summaryrefslogtreecommitdiff
path: root/tools
diff options
context:
space:
mode:
authorChanho Park <chanho61.park@samsung.com>2014-12-11 18:55:56 +0900
committerChanho Park <chanho61.park@samsung.com>2014-12-11 18:55:56 +0900
commit08c1e93fa36a49f49325a07fe91ff92c964c2b6c (patch)
tree7a7053ceb8874b28ec4b868d4c49b500008a102e /tools
parentbb4dd8289b351fae6b55e303f189127a394a1edd (diff)
downloadboost-08c1e93fa36a49f49325a07fe91ff92c964c2b6c.tar.gz
boost-08c1e93fa36a49f49325a07fe91ff92c964c2b6c.tar.bz2
boost-08c1e93fa36a49f49325a07fe91ff92c964c2b6c.zip
Imported Upstream version 1.57.0upstream/1.57.0
Diffstat (limited to 'tools')
-rw-r--r--tools/Jamfile.v23
-rw-r--r--tools/auto_index/src/auto_index.cpp5
-rw-r--r--tools/bcp/add_dependent_lib.cpp6
-rw-r--r--tools/bcp/copy_path.cpp43
-rw-r--r--tools/bcp/doc/bcp.qbk1
-rw-r--r--tools/bcp/doc/html/index.html65
-rw-r--r--tools/bcp/fileview.cpp7
-rw-r--r--tools/boostbook/doc/Jamfile.v22
-rw-r--r--tools/boostbook/doc/boostbook.xml6
-rw-r--r--tools/boostbook/doc/documenting.xml6
-rw-r--r--tools/boostbook/doc/reference.xml128
-rw-r--r--tools/boostbook/doc/together.xml2
-rw-r--r--tools/boostbook/dtd/1.1/boostbook.dtd4
-rw-r--r--tools/boostbook/dtd/boostbook.dtd4
-rw-r--r--tools/boostbook/setup_boostbook.py6
-rwxr-xr-x[-rw-r--r--]tools/boostbook/setup_boostbook.sh6
-rw-r--r--tools/boostbook/test/Jamfile.v22
-rw-r--r--tools/boostbook/test/doxygen/autodoc.gold84
-rw-r--r--tools/boostbook/test/doxygen/boost/example.hpp66
-rwxr-xr-xtools/boostbook/test/more/run-tests.py22
-rw-r--r--tools/boostbook/test/more/tests/libs/hash-ref.gold54
-rw-r--r--tools/boostbook/test/more/tests/libs/unordered-ref.gold48
-rw-r--r--tools/boostbook/test/more/tests/ref/methodname.gold26
-rw-r--r--tools/boostbook/test/more/tests/ref/methodname.xml36
-rw-r--r--tools/boostbook/test/more/tests/syntax-highlight/language-attribute.gold13
-rw-r--r--tools/boostbook/test/more/tests/syntax-highlight/language-attribute.xml25
-rw-r--r--tools/boostbook/xsl/annotation.xsl22
-rw-r--r--tools/boostbook/xsl/docbook-layout.xsl51
-rw-r--r--tools/boostbook/xsl/docbook.xsl27
-rw-r--r--tools/boostbook/xsl/doxygen/doxygen2boostbook.xsl278
-rw-r--r--tools/boostbook/xsl/fo.xsl125
-rw-r--r--tools/boostbook/xsl/html-base.xsl46
-rw-r--r--tools/boostbook/xsl/html-help.xsl98
-rw-r--r--tools/boostbook/xsl/html-single.xsl16
-rw-r--r--tools/boostbook/xsl/macro.xsl38
-rw-r--r--tools/boostbook/xsl/source-highlight.xsl6
-rw-r--r--tools/boostdep/README.md4
-rw-r--r--tools/boostdep/examples/report.bat26
-rw-r--r--tools/boostdep/src/Jamfile.v217
-rw-r--r--tools/boostdep/src/boostdep.cpp1349
-rw-r--r--tools/build/Jamroot.jam47
-rw-r--r--tools/build/boost-build.jam8
-rw-r--r--tools/build/boost.css65
-rw-r--r--tools/build/bootstrap.bat49
-rwxr-xr-xtools/build/bootstrap.sh120
-rw-r--r--tools/build/doc/bjam.qbk1696
-rw-r--r--tools/build/doc/development_plan.html (renamed from tools/build/v2/doc/development_plan.html)0
-rw-r--r--tools/build/doc/history.qbk (renamed from tools/build/v2/doc/history.qbk)0
-rw-r--r--tools/build/doc/jamfile.jam26
-rw-r--r--tools/build/doc/src/abstract-target.xml (renamed from tools/build/v2/doc/src/abstract-target.xml)0
-rw-r--r--tools/build/doc/src/architecture.xml668
-rw-r--r--tools/build/doc/src/basic-target.xml (renamed from tools/build/v2/doc/src/basic-target.xml)0
-rw-r--r--tools/build/doc/src/extending.xml1216
-rw-r--r--tools/build/doc/src/faq.xml489
-rw-r--r--tools/build/doc/src/fragments.xml (renamed from tools/build/v2/doc/src/fragments.xml)0
-rw-r--r--tools/build/doc/src/howto.xml (renamed from tools/build/v2/doc/src/howto.xml)0
-rw-r--r--tools/build/doc/src/install.xml150
-rw-r--r--tools/build/doc/src/main-target.xml (renamed from tools/build/v2/doc/src/main-target.xml)0
-rw-r--r--tools/build/doc/src/overview.xml1700
-rw-r--r--tools/build/doc/src/path.xml249
-rw-r--r--tools/build/doc/src/project-target.xml (renamed from tools/build/v2/doc/src/project-target.xml)0
-rw-r--r--tools/build/doc/src/property-set.xml (renamed from tools/build/v2/doc/src/property-set.xml)0
-rw-r--r--tools/build/doc/src/recipes.xml11
-rw-r--r--tools/build/doc/src/reference.xml2737
-rw-r--r--tools/build/doc/src/regex.xml170
-rw-r--r--tools/build/doc/src/sequence.xml135
-rw-r--r--tools/build/doc/src/standalone.xml48
-rw-r--r--tools/build/doc/src/tasks.xml842
-rw-r--r--tools/build/doc/src/tutorial.xml682
-rw-r--r--tools/build/doc/src/type.xml (renamed from tools/build/v2/doc/src/type.xml)0
-rw-r--r--tools/build/doc/src/typed-target.xml (renamed from tools/build/v2/doc/src/typed-target.xml)0
-rw-r--r--tools/build/doc/src/userman.xml41
-rw-r--r--tools/build/doc/tools.html (renamed from tools/build/v2/doc/tools.html)0
-rw-r--r--tools/build/example/boost-build.jam6
-rw-r--r--tools/build/example/built_tool/Jamroot.jam (renamed from tools/build/v2/example/built_tool/Jamroot.jam)0
-rw-r--r--tools/build/example/built_tool/core/Jamfile.jam (renamed from tools/build/v2/example/built_tool/core/Jamfile.jam)0
-rw-r--r--tools/build/example/built_tool/core/a.td (renamed from tools/build/v2/example/built_tool/core/a.td)0
-rw-r--r--tools/build/example/built_tool/core/core.cpp (renamed from tools/build/v2/example/built_tool/core/core.cpp)0
-rw-r--r--tools/build/example/built_tool/readme.txt (renamed from tools/build/v2/example/built_tool/readme.txt)0
-rw-r--r--tools/build/example/built_tool/tblgen/Jamfile.jam (renamed from tools/build/v2/example/built_tool/tblgen/Jamfile.jam)0
-rw-r--r--tools/build/example/built_tool/tblgen/tblgen.cpp (renamed from tools/build/v2/example/built_tool/tblgen/tblgen.cpp)0
-rw-r--r--tools/build/example/customization/class.verbatim (renamed from tools/build/v2/example/customization/class.verbatim)0
-rw-r--r--tools/build/example/customization/codegen.cpp (renamed from tools/build/v2/example/customization/codegen.cpp)0
-rw-r--r--[-rwxr-xr-x]tools/build/example/customization/inline_file.py (renamed from tools/build/v2/example/customization/inline_file.py)0
-rw-r--r--tools/build/example/customization/jamroot.jam (renamed from tools/build/v2/example/customization/jamroot.jam)0
-rw-r--r--tools/build/example/customization/readme.txt (renamed from tools/build/v2/example/customization/readme.txt)0
-rw-r--r--tools/build/example/customization/t1.verbatim (renamed from tools/build/v2/example/customization/t1.verbatim)0
-rw-r--r--tools/build/example/customization/t2.verbatim (renamed from tools/build/v2/example/customization/t2.verbatim)0
-rw-r--r--tools/build/example/customization/usage.verbatim (renamed from tools/build/v2/example/customization/usage.verbatim)0
-rw-r--r--tools/build/example/customization/verbatim.jam (renamed from tools/build/v2/example/customization/verbatim.jam)0
-rw-r--r--tools/build/example/customization/verbatim.py (renamed from tools/build/v2/example/customization/verbatim.py)0
-rw-r--r--tools/build/example/generate/REAME.txt (renamed from tools/build/v2/example/generate/REAME.txt)0
-rw-r--r--tools/build/example/generate/a.cpp (renamed from tools/build/v2/example/generate/a.cpp)0
-rw-r--r--tools/build/example/generate/gen.jam (renamed from tools/build/v2/example/generate/gen.jam)0
-rw-r--r--tools/build/example/generate/gen.py (renamed from tools/build/v2/example/generate/gen.py)0
-rw-r--r--tools/build/example/generate/jamroot.jam (renamed from tools/build/v2/example/generate/jamroot.jam)0
-rw-r--r--tools/build/example/generator/README.txt (renamed from tools/build/v2/example/generator/README.txt)0
-rw-r--r--tools/build/example/generator/foo.gci (renamed from tools/build/v2/example/generator/foo.gci)0
-rw-r--r--tools/build/example/generator/jamroot.jam (renamed from tools/build/v2/example/generator/jamroot.jam)0
-rw-r--r--tools/build/example/generator/soap.jam (renamed from tools/build/v2/example/generator/soap.jam)0
-rw-r--r--tools/build/example/gettext/jamfile.jam (renamed from tools/build/v2/example/gettext/jamfile.jam)0
-rw-r--r--tools/build/example/gettext/jamroot.jam (renamed from tools/build/v2/example/gettext/jamroot.jam)0
-rw-r--r--tools/build/example/gettext/main.cpp (renamed from tools/build/v2/example/gettext/main.cpp)0
-rw-r--r--tools/build/example/gettext/readme.txt (renamed from tools/build/v2/example/gettext/readme.txt)0
-rw-r--r--tools/build/example/gettext/russian.po (renamed from tools/build/v2/example/gettext/russian.po)0
-rw-r--r--tools/build/example/hello/hello.cpp (renamed from tools/build/v2/example/hello/hello.cpp)0
-rw-r--r--tools/build/example/hello/jamroot.jam (renamed from tools/build/v2/example/hello/jamroot.jam)0
-rw-r--r--tools/build/example/hello/readme.txt (renamed from tools/build/v2/example/hello/readme.txt)0
-rw-r--r--tools/build/example/libraries/app/app.cpp (renamed from tools/build/v2/example/libraries/app/app.cpp)0
-rw-r--r--tools/build/example/libraries/app/jamfile.jam (renamed from tools/build/v2/example/libraries/app/jamfile.jam)0
-rw-r--r--tools/build/example/libraries/jamroot.jam (renamed from tools/build/v2/example/libraries/jamroot.jam)0
-rw-r--r--tools/build/example/libraries/util/foo/bar.cpp (renamed from tools/build/v2/example/libraries/util/foo/bar.cpp)0
-rw-r--r--tools/build/example/libraries/util/foo/include/lib1.h (renamed from tools/build/v2/example/libraries/util/foo/include/lib1.h)0
-rw-r--r--tools/build/example/libraries/util/foo/jamfile.jam (renamed from tools/build/v2/example/libraries/util/foo/jamfile.jam)0
-rw-r--r--tools/build/example/make/foo.py (renamed from tools/build/v2/example/make/foo.py)0
-rw-r--r--tools/build/example/make/jamroot.jam (renamed from tools/build/v2/example/make/jamroot.jam)0
-rw-r--r--tools/build/example/make/main_cpp.pro (renamed from tools/build/v2/example/make/main_cpp.pro)0
-rw-r--r--tools/build/example/make/readme.txt (renamed from tools/build/v2/example/make/readme.txt)0
-rw-r--r--tools/build/example/pch/include/pch.hpp (renamed from tools/build/v2/example/pch/include/pch.hpp)0
-rw-r--r--tools/build/example/pch/jamroot.jam (renamed from tools/build/v2/example/pch/jamroot.jam)0
-rw-r--r--tools/build/example/pch/source/hello_world.cpp (renamed from tools/build/v2/example/pch/source/hello_world.cpp)0
-rw-r--r--tools/build/example/python_modules/jamroot.jam (renamed from tools/build/v2/example/python_modules/jamroot.jam)0
-rw-r--r--tools/build/example/python_modules/python_helpers.jam (renamed from tools/build/v2/example/python_modules/python_helpers.jam)0
-rw-r--r--tools/build/example/python_modules/python_helpers.py (renamed from tools/build/v2/example/python_modules/python_helpers.py)0
-rw-r--r--tools/build/example/python_modules/readme.txt (renamed from tools/build/v2/example/python_modules/readme.txt)0
-rw-r--r--tools/build/example/qt/README.txt (renamed from tools/build/v2/example/qt/README.txt)0
-rw-r--r--tools/build/example/qt/qt3/hello/canvas.cpp (renamed from tools/build/v2/example/qt/qt3/hello/canvas.cpp)0
-rw-r--r--tools/build/example/qt/qt3/hello/canvas.h (renamed from tools/build/v2/example/qt/qt3/hello/canvas.h)0
-rw-r--r--tools/build/example/qt/qt3/hello/jamroot.jam (renamed from tools/build/v2/example/qt/qt3/hello/jamroot.jam)0
-rw-r--r--tools/build/example/qt/qt3/hello/main.cpp (renamed from tools/build/v2/example/qt/qt3/hello/main.cpp)0
-rw-r--r--tools/build/example/qt/qt3/moccable-cpp/jamroot.jam (renamed from tools/build/v2/example/qt/qt3/moccable-cpp/jamroot.jam)0
-rw-r--r--tools/build/example/qt/qt3/moccable-cpp/main.cpp (renamed from tools/build/v2/example/qt/qt3/moccable-cpp/main.cpp)0
-rw-r--r--tools/build/example/qt/qt3/uic/hello_world_widget.ui (renamed from tools/build/v2/example/qt/qt3/uic/hello_world_widget.ui)0
-rw-r--r--tools/build/example/qt/qt3/uic/jamroot.jam (renamed from tools/build/v2/example/qt/qt3/uic/jamroot.jam)0
-rw-r--r--tools/build/example/qt/qt3/uic/main.cpp (renamed from tools/build/v2/example/qt/qt3/uic/main.cpp)0
-rw-r--r--tools/build/example/qt/qt4/hello/arrow.cpp (renamed from tools/build/v2/example/qt/qt4/hello/arrow.cpp)0
-rw-r--r--tools/build/example/qt/qt4/hello/arrow.h (renamed from tools/build/v2/example/qt/qt4/hello/arrow.h)0
-rw-r--r--tools/build/example/qt/qt4/hello/jamroot.jam (renamed from tools/build/v2/example/qt/qt4/hello/jamroot.jam)0
-rw-r--r--tools/build/example/qt/qt4/hello/main.cpp (renamed from tools/build/v2/example/qt/qt4/hello/main.cpp)0
-rw-r--r--tools/build/example/qt/qt4/moccable-cpp/jamroot.jam (renamed from tools/build/v2/example/qt/qt4/moccable-cpp/jamroot.jam)0
-rw-r--r--tools/build/example/qt/qt4/moccable-cpp/main.cpp (renamed from tools/build/v2/example/qt/qt4/moccable-cpp/main.cpp)0
-rw-r--r--tools/build/example/qt/qt4/uic/hello_world_widget.ui (renamed from tools/build/v2/example/qt/qt4/uic/hello_world_widget.ui)0
-rw-r--r--tools/build/example/qt/qt4/uic/jamroot.jam (renamed from tools/build/v2/example/qt/qt4/uic/jamroot.jam)0
-rw-r--r--tools/build/example/qt/qt4/uic/main.cpp (renamed from tools/build/v2/example/qt/qt4/uic/main.cpp)0
-rw-r--r--tools/build/example/site-config.jam (renamed from tools/build/v2/site-config.jam)0
-rw-r--r--tools/build/example/testing/compile-fail.cpp16
-rw-r--r--tools/build/example/testing/fail.cpp16
-rw-r--r--tools/build/example/testing/jamroot.jam10
-rw-r--r--tools/build/example/testing/success.cpp16
-rw-r--r--tools/build/example/user-config.jam (renamed from tools/build/v2/user-config.jam)0
-rw-r--r--tools/build/example/variant/a.cpp (renamed from tools/build/v2/example/variant/a.cpp)0
-rw-r--r--tools/build/example/variant/jamfile.jam (renamed from tools/build/v2/example/variant/jamfile.jam)0
-rw-r--r--tools/build/example/variant/jamroot.jam (renamed from tools/build/v2/example/variant/jamroot.jam)0
-rw-r--r--tools/build/example/variant/libs/jamfile.jam (renamed from tools/build/v2/example/variant/libs/jamfile.jam)0
-rw-r--r--tools/build/example/variant/libs/l.cpp (renamed from tools/build/v2/example/variant/libs/l.cpp)0
-rw-r--r--tools/build/example/variant/readme.txt (renamed from tools/build/v2/example/variant/readme.txt)0
-rw-r--r--tools/build/index.html184
-rw-r--r--tools/build/notes/README.txt8
-rw-r--r--tools/build/notes/build_dir_option.txt (renamed from tools/build/v2/notes/build_dir_option.txt)0
-rw-r--r--tools/build/notes/changes.txt (renamed from tools/build/v2/changes.txt)0
-rw-r--r--tools/build/notes/hacking.txt138
-rw-r--r--tools/build/notes/relative_source_paths.txt (renamed from tools/build/v2/notes/relative_source_paths.txt)0
-rw-r--r--tools/build/notes/release_procedure.txt (renamed from tools/build/v2/release_procedure.txt)0
-rwxr-xr-xtools/build/scripts/nightly.sh (renamed from tools/build/v2/nightly.sh)0
-rwxr-xr-xtools/build/scripts/roll.sh64
-rwxr-xr-xtools/build/scripts/to_merge.sh (renamed from tools/build/v2/to_merge.sh)0
-rw-r--r--tools/build/src/bootstrap.jam (renamed from tools/build/v2/bootstrap.jam)0
-rw-r--r--tools/build/src/build-system.jam981
-rw-r--r--tools/build/src/build/__init__.py (renamed from tools/build/v2/build/__init__.py)0
-rw-r--r--tools/build/src/build/ac.jam303
-rw-r--r--tools/build/src/build/alias.jam74
-rwxr-xr-xtools/build/src/build/alias.py (renamed from tools/build/v2/build/alias.py)0
-rw-r--r--tools/build/src/build/build-request.jam322
-rw-r--r--tools/build/src/build/build_request.py216
-rw-r--r--tools/build/src/build/config-cache.jam64
-rw-r--r--tools/build/src/build/configure.jam292
-rw-r--r--tools/build/src/build/configure.py (renamed from tools/build/v2/build/configure.py)0
-rw-r--r--tools/build/src/build/engine.py202
-rw-r--r--tools/build/src/build/errors.py (renamed from tools/build/v2/build/errors.py)0
-rw-r--r--tools/build/src/build/feature.jam1350
-rw-r--r--tools/build/src/build/feature.py907
-rw-r--r--tools/build/src/build/generators.jam1420
-rw-r--r--tools/build/src/build/generators.py1097
-rw-r--r--tools/build/src/build/project.ann.py (renamed from tools/build/v2/build/project.ann.py)0
-rw-r--r--tools/build/src/build/project.jam1228
-rw-r--r--tools/build/src/build/project.py1148
-rw-r--r--tools/build/src/build/property-set.jam517
-rw-r--r--tools/build/src/build/property.jam905
-rw-r--r--tools/build/src/build/property.py611
-rw-r--r--tools/build/src/build/property_set.py460
-rw-r--r--tools/build/src/build/readme.txt11
-rw-r--r--tools/build/src/build/scanner.jam163
-rw-r--r--tools/build/src/build/scanner.py (renamed from tools/build/v2/build/scanner.py)0
-rw-r--r--tools/build/src/build/targets.jam1698
-rw-r--r--tools/build/src/build/targets.py1401
-rw-r--r--tools/build/src/build/toolset.jam603
-rw-r--r--tools/build/src/build/toolset.py399
-rw-r--r--tools/build/src/build/type.jam401
-rw-r--r--tools/build/src/build/type.py (renamed from tools/build/v2/build/type.py)0
-rw-r--r--tools/build/src/build/version.jam165
-rw-r--r--tools/build/src/build/virtual-target.jam1344
-rw-r--r--tools/build/src/build/virtual_target.py1107
-rw-r--r--tools/build/src/build_system.py860
-rw-r--r--tools/build/src/contrib/__init__.py (renamed from tools/build/v2/tools/__init__.py)0
-rw-r--r--tools/build/src/contrib/boost.jam304
-rw-r--r--tools/build/src/contrib/boost.py279
-rw-r--r--tools/build/src/contrib/tntnet.jam (renamed from tools/build/v2/contrib/tntnet.jam)0
-rw-r--r--tools/build/src/contrib/wxFormBuilder.jam (renamed from tools/build/v2/contrib/wxFormBuilder.jam)0
-rw-r--r--tools/build/src/engine/Jambase (renamed from tools/build/v2/engine/Jambase)0
-rw-r--r--tools/build/src/engine/boehm_gc/AmigaOS.c (renamed from tools/build/v2/engine/boehm_gc/AmigaOS.c)0
-rw-r--r--tools/build/src/engine/boehm_gc/BCC_MAKEFILE (renamed from tools/build/v2/engine/boehm_gc/BCC_MAKEFILE)0
-rw-r--r--tools/build/src/engine/boehm_gc/ChangeLog (renamed from tools/build/v2/engine/boehm_gc/ChangeLog)0
-rw-r--r--tools/build/src/engine/boehm_gc/EMX_MAKEFILE (renamed from tools/build/v2/engine/boehm_gc/EMX_MAKEFILE)0
-rw-r--r--tools/build/src/engine/boehm_gc/MacOS.c (renamed from tools/build/v2/engine/boehm_gc/MacOS.c)0
-rw-r--r--tools/build/src/engine/boehm_gc/MacProjects.sit.hqx (renamed from tools/build/v2/engine/boehm_gc/MacProjects.sit.hqx)0
-rw-r--r--tools/build/src/engine/boehm_gc/Mac_files/MacOS_Test_config.h (renamed from tools/build/v2/engine/boehm_gc/Mac_files/MacOS_Test_config.h)0
-rw-r--r--tools/build/src/engine/boehm_gc/Mac_files/MacOS_config.h (renamed from tools/build/v2/engine/boehm_gc/Mac_files/MacOS_config.h)0
-rw-r--r--tools/build/src/engine/boehm_gc/Mac_files/dataend.c (renamed from tools/build/v2/engine/boehm_gc/Mac_files/dataend.c)0
-rw-r--r--tools/build/src/engine/boehm_gc/Mac_files/datastart.c (renamed from tools/build/v2/engine/boehm_gc/Mac_files/datastart.c)0
-rw-r--r--tools/build/src/engine/boehm_gc/Makefile.DLLs (renamed from tools/build/v2/engine/boehm_gc/Makefile.DLLs)0
-rw-r--r--tools/build/src/engine/boehm_gc/Makefile.am (renamed from tools/build/v2/engine/boehm_gc/Makefile.am)0
-rw-r--r--tools/build/src/engine/boehm_gc/Makefile.direct (renamed from tools/build/v2/engine/boehm_gc/Makefile.direct)0
-rw-r--r--tools/build/src/engine/boehm_gc/Makefile.dj (renamed from tools/build/v2/engine/boehm_gc/Makefile.dj)0
-rw-r--r--tools/build/src/engine/boehm_gc/Makefile.in (renamed from tools/build/v2/engine/boehm_gc/Makefile.in)0
-rwxr-xr-xtools/build/src/engine/boehm_gc/NT_MAKEFILE (renamed from tools/build/v2/engine/boehm_gc/NT_MAKEFILE)0
-rw-r--r--tools/build/src/engine/boehm_gc/NT_STATIC_THREADS_MAKEFILE (renamed from tools/build/v2/engine/boehm_gc/NT_STATIC_THREADS_MAKEFILE)0
-rw-r--r--tools/build/src/engine/boehm_gc/NT_THREADS_MAKEFILE (renamed from tools/build/v2/engine/boehm_gc/NT_THREADS_MAKEFILE)0
-rw-r--r--tools/build/src/engine/boehm_gc/NT_X64_STATIC_THREADS_MAKEFILE (renamed from tools/build/v2/engine/boehm_gc/NT_X64_STATIC_THREADS_MAKEFILE)0
-rw-r--r--tools/build/src/engine/boehm_gc/OS2_MAKEFILE (renamed from tools/build/v2/engine/boehm_gc/OS2_MAKEFILE)0
-rw-r--r--tools/build/src/engine/boehm_gc/PCR-Makefile (renamed from tools/build/v2/engine/boehm_gc/PCR-Makefile)0
-rw-r--r--tools/build/src/engine/boehm_gc/README.QUICK (renamed from tools/build/v2/engine/boehm_gc/README.QUICK)0
-rw-r--r--tools/build/src/engine/boehm_gc/SMakefile.amiga (renamed from tools/build/v2/engine/boehm_gc/SMakefile.amiga)0
-rw-r--r--tools/build/src/engine/boehm_gc/WCC_MAKEFILE (renamed from tools/build/v2/engine/boehm_gc/WCC_MAKEFILE)0
-rw-r--r--tools/build/src/engine/boehm_gc/acinclude.m4 (renamed from tools/build/v2/engine/boehm_gc/acinclude.m4)0
-rw-r--r--tools/build/src/engine/boehm_gc/aclocal.m4 (renamed from tools/build/v2/engine/boehm_gc/aclocal.m4)0
-rw-r--r--tools/build/src/engine/boehm_gc/add_gc_prefix.c (renamed from tools/build/v2/engine/boehm_gc/add_gc_prefix.c)0
-rw-r--r--tools/build/src/engine/boehm_gc/allchblk.c (renamed from tools/build/v2/engine/boehm_gc/allchblk.c)0
-rw-r--r--tools/build/src/engine/boehm_gc/alloc.c (renamed from tools/build/v2/engine/boehm_gc/alloc.c)0
-rw-r--r--tools/build/src/engine/boehm_gc/alpha_mach_dep.S (renamed from tools/build/v2/engine/boehm_gc/alpha_mach_dep.S)0
-rw-r--r--tools/build/src/engine/boehm_gc/backgraph.c (renamed from tools/build/v2/engine/boehm_gc/backgraph.c)0
-rw-r--r--tools/build/src/engine/boehm_gc/bdw-gc.pc (renamed from tools/build/v2/engine/boehm_gc/bdw-gc.pc)0
-rw-r--r--tools/build/src/engine/boehm_gc/bdw-gc.pc.in (renamed from tools/build/v2/engine/boehm_gc/bdw-gc.pc.in)0
-rw-r--r--tools/build/src/engine/boehm_gc/blacklst.c (renamed from tools/build/v2/engine/boehm_gc/blacklst.c)0
-rwxr-xr-xtools/build/src/engine/boehm_gc/callprocs (renamed from tools/build/v2/engine/boehm_gc/callprocs)0
-rw-r--r--tools/build/src/engine/boehm_gc/checksums.c (renamed from tools/build/v2/engine/boehm_gc/checksums.c)0
-rwxr-xr-xtools/build/src/engine/boehm_gc/compile (renamed from tools/build/v2/engine/boehm_gc/compile)0
-rwxr-xr-xtools/build/src/engine/boehm_gc/config.guess (renamed from tools/build/v2/engine/boehm_gc/config.guess)0
-rw-r--r--tools/build/src/engine/boehm_gc/config.sub (renamed from tools/build/v2/engine/boehm_gc/config.sub)0
-rwxr-xr-xtools/build/src/engine/boehm_gc/configure (renamed from tools/build/v2/engine/boehm_gc/configure)0
-rw-r--r--tools/build/src/engine/boehm_gc/configure.ac (renamed from tools/build/v2/engine/boehm_gc/configure.ac)0
-rw-r--r--tools/build/src/engine/boehm_gc/configure.host (renamed from tools/build/v2/engine/boehm_gc/configure.host)0
-rwxr-xr-xtools/build/src/engine/boehm_gc/configure_atomic_ops.sh (renamed from tools/build/v2/engine/boehm_gc/configure_atomic_ops.sh)0
-rw-r--r--tools/build/src/engine/boehm_gc/cord/cord.am (renamed from tools/build/v2/engine/boehm_gc/cord/cord.am)0
-rw-r--r--tools/build/src/engine/boehm_gc/cord/cordbscs.c (renamed from tools/build/v2/engine/boehm_gc/cord/cordbscs.c)0
-rw-r--r--tools/build/src/engine/boehm_gc/cord/cordprnt.c (renamed from tools/build/v2/engine/boehm_gc/cord/cordprnt.c)0
-rw-r--r--tools/build/src/engine/boehm_gc/cord/cordtest.c (renamed from tools/build/v2/engine/boehm_gc/cord/cordtest.c)0
-rw-r--r--tools/build/src/engine/boehm_gc/cord/cordxtra.c (renamed from tools/build/v2/engine/boehm_gc/cord/cordxtra.c)0
-rw-r--r--tools/build/src/engine/boehm_gc/cord/de.c (renamed from tools/build/v2/engine/boehm_gc/cord/de.c)0
-rw-r--r--tools/build/src/engine/boehm_gc/cord/de_cmds.h (renamed from tools/build/v2/engine/boehm_gc/cord/de_cmds.h)0
-rw-r--r--tools/build/src/engine/boehm_gc/cord/de_win.ICO (renamed from tools/build/v2/engine/boehm_gc/cord/de_win.ICO)bin766 -> 766 bytes
-rw-r--r--tools/build/src/engine/boehm_gc/cord/de_win.RC (renamed from tools/build/v2/engine/boehm_gc/cord/de_win.RC)0
-rw-r--r--tools/build/src/engine/boehm_gc/cord/de_win.c (renamed from tools/build/v2/engine/boehm_gc/cord/de_win.c)0
-rw-r--r--tools/build/src/engine/boehm_gc/cord/de_win.h (renamed from tools/build/v2/engine/boehm_gc/cord/de_win.h)0
-rw-r--r--tools/build/src/engine/boehm_gc/darwin_stop_world.c (renamed from tools/build/v2/engine/boehm_gc/darwin_stop_world.c)0
-rw-r--r--tools/build/src/engine/boehm_gc/dbg_mlc.c (renamed from tools/build/v2/engine/boehm_gc/dbg_mlc.c)0
-rwxr-xr-xtools/build/src/engine/boehm_gc/depcomp (renamed from tools/build/v2/engine/boehm_gc/depcomp)0
-rw-r--r--tools/build/src/engine/boehm_gc/digimars.mak (renamed from tools/build/v2/engine/boehm_gc/digimars.mak)0
-rw-r--r--tools/build/src/engine/boehm_gc/doc/README (renamed from tools/build/v2/engine/boehm_gc/doc/README)0
-rw-r--r--tools/build/src/engine/boehm_gc/doc/README.DGUX386 (renamed from tools/build/v2/engine/boehm_gc/doc/README.DGUX386)0
-rw-r--r--tools/build/src/engine/boehm_gc/doc/README.Mac (renamed from tools/build/v2/engine/boehm_gc/doc/README.Mac)0
-rw-r--r--tools/build/src/engine/boehm_gc/doc/README.MacOSX (renamed from tools/build/v2/engine/boehm_gc/doc/README.MacOSX)0
-rw-r--r--tools/build/src/engine/boehm_gc/doc/README.OS2 (renamed from tools/build/v2/engine/boehm_gc/doc/README.OS2)0
-rw-r--r--tools/build/src/engine/boehm_gc/doc/README.amiga (renamed from tools/build/v2/engine/boehm_gc/doc/README.amiga)0
-rw-r--r--tools/build/src/engine/boehm_gc/doc/README.arm.cross (renamed from tools/build/v2/engine/boehm_gc/doc/README.arm.cross)0
-rw-r--r--tools/build/src/engine/boehm_gc/doc/README.autoconf (renamed from tools/build/v2/engine/boehm_gc/doc/README.autoconf)0
-rw-r--r--tools/build/src/engine/boehm_gc/doc/README.changes (renamed from tools/build/v2/engine/boehm_gc/doc/README.changes)0
-rw-r--r--tools/build/src/engine/boehm_gc/doc/README.contributors (renamed from tools/build/v2/engine/boehm_gc/doc/README.contributors)0
-rw-r--r--tools/build/src/engine/boehm_gc/doc/README.cords (renamed from tools/build/v2/engine/boehm_gc/doc/README.cords)0
-rw-r--r--tools/build/src/engine/boehm_gc/doc/README.darwin (renamed from tools/build/v2/engine/boehm_gc/doc/README.darwin)0
-rw-r--r--tools/build/src/engine/boehm_gc/doc/README.dj (renamed from tools/build/v2/engine/boehm_gc/doc/README.dj)0
-rw-r--r--tools/build/src/engine/boehm_gc/doc/README.environment (renamed from tools/build/v2/engine/boehm_gc/doc/README.environment)0
-rw-r--r--tools/build/src/engine/boehm_gc/doc/README.ews4800 (renamed from tools/build/v2/engine/boehm_gc/doc/README.ews4800)0
-rw-r--r--tools/build/src/engine/boehm_gc/doc/README.hp (renamed from tools/build/v2/engine/boehm_gc/doc/README.hp)0
-rw-r--r--tools/build/src/engine/boehm_gc/doc/README.linux (renamed from tools/build/v2/engine/boehm_gc/doc/README.linux)0
-rw-r--r--tools/build/src/engine/boehm_gc/doc/README.macros (renamed from tools/build/v2/engine/boehm_gc/doc/README.macros)0
-rw-r--r--tools/build/src/engine/boehm_gc/doc/README.rs6000 (renamed from tools/build/v2/engine/boehm_gc/doc/README.rs6000)0
-rw-r--r--tools/build/src/engine/boehm_gc/doc/README.sgi (renamed from tools/build/v2/engine/boehm_gc/doc/README.sgi)0
-rw-r--r--tools/build/src/engine/boehm_gc/doc/README.solaris2 (renamed from tools/build/v2/engine/boehm_gc/doc/README.solaris2)0
-rw-r--r--tools/build/src/engine/boehm_gc/doc/README.uts (renamed from tools/build/v2/engine/boehm_gc/doc/README.uts)0
-rw-r--r--tools/build/src/engine/boehm_gc/doc/README.win32 (renamed from tools/build/v2/engine/boehm_gc/doc/README.win32)0
-rw-r--r--tools/build/src/engine/boehm_gc/doc/README.win64 (renamed from tools/build/v2/engine/boehm_gc/doc/README.win64)0
-rw-r--r--tools/build/src/engine/boehm_gc/doc/barrett_diagram (renamed from tools/build/v2/engine/boehm_gc/doc/barrett_diagram)0
-rw-r--r--tools/build/src/engine/boehm_gc/doc/debugging.html (renamed from tools/build/v2/engine/boehm_gc/doc/debugging.html)0
-rw-r--r--tools/build/src/engine/boehm_gc/doc/doc.am (renamed from tools/build/v2/engine/boehm_gc/doc/doc.am)0
-rw-r--r--tools/build/src/engine/boehm_gc/doc/gc.man (renamed from tools/build/v2/engine/boehm_gc/doc/gc.man)0
-rw-r--r--tools/build/src/engine/boehm_gc/doc/gcdescr.html (renamed from tools/build/v2/engine/boehm_gc/doc/gcdescr.html)0
-rw-r--r--tools/build/src/engine/boehm_gc/doc/gcinterface.html (renamed from tools/build/v2/engine/boehm_gc/doc/gcinterface.html)0
-rw-r--r--tools/build/src/engine/boehm_gc/doc/leak.html (renamed from tools/build/v2/engine/boehm_gc/doc/leak.html)0
-rw-r--r--tools/build/src/engine/boehm_gc/doc/overview.html (renamed from tools/build/v2/engine/boehm_gc/doc/overview.html)0
-rw-r--r--tools/build/src/engine/boehm_gc/doc/porting.html (renamed from tools/build/v2/engine/boehm_gc/doc/porting.html)0
-rw-r--r--tools/build/src/engine/boehm_gc/doc/scale.html (renamed from tools/build/v2/engine/boehm_gc/doc/scale.html)0
-rw-r--r--tools/build/src/engine/boehm_gc/doc/simple_example.html (renamed from tools/build/v2/engine/boehm_gc/doc/simple_example.html)0
-rw-r--r--tools/build/src/engine/boehm_gc/doc/tree.html (renamed from tools/build/v2/engine/boehm_gc/doc/tree.html)0
-rw-r--r--tools/build/src/engine/boehm_gc/dyn_load.c (renamed from tools/build/v2/engine/boehm_gc/dyn_load.c)0
-rw-r--r--tools/build/src/engine/boehm_gc/finalize.c (renamed from tools/build/v2/engine/boehm_gc/finalize.c)0
-rw-r--r--tools/build/src/engine/boehm_gc/gc.mak (renamed from tools/build/v2/engine/boehm_gc/gc.mak)0
-rw-r--r--tools/build/src/engine/boehm_gc/gc_cpp.cc (renamed from tools/build/v2/engine/boehm_gc/gc_cpp.cc)0
-rw-r--r--tools/build/src/engine/boehm_gc/gc_cpp.cpp (renamed from tools/build/v2/engine/boehm_gc/gc_cpp.cpp)0
-rw-r--r--tools/build/src/engine/boehm_gc/gc_dlopen.c (renamed from tools/build/v2/engine/boehm_gc/gc_dlopen.c)0
-rw-r--r--tools/build/src/engine/boehm_gc/gcj_mlc.c (renamed from tools/build/v2/engine/boehm_gc/gcj_mlc.c)0
-rw-r--r--tools/build/src/engine/boehm_gc/gcname.c (renamed from tools/build/v2/engine/boehm_gc/gcname.c)0
-rw-r--r--tools/build/src/engine/boehm_gc/headers.c (renamed from tools/build/v2/engine/boehm_gc/headers.c)0
-rw-r--r--tools/build/src/engine/boehm_gc/hpux_test_and_clear.s (renamed from tools/build/v2/engine/boehm_gc/hpux_test_and_clear.s)0
-rw-r--r--tools/build/src/engine/boehm_gc/ia64_save_regs_in_stack.s (renamed from tools/build/v2/engine/boehm_gc/ia64_save_regs_in_stack.s)0
-rw-r--r--tools/build/src/engine/boehm_gc/if_mach.c (renamed from tools/build/v2/engine/boehm_gc/if_mach.c)0
-rw-r--r--tools/build/src/engine/boehm_gc/if_not_there.c (renamed from tools/build/v2/engine/boehm_gc/if_not_there.c)0
-rw-r--r--tools/build/src/engine/boehm_gc/include/cord.h (renamed from tools/build/v2/engine/boehm_gc/include/cord.h)0
-rw-r--r--tools/build/src/engine/boehm_gc/include/ec.h (renamed from tools/build/v2/engine/boehm_gc/include/ec.h)0
-rw-r--r--tools/build/src/engine/boehm_gc/include/gc.h (renamed from tools/build/v2/engine/boehm_gc/include/gc.h)0
-rw-r--r--tools/build/src/engine/boehm_gc/include/gc_allocator.h (renamed from tools/build/v2/engine/boehm_gc/include/gc_allocator.h)0
-rw-r--r--tools/build/src/engine/boehm_gc/include/gc_amiga_redirects.h (renamed from tools/build/v2/engine/boehm_gc/include/gc_amiga_redirects.h)0
-rw-r--r--tools/build/src/engine/boehm_gc/include/gc_backptr.h (renamed from tools/build/v2/engine/boehm_gc/include/gc_backptr.h)0
-rw-r--r--tools/build/src/engine/boehm_gc/include/gc_config_macros.h (renamed from tools/build/v2/engine/boehm_gc/include/gc_config_macros.h)0
-rw-r--r--tools/build/src/engine/boehm_gc/include/gc_cpp.h (renamed from tools/build/v2/engine/boehm_gc/include/gc_cpp.h)0
-rw-r--r--tools/build/src/engine/boehm_gc/include/gc_gcj.h (renamed from tools/build/v2/engine/boehm_gc/include/gc_gcj.h)0
-rw-r--r--tools/build/src/engine/boehm_gc/include/gc_inline.h (renamed from tools/build/v2/engine/boehm_gc/include/gc_inline.h)0
-rw-r--r--tools/build/src/engine/boehm_gc/include/gc_mark.h (renamed from tools/build/v2/engine/boehm_gc/include/gc_mark.h)0
-rw-r--r--tools/build/src/engine/boehm_gc/include/gc_pthread_redirects.h (renamed from tools/build/v2/engine/boehm_gc/include/gc_pthread_redirects.h)0
-rw-r--r--tools/build/src/engine/boehm_gc/include/gc_tiny_fl.h (renamed from tools/build/v2/engine/boehm_gc/include/gc_tiny_fl.h)0
-rw-r--r--tools/build/src/engine/boehm_gc/include/gc_typed.h (renamed from tools/build/v2/engine/boehm_gc/include/gc_typed.h)0
-rw-r--r--tools/build/src/engine/boehm_gc/include/include.am (renamed from tools/build/v2/engine/boehm_gc/include/include.am)0
-rw-r--r--tools/build/src/engine/boehm_gc/include/javaxfc.h (renamed from tools/build/v2/engine/boehm_gc/include/javaxfc.h)0
-rw-r--r--tools/build/src/engine/boehm_gc/include/leak_detector.h (renamed from tools/build/v2/engine/boehm_gc/include/leak_detector.h)0
-rw-r--r--tools/build/src/engine/boehm_gc/include/new_gc_alloc.h (renamed from tools/build/v2/engine/boehm_gc/include/new_gc_alloc.h)0
-rw-r--r--tools/build/src/engine/boehm_gc/include/private/cord_pos.h (renamed from tools/build/v2/engine/boehm_gc/include/private/cord_pos.h)0
-rw-r--r--tools/build/src/engine/boehm_gc/include/private/darwin_semaphore.h (renamed from tools/build/v2/engine/boehm_gc/include/private/darwin_semaphore.h)0
-rw-r--r--tools/build/src/engine/boehm_gc/include/private/darwin_stop_world.h (renamed from tools/build/v2/engine/boehm_gc/include/private/darwin_stop_world.h)0
-rw-r--r--tools/build/src/engine/boehm_gc/include/private/dbg_mlc.h (renamed from tools/build/v2/engine/boehm_gc/include/private/dbg_mlc.h)0
-rw-r--r--tools/build/src/engine/boehm_gc/include/private/gc_hdrs.h (renamed from tools/build/v2/engine/boehm_gc/include/private/gc_hdrs.h)0
-rw-r--r--tools/build/src/engine/boehm_gc/include/private/gc_locks.h (renamed from tools/build/v2/engine/boehm_gc/include/private/gc_locks.h)0
-rw-r--r--tools/build/src/engine/boehm_gc/include/private/gc_pmark.h (renamed from tools/build/v2/engine/boehm_gc/include/private/gc_pmark.h)0
-rw-r--r--tools/build/src/engine/boehm_gc/include/private/gc_priv.h (renamed from tools/build/v2/engine/boehm_gc/include/private/gc_priv.h)0
-rw-r--r--tools/build/src/engine/boehm_gc/include/private/gcconfig.h (renamed from tools/build/v2/engine/boehm_gc/include/private/gcconfig.h)0
-rw-r--r--tools/build/src/engine/boehm_gc/include/private/msvc_dbg.h (renamed from tools/build/v2/engine/boehm_gc/include/private/msvc_dbg.h)0
-rw-r--r--tools/build/src/engine/boehm_gc/include/private/pthread_stop_world.h (renamed from tools/build/v2/engine/boehm_gc/include/private/pthread_stop_world.h)0
-rw-r--r--tools/build/src/engine/boehm_gc/include/private/pthread_support.h (renamed from tools/build/v2/engine/boehm_gc/include/private/pthread_support.h)0
-rw-r--r--tools/build/src/engine/boehm_gc/include/private/specific.h (renamed from tools/build/v2/engine/boehm_gc/include/private/specific.h)0
-rw-r--r--tools/build/src/engine/boehm_gc/include/private/thread_local_alloc.h (renamed from tools/build/v2/engine/boehm_gc/include/private/thread_local_alloc.h)0
-rw-r--r--tools/build/src/engine/boehm_gc/include/weakpointer.h (renamed from tools/build/v2/engine/boehm_gc/include/weakpointer.h)0
-rwxr-xr-xtools/build/src/engine/boehm_gc/install-sh (renamed from tools/build/v2/engine/boehm_gc/install-sh)0
-rw-r--r--tools/build/src/engine/boehm_gc/libtool.m4 (renamed from tools/build/v2/engine/boehm_gc/libtool.m4)0
-rwxr-xr-xtools/build/src/engine/boehm_gc/ltmain.sh (renamed from tools/build/v2/engine/boehm_gc/ltmain.sh)0
-rw-r--r--tools/build/src/engine/boehm_gc/mach_dep.c (renamed from tools/build/v2/engine/boehm_gc/mach_dep.c)0
-rw-r--r--tools/build/src/engine/boehm_gc/malloc.c (renamed from tools/build/v2/engine/boehm_gc/malloc.c)0
-rw-r--r--tools/build/src/engine/boehm_gc/mallocx.c (renamed from tools/build/v2/engine/boehm_gc/mallocx.c)0
-rw-r--r--tools/build/src/engine/boehm_gc/mark.c (renamed from tools/build/v2/engine/boehm_gc/mark.c)0
-rw-r--r--tools/build/src/engine/boehm_gc/mark_rts.c (renamed from tools/build/v2/engine/boehm_gc/mark_rts.c)0
-rw-r--r--tools/build/src/engine/boehm_gc/mips_sgi_mach_dep.s (renamed from tools/build/v2/engine/boehm_gc/mips_sgi_mach_dep.s)0
-rw-r--r--tools/build/src/engine/boehm_gc/mips_ultrix_mach_dep.s (renamed from tools/build/v2/engine/boehm_gc/mips_ultrix_mach_dep.s)0
-rw-r--r--tools/build/src/engine/boehm_gc/misc.c (renamed from tools/build/v2/engine/boehm_gc/misc.c)0
-rwxr-xr-xtools/build/src/engine/boehm_gc/missing (renamed from tools/build/v2/engine/boehm_gc/missing)0
-rwxr-xr-xtools/build/src/engine/boehm_gc/mkinstalldirs (renamed from tools/build/v2/engine/boehm_gc/mkinstalldirs)0
-rw-r--r--tools/build/src/engine/boehm_gc/msvc_dbg.c (renamed from tools/build/v2/engine/boehm_gc/msvc_dbg.c)0
-rw-r--r--tools/build/src/engine/boehm_gc/new_hblk.c (renamed from tools/build/v2/engine/boehm_gc/new_hblk.c)0
-rw-r--r--tools/build/src/engine/boehm_gc/obj_map.c (renamed from tools/build/v2/engine/boehm_gc/obj_map.c)0
-rw-r--r--tools/build/src/engine/boehm_gc/os_dep.c (renamed from tools/build/v2/engine/boehm_gc/os_dep.c)0
-rw-r--r--tools/build/src/engine/boehm_gc/pcr_interface.c (renamed from tools/build/v2/engine/boehm_gc/pcr_interface.c)0
-rw-r--r--tools/build/src/engine/boehm_gc/pthread_stop_world.c (renamed from tools/build/v2/engine/boehm_gc/pthread_stop_world.c)0
-rw-r--r--tools/build/src/engine/boehm_gc/pthread_support.c (renamed from tools/build/v2/engine/boehm_gc/pthread_support.c)0
-rw-r--r--tools/build/src/engine/boehm_gc/ptr_chck.c (renamed from tools/build/v2/engine/boehm_gc/ptr_chck.c)0
-rw-r--r--tools/build/src/engine/boehm_gc/real_malloc.c (renamed from tools/build/v2/engine/boehm_gc/real_malloc.c)0
-rw-r--r--tools/build/src/engine/boehm_gc/reclaim.c (renamed from tools/build/v2/engine/boehm_gc/reclaim.c)0
-rw-r--r--tools/build/src/engine/boehm_gc/rs6000_mach_dep.s (renamed from tools/build/v2/engine/boehm_gc/rs6000_mach_dep.s)0
-rw-r--r--tools/build/src/engine/boehm_gc/setjmp_t.c (renamed from tools/build/v2/engine/boehm_gc/setjmp_t.c)0
-rw-r--r--tools/build/src/engine/boehm_gc/sparc_mach_dep.S (renamed from tools/build/v2/engine/boehm_gc/sparc_mach_dep.S)0
-rw-r--r--tools/build/src/engine/boehm_gc/sparc_netbsd_mach_dep.s (renamed from tools/build/v2/engine/boehm_gc/sparc_netbsd_mach_dep.s)0
-rw-r--r--tools/build/src/engine/boehm_gc/sparc_sunos4_mach_dep.s (renamed from tools/build/v2/engine/boehm_gc/sparc_sunos4_mach_dep.s)0
-rw-r--r--tools/build/src/engine/boehm_gc/specific.c (renamed from tools/build/v2/engine/boehm_gc/specific.c)0
-rw-r--r--tools/build/src/engine/boehm_gc/stubborn.c (renamed from tools/build/v2/engine/boehm_gc/stubborn.c)0
-rw-r--r--tools/build/src/engine/boehm_gc/tests/leak_test.c (renamed from tools/build/v2/engine/boehm_gc/tests/leak_test.c)0
-rw-r--r--tools/build/src/engine/boehm_gc/tests/middle.c (renamed from tools/build/v2/engine/boehm_gc/tests/middle.c)0
-rw-r--r--tools/build/src/engine/boehm_gc/tests/test.c (renamed from tools/build/v2/engine/boehm_gc/tests/test.c)0
-rw-r--r--tools/build/src/engine/boehm_gc/tests/test_cpp.cc (renamed from tools/build/v2/engine/boehm_gc/tests/test_cpp.cc)0
-rw-r--r--tools/build/src/engine/boehm_gc/tests/tests.am (renamed from tools/build/v2/engine/boehm_gc/tests/tests.am)0
-rw-r--r--tools/build/src/engine/boehm_gc/tests/thread_leak_test.c (renamed from tools/build/v2/engine/boehm_gc/tests/thread_leak_test.c)0
-rw-r--r--tools/build/src/engine/boehm_gc/thread_local_alloc.c (renamed from tools/build/v2/engine/boehm_gc/thread_local_alloc.c)0
-rw-r--r--tools/build/src/engine/boehm_gc/threadlibs.c (renamed from tools/build/v2/engine/boehm_gc/threadlibs.c)0
-rw-r--r--tools/build/src/engine/boehm_gc/typd_mlc.c (renamed from tools/build/v2/engine/boehm_gc/typd_mlc.c)0
-rw-r--r--tools/build/src/engine/boehm_gc/version.h (renamed from tools/build/v2/engine/boehm_gc/version.h)0
-rw-r--r--tools/build/src/engine/boehm_gc/win32_threads.c (renamed from tools/build/v2/engine/boehm_gc/win32_threads.c)0
-rw-r--r--tools/build/src/engine/boost-jam.spec (renamed from tools/build/v2/engine/boost-jam.spec)0
-rw-r--r--tools/build/src/engine/boost-no-inspect (renamed from tools/build/v2/engine/boost-no-inspect)0
-rw-r--r--tools/build/src/engine/build.bat615
-rw-r--r--tools/build/src/engine/build.jam1030
-rwxr-xr-xtools/build/src/engine/build.sh303
-rw-r--r--tools/build/src/engine/builtins.c2479
-rw-r--r--tools/build/src/engine/builtins.h71
-rw-r--r--tools/build/src/engine/bump_version.py98
-rw-r--r--tools/build/src/engine/class.c191
-rw-r--r--tools/build/src/engine/class.h (renamed from tools/build/v2/engine/class.h)0
-rw-r--r--tools/build/src/engine/command.c121
-rw-r--r--tools/build/src/engine/command.h100
-rw-r--r--tools/build/src/engine/compile.c232
-rw-r--r--tools/build/src/engine/compile.h59
-rw-r--r--tools/build/src/engine/constants.c186
-rw-r--r--tools/build/src/engine/constants.h73
-rw-r--r--tools/build/src/engine/cwd.c83
-rw-r--r--tools/build/src/engine/cwd.h35
-rw-r--r--tools/build/src/engine/debian/changelog (renamed from tools/build/v2/engine/debian/changelog)0
-rw-r--r--tools/build/src/engine/debian/control (renamed from tools/build/v2/engine/debian/control)0
-rw-r--r--tools/build/src/engine/debian/copyright (renamed from tools/build/v2/engine/debian/copyright)0
-rw-r--r--tools/build/src/engine/debian/jam.man.sgml (renamed from tools/build/v2/engine/debian/jam.man.sgml)0
-rwxr-xr-xtools/build/src/engine/debian/rules (renamed from tools/build/v2/engine/debian/rules)0
-rw-r--r--tools/build/src/engine/debug.c145
-rw-r--r--tools/build/src/engine/debug.h55
-rw-r--r--tools/build/src/engine/execcmd.c121
-rw-r--r--tools/build/src/engine/execcmd.h102
-rw-r--r--tools/build/src/engine/execnt.c1400
-rw-r--r--tools/build/src/engine/execunix.c559
-rw-r--r--tools/build/src/engine/filent.c448
-rw-r--r--tools/build/src/engine/filesys.c326
-rw-r--r--tools/build/src/engine/filesys.h57
-rw-r--r--tools/build/src/engine/fileunix.c464
-rw-r--r--tools/build/src/engine/frames.c29
-rw-r--r--tools/build/src/engine/frames.h42
-rw-r--r--tools/build/src/engine/function.c4870
-rw-r--r--tools/build/src/engine/function.h (renamed from tools/build/v2/engine/function.h)0
-rw-r--r--tools/build/src/engine/glob.c (renamed from tools/build/v2/engine/glob.c)0
-rw-r--r--tools/build/src/engine/hash.c387
-rw-r--r--tools/build/src/engine/hash.h79
-rw-r--r--tools/build/src/engine/hcache.c519
-rw-r--r--tools/build/src/engine/hcache.h19
-rw-r--r--tools/build/src/engine/hdrmacro.c139
-rw-r--r--tools/build/src/engine/hdrmacro.h21
-rw-r--r--tools/build/src/engine/headers.c197
-rw-r--r--tools/build/src/engine/headers.h (renamed from tools/build/v2/engine/headers.h)0
-rw-r--r--tools/build/src/engine/jam.c656
-rw-r--r--tools/build/src/engine/jam.h475
-rw-r--r--tools/build/src/engine/jambase.c (renamed from tools/build/v2/engine/jambase.c)0
-rw-r--r--tools/build/src/engine/jambase.h (renamed from tools/build/v2/engine/jambase.h)0
-rw-r--r--tools/build/src/engine/jamgram.c (renamed from tools/build/v2/engine/jamgram.c)0
-rw-r--r--tools/build/src/engine/jamgram.h (renamed from tools/build/v2/engine/jamgram.h)0
-rw-r--r--tools/build/src/engine/jamgram.y (renamed from tools/build/v2/engine/jamgram.y)0
-rw-r--r--tools/build/src/engine/jamgram.yy (renamed from tools/build/v2/engine/jamgram.yy)0
-rw-r--r--tools/build/src/engine/jamgramtab.h (renamed from tools/build/v2/engine/jamgramtab.h)0
-rw-r--r--tools/build/src/engine/lists.c475
-rw-r--r--tools/build/src/engine/lists.h113
-rw-r--r--tools/build/src/engine/make.c935
-rw-r--r--tools/build/src/engine/make.h44
-rw-r--r--tools/build/src/engine/make1.c1460
-rw-r--r--tools/build/src/engine/md5.c (renamed from tools/build/v2/engine/md5.c)0
-rw-r--r--tools/build/src/engine/md5.h (renamed from tools/build/v2/engine/md5.h)0
-rw-r--r--tools/build/src/engine/mem.c (renamed from tools/build/v2/engine/mem.c)0
-rw-r--r--tools/build/src/engine/mem.h133
-rw-r--r--tools/build/src/engine/mkjambase.c (renamed from tools/build/v2/engine/mkjambase.c)0
-rw-r--r--tools/build/src/engine/modules.c431
-rw-r--r--tools/build/src/engine/modules.h52
-rw-r--r--tools/build/src/engine/modules/order.c160
-rw-r--r--tools/build/src/engine/modules/path.c25
-rw-r--r--tools/build/src/engine/modules/property-set.c330
-rw-r--r--tools/build/src/engine/modules/readme.txt (renamed from tools/build/v2/engine/modules/readme.txt)0
-rw-r--r--tools/build/src/engine/modules/regex.c220
-rw-r--r--tools/build/src/engine/modules/sequence.c97
-rw-r--r--tools/build/src/engine/modules/set.c (renamed from tools/build/v2/engine/modules/set.c)0
-rw-r--r--tools/build/src/engine/native.c34
-rw-r--r--tools/build/src/engine/native.h34
-rw-r--r--tools/build/src/engine/object.c394
-rw-r--r--tools/build/src/engine/object.h44
-rw-r--r--tools/build/src/engine/option.c (renamed from tools/build/v2/engine/option.c)0
-rw-r--r--tools/build/src/engine/option.h23
-rw-r--r--tools/build/src/engine/output.c98
-rw-r--r--tools/build/src/engine/output.h30
-rw-r--r--tools/build/src/engine/parse.c132
-rw-r--r--tools/build/src/engine/parse.h76
-rw-r--r--tools/build/src/engine/patchlevel.h17
-rw-r--r--tools/build/src/engine/pathnt.c308
-rw-r--r--tools/build/src/engine/pathsys.c285
-rw-r--r--tools/build/src/engine/pathsys.h85
-rw-r--r--tools/build/src/engine/pathunix.c71
-rw-r--r--tools/build/src/engine/regexp.c1329
-rw-r--r--tools/build/src/engine/regexp.h34
-rw-r--r--tools/build/src/engine/rules.c740
-rw-r--r--tools/build/src/engine/rules.h275
-rw-r--r--tools/build/src/engine/scan.c404
-rw-r--r--tools/build/src/engine/scan.h61
-rw-r--r--tools/build/src/engine/search.c274
-rw-r--r--tools/build/src/engine/search.h22
-rw-r--r--tools/build/src/engine/strings.c223
-rw-r--r--tools/build/src/engine/strings.h36
-rw-r--r--tools/build/src/engine/subst.c116
-rw-r--r--tools/build/src/engine/subst.h14
-rw-r--r--tools/build/src/engine/timestamp.c262
-rw-r--r--tools/build/src/engine/timestamp.h46
-rw-r--r--tools/build/src/engine/variable.c345
-rw-r--r--tools/build/src/engine/variable.h34
-rw-r--r--tools/build/src/engine/w32_getreg.c (renamed from tools/build/v2/engine/w32_getreg.c)0
-rw-r--r--tools/build/src/engine/yyacc.c (renamed from tools/build/v2/engine/yyacc.c)0
-rw-r--r--tools/build/src/exceptions.py (renamed from tools/build/v2/exceptions.py)0
-rwxr-xr-x[-rw-r--r--]tools/build/src/kernel/boost-build.jam (renamed from tools/build/v2/kernel/boost-build.jam)0
-rw-r--r--tools/build/src/kernel/bootstrap.jam266
-rw-r--r--tools/build/src/kernel/bootstrap.py (renamed from tools/build/v2/kernel/bootstrap.py)0
-rw-r--r--tools/build/src/kernel/class.jam420
-rw-r--r--tools/build/src/kernel/errors.jam287
-rw-r--r--tools/build/src/kernel/modules.jam359
-rw-r--r--tools/build/src/manager.py (renamed from tools/build/v2/manager.py)0
-rwxr-xr-xtools/build/src/options/help.jam222
-rw-r--r--tools/build/src/tools/__init__.py (renamed from tools/build/v2/debian/copyright)0
-rw-r--r--tools/build/src/tools/acc.jam (renamed from tools/build/v2/tools/acc.jam)0
-rw-r--r--tools/build/src/tools/auto-index.jam204
-rw-r--r--tools/build/src/tools/bison.jam (renamed from tools/build/v2/tools/bison.jam)0
-rw-r--r--tools/build/src/tools/boostbook-config.jam (renamed from tools/build/v2/tools/boostbook-config.jam)0
-rw-r--r--tools/build/src/tools/boostbook.jam771
-rw-r--r--tools/build/src/tools/borland.jam221
-rw-r--r--tools/build/src/tools/builtin.jam974
-rw-r--r--tools/build/src/tools/builtin.py728
-rw-r--r--tools/build/src/tools/cast.jam91
-rw-r--r--tools/build/src/tools/cast.py (renamed from tools/build/v2/tools/cast.py)0
-rw-r--r--tools/build/src/tools/clang-darwin.jam170
-rw-r--r--tools/build/src/tools/clang-linux.jam213
-rw-r--r--tools/build/src/tools/clang-win.jam175
-rw-r--r--tools/build/src/tools/clang.jam (renamed from tools/build/v2/tools/clang.jam)0
-rw-r--r--tools/build/src/tools/common.jam980
-rw-r--r--tools/build/src/tools/common.py858
-rw-r--r--tools/build/src/tools/common_clang_vc.jam987
-rw-r--r--tools/build/src/tools/como-linux.jam (renamed from tools/build/v2/tools/como-linux.jam)0
-rw-r--r--tools/build/src/tools/como-win.jam (renamed from tools/build/v2/tools/como-win.jam)0
-rw-r--r--tools/build/src/tools/como.jam (renamed from tools/build/v2/tools/como.jam)0
-rw-r--r--tools/build/src/tools/convert.jam (renamed from tools/build/v2/tools/convert.jam)0
-rw-r--r--tools/build/src/tools/cray.jam125
-rw-r--r--tools/build/src/tools/cw-config.jam (renamed from tools/build/v2/tools/cw-config.jam)0
-rw-r--r--tools/build/src/tools/cw.jam (renamed from tools/build/v2/tools/cw.jam)0
-rw-r--r--tools/build/src/tools/darwin.jam590
-rw-r--r--tools/build/src/tools/darwin.py (renamed from tools/build/v2/tools/darwin.py)0
-rw-r--r--tools/build/src/tools/dmc.jam (renamed from tools/build/v2/tools/dmc.jam)0
-rw-r--r--tools/build/src/tools/docutils.jam99
-rw-r--r--tools/build/src/tools/doxproc.py (renamed from tools/build/v2/tools/doxproc.py)0
-rw-r--r--tools/build/src/tools/doxygen-config.jam (renamed from tools/build/v2/tools/doxygen-config.jam)0
-rw-r--r--tools/build/src/tools/doxygen.jam775
-rw-r--r--tools/build/src/tools/doxygen/windows-paths-check.doxyfile (renamed from tools/build/v2/tools/doxygen/windows-paths-check.doxyfile)0
-rw-r--r--tools/build/src/tools/doxygen/windows-paths-check.hpp (renamed from tools/build/v2/tools/doxygen/windows-paths-check.hpp)0
-rw-r--r--tools/build/src/tools/fop.jam (renamed from tools/build/v2/tools/fop.jam)0
-rw-r--r--tools/build/src/tools/fortran.jam (renamed from tools/build/v2/tools/fortran.jam)0
-rw-r--r--tools/build/src/tools/gcc.jam1189
-rw-r--r--tools/build/src/tools/gcc.py842
-rw-r--r--tools/build/src/tools/generate.jam (renamed from tools/build/v2/tools/generate.jam)0
-rw-r--r--tools/build/src/tools/gettext.jam (renamed from tools/build/v2/tools/gettext.jam)0
-rw-r--r--tools/build/src/tools/gfortran.jam (renamed from tools/build/v2/tools/gfortran.jam)0
-rw-r--r--tools/build/src/tools/hp_cxx.jam (renamed from tools/build/v2/tools/hp_cxx.jam)0
-rw-r--r--tools/build/src/tools/hpfortran.jam (renamed from tools/build/v2/tools/hpfortran.jam)0
-rw-r--r--tools/build/src/tools/ifort.jam (renamed from tools/build/v2/tools/ifort.jam)0
-rw-r--r--tools/build/src/tools/intel-darwin.jam227
-rw-r--r--tools/build/src/tools/intel-linux.jam (renamed from tools/build/v2/tools/intel-linux.jam)0
-rw-r--r--tools/build/src/tools/intel-win.jam487
-rw-r--r--tools/build/src/tools/intel.jam (renamed from tools/build/v2/tools/intel.jam)0
-rw-r--r--tools/build/src/tools/lex.jam (renamed from tools/build/v2/tools/lex.jam)0
-rw-r--r--tools/build/src/tools/libjpeg.jam232
-rw-r--r--tools/build/src/tools/libpng.jam226
-rw-r--r--tools/build/src/tools/libtiff.jam229
-rw-r--r--tools/build/src/tools/link.jam500
-rw-r--r--tools/build/src/tools/make.jam63
-rw-r--r--tools/build/src/tools/make.py (renamed from tools/build/v2/tools/make.py)0
-rw-r--r--tools/build/src/tools/mc.jam (renamed from tools/build/v2/tools/mc.jam)0
-rw-r--r--tools/build/src/tools/mc.py (renamed from tools/build/v2/tools/mc.py)0
-rw-r--r--tools/build/src/tools/message.jam62
-rw-r--r--tools/build/src/tools/message.py (renamed from tools/build/v2/tools/message.py)0
-rw-r--r--tools/build/src/tools/midl.jam (renamed from tools/build/v2/tools/midl.jam)0
-rw-r--r--tools/build/src/tools/midl.py134
-rw-r--r--tools/build/src/tools/mipspro.jam (renamed from tools/build/v2/tools/mipspro.jam)0
-rw-r--r--tools/build/src/tools/mpi.jam600
-rw-r--r--tools/build/src/tools/msvc-config.jam (renamed from tools/build/v2/tools/msvc-config.jam)0
-rw-r--r--tools/build/src/tools/msvc.jam1625
-rw-r--r--tools/build/src/tools/msvc.py1237
-rw-r--r--tools/build/src/tools/notfile.jam65
-rw-r--r--tools/build/src/tools/notfile.py (renamed from tools/build/v2/tools/notfile.py)0
-rw-r--r--tools/build/src/tools/package.jam (renamed from tools/build/v2/tools/package.jam)0
-rw-r--r--tools/build/src/tools/package.py (renamed from tools/build/v2/tools/package.py)0
-rw-r--r--tools/build/src/tools/pathscale.jam178
-rw-r--r--tools/build/src/tools/pch.jam (renamed from tools/build/v2/tools/pch.jam)0
-rw-r--r--tools/build/src/tools/pch.py (renamed from tools/build/v2/tools/pch.py)0
-rw-r--r--tools/build/src/tools/pgi.jam147
-rw-r--r--tools/build/src/tools/python-config.jam (renamed from tools/build/v2/tools/python-config.jam)0
-rw-r--r--tools/build/src/tools/python.jam1258
-rw-r--r--tools/build/src/tools/qcc.jam238
-rw-r--r--tools/build/src/tools/qt.jam (renamed from tools/build/v2/tools/qt.jam)0
-rw-r--r--tools/build/src/tools/qt3.jam (renamed from tools/build/v2/tools/qt3.jam)0
-rw-r--r--tools/build/src/tools/qt4.jam755
-rw-r--r--tools/build/src/tools/qt5.jam753
-rw-r--r--tools/build/src/tools/quickbook-config.jam (renamed from tools/build/v2/tools/quickbook-config.jam)0
-rw-r--r--tools/build/src/tools/quickbook.jam (renamed from tools/build/v2/tools/quickbook.jam)0
-rw-r--r--tools/build/src/tools/rc.jam155
-rw-r--r--tools/build/src/tools/rc.py196
-rw-r--r--tools/build/src/tools/stage.jam519
-rw-r--r--tools/build/src/tools/stage.py350
-rw-r--r--tools/build/src/tools/stlport.jam309
-rw-r--r--tools/build/src/tools/sun.jam (renamed from tools/build/v2/tools/sun.jam)0
-rw-r--r--tools/build/src/tools/symlink.jam (renamed from tools/build/v2/tools/symlink.jam)0
-rw-r--r--tools/build/src/tools/symlink.py (renamed from tools/build/v2/tools/symlink.py)0
-rw-r--r--tools/build/src/tools/testing-aux.jam220
-rw-r--r--tools/build/src/tools/testing.jam673
-rw-r--r--tools/build/src/tools/testing.py345
-rw-r--r--tools/build/src/tools/types/__init__.py (renamed from tools/build/v2/tools/types/__init__.py)0
-rw-r--r--tools/build/src/tools/types/asm.jam (renamed from tools/build/v2/tools/types/asm.jam)0
-rw-r--r--tools/build/src/tools/types/asm.py33
-rw-r--r--tools/build/src/tools/types/cpp.jam90
-rw-r--r--tools/build/src/tools/types/cpp.py84
-rw-r--r--tools/build/src/tools/types/exe.jam (renamed from tools/build/v2/tools/types/exe.jam)0
-rw-r--r--tools/build/src/tools/types/exe.py (renamed from tools/build/v2/tools/types/exe.py)0
-rw-r--r--tools/build/src/tools/types/html.jam (renamed from tools/build/v2/tools/types/html.jam)0
-rw-r--r--tools/build/src/tools/types/html.py (renamed from tools/build/v2/tools/types/html.py)0
-rw-r--r--tools/build/src/tools/types/lib.jam (renamed from tools/build/v2/tools/types/lib.jam)0
-rw-r--r--tools/build/src/tools/types/lib.py (renamed from tools/build/v2/tools/types/lib.py)0
-rw-r--r--tools/build/src/tools/types/obj.jam (renamed from tools/build/v2/tools/types/obj.jam)0
-rw-r--r--tools/build/src/tools/types/obj.py (renamed from tools/build/v2/tools/types/obj.py)0
-rw-r--r--tools/build/src/tools/types/objc.jam (renamed from tools/build/v2/tools/types/objc.jam)0
-rw-r--r--tools/build/src/tools/types/preprocessed.jam (renamed from tools/build/v2/tools/types/preprocessed.jam)0
-rw-r--r--tools/build/src/tools/types/preprocessed.py (renamed from tools/build/v2/tools/types/preprocessed.py)0
-rw-r--r--tools/build/src/tools/types/qt.jam12
-rw-r--r--tools/build/src/tools/types/register.jam (renamed from tools/build/v2/tools/types/register.jam)0
-rw-r--r--tools/build/src/tools/types/rsp.jam (renamed from tools/build/v2/tools/types/rsp.jam)0
-rw-r--r--tools/build/src/tools/types/rsp.py (renamed from tools/build/v2/tools/types/rsp.py)0
-rw-r--r--tools/build/src/tools/unix.jam (renamed from tools/build/v2/tools/unix.jam)0
-rw-r--r--tools/build/src/tools/unix.py (renamed from tools/build/v2/tools/unix.py)0
-rw-r--r--tools/build/src/tools/vacpp.jam (renamed from tools/build/v2/tools/vacpp.jam)0
-rw-r--r--tools/build/src/tools/whale.jam (renamed from tools/build/v2/tools/whale.jam)0
-rw-r--r--tools/build/src/tools/xlf.jam (renamed from tools/build/v2/tools/xlf.jam)0
-rw-r--r--tools/build/src/tools/xsltproc-config.jam36
-rw-r--r--tools/build/src/tools/xsltproc.jam205
-rw-r--r--tools/build/src/tools/xsltproc/included.xsl (renamed from tools/build/v2/tools/xsltproc/included.xsl)0
-rw-r--r--tools/build/src/tools/xsltproc/test.xml (renamed from tools/build/v2/tools/xsltproc/test.xml)0
-rw-r--r--tools/build/src/tools/xsltproc/test.xsl (renamed from tools/build/v2/tools/xsltproc/test.xsl)0
-rw-r--r--tools/build/src/tools/zlib.jam227
-rw-r--r--tools/build/src/util/__init__.py (renamed from tools/build/v2/util/__init__.py)0
-rw-r--r--tools/build/src/util/assert.jam346
-rw-r--r--tools/build/src/util/container.jam (renamed from tools/build/v2/util/container.jam)0
-rw-r--r--tools/build/src/util/doc.jam1009
-rw-r--r--tools/build/src/util/indirect.jam117
-rw-r--r--tools/build/src/util/indirect.py (renamed from tools/build/v2/util/indirect.py)0
-rw-r--r--tools/build/src/util/logger.py (renamed from tools/build/v2/util/logger.py)0
-rw-r--r--tools/build/src/util/numbers.jam (renamed from tools/build/v2/util/numbers.jam)0
-rw-r--r--tools/build/src/util/option.jam (renamed from tools/build/v2/util/option.jam)0
-rw-r--r--tools/build/src/util/option.py (renamed from tools/build/v2/util/option.py)0
-rw-r--r--tools/build/src/util/order.jam (renamed from tools/build/v2/util/order.jam)0
-rw-r--r--tools/build/src/util/order.py (renamed from tools/build/v2/util/order.py)0
-rw-r--r--tools/build/src/util/os.jam (renamed from tools/build/v2/util/os.jam)0
-rw-r--r--tools/build/src/util/os_j.py (renamed from tools/build/v2/util/os_j.py)0
-rw-r--r--tools/build/src/util/path.jam910
-rw-r--r--tools/build/src/util/path.py936
-rw-r--r--tools/build/src/util/print.jam488
-rw-r--r--tools/build/src/util/regex.jam203
-rw-r--r--tools/build/src/util/regex.py54
-rw-r--r--tools/build/src/util/sequence.jam342
-rw-r--r--tools/build/src/util/sequence.py (renamed from tools/build/v2/util/sequence.py)0
-rw-r--r--tools/build/src/util/set.jam (renamed from tools/build/v2/util/set.jam)0
-rw-r--r--tools/build/src/util/set.py (renamed from tools/build/v2/util/set.py)0
-rw-r--r--tools/build/src/util/string.jam (renamed from tools/build/v2/util/string.jam)0
-rw-r--r--tools/build/src/util/utility.jam235
-rw-r--r--tools/build/src/util/utility.py (renamed from tools/build/v2/util/utility.py)0
-rw-r--r--tools/build/test/BoostBuild.py1317
-rwxr-xr-xtools/build/test/MockToolset.py250
-rw-r--r--tools/build/test/TestCmd.py589
-rw-r--r--tools/build/test/abs_workdir.py26
-rw-r--r--tools/build/test/absolute_sources.py73
-rw-r--r--tools/build/test/alias.py109
-rw-r--r--tools/build/test/alternatives.py113
-rw-r--r--tools/build/test/bad_dirname.py (renamed from tools/build/v2/test/bad_dirname.py)0
-rw-r--r--tools/build/test/boost-build.jam14
-rw-r--r--tools/build/test/boostbook.py (renamed from tools/build/v2/test/boostbook.py)0
-rw-r--r--tools/build/test/boostbook/a.hpp (renamed from tools/build/v2/test/boostbook/a.hpp)0
-rw-r--r--tools/build/test/boostbook/docs.xml (renamed from tools/build/v2/test/boostbook/docs.xml)0
-rw-r--r--tools/build/test/boostbook/jamroot.jam (renamed from tools/build/v2/test/boostbook/jamroot.jam)0
-rw-r--r--tools/build/test/build_dir.py107
-rw-r--r--tools/build/test/build_file.py170
-rw-r--r--tools/build/test/build_no.py23
-rwxr-xr-xtools/build/test/builtin_echo.py30
-rwxr-xr-xtools/build/test/builtin_exit.py42
-rwxr-xr-xtools/build/test/builtin_glob.py87
-rwxr-xr-xtools/build/test/builtin_readlink.py24
-rwxr-xr-xtools/build/test/builtin_split_by_characters.py57
-rw-r--r--tools/build/test/c_file.py36
-rw-r--r--tools/build/test/chain.py56
-rw-r--r--tools/build/test/clean.py104
-rwxr-xr-xtools/build/test/collect_debug_info.py341
-rw-r--r--tools/build/test/composite.py25
-rw-r--r--tools/build/test/conditionals.py48
-rw-r--r--tools/build/test/conditionals2.py (renamed from tools/build/v2/test/conditionals2.py)0
-rw-r--r--tools/build/test/conditionals3.py30
-rwxr-xr-xtools/build/test/conditionals_multiple.py312
-rwxr-xr-xtools/build/test/configuration.py328
-rwxr-xr-xtools/build/test/copy_time.py69
-rw-r--r--tools/build/test/core-language/test.jam1400
-rwxr-xr-xtools/build/test/core_action_output.py62
-rwxr-xr-xtools/build/test/core_action_status.py27
-rwxr-xr-xtools/build/test/core_actions_quietly.py61
-rwxr-xr-xtools/build/test/core_arguments.py103
-rwxr-xr-xtools/build/test/core_at_file.py63
-rwxr-xr-xtools/build/test/core_bindrule.py45
-rw-r--r--tools/build/test/core_d12.py32
-rw-r--r--tools/build/test/core_delete_module.py51
-rw-r--r--tools/build/test/core_dependencies.py (renamed from tools/build/v2/test/core_dependencies.py)0
-rw-r--r--tools/build/test/core_import_module.py82
-rw-r--r--tools/build/test/core_jamshell.py54
-rwxr-xr-xtools/build/test/core_language.py12
-rw-r--r--tools/build/test/core_modifiers.py (renamed from tools/build/v2/test/core_modifiers.py)0
-rwxr-xr-xtools/build/test/core_multifile_actions.py202
-rwxr-xr-xtools/build/test/core_nt_cmd_line.py266
-rwxr-xr-xtools/build/test/core_option_d2.py55
-rwxr-xr-xtools/build/test/core_option_l.py44
-rwxr-xr-xtools/build/test/core_option_n.py51
-rwxr-xr-xtools/build/test/core_parallel_actions.py103
-rwxr-xr-xtools/build/test/core_parallel_multifile_actions_1.py78
-rwxr-xr-xtools/build/test/core_parallel_multifile_actions_2.py71
-rwxr-xr-xtools/build/test/core_source_line_tracking.py74
-rw-r--r--tools/build/test/core_typecheck.py47
-rwxr-xr-xtools/build/test/core_update_now.py377
-rwxr-xr-xtools/build/test/core_variables_in_actions.py39
-rw-r--r--tools/build/test/core_varnames.py38
-rw-r--r--tools/build/test/custom_generator.py (renamed from tools/build/v2/test/custom_generator.py)0
-rw-r--r--tools/build/test/default_build.py80
-rw-r--r--tools/build/test/default_features.py50
-rwxr-xr-xtools/build/test/default_toolset.py215
-rw-r--r--tools/build/test/dependency_property.py36
-rw-r--r--tools/build/test/dependency_test.py239
-rw-r--r--tools/build/test/direct_request_test.py68
-rw-r--r--tools/build/test/disambiguation.py32
-rw-r--r--tools/build/test/dll_path.py146
-rw-r--r--tools/build/test/double_loading.py31
-rw-r--r--tools/build/test/duplicate.py (renamed from tools/build/v2/test/duplicate.py)0
-rw-r--r--tools/build/test/example_customization.py (renamed from tools/build/v2/test/example_customization.py)0
-rw-r--r--tools/build/test/example_gettext.py (renamed from tools/build/v2/test/example_gettext.py)0
-rw-r--r--tools/build/test/example_libraries.py21
-rw-r--r--tools/build/test/example_make.py17
-rw-r--r--tools/build/test/example_qt4.py (renamed from tools/build/v2/test/example_qt4.py)0
-rwxr-xr-xtools/build/test/exit_status.py (renamed from tools/build/v2/test/exit_status.py)0
-rw-r--r--tools/build/test/expansion.py80
-rw-r--r--tools/build/test/explicit.py58
-rwxr-xr-xtools/build/test/feature_cxxflags.py37
-rw-r--r--tools/build/test/free_features_request.py42
-rw-r--r--tools/build/test/gcc_runtime.py28
-rwxr-xr-xtools/build/test/generator_selection.py157
-rw-r--r--tools/build/test/generators_test.py433
-rw-r--r--tools/build/test/implicit_dependency.py81
-rw-r--r--tools/build/test/indirect_conditional.py105
-rw-r--r--tools/build/test/inherit_toolset.py51
-rwxr-xr-xtools/build/test/inherited_dependency.py237
-rw-r--r--tools/build/test/inline.py62
-rw-r--r--tools/build/test/lib_source_property.py45
-rwxr-xr-xtools/build/test/libjpeg.py119
-rwxr-xr-xtools/build/test/libpng.py119
-rw-r--r--tools/build/test/library_chain.py152
-rw-r--r--tools/build/test/library_order.py94
-rw-r--r--tools/build/test/library_property.py56
-rwxr-xr-xtools/build/test/libtiff.py119
-rwxr-xr-xtools/build/test/link.py154
-rw-r--r--tools/build/test/load_dir.py (renamed from tools/build/v2/test/load_dir.py)0
-rw-r--r--tools/build/test/load_order.py71
-rw-r--r--tools/build/test/loop.py24
-rw-r--r--tools/build/test/make_rule.py54
-rwxr-xr-xtools/build/test/message.py38
-rw-r--r--tools/build/test/module_actions.py105
-rw-r--r--tools/build/test/ndebug.py33
-rw-r--r--tools/build/test/no_type.py19
-rw-r--r--tools/build/test/notfile.py36
-rw-r--r--tools/build/test/ordered_include.py173
-rw-r--r--tools/build/test/ordered_properties.py (renamed from tools/build/v2/test/ordered_properties.py)0
-rw-r--r--tools/build/test/out_of_tree.py29
-rw-r--r--tools/build/test/path_features.py152
-rw-r--r--tools/build/test/pch.py (renamed from tools/build/v2/test/pch.py)0
-rw-r--r--tools/build/test/prebuilt.py43
-rw-r--r--tools/build/test/prebuilt/ext/a.cpp (renamed from tools/build/v2/test/prebuilt/ext/a.cpp)0
-rw-r--r--tools/build/test/prebuilt/ext/debug/a.h (renamed from tools/build/v2/test/prebuilt/ext/debug/a.h)0
-rw-r--r--tools/build/test/prebuilt/ext/jamfile.jam (renamed from tools/build/v2/test/prebuilt/ext/jamfile.jam)0
-rw-r--r--tools/build/test/prebuilt/ext/jamfile2.jam39
-rw-r--r--tools/build/test/prebuilt/ext/jamfile3.jam46
-rw-r--r--tools/build/test/prebuilt/ext/jamroot.jam (renamed from tools/build/v2/test/prebuilt/ext/jamroot.jam)0
-rw-r--r--tools/build/test/prebuilt/ext/release/a.h (renamed from tools/build/v2/test/prebuilt/ext/release/a.h)0
-rw-r--r--tools/build/test/prebuilt/hello.cpp (renamed from tools/build/v2/test/prebuilt/hello.cpp)0
-rw-r--r--tools/build/test/prebuilt/jamfile.jam (renamed from tools/build/v2/test/prebuilt/jamfile.jam)0
-rw-r--r--tools/build/test/prebuilt/jamroot.jam (renamed from tools/build/v2/test/prebuilt/jamroot.jam)0
-rwxr-xr-xtools/build/test/preprocessor.py (renamed from tools/build/v2/test/preprocessor.py)0
-rw-r--r--tools/build/test/print.py (renamed from tools/build/v2/test/print.py)0
-rw-r--r--tools/build/test/project-test3/a.cpp (renamed from tools/build/v2/test/project-test3/a.cpp)0
-rw-r--r--tools/build/test/project-test3/jamfile.jam (renamed from tools/build/v2/test/project-test3/jamfile.jam)0
-rw-r--r--tools/build/test/project-test3/jamroot.jam (renamed from tools/build/v2/test/project-test3/jamroot.jam)0
-rw-r--r--tools/build/test/project-test3/lib/b.cpp (renamed from tools/build/v2/test/project-test3/lib/b.cpp)0
-rw-r--r--tools/build/test/project-test3/lib/jamfile.jam (renamed from tools/build/v2/test/project-test3/lib/jamfile.jam)0
-rw-r--r--tools/build/test/project-test3/lib2/c.cpp (renamed from tools/build/v2/test/project-test3/lib2/c.cpp)0
-rw-r--r--tools/build/test/project-test3/lib2/d.cpp (renamed from tools/build/v2/test/project-test3/lib2/d.cpp)0
-rw-r--r--tools/build/test/project-test3/lib2/helper/e.cpp (renamed from tools/build/v2/test/generators-test/e.cpp)0
-rw-r--r--tools/build/test/project-test3/lib2/helper/jamfile.jam (renamed from tools/build/v2/test/project-test3/lib2/helper/jamfile.jam)0
-rw-r--r--tools/build/test/project-test3/lib2/jamfile.jam (renamed from tools/build/v2/test/project-test3/lib2/jamfile.jam)0
-rw-r--r--tools/build/test/project-test3/lib3/f.cpp (renamed from tools/build/v2/test/project-test3/lib3/f.cpp)0
-rw-r--r--tools/build/test/project-test3/lib3/jamfile.jam (renamed from tools/build/v2/test/project-test3/lib3/jamfile.jam)0
-rw-r--r--tools/build/test/project-test3/lib3/jamroot.jam (renamed from tools/build/v2/test/project-test3/lib3/jamroot.jam)0
-rw-r--r--tools/build/test/project-test3/readme.txt (renamed from tools/build/v2/test/project-test3/readme.txt)0
-rw-r--r--tools/build/test/project-test4/a.cpp (renamed from tools/build/v2/test/project-test4/a.cpp)0
-rw-r--r--tools/build/test/project-test4/a_gcc.cpp (renamed from tools/build/v2/test/project-test4/a_gcc.cpp)0
-rw-r--r--tools/build/test/project-test4/jamfile.jam (renamed from tools/build/v2/test/project-test4/jamfile.jam)0
-rw-r--r--tools/build/test/project-test4/jamfile3.jam (renamed from tools/build/v2/test/project-test4/jamfile3.jam)0
-rw-r--r--tools/build/test/project-test4/jamfile4.jam (renamed from tools/build/v2/test/project-test4/jamfile4.jam)0
-rw-r--r--tools/build/test/project-test4/jamfile5.jam (renamed from tools/build/v2/test/project-test4/jamfile5.jam)0
-rw-r--r--tools/build/test/project-test4/jamroot.jam (renamed from tools/build/v2/test/project-test4/jamroot.jam)0
-rw-r--r--tools/build/test/project-test4/lib/b.cpp (renamed from tools/build/v2/test/project-test4/lib/b.cpp)0
-rw-r--r--tools/build/test/project-test4/lib/jamfile.jam (renamed from tools/build/v2/test/project-test4/lib/jamfile.jam)0
-rw-r--r--tools/build/test/project-test4/lib/jamfile1.jam (renamed from tools/build/v2/test/project-test4/lib/jamfile1.jam)0
-rw-r--r--tools/build/test/project-test4/lib/jamfile2.jam (renamed from tools/build/v2/test/project-test4/lib/jamfile2.jam)0
-rw-r--r--tools/build/test/project-test4/lib/jamfile3.jam (renamed from tools/build/v2/test/project-test4/lib/jamfile3.jam)0
-rw-r--r--tools/build/test/project-test4/lib2/jamfile.jam (renamed from tools/build/v2/test/project-test4/lib2/jamfile.jam)0
-rw-r--r--tools/build/test/project-test4/lib2/jamfile2.jam (renamed from tools/build/v2/test/project-test4/lib2/jamfile2.jam)0
-rw-r--r--tools/build/test/project-test4/readme.txt (renamed from tools/build/v2/test/project-test4/readme.txt)0
-rw-r--r--tools/build/test/project_dependencies.py51
-rw-r--r--tools/build/test/project_glob.py212
-rwxr-xr-xtools/build/test/project_id.py414
-rw-r--r--tools/build/test/project_root_constants.py62
-rw-r--r--tools/build/test/project_root_rule.py34
-rw-r--r--tools/build/test/project_test3.py136
-rw-r--r--tools/build/test/project_test4.py (renamed from tools/build/v2/test/project_test4.py)0
-rw-r--r--tools/build/test/property_expansion.py28
-rwxr-xr-xtools/build/test/qt4.py (renamed from tools/build/v2/test/qt4.py)0
-rw-r--r--tools/build/test/qt4/jamroot.jam82
-rw-r--r--tools/build/test/qt4/mock.cpp (renamed from tools/build/v2/test/qt4/mock.cpp)0
-rw-r--r--tools/build/test/qt4/mock.h (renamed from tools/build/v2/test/qt4/mock.h)0
-rw-r--r--tools/build/test/qt4/phonon.cpp (renamed from tools/build/v2/test/qt4/phonon.cpp)0
-rw-r--r--tools/build/test/qt4/qt3support.cpp (renamed from tools/build/v2/test/qt4/qt3support.cpp)0
-rw-r--r--tools/build/test/qt4/qtassistant.cpp (renamed from tools/build/v2/test/qt4/qtassistant.cpp)0
-rw-r--r--tools/build/test/qt4/qtcore.cpp (renamed from tools/build/v2/test/qt4/qtcore.cpp)0
-rw-r--r--tools/build/test/qt4/qtcorefail.cpp (renamed from tools/build/v2/test/qt4/qtcorefail.cpp)0
-rw-r--r--tools/build/test/qt4/qtdeclarative.cpp (renamed from tools/build/v2/test/qt4/qtdeclarative.cpp)0
-rw-r--r--tools/build/test/qt4/qtgui.cpp (renamed from tools/build/v2/test/qt4/qtgui.cpp)0
-rw-r--r--tools/build/test/qt4/qthelp.cpp (renamed from tools/build/v2/test/qt4/qthelp.cpp)0
-rw-r--r--tools/build/test/qt4/qtmultimedia.cpp (renamed from tools/build/v2/test/qt4/qtmultimedia.cpp)0
-rw-r--r--tools/build/test/qt4/qtnetwork.cpp (renamed from tools/build/v2/test/qt4/qtnetwork.cpp)0
-rw-r--r--tools/build/test/qt4/qtscript.cpp (renamed from tools/build/v2/test/qt4/qtscript.cpp)0
-rw-r--r--tools/build/test/qt4/qtscripttools.cpp (renamed from tools/build/v2/test/qt4/qtscripttools.cpp)0
-rw-r--r--tools/build/test/qt4/qtsql.cpp (renamed from tools/build/v2/test/qt4/qtsql.cpp)0
-rw-r--r--tools/build/test/qt4/qtsvg.cpp (renamed from tools/build/v2/test/qt4/qtsvg.cpp)0
-rw-r--r--tools/build/test/qt4/qttest.cpp (renamed from tools/build/v2/test/qt4/qttest.cpp)0
-rw-r--r--tools/build/test/qt4/qtwebkit.cpp (renamed from tools/build/v2/test/qt4/qtwebkit.cpp)0
-rw-r--r--tools/build/test/qt4/qtxml.cpp (renamed from tools/build/v2/test/qt4/qtxml.cpp)0
-rw-r--r--tools/build/test/qt4/qtxmlpatterns.cpp (renamed from tools/build/v2/test/qt4/qtxmlpatterns.cpp)0
-rw-r--r--tools/build/test/qt4/rcc.cpp20
-rw-r--r--tools/build/test/qt4/rcc.qrc5
-rwxr-xr-xtools/build/test/qt5.py19
-rw-r--r--tools/build/test/qt5/jamroot.jam68
-rw-r--r--tools/build/test/qt5/mock.cpp26
-rw-r--r--tools/build/test/qt5/mock.h21
-rw-r--r--tools/build/test/qt5/qtassistant.cpp21
-rw-r--r--tools/build/test/qt5/qtcore.cpp22
-rw-r--r--tools/build/test/qt5/qtcorefail.cpp23
-rw-r--r--tools/build/test/qt5/qtdeclarative.cpp26
-rw-r--r--tools/build/test/qt5/qthelp.cpp22
-rw-r--r--tools/build/test/qt5/qtmultimedia.cpp25
-rw-r--r--tools/build/test/qt5/qtnetwork.cpp33
-rw-r--r--tools/build/test/qt5/qtquick.cpp43
-rw-r--r--tools/build/test/qt5/qtquick.qml20
-rw-r--r--tools/build/test/qt5/qtscript.cpp37
-rw-r--r--tools/build/test/qt5/qtscripttools.cpp47
-rw-r--r--tools/build/test/qt5/qtsql.cpp37
-rw-r--r--tools/build/test/qt5/qtsvg.cpp21
-rw-r--r--tools/build/test/qt5/qttest.cpp30
-rw-r--r--tools/build/test/qt5/qtwebkit.cpp22
-rw-r--r--tools/build/test/qt5/qtwebkitwidgets.cpp23
-rw-r--r--tools/build/test/qt5/qtwidgets.cpp43
-rw-r--r--tools/build/test/qt5/qtxml.cpp29
-rw-r--r--tools/build/test/qt5/qtxmlpatterns.cpp76
-rw-r--r--tools/build/test/qt5/rcc.cpp20
-rw-r--r--tools/build/test/qt5/rcc.qrc5
-rw-r--r--tools/build/test/railsys.py (renamed from tools/build/v2/test/railsys.py)0
-rw-r--r--tools/build/test/railsys/libx/include/test_libx.h (renamed from tools/build/v2/test/railsys/libx/include/test_libx.h)0
-rw-r--r--tools/build/test/railsys/libx/jamroot.jam (renamed from tools/build/v2/test/railsys/libx/jamroot.jam)0
-rw-r--r--tools/build/test/railsys/libx/src/jamfile.jam (renamed from tools/build/v2/test/railsys/libx/src/jamfile.jam)0
-rw-r--r--tools/build/test/railsys/libx/src/test_libx.cpp (renamed from tools/build/v2/test/railsys/libx/src/test_libx.cpp)0
-rw-r--r--tools/build/test/railsys/program/include/test_a.h (renamed from tools/build/v2/test/railsys/program/include/test_a.h)0
-rw-r--r--tools/build/test/railsys/program/jamfile.jam45
-rw-r--r--tools/build/test/railsys/program/jamroot.jam (renamed from tools/build/v2/test/railsys/program/jamroot.jam)0
-rw-r--r--tools/build/test/railsys/program/liba/jamfile.jam (renamed from tools/build/v2/test/railsys/program/liba/jamfile.jam)0
-rw-r--r--tools/build/test/railsys/program/liba/test_a.cpp (renamed from tools/build/v2/test/railsys/program/liba/test_a.cpp)0
-rw-r--r--tools/build/test/railsys/program/main/jamfile.jam (renamed from tools/build/v2/test/railsys/program/main/jamfile.jam)0
-rw-r--r--tools/build/test/railsys/program/main/main.cpp (renamed from tools/build/v2/test/railsys/program/main/main.cpp)0
-rw-r--r--tools/build/test/readme.txt (renamed from tools/build/v2/test/readme.txt)0
-rw-r--r--tools/build/test/rebuilds.py68
-rw-r--r--tools/build/test/regression.py113
-rw-r--r--tools/build/test/relative_sources.py38
-rw-r--r--tools/build/test/remove_requirement.py89
-rwxr-xr-xtools/build/test/rescan_header.py265
-rw-r--r--tools/build/test/resolution.py35
-rw-r--r--tools/build/test/results-python.txt132
-rwxr-xr-xtools/build/test/scanner_causing_rebuilds.py84
-rw-r--r--tools/build/test/searched_lib.py183
-rw-r--r--tools/build/test/skipping.py27
-rwxr-xr-xtools/build/test/sort_rule.py96
-rw-r--r--tools/build/test/source_locations.py42
-rwxr-xr-xtools/build/test/source_order.py53
-rwxr-xr-xtools/build/test/space_in_path.py21
-rw-r--r--tools/build/test/stage.py207
-rw-r--r--tools/build/test/standalone.py53
-rw-r--r--tools/build/test/startup/boost-root/boost-build.jam (renamed from tools/build/v2/test/startup/boost-root/boost-build.jam)0
-rw-r--r--tools/build/test/startup/boost-root/build/boost-build.jam (renamed from tools/build/v2/test/startup/boost-root/build/boost-build.jam)0
-rw-r--r--tools/build/test/startup/boost-root/build/bootstrap.jam (renamed from tools/build/v2/test/startup/boost-root/build/bootstrap.jam)0
-rw-r--r--tools/build/test/startup/bootstrap-env/boost-build.jam (renamed from tools/build/v2/test/startup/bootstrap-env/boost-build.jam)0
-rw-r--r--tools/build/test/startup/bootstrap-explicit/boost-build.jam (renamed from tools/build/v2/test/startup/bootstrap-explicit/boost-build.jam)0
-rw-r--r--tools/build/test/startup/bootstrap-implicit/readme.txt (renamed from tools/build/v2/test/startup/bootstrap-implicit/readme.txt)0
-rw-r--r--tools/build/test/startup/no-bootstrap1/boost-build.jam (renamed from tools/build/v2/test/startup/no-bootstrap1/boost-build.jam)0
-rw-r--r--tools/build/test/startup/no-bootstrap1/subdir/readme.txt (renamed from tools/build/v2/test/startup/no-bootstrap1/subdir/readme.txt)0
-rw-r--r--tools/build/test/startup/no-bootstrap2/boost-build.jam (renamed from tools/build/v2/test/startup/no-bootstrap2/boost-build.jam)0
-rw-r--r--tools/build/test/startup/no-bootstrap3/boost-build.jam (renamed from tools/build/v2/test/startup/no-bootstrap3/boost-build.jam)0
-rw-r--r--tools/build/test/startup_v2.py94
-rwxr-xr-xtools/build/test/static_and_shared_library.py38
-rw-r--r--tools/build/test/suffix.py (renamed from tools/build/v2/test/suffix.py)0
-rw-r--r--tools/build/test/symlink.py41
-rw-r--r--tools/build/test/tag.py122
-rw-r--r--tools/build/test/template.py (renamed from tools/build/v2/test/template.py)0
-rw-r--r--tools/build/test/test-config-example.jam (renamed from tools/build/v2/test/test-config-example.jam)0
-rw-r--r--tools/build/test/test.jam (renamed from tools/build/v2/test/test.jam)0
-rw-r--r--tools/build/test/test1.py (renamed from tools/build/v2/test/test1.py)0
-rw-r--r--tools/build/test/test2.py (renamed from tools/build/v2/test/test2.py)0
-rw-r--r--tools/build/test/test2/foo.cpp (renamed from tools/build/v2/test/test2/foo.cpp)0
-rw-r--r--tools/build/test/test2/jamroot.jam (renamed from tools/build/v2/test/test2/jamroot.jam)0
-rw-r--r--tools/build/test/test_all.py307
-rwxr-xr-xtools/build/test/test_rc.py110
-rwxr-xr-xtools/build/test/test_result_dumping.py33
-rw-r--r--tools/build/test/test_system.html (renamed from tools/build/v2/test/test_system.html)0
-rwxr-xr-xtools/build/test/testing_support.py61
-rw-r--r--tools/build/test/timedata.py175
-rw-r--r--tools/build/test/toolset_requirements.py44
-rw-r--r--tools/build/test/tree.py243
-rw-r--r--tools/build/test/unit_test.py36
-rw-r--r--tools/build/test/unit_tests.py11
-rw-r--r--tools/build/test/unused.py81
-rw-r--r--tools/build/test/use_requirements.py283
-rw-r--r--tools/build/test/using.py32
-rw-r--r--tools/build/test/wrapper.py38
-rw-r--r--tools/build/test/wrong_project.py39
-rwxr-xr-xtools/build/test/zlib.py119
-rw-r--r--tools/build/v2/Jamroot.jam47
-rw-r--r--tools/build/v2/boost-build.jam8
-rw-r--r--tools/build/v2/bootstrap.bat49
-rwxr-xr-xtools/build/v2/bootstrap.sh120
-rw-r--r--tools/build/v2/build-system.jam1008
-rw-r--r--tools/build/v2/build/ac.jam198
-rw-r--r--tools/build/v2/build/alias.jam73
-rw-r--r--tools/build/v2/build/build-request.jam322
-rw-r--r--tools/build/v2/build/build_request.py216
-rw-r--r--tools/build/v2/build/configure.jam237
-rw-r--r--tools/build/v2/build/engine.py177
-rw-r--r--tools/build/v2/build/feature.jam1335
-rw-r--r--tools/build/v2/build/feature.py907
-rw-r--r--tools/build/v2/build/generators.jam1380
-rw-r--r--tools/build/v2/build/generators.py1097
-rw-r--r--tools/build/v2/build/modifiers.jam232
-rw-r--r--tools/build/v2/build/project.jam1121
-rw-r--r--tools/build/v2/build/project.py1120
-rw-r--r--tools/build/v2/build/property-set.jam489
-rw-r--r--tools/build/v2/build/property.jam788
-rw-r--r--tools/build/v2/build/property.py593
-rw-r--r--tools/build/v2/build/property_set.py449
-rw-r--r--tools/build/v2/build/readme.txt13
-rw-r--r--tools/build/v2/build/scanner.jam153
-rw-r--r--tools/build/v2/build/targets.jam1659
-rw-r--r--tools/build/v2/build/targets.py1402
-rw-r--r--tools/build/v2/build/toolset.jam575
-rw-r--r--tools/build/v2/build/toolset.py399
-rw-r--r--tools/build/v2/build/type.jam425
-rw-r--r--tools/build/v2/build/version.jam161
-rw-r--r--tools/build/v2/build/virtual-target.jam1317
-rw-r--r--tools/build/v2/build/virtual_target.py1118
-rw-r--r--tools/build/v2/build_system.py881
-rw-r--r--tools/build/v2/contrib/boost.jam321
-rw-r--r--tools/build/v2/debian/boost-build.docs4
-rw-r--r--tools/build/v2/debian/boost-build.examples1
-rw-r--r--tools/build/v2/debian/changelog6
-rw-r--r--tools/build/v2/debian/conffiles1
-rw-r--r--tools/build/v2/debian/control13
-rw-r--r--tools/build/v2/debian/excludes14
-rwxr-xr-xtools/build/v2/debian/rules56
-rw-r--r--tools/build/v2/doc/bjam.qbk1696
-rw-r--r--tools/build/v2/doc/jamfile.jam26
-rw-r--r--tools/build/v2/doc/src/architecture.xml636
-rw-r--r--tools/build/v2/doc/src/extending.xml1216
-rw-r--r--tools/build/v2/doc/src/faq.xml458
-rw-r--r--tools/build/v2/doc/src/install.xml150
-rw-r--r--tools/build/v2/doc/src/overview.xml1684
-rw-r--r--tools/build/v2/doc/src/path.xml248
-rw-r--r--tools/build/v2/doc/src/recipes.xml11
-rw-r--r--tools/build/v2/doc/src/reference.xml2545
-rw-r--r--tools/build/v2/doc/src/standalone.xml47
-rw-r--r--tools/build/v2/doc/src/tasks.xml782
-rw-r--r--tools/build/v2/doc/src/tutorial.xml682
-rw-r--r--tools/build/v2/doc/src/userman.xml40
-rw-r--r--tools/build/v2/engine/build.bat560
-rw-r--r--tools/build/v2/engine/build.jam1013
-rwxr-xr-xtools/build/v2/engine/build.sh303
-rw-r--r--tools/build/v2/engine/builtins.c2312
-rw-r--r--tools/build/v2/engine/builtins.h68
-rw-r--r--tools/build/v2/engine/bump_version.py80
-rw-r--r--tools/build/v2/engine/class.c167
-rw-r--r--tools/build/v2/engine/command.c94
-rw-r--r--tools/build/v2/engine/command.h70
-rw-r--r--tools/build/v2/engine/compile.c347
-rw-r--r--tools/build/v2/engine/compile.h63
-rw-r--r--tools/build/v2/engine/constants.c183
-rw-r--r--tools/build/v2/engine/constants.h72
-rw-r--r--tools/build/v2/engine/debug.c141
-rw-r--r--tools/build/v2/engine/debug.h55
-rw-r--r--tools/build/v2/engine/execcmd.h47
-rw-r--r--tools/build/v2/engine/execnt.c1303
-rw-r--r--tools/build/v2/engine/execunix.c581
-rw-r--r--tools/build/v2/engine/filent.c411
-rw-r--r--tools/build/v2/engine/filesys.c99
-rw-r--r--tools/build/v2/engine/filesys.h61
-rw-r--r--tools/build/v2/engine/fileunix.c513
-rw-r--r--tools/build/v2/engine/frames.c23
-rw-r--r--tools/build/v2/engine/frames.h39
-rw-r--r--tools/build/v2/engine/function.c4553
-rw-r--r--tools/build/v2/engine/hash.c396
-rw-r--r--tools/build/v2/engine/hash.h78
-rw-r--r--tools/build/v2/engine/hcache.c506
-rw-r--r--tools/build/v2/engine/hcache.h18
-rw-r--r--tools/build/v2/engine/hdrmacro.c142
-rw-r--r--tools/build/v2/engine/hdrmacro.h19
-rw-r--r--tools/build/v2/engine/headers.c216
-rw-r--r--tools/build/v2/engine/jam.c662
-rw-r--r--tools/build/v2/engine/jam.h496
-rw-r--r--tools/build/v2/engine/lists.c526
-rw-r--r--tools/build/v2/engine/lists.h124
-rw-r--r--tools/build/v2/engine/make.c819
-rw-r--r--tools/build/v2/engine/make.h47
-rw-r--r--tools/build/v2/engine/make1.c1170
-rw-r--r--tools/build/v2/engine/mem.h134
-rw-r--r--tools/build/v2/engine/modules.c436
-rw-r--r--tools/build/v2/engine/modules.h53
-rw-r--r--tools/build/v2/engine/modules/order.c147
-rw-r--r--tools/build/v2/engine/modules/path.c32
-rw-r--r--tools/build/v2/engine/modules/property-set.c117
-rw-r--r--tools/build/v2/engine/modules/regex.c98
-rw-r--r--tools/build/v2/engine/modules/sequence.c47
-rw-r--r--tools/build/v2/engine/native.c39
-rw-r--r--tools/build/v2/engine/native.h37
-rw-r--r--tools/build/v2/engine/object.c379
-rw-r--r--tools/build/v2/engine/object.h43
-rw-r--r--tools/build/v2/engine/option.h23
-rw-r--r--tools/build/v2/engine/output.c125
-rw-r--r--tools/build/v2/engine/output.h30
-rw-r--r--tools/build/v2/engine/parse.c132
-rw-r--r--tools/build/v2/engine/parse.h78
-rw-r--r--tools/build/v2/engine/patchlevel.h17
-rw-r--r--tools/build/v2/engine/pathsys.h97
-rw-r--r--tools/build/v2/engine/pathunix.c587
-rw-r--r--tools/build/v2/engine/pwd.c76
-rw-r--r--tools/build/v2/engine/pwd.h11
-rw-r--r--tools/build/v2/engine/regexp.c1328
-rw-r--r--tools/build/v2/engine/regexp.h32
-rw-r--r--tools/build/v2/engine/rules.c724
-rw-r--r--tools/build/v2/engine/rules.h269
-rw-r--r--tools/build/v2/engine/scan.c419
-rw-r--r--tools/build/v2/engine/scan.h57
-rw-r--r--tools/build/v2/engine/search.c252
-rw-r--r--tools/build/v2/engine/search.h20
-rw-r--r--tools/build/v2/engine/strings.c201
-rw-r--r--tools/build/v2/engine/strings.h34
-rw-r--r--tools/build/v2/engine/subst.c113
-rw-r--r--tools/build/v2/engine/timestamp.c223
-rw-r--r--tools/build/v2/engine/timestamp.h20
-rw-r--r--tools/build/v2/engine/variable.c353
-rw-r--r--tools/build/v2/engine/variable.h30
-rw-r--r--tools/build/v2/example/boost-build.jam6
-rw-r--r--tools/build/v2/example/versioned/hello.cpp12
-rw-r--r--tools/build/v2/example/versioned/jamfile.jam9
-rw-r--r--tools/build/v2/example/versioned/jamroot.jam8
-rw-r--r--tools/build/v2/hacking.txt154
-rw-r--r--tools/build/v2/index.html165
-rw-r--r--tools/build/v2/kernel/bootstrap.jam263
-rw-r--r--tools/build/v2/kernel/class.jam420
-rw-r--r--tools/build/v2/kernel/errors.jam274
-rw-r--r--tools/build/v2/kernel/modules.jam354
-rw-r--r--tools/build/v2/notes/README.txt8
-rw-r--r--tools/build/v2/options/help.jam212
-rwxr-xr-xtools/build/v2/roll.sh66
-rw-r--r--tools/build/v2/test/BoostBuild.py949
-rw-r--r--tools/build/v2/test/TestCmd.py651
-rw-r--r--tools/build/v2/test/abs_workdir.py34
-rw-r--r--tools/build/v2/test/absolute_sources.py97
-rw-r--r--tools/build/v2/test/alias.py107
-rw-r--r--tools/build/v2/test/alternatives.py113
-rw-r--r--tools/build/v2/test/boost-build.jam14
-rw-r--r--tools/build/v2/test/build_dir.py106
-rw-r--r--tools/build/v2/test/build_file.py170
-rw-r--r--tools/build/v2/test/build_no.py28
-rwxr-xr-xtools/build/v2/test/builtin_echo.py30
-rwxr-xr-xtools/build/v2/test/builtin_exit.py54
-rw-r--r--tools/build/v2/test/c_file.py36
-rw-r--r--tools/build/v2/test/chain.py56
-rw-r--r--tools/build/v2/test/clean.py116
-rw-r--r--tools/build/v2/test/composite.py25
-rw-r--r--tools/build/v2/test/conditionals.py47
-rw-r--r--tools/build/v2/test/conditionals3.py30
-rwxr-xr-xtools/build/v2/test/conditionals_multiple.py312
-rwxr-xr-xtools/build/v2/test/configuration.py118
-rwxr-xr-xtools/build/v2/test/copy_time.py76
-rw-r--r--tools/build/v2/test/core-language/test.jam1353
-rwxr-xr-xtools/build/v2/test/core_action_status.py27
-rwxr-xr-xtools/build/v2/test/core_actions_quietly.py59
-rwxr-xr-xtools/build/v2/test/core_arguments.py109
-rwxr-xr-xtools/build/v2/test/core_at_file.py75
-rwxr-xr-xtools/build/v2/test/core_bindrule.py47
-rw-r--r--tools/build/v2/test/core_d12.py35
-rw-r--r--tools/build/v2/test/core_delete_module.py51
-rw-r--r--tools/build/v2/test/core_import_module.py80
-rwxr-xr-xtools/build/v2/test/core_language.py14
-rwxr-xr-xtools/build/v2/test/core_nt_line_length.py52
-rwxr-xr-xtools/build/v2/test/core_option_d2.py59
-rwxr-xr-xtools/build/v2/test/core_option_l.py47
-rwxr-xr-xtools/build/v2/test/core_option_n.py51
-rwxr-xr-xtools/build/v2/test/core_parallel_actions.py104
-rwxr-xr-xtools/build/v2/test/core_parallel_multifile_actions_1.py68
-rwxr-xr-xtools/build/v2/test/core_parallel_multifile_actions_2.py72
-rw-r--r--tools/build/v2/test/core_typecheck.py47
-rwxr-xr-xtools/build/v2/test/core_update_now.py198
-rw-r--r--tools/build/v2/test/core_varnames.py38
-rw-r--r--tools/build/v2/test/default_build.py93
-rw-r--r--tools/build/v2/test/default_features.py50
-rwxr-xr-xtools/build/v2/test/default_toolset.py211
-rw-r--r--tools/build/v2/test/dependency-test/a.cpp17
-rw-r--r--tools/build/v2/test/dependency-test/a.h8
-rw-r--r--tools/build/v2/test/dependency-test/a_c.c12
-rw-r--r--tools/build/v2/test/dependency-test/b.cpp14
-rw-r--r--tools/build/v2/test/dependency-test/b.h8
-rw-r--r--tools/build/v2/test/dependency-test/c.cpp14
-rw-r--r--tools/build/v2/test/dependency-test/e.cpp15
-rw-r--r--tools/build/v2/test/dependency-test/foo.jam34
-rw-r--r--tools/build/v2/test/dependency-test/foo.py26
-rw-r--r--tools/build/v2/test/dependency-test/jamfile.jam25
-rw-r--r--tools/build/v2/test/dependency-test/jamroot.jam7
-rw-r--r--tools/build/v2/test/dependency-test/src1/a.h10
-rw-r--r--tools/build/v2/test/dependency-test/src1/b.h10
-rw-r--r--tools/build/v2/test/dependency-test/src1/c.h8
-rw-r--r--tools/build/v2/test/dependency-test/src1/z.h5
-rw-r--r--tools/build/v2/test/dependency-test/src2/b.h8
-rw-r--r--tools/build/v2/test/dependency-test/y.foo0
-rw-r--r--tools/build/v2/test/dependency_property.py36
-rw-r--r--tools/build/v2/test/dependency_test.py104
-rw-r--r--tools/build/v2/test/direct_request_test.py73
-rw-r--r--tools/build/v2/test/disambiguation.py32
-rw-r--r--tools/build/v2/test/dll_path.py158
-rw-r--r--tools/build/v2/test/double_loading.py30
-rw-r--r--tools/build/v2/test/example_libraries.py21
-rw-r--r--tools/build/v2/test/example_make.py20
-rw-r--r--tools/build/v2/test/expansion.py80
-rw-r--r--tools/build/v2/test/explicit.py64
-rw-r--r--tools/build/v2/test/free_features_request.py42
-rw-r--r--tools/build/v2/test/gcc_runtime.py33
-rwxr-xr-xtools/build/v2/test/generator_selection.py140
-rw-r--r--tools/build/v2/test/generators-test/a.cpp22
-rw-r--r--tools/build/v2/test/generators-test/b.cxx10
-rw-r--r--tools/build/v2/test/generators-test/c.tui0
-rw-r--r--tools/build/v2/test/generators-test/d.wd0
-rw-r--r--tools/build/v2/test/generators-test/extra.jam120
-rw-r--r--tools/build/v2/test/generators-test/jamfile.jam19
-rw-r--r--tools/build/v2/test/generators-test/jamroot.jam95
-rw-r--r--tools/build/v2/test/generators-test/lex.jam26
-rw-r--r--tools/build/v2/test/generators-test/lib/c.cpp10
-rw-r--r--tools/build/v2/test/generators-test/lib/jamfile.jam9
-rw-r--r--tools/build/v2/test/generators-test/nm.jam43
-rw-r--r--tools/build/v2/test/generators-test/qt.jam34
-rw-r--r--tools/build/v2/test/generators-test/x.l5
-rw-r--r--tools/build/v2/test/generators-test/y.x_pro0
-rw-r--r--tools/build/v2/test/generators-test/z.cpp16
-rw-r--r--tools/build/v2/test/generators_test.py30
-rw-r--r--tools/build/v2/test/implicit_dependency.py81
-rw-r--r--tools/build/v2/test/indirect_conditional.py78
-rw-r--r--tools/build/v2/test/inherit_toolset.py59
-rwxr-xr-xtools/build/v2/test/inherited_dependency.py237
-rw-r--r--tools/build/v2/test/inline.py64
-rw-r--r--tools/build/v2/test/lib_source_property.py45
-rw-r--r--tools/build/v2/test/library_chain.py166
-rw-r--r--tools/build/v2/test/library_order.py100
-rw-r--r--tools/build/v2/test/library_property.py56
-rw-r--r--tools/build/v2/test/load_order.py88
-rw-r--r--tools/build/v2/test/loop.py25
-rw-r--r--tools/build/v2/test/make_rule.py58
-rw-r--r--tools/build/v2/test/module-actions/boost-build.jam5
-rw-r--r--tools/build/v2/test/module-actions/bootstrap.jam61
-rw-r--r--tools/build/v2/test/module_actions.py53
-rw-r--r--tools/build/v2/test/ndebug.py39
-rw-r--r--tools/build/v2/test/no_type.py23
-rw-r--r--tools/build/v2/test/notfile.py44
-rw-r--r--tools/build/v2/test/ordered_include.py41
-rw-r--r--tools/build/v2/test/out_of_tree.py39
-rw-r--r--tools/build/v2/test/path_features.py85
-rw-r--r--tools/build/v2/test/prebuilt.py43
-rw-r--r--tools/build/v2/test/prebuilt/ext/jamfile2.jam39
-rw-r--r--tools/build/v2/test/prebuilt/ext/jamfile3.jam46
-rw-r--r--tools/build/v2/test/project-test3/lib2/helper/e.cpp8
-rw-r--r--tools/build/v2/test/project_dependencies.py51
-rw-r--r--tools/build/v2/test/project_glob.py161
-rw-r--r--tools/build/v2/test/project_root_constants.py66
-rw-r--r--tools/build/v2/test/project_root_rule.py34
-rw-r--r--tools/build/v2/test/project_test3.py133
-rw-r--r--tools/build/v2/test/property_expansion.py29
-rw-r--r--tools/build/v2/test/qt4/jamroot.jam79
-rw-r--r--tools/build/v2/test/railsys/program/jamfile.jam45
-rw-r--r--tools/build/v2/test/rebuilds.py51
-rw-r--r--tools/build/v2/test/regression.py124
-rw-r--r--tools/build/v2/test/relative_sources.py38
-rw-r--r--tools/build/v2/test/remove_requirement.py89
-rw-r--r--tools/build/v2/test/resolution.py37
-rw-r--r--tools/build/v2/test/searched_lib.py187
-rw-r--r--tools/build/v2/test/skipping.py36
-rwxr-xr-xtools/build/v2/test/sort_rule.py95
-rw-r--r--tools/build/v2/test/source_locations.py42
-rw-r--r--tools/build/v2/test/stage.py258
-rw-r--r--tools/build/v2/test/standalone.py59
-rw-r--r--tools/build/v2/test/startup_v2.py79
-rw-r--r--tools/build/v2/test/svn_tree.py668
-rw-r--r--tools/build/v2/test/symlink.py41
-rw-r--r--tools/build/v2/test/tag.py122
-rw-r--r--tools/build/v2/test/test_all.py255
-rwxr-xr-xtools/build/v2/test/test_result_dumping.py33
-rwxr-xr-xtools/build/v2/test/testing_support.py61
-rw-r--r--tools/build/v2/test/timedata.py155
-rw-r--r--tools/build/v2/test/tree.py120
-rw-r--r--tools/build/v2/test/unit_test.py36
-rw-r--r--tools/build/v2/test/unit_tests.py13
-rw-r--r--tools/build/v2/test/unused.py33
-rw-r--r--tools/build/v2/test/unused/a.cpp9
-rw-r--r--tools/build/v2/test/unused/b.cpp4
-rw-r--r--tools/build/v2/test/unused/b.x0
-rw-r--r--tools/build/v2/test/unused/jamfile.jam11
-rw-r--r--tools/build/v2/test/unused/jamroot.jam60
-rw-r--r--tools/build/v2/test/use_requirements.py295
-rw-r--r--tools/build/v2/test/using.py42
-rw-r--r--tools/build/v2/test/wrapper.py38
-rw-r--r--tools/build/v2/test/wrong_project.py41
-rw-r--r--tools/build/v2/tools/auto-index.jam212
-rw-r--r--tools/build/v2/tools/boostbook.jam730
-rw-r--r--tools/build/v2/tools/borland.jam220
-rw-r--r--tools/build/v2/tools/builtin.jam960
-rw-r--r--tools/build/v2/tools/builtin.py725
-rw-r--r--tools/build/v2/tools/cast.jam91
-rw-r--r--tools/build/v2/tools/clang-darwin.jam170
-rw-r--r--tools/build/v2/tools/clang-linux.jam196
-rw-r--r--tools/build/v2/tools/common.jam983
-rw-r--r--tools/build/v2/tools/common.py844
-rw-r--r--tools/build/v2/tools/cray.jam112
-rw-r--r--tools/build/v2/tools/darwin.jam583
-rw-r--r--tools/build/v2/tools/docutils.jam85
-rw-r--r--tools/build/v2/tools/doxygen.jam776
-rw-r--r--tools/build/v2/tools/gcc.jam1190
-rw-r--r--tools/build/v2/tools/gcc.py800
-rw-r--r--tools/build/v2/tools/intel-darwin.jam220
-rw-r--r--tools/build/v2/tools/intel-win.jam184
-rw-r--r--tools/build/v2/tools/make.jam72
-rw-r--r--tools/build/v2/tools/message.jam55
-rw-r--r--tools/build/v2/tools/midl.py134
-rw-r--r--tools/build/v2/tools/mpi.jam583
-rw-r--r--tools/build/v2/tools/msvc.jam1398
-rw-r--r--tools/build/v2/tools/msvc.py1198
-rw-r--r--tools/build/v2/tools/notfile.jam74
-rw-r--r--tools/build/v2/tools/pathscale.jam168
-rw-r--r--tools/build/v2/tools/pgi.jam147
-rw-r--r--tools/build/v2/tools/python.jam1267
-rw-r--r--tools/build/v2/tools/qcc.jam236
-rw-r--r--tools/build/v2/tools/qt4.jam724
-rw-r--r--tools/build/v2/tools/rc.jam156
-rw-r--r--tools/build/v2/tools/rc.py189
-rw-r--r--tools/build/v2/tools/stage.jam524
-rw-r--r--tools/build/v2/tools/stage.py350
-rw-r--r--tools/build/v2/tools/stlport.jam303
-rw-r--r--tools/build/v2/tools/testing-aux.jam210
-rw-r--r--tools/build/v2/tools/testing.jam581
-rw-r--r--tools/build/v2/tools/testing.py342
-rw-r--r--tools/build/v2/tools/types/asm.py13
-rw-r--r--tools/build/v2/tools/types/cpp.jam86
-rw-r--r--tools/build/v2/tools/types/cpp.py13
-rw-r--r--tools/build/v2/tools/types/qt.jam10
-rw-r--r--tools/build/v2/tools/xsltproc-config.jam37
-rw-r--r--tools/build/v2/tools/xsltproc.jam194
-rw-r--r--tools/build/v2/tools/zlib.jam92
-rw-r--r--tools/build/v2/util/assert.jam336
-rw-r--r--tools/build/v2/util/doc.jam997
-rw-r--r--tools/build/v2/util/indirect.jam115
-rw-r--r--tools/build/v2/util/path.jam934
-rw-r--r--tools/build/v2/util/path.py904
-rw-r--r--tools/build/v2/util/print.jam488
-rw-r--r--tools/build/v2/util/regex.jam193
-rw-r--r--tools/build/v2/util/regex.py25
-rw-r--r--tools/build/v2/util/sequence.jam335
-rw-r--r--tools/build/v2/util/utility.jam235
-rw-r--r--tools/build/website/boost.css (renamed from tools/build/v2/boost.css)0
-rw-r--r--tools/build/website/boost_build.png (renamed from tools/build/v2/boost_build.png)bin7437 -> 7437 bytes
-rw-r--r--tools/build/website/boost_build.svg (renamed from tools/build/v2/boost_build.svg)0
-rw-r--r--tools/inspect/apple_macro_check.cpp2
-rw-r--r--tools/inspect/ascii_check.hpp2
-rw-r--r--tools/inspect/assert_macro_check.cpp6
-rw-r--r--tools/inspect/build/Jamfile.v21
-rw-r--r--tools/inspect/build/msvc/boost_inspect.sln6
-rw-r--r--tools/inspect/build/msvc/boost_inspect.vcproj247
-rw-r--r--tools/inspect/build/msvc/boost_inspect.vcxproj117
-rw-r--r--tools/inspect/build/msvc/readme.txt10
-rw-r--r--tools/inspect/deprecated_macro_check.cpp146
-rw-r--r--tools/inspect/deprecated_macro_check.hpp40
-rw-r--r--tools/inspect/doc/inspect.qbk5
-rw-r--r--tools/inspect/inspect.cpp265
-rw-r--r--tools/inspect/inspector.hpp2
-rw-r--r--tools/inspect/link_check.cpp8
-rw-r--r--tools/inspect/link_check.hpp2
-rw-r--r--tools/inspect/path_name_check.cpp2
-rw-r--r--tools/inspect/tab_check.hpp2
-rw-r--r--tools/inspect/time_string.hpp2
-rw-r--r--tools/inspect/unnamed_namespace_check.cpp2
-rw-r--r--tools/quickbook/doc/1_6.qbk226
-rw-r--r--tools/quickbook/doc/1_7.qbk191
-rw-r--r--tools/quickbook/doc/Jamfile.v28
-rw-r--r--tools/quickbook/doc/block.qbk191
-rw-r--r--tools/quickbook/doc/change_log.qbk83
-rw-r--r--tools/quickbook/doc/language_versions.qbk46
-rw-r--r--tools/quickbook/doc/phrase.qbk36
-rw-r--r--tools/quickbook/doc/quickbook.qbk6
-rw-r--r--tools/quickbook/doc/structure.qbk65
-rw-r--r--tools/quickbook/src/Jamfile.v212
-rw-r--r--tools/quickbook/src/actions.cpp702
-rw-r--r--tools/quickbook/src/actions.hpp138
-rw-r--r--tools/quickbook/src/block_element_grammar.cpp39
-rw-r--r--tools/quickbook/src/block_tags.hpp2
-rw-r--r--tools/quickbook/src/code_snippet.cpp49
-rw-r--r--tools/quickbook/src/dependency_tracker.cpp184
-rw-r--r--tools/quickbook/src/dependency_tracker.hpp53
-rw-r--r--tools/quickbook/src/doc_info_actions.cpp41
-rw-r--r--tools/quickbook/src/doc_info_grammar.cpp50
-rw-r--r--tools/quickbook/src/document_state.cpp472
-rw-r--r--tools/quickbook/src/document_state.hpp89
-rw-r--r--tools/quickbook/src/document_state_impl.hpp147
-rw-r--r--tools/quickbook/src/files.cpp368
-rw-r--r--tools/quickbook/src/files.hpp34
-rw-r--r--tools/quickbook/src/fwd.hpp6
-rw-r--r--tools/quickbook/src/glob.cpp301
-rw-r--r--tools/quickbook/src/glob.hpp30
-rw-r--r--tools/quickbook/src/grammar.cpp6
-rw-r--r--tools/quickbook/src/grammar.hpp5
-rw-r--r--tools/quickbook/src/grammar_impl.hpp76
-rw-r--r--tools/quickbook/src/id_generation.cpp379
-rw-r--r--tools/quickbook/src/id_manager.cpp1143
-rw-r--r--tools/quickbook/src/id_manager.hpp86
-rw-r--r--tools/quickbook/src/id_xml.cpp153
-rw-r--r--tools/quickbook/src/include_paths.cpp291
-rw-r--r--tools/quickbook/src/include_paths.hpp68
-rw-r--r--tools/quickbook/src/input_path.cpp318
-rw-r--r--tools/quickbook/src/input_path.hpp130
-rw-r--r--tools/quickbook/src/main_grammar.cpp672
-rw-r--r--tools/quickbook/src/markups.cpp1
-rw-r--r--tools/quickbook/src/native_text.cpp336
-rw-r--r--tools/quickbook/src/native_text.hpp144
-rw-r--r--tools/quickbook/src/phrase_element_grammar.cpp29
-rw-r--r--tools/quickbook/src/quickbook.cpp209
-rw-r--r--tools/quickbook/src/state.cpp136
-rw-r--r--tools/quickbook/src/state.hpp43
-rw-r--r--tools/quickbook/src/state_save.hpp23
-rw-r--r--tools/quickbook/src/string_ref.cpp37
-rw-r--r--tools/quickbook/src/string_ref.hpp89
-rw-r--r--tools/quickbook/src/syntax_highlight.cpp209
-rw-r--r--tools/quickbook/src/syntax_highlight.hpp58
-rw-r--r--tools/quickbook/src/template_tags.hpp1
-rw-r--r--tools/quickbook/src/utils.cpp59
-rw-r--r--tools/quickbook/src/utils.hpp18
-rw-r--r--tools/quickbook/src/values.cpp14
-rw-r--r--tools/quickbook/src/values.hpp6
-rw-r--r--tools/quickbook/test/Jamfile.v224
-rw-r--r--tools/quickbook/test/anchor-1_7.gold151
-rw-r--r--tools/quickbook/test/anchor-1_7.quickbook97
-rw-r--r--tools/quickbook/test/code_cpp-1_5.gold15
-rw-r--r--tools/quickbook/test/code_cpp-1_5.quickbook12
-rw-r--r--tools/quickbook/test/command-line/Jamfile.v24
-rw-r--r--tools/quickbook/test/doc-info/Jamfile.v24
-rw-r--r--tools/quickbook/test/doc-info/macros1-1_5.gold11
-rw-r--r--tools/quickbook/test/doc-info/macros1-1_5.quickbook6
-rw-r--r--tools/quickbook/test/doc-info/macros1-1_6.gold8
-rw-r--r--tools/quickbook/test/doc-info/macros1-1_6.quickbook7
-rw-r--r--tools/quickbook/test/doc-info/macros1-inc_1_6.quickbook4
-rw-r--r--tools/quickbook/test/elements-1_6.gold24
-rw-r--r--tools/quickbook/test/elements-1_6.quickbook10
-rw-r--r--tools/quickbook/test/empty-inc.quickbook (renamed from tools/build/v2/test/dependency-test/x.foo)0
-rw-r--r--tools/quickbook/test/empty.qbk0
-rw-r--r--tools/quickbook/test/heading-1_7.gold113
-rw-r--r--tools/quickbook/test/heading-1_7.quickbook83
-rw-r--r--tools/quickbook/test/include-1_5.quickbook14
-rw-r--r--tools/quickbook/test/include-1_6.quickbook10
-rw-r--r--tools/quickbook/test/include-1_7.gold36
-rw-r--r--tools/quickbook/test/include-1_7.quickbook18
-rw-r--r--tools/quickbook/test/include-inc-1_5.quickbook (renamed from tools/quickbook/test/include_sub-1_5.qbk)0
-rw-r--r--tools/quickbook/test/include-inc.quickbook (renamed from tools/quickbook/test/include_sub.qbk)0
-rw-r--r--tools/quickbook/test/include/Jamfile.v26
-rw-r--r--tools/quickbook/test/include/compatibility-1_1.quickbook8
-rw-r--r--tools/quickbook/test/include/compatibility-1_5.quickbook8
-rw-r--r--tools/quickbook/test/include/compatibility-1_6.quickbook8
-rw-r--r--tools/quickbook/test/include/compatibility-inc.quickbook (renamed from tools/quickbook/test/include/compatibility-inc.qbk)0
-rw-r--r--tools/quickbook/test/include/compatibility-inc_1_1.quickbook (renamed from tools/quickbook/test/include/compatibility-inc_1_1.qbk)0
-rw-r--r--tools/quickbook/test/include/compatibility-inc_1_5.quickbook (renamed from tools/quickbook/test/include/compatibility-inc_1_5.qbk)0
-rw-r--r--tools/quickbook/test/include/compatibility-inc_1_6.quickbook (renamed from tools/quickbook/test/include/compatibility-inc_1_6.qbk)0
-rw-r--r--tools/quickbook/test/include/filename-1_7.gold46
-rw-r--r--tools/quickbook/test/include/filename-1_7.quickbook21
-rw-r--r--tools/quickbook/test/include/filename_path-1_7.gold18
-rw-r--r--tools/quickbook/test/include/filename_path-1_7.quickbook7
-rw-r--r--tools/quickbook/test/include/glob-1_7.gold41
-rw-r--r--tools/quickbook/test/include/glob-1_7.quickbook39
-rw-r--r--tools/quickbook/test/include/glob1/a.qbk1
-rw-r--r--tools/quickbook/test/include/glob1/glob1-1/b.qbk1
-rw-r--r--tools/quickbook/test/include/glob2/a.qbk3
-rw-r--r--tools/quickbook/test/include/glob2/glob2-1/b.qbk1
-rw-r--r--tools/quickbook/test/include/in_section-1_5.gold28
-rw-r--r--tools/quickbook/test/include/in_section-1_5.quickbook12
-rw-r--r--tools/quickbook/test/include/in_section-1_6.gold24
-rw-r--r--tools/quickbook/test/include/in_section-1_6.quickbook12
-rw-r--r--tools/quickbook/test/include/in_section-inc1.quickbook13
-rw-r--r--tools/quickbook/test/include/in_section-inc2.quickbook14
-rw-r--r--tools/quickbook/test/include/in_section-include1.qbk11
-rw-r--r--tools/quickbook/test/include/in_section-include2.qbk12
-rw-r--r--tools/quickbook/test/include/include_id_unbalanced-1_6.quickbook4
-rw-r--r--tools/quickbook/test/include/include_id_unbalanced-inc1.qbk4
-rw-r--r--tools/quickbook/test/include/include_id_unbalanced-inc1.quickbook4
-rw-r--r--tools/quickbook/test/include/include_id_unbalanced-inc1a.quickbook (renamed from tools/quickbook/test/include/include_id_unbalanced-inc1a.qbk)0
-rw-r--r--tools/quickbook/test/include/include_id_unbalanced-inc2.qbk5
-rw-r--r--tools/quickbook/test/include/include_id_unbalanced-inc2.quickbook5
-rw-r--r--tools/quickbook/test/include/include_id_unbalanced-inc2a.quickbook (renamed from tools/quickbook/test/include/include_id_unbalanced-inc2a.qbk)0
-rw-r--r--tools/quickbook/test/include/nested_compatibility-1_5.quickbook8
-rw-r--r--tools/quickbook/test/include/nested_compatibility-1_6.quickbook8
-rw-r--r--tools/quickbook/test/include/nested_compatibility-inc-1_5.quickbook (renamed from tools/quickbook/test/include/nested_compatibility_inc-1_5.qbk)0
-rw-r--r--tools/quickbook/test/include/nested_compatibility-inc-1_6.quickbook (renamed from tools/quickbook/test/include/nested_compatibility_inc-1_6.qbk)0
-rw-r--r--tools/quickbook/test/include/source_mode-1_5.gold2
-rw-r--r--tools/quickbook/test/include/source_mode-1_5.quickbook3
-rw-r--r--tools/quickbook/test/include/source_mode-1_6.gold2
-rw-r--r--tools/quickbook/test/include/source_mode-1_6.quickbook3
-rw-r--r--tools/quickbook/test/include/source_mode-inc1.quickbook (renamed from tools/quickbook/test/include/source_mode-inc1.qbk)0
-rw-r--r--tools/quickbook/test/include/source_mode-inc2.quickbook3
-rw-r--r--tools/quickbook/test/include/template_include-1_7.gold12
-rw-r--r--tools/quickbook/test/include/template_include-1_7.quickbook8
-rw-r--r--tools/quickbook/test/include2-1_6.quickbook6
-rw-r--r--tools/quickbook/test/include_invalid_path1-1_7-fail.quickbook5
-rw-r--r--tools/quickbook/test/include_invalid_path2-1_7-fail.quickbook5
-rw-r--r--tools/quickbook/test/include_invalid_path3-1_7-fail.quickbook5
-rw-r--r--tools/quickbook/test/include_unicode_glob-1_7-fail.quickbook5
-rw-r--r--tools/quickbook/test/include_win_path-1_6-fail.quickbook2
-rw-r--r--tools/quickbook/test/link-1_1.gold11
-rw-r--r--tools/quickbook/test/link-1_1.quickbook7
-rw-r--r--tools/quickbook/test/link-1_6.gold11
-rw-r--r--tools/quickbook/test/link-1_6.quickbook7
-rw-r--r--tools/quickbook/test/link-1_7-fail.quickbook5
-rw-r--r--tools/quickbook/test/link-1_7-fail2.quickbook6
-rw-r--r--tools/quickbook/test/link-1_7.gold18
-rw-r--r--tools/quickbook/test/link-1_7.quickbook15
-rw-r--r--tools/quickbook/test/list_test-1_6-fail.quickbook46
-rw-r--r--tools/quickbook/test/list_test-1_6.gold160
-rw-r--r--tools/quickbook/test/list_test-1_6.quickbook49
-rw-r--r--tools/quickbook/test/list_test-1_7-fail1.quickbook9
-rw-r--r--tools/quickbook/test/list_test-1_7.gold479
-rw-r--r--tools/quickbook/test/list_test-1_7.quickbook134
-rw-r--r--tools/quickbook/test/mismatched_brackets3-1_1.gold12
-rw-r--r--tools/quickbook/test/mismatched_brackets3-1_1.quickbook6
-rw-r--r--tools/quickbook/test/python/include_glob.qbk11
-rw-r--r--tools/quickbook/test/python/include_glob_deps.txt6
-rw-r--r--tools/quickbook/test/python/include_glob_locs.txt9
-rw-r--r--tools/quickbook/test/python/include_path.qbk8
-rw-r--r--tools/quickbook/test/python/include_path_deps.txt3
-rw-r--r--tools/quickbook/test/python/include_path_locs.txt3
-rw-r--r--tools/quickbook/test/python/missing_relative.qbk8
-rw-r--r--tools/quickbook/test/python/missing_relative_deps.txt3
-rw-r--r--tools/quickbook/test/python/missing_relative_locs.txt3
-rw-r--r--tools/quickbook/test/python/output-deps.py38
-rw-r--r--tools/quickbook/test/python/sub1/a.qbk8
-rw-r--r--tools/quickbook/test/python/sub2/b.qbk8
-rw-r--r--tools/quickbook/test/python/svg_missing.qbk8
-rw-r--r--tools/quickbook/test/python/svg_missing_deps.txt3
-rw-r--r--tools/quickbook/test/python/svg_missing_locs.txt3
-rw-r--r--tools/quickbook/test/role-1_7-fail.quickbook5
-rw-r--r--tools/quickbook/test/role-1_7.gold13
-rw-r--r--tools/quickbook/test/role-1_7.quickbook13
-rw-r--r--tools/quickbook/test/section-1_7.gold26
-rw-r--r--tools/quickbook/test/section-1_7.quickbook18
-rw-r--r--tools/quickbook/test/snippets/Jamfile.v22
-rw-r--r--tools/quickbook/test/snippets/unbalanced_snippet1.cpp10
-rw-r--r--tools/quickbook/test/snippets/unbalanced_snippet2.cpp11
-rw-r--r--tools/quickbook/test/source_mode-1_7.gold27
-rw-r--r--tools/quickbook/test/source_mode-1_7.quickbook14
-rw-r--r--tools/quickbook/test/table-1_7.gold520
-rw-r--r--tools/quickbook/test/table-1_7.quickbook151
-rw-r--r--tools/quickbook/test/templates-1_3.gold23
-rw-r--r--tools/quickbook/test/templates-1_3.quickbook16
-rw-r--r--tools/quickbook/test/templates-1_4.gold7
-rw-r--r--tools/quickbook/test/templates-1_4.quickbook18
-rw-r--r--tools/quickbook/test/templates-1_5.gold269
-rw-r--r--tools/quickbook/test/templates-1_5.quickbook255
-rw-r--r--tools/quickbook/test/templates-1_6-fail1.quickbook8
-rw-r--r--tools/quickbook/test/templates-1_6.gold257
-rw-r--r--tools/quickbook/test/templates-1_6.quickbook313
-rw-r--r--tools/quickbook/test/templates-1_7-fail1.quickbook12
-rw-r--r--tools/quickbook/test/templates-1_7-fail2.quickbook11
-rw-r--r--tools/quickbook/test/templates-1_7.gold302
-rw-r--r--tools/quickbook/test/templates-1_7.quickbook322
-rw-r--r--tools/quickbook/test/unit/Jamfile.v26
-rw-r--r--tools/quickbook/test/unit/glob_test.cpp122
-rw-r--r--tools/quickbook/test/unit/source_map_test.cpp395
-rw-r--r--tools/quickbook/test/unit/values_test.cpp6
-rw-r--r--tools/quickbook/test/versions/Jamfile.v22
-rw-r--r--tools/quickbook/test/versions/invalid_macro-1_6.quickbook4
-rw-r--r--tools/quickbook/test/versions/invalid_macro-inc-1_1.quickbook (renamed from tools/quickbook/test/versions/invalid_macro-inc-1_1.qbk)0
-rw-r--r--tools/quickbook/test/versions/templates-inc-1_1.quickbook (renamed from tools/quickbook/test/versions/templates-1_1.qbk)0
-rw-r--r--tools/quickbook/test/versions/templates-inc-1_4.quickbook (renamed from tools/quickbook/test/versions/templates-1_4.qbk)0
-rw-r--r--tools/quickbook/test/versions/templates-inc-1_5.quickbook (renamed from tools/quickbook/test/versions/templates-1_5.qbk)0
-rw-r--r--tools/quickbook/test/versions/versions-1_6.quickbook12
-rw-r--r--tools/quickbook/test/xinclude/Jamfile.v22
-rw-r--r--tools/quickbook/test/xinclude/sub/boost-no-inspect2
-rw-r--r--tools/regression/build/Jamroot.jam25
-rw-r--r--tools/regression/doc/index.html2
-rw-r--r--tools/regression/doc/library_status.html2
-rw-r--r--tools/regression/src/compiler_status.cpp34
-rw-r--r--tools/regression/src/library_status.cpp572
-rw-r--r--tools/regression/src/regression.py8
-rw-r--r--tools/regression/src/run_tests.sh4
-rw-r--r--tools/regression/test/test-cases/general/bjam.log6
-rw-r--r--tools/release/2release.bat16
-rw-r--r--tools/release/README11
-rw-r--r--tools/release/bjam_warnings.bat17
-rwxr-xr-xtools/release/build_docs.sh20
-rwxr-xr-xtools/release/build_release.sh20
-rw-r--r--tools/release/build_release_packages.bat47
-rwxr-xr-xtools/release/build_release_packages.sh43
-rw-r--r--tools/release/index.html64
-rwxr-xr-xtools/release/inspect.sh31
-rw-r--r--tools/release/inspect_trunk.bat42
-rw-r--r--tools/release/linux_user-config.jam21
-rwxr-xr-xtools/release/load_posix.sh10
-rwxr-xr-xtools/release/load_windows.sh10
-rwxr-xr-xtools/release/make_packages.sh45
-rw-r--r--tools/release/merge2release.bat22
-rw-r--r--tools/release/merge_release_cycle_init.bat16
-rw-r--r--tools/release/release-mgt-msvc/compare_trees/compare_trees.vcproj197
-rw-r--r--tools/release/release-mgt-msvc/msvc.sln26
-rw-r--r--tools/release/release-mgt-msvc/strftime/strftime.vcproj193
-rwxr-xr-xtools/release/release_reports.sh31
-rw-r--r--tools/release/revision_number.bat13
-rw-r--r--tools/release/snapshot.bat39
-rwxr-xr-xtools/release/snapshot.sh16
-rw-r--r--tools/release/snapshot_download_docs.bat31
-rwxr-xr-xtools/release/snapshot_inspect.sh31
-rw-r--r--tools/release/snapshot_inspection.bat28
-rw-r--r--tools/release/snapshot_posix.bat69
-rwxr-xr-xtools/release/snapshot_posix.sh65
-rw-r--r--tools/release/snapshot_windows.bat75
-rwxr-xr-xtools/release/snapshot_windows.sh58
-rw-r--r--tools/release/strftime.cpp68
-rw-r--r--tools/release/unmerged.bat17
-rw-r--r--tools/release/unmerged_all.bat86
-rw-r--r--tools/release/unmerged_whatever.bat13
-rw-r--r--tools/release/upload2sourceforge.bat13
-rw-r--r--tools/wave/build/Jamfile.v269
-rw-r--r--tools/wave/cpp.cpp1473
-rw-r--r--tools/wave/cpp.hpp43
-rw-r--r--tools/wave/cpp_config.hpp63
-rw-r--r--tools/wave/cpp_version.hpp25
-rw-r--r--tools/wave/stop_watch.hpp84
-rw-r--r--tools/wave/trace_macro_expansion.hpp1494
1559 files changed, 118596 insertions, 102982 deletions
diff --git a/tools/Jamfile.v2 b/tools/Jamfile.v2
index d7fab287a8..86cbbefdd1 100644
--- a/tools/Jamfile.v2
+++ b/tools/Jamfile.v2
@@ -12,6 +12,9 @@ project
:
requirements
<link>static
+ <implicit-dependency>/boost//headers
+ :
+ usage-requirements <implicit-dependency>/boost//headers
;
use-project /boost/regression : regression/build ;
diff --git a/tools/auto_index/src/auto_index.cpp b/tools/auto_index/src/auto_index.cpp
index f108885c79..e8d5db2011 100644
--- a/tools/auto_index/src/auto_index.cpp
+++ b/tools/auto_index/src/auto_index.cpp
@@ -94,6 +94,8 @@ const std::string* get_current_block_id(node_id const* id)
{
while((id->id == 0) && (id->prev))
id = id->prev;
+ if(!id->id)
+ BOOST_THROW_EXCEPTION(std::runtime_error("Current XML block has no enclosing ID: XML is not valid Boostbook?"));
return id->id;
}
//
@@ -588,6 +590,8 @@ void process_node(boost::tiny_xml::element_ptr node, node_id* prev, title_info*
preferred_term = true;
}
parent = parent->parent.lock();
+ if(!parent)
+ break;
}
}
catch(const std::exception&){}
@@ -762,6 +766,7 @@ int main(int argc, char* argv[])
catch(boost::exception& e)
{
std::cerr << diagnostic_information(e);
+ return 1;
}
catch(const std::exception& e)
{
diff --git a/tools/bcp/add_dependent_lib.cpp b/tools/bcp/add_dependent_lib.cpp
index bb1818a2e2..41cc59742b 100644
--- a/tools/bcp/add_dependent_lib.cpp
+++ b/tools/bcp/add_dependent_lib.cpp
@@ -81,7 +81,7 @@ static void init_library_scanner(const fs::path& p, bool cvs_mode, const std::st
"\\("
"[^\\(\\);{}]*" // argument list
"\\)"
- "\\s*"
+ "\\s*(?:BOOST[_A-Z]+\\s*)?"
"\\{" // start of definition
"|"
"(\\<\\w+\\>)" // Maybe class name
@@ -93,7 +93,7 @@ static void init_library_scanner(const fs::path& p, bool cvs_mode, const std::st
"\\("
"[^\\(\\);{}]*" // argument list
"\\)"
- "\\s*"
+ "\\s*(?:BOOST[_A-Z]+\\s*)?"
"\\{" // start of definition
")" // end branch reset
);
@@ -135,7 +135,7 @@ static void init_library_scanner(const fs::path& p, bool cvs_mode, const std::st
"\\<(?!return)\\w+\\>[^:;{}#=<>!~%.\\w]*(";
// List of function names goes here...
const char* e3 =
- ")\\s*\\([^;()]*\\)\\s*;)";
+ ")\\s*\\([^;()]*\\)\\s*(?:BOOST[_A-Z]+\\s*)?;)";
std::string class_name_list;
std::set<std::string>::const_iterator i = class_names[libname].begin(), j = class_names[libname].end();
diff --git a/tools/bcp/copy_path.cpp b/tools/bcp/copy_path.cpp
index 497dcd5c86..ded5d181cc 100644
--- a/tools/bcp/copy_path.cpp
+++ b/tools/bcp/copy_path.cpp
@@ -1,8 +1,8 @@
/*
*
* Copyright (c) 2003 Dr John Maddock
- * Use, modification and distribution is subject to the
- * Boost Software License, Version 1.0. (See accompanying file
+ * Use, modification and distribution is subject to the
+ * Boost Software License, Version 1.0. (See accompanying file
* LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
*
* This file implements the following:
@@ -13,6 +13,7 @@
#include "bcp_imp.hpp"
#include "fileview.hpp"
#include <boost/filesystem/operations.hpp>
+#include <boost/filesystem/fstream.hpp>
#include <boost/regex.hpp>
#include <fstream>
#include <iterator>
@@ -20,7 +21,7 @@
#include <iostream>
struct get_new_library_name
-{
+{
get_new_library_name(const std::string& n) : m_new_name(n) {}
template <class I>
std::string operator()(const boost::match_results<I>& what)
@@ -63,7 +64,7 @@ void bcp_implementation::copy_path(const fs::path& p)
static std::vector<char> v1, v2;
v1.clear();
v2.clear();
- std::ifstream is((m_boost_path / p).c_str());
+ boost::filesystem::ifstream is((m_boost_path / p));
std::copy(std::istreambuf_iterator<char>(is), std::istreambuf_iterator<char>(), std::back_inserter(v1));
static boost::regex libname_matcher;
@@ -76,11 +77,11 @@ void bcp_implementation::copy_path(const fs::path& p)
std::swap(v1, v2);
v2.clear();
- std::ofstream os;
+ boost::filesystem::ofstream os;
if(m_unix_lines)
- os.open((m_dest_path / p).c_str(), std::ios_base::binary | std::ios_base::out);
+ os.open((m_dest_path / p), std::ios_base::binary | std::ios_base::out);
else
- os.open((m_dest_path / p).c_str(), std::ios_base::out);
+ os.open((m_dest_path / p), std::ios_base::out);
os.write(&*v1.begin(), v1.size());
os.close();
}
@@ -89,7 +90,7 @@ void bcp_implementation::copy_path(const fs::path& p)
static std::vector<char> v1, v2;
v1.clear();
v2.clear();
- std::ifstream is((m_boost_path / p).c_str());
+ boost::filesystem::ifstream is((m_boost_path / p));
std::copy(std::istreambuf_iterator<char>(is), std::istreambuf_iterator<char>(), std::back_inserter(v1));
static boost::regex libname_matcher;
@@ -109,11 +110,11 @@ void bcp_implementation::copy_path(const fs::path& p)
std::swap(v1, v2);
v2.clear();
- std::ofstream os;
+ boost::filesystem::ofstream os;
if(m_unix_lines)
- os.open((m_dest_path / p).c_str(), std::ios_base::binary | std::ios_base::out);
+ os.open((m_dest_path / p), std::ios_base::binary | std::ios_base::out);
else
- os.open((m_dest_path / p).c_str(), std::ios_base::out);
+ os.open((m_dest_path / p), std::ios_base::out);
os.write(&*v1.begin(), v1.size());
os.close();
}
@@ -121,13 +122,13 @@ void bcp_implementation::copy_path(const fs::path& p)
{
//
// v1 hold the current content, v2 is temp buffer.
- // Each time we do a search and replace the new content
+ // Each time we do a search and replace the new content
// ends up in v2: we then swap v1 and v2, and clear v2.
//
static std::vector<char> v1, v2;
v1.clear();
v2.clear();
- std::ifstream is((m_boost_path / p).c_str());
+ boost::filesystem::ifstream is((m_boost_path / p));
std::copy(std::istreambuf_iterator<char>(is), std::istreambuf_iterator<char>(), std::back_inserter(v1));
static const boost::regex namespace_matcher(
@@ -170,7 +171,7 @@ void bcp_implementation::copy_path(const fs::path& p)
{
static const boost::regex namespace_alias(
/*
- "namespace\\s+" + m_namespace_name +
+ "namespace\\s+" + m_namespace_name +
"\\s*"
"("
"\\{"
@@ -191,35 +192,35 @@ void bcp_implementation::copy_path(const fs::path& p)
")"
*/
/*
- "(namespace\\s+" + m_namespace_name +
+ "(namespace\\s+" + m_namespace_name +
"\\s*\\{.*"
"\\})([^\\{\\};]*)\\z"
*/
"(namespace)(\\s+)(" + m_namespace_name + ")"
"(adstl|phoenix|rapidxml)?(\\s*\\{)"
);
- regex_replace(std::back_inserter(v2), v1.begin(), v1.end(), namespace_alias,
+ regex_replace(std::back_inserter(v2), v1.begin(), v1.end(), namespace_alias,
"$1 $3$4 {} $1 (?4$4:boost) = $3$4; $1$2$3$4$5", boost::regex_constants::format_all);
std::swap(v1, v2);
v2.clear();
}
- std::ofstream os;
+ boost::filesystem::ofstream os;
if(m_unix_lines)
- os.open((m_dest_path / p).c_str(), std::ios_base::binary | std::ios_base::out);
+ os.open((m_dest_path / p), std::ios_base::binary | std::ios_base::out);
else
- os.open((m_dest_path / p).c_str(), std::ios_base::out);
+ os.open((m_dest_path / p), std::ios_base::out);
if(v1.size())
os.write(&*v1.begin(), v1.size());
os.close();
}
else if(m_unix_lines && !is_binary_file(p))
{
- std::ifstream is((m_boost_path / p).c_str());
+ boost::filesystem::ifstream is((m_boost_path / p));
std::istreambuf_iterator<char> isi(is);
std::istreambuf_iterator<char> end;
- std::ofstream os((m_dest_path / p).c_str(), std::ios_base::binary | std::ios_base::out);
+ boost::filesystem::ofstream os((m_dest_path / p), std::ios_base::binary | std::ios_base::out);
std::ostreambuf_iterator<char> osi(os);
std::copy(isi, end, osi);
diff --git a/tools/bcp/doc/bcp.qbk b/tools/bcp/doc/bcp.qbk
index 905a201ada..2d6b1cfedf 100644
--- a/tools/bcp/doc/bcp.qbk
+++ b/tools/bcp/doc/bcp.qbk
@@ -175,6 +175,7 @@ their dependencies and so on.
* HTML files are scanned for immediate dependencies (images and style sheets, but not links).
It should be noted that in practice bcp can produce a rather "fat" list of dependencies, reasons for this include:
+
* It searches for library names first, so using "regex" as a name will give you everything in the
libs/regex directory and everything that depends on. This can be a long list as all the regex test and example
programs will get scanned for their dependencies. If you want a more minimal list, then try using the
diff --git a/tools/bcp/doc/html/index.html b/tools/bcp/doc/html/index.html
index c9530bfa23..bf59356f0b 100644
--- a/tools/bcp/doc/html/index.html
+++ b/tools/bcp/doc/html/index.html
@@ -3,7 +3,7 @@
<meta http-equiv="Content-Type" content="text/html; charset=US-ASCII">
<title>BCP</title>
<link rel="stylesheet" href="../../../../doc/src/boostbook.css" type="text/css">
-<meta name="generator" content="DocBook XSL Stylesheets V1.76.1">
+<meta name="generator" content="DocBook XSL Stylesheets V1.77.1">
<link rel="home" href="index.html" title="BCP">
</head>
<body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF">
@@ -25,9 +25,9 @@
<div><div class="authorgroup"><div class="author"><h3 class="author">
<span class="firstname">John</span> <span class="surname">Maddock</span>
</h3></div></div></div>
-<div><p class="copyright">Copyright &#169; 2209 John Maddock</p></div>
+<div><p class="copyright">Copyright &#169; 2009 John Maddock</p></div>
<div><div class="legalnotice">
-<a name="id917214"></a><p>
+<a name="bcp.legal"></a><p>
Distributed under the Boost Software License, Version 1.0. (See accompanying
file LICENSE_1_0.txt or copy at <a href="http://www.boost.org/LICENSE_1_0.txt" target="_top">http://www.boost.org/LICENSE_1_0.txt</a>)
</p>
@@ -50,7 +50,7 @@
</dl></dd>
</dl>
</div>
-<div class="section">
+<div class="section bcp_overview">
<div class="titlepage"><div><div><h2 class="title" style="clear: both">
<a name="bcp.overview"></a><a class="link" href="index.html#bcp.overview" title="Overview">Overview</a>
</h2></div></div></div>
@@ -64,7 +64,7 @@
what licences are used by those dependencies.
</p>
</div>
-<div class="section">
+<div class="section bcp_examples">
<div class="titlepage"><div><div><h2 class="title" style="clear: both">
<a name="bcp.examples"></a><a class="link" href="index.html#bcp.examples" title="Examples">Examples</a>
</h2></div></div></div>
@@ -111,7 +111,7 @@
The report contains license information, author details, and file dependencies.
</p>
</div>
-<div class="section">
+<div class="section bcp_syntax">
<div class="titlepage"><div><div><h2 class="title" style="clear: both">
<a name="bcp.syntax"></a><a class="link" href="index.html#bcp.syntax" title="Syntax">Syntax</a>
</h2></div></div></div>
@@ -122,7 +122,7 @@
<dt><span class="section"><a href="index.html#bcp.syntax.output">output-path</a></span></dt>
<dt><span class="section"><a href="index.html#bcp.syntax.dependencies">Dependencies</a></span></dt>
</dl></div>
-<div class="section">
+<div class="section bcp_syntax_main">
<div class="titlepage"><div><div><h3 class="title">
<a name="bcp.syntax.main"></a><a class="link" href="index.html#bcp.syntax.main" title="Behaviour Selection">Behaviour Selection</a>
</h3></div></div></div>
@@ -141,7 +141,7 @@
<p>
Outputs a html report file containing:
</p>
-<div class="itemizedlist"><ul class="itemizedlist" type="disc">
+<div class="itemizedlist"><ul class="itemizedlist" style="list-style-type: disc; ">
<li class="listitem">
All the licenses in effect, plus the files using each license, and the
copyright holders using each license.
@@ -163,7 +163,7 @@
</li>
</ul></div>
</div>
-<div class="section">
+<div class="section bcp_syntax_options">
<div class="titlepage"><div><div><h3 class="title">
<a name="bcp.syntax.options"></a><a class="link" href="index.html#bcp.syntax.options" title="Options">Options</a>
</h3></div></div></div>
@@ -211,7 +211,7 @@
Make sure that all copied files use Unix style line endings.
</p>
</div>
-<div class="section">
+<div class="section bcp_syntax_module">
<div class="titlepage"><div><div><h3 class="title">
<a name="bcp.syntax.module"></a><a class="link" href="index.html#bcp.syntax.module" title="module-list">module-list</a>
</h3></div></div></div>
@@ -243,7 +243,7 @@
copied/listed.
</p>
</div>
-<div class="section">
+<div class="section bcp_syntax_output">
<div class="titlepage"><div><div><h3 class="title">
<a name="bcp.syntax.output"></a><a class="link" href="index.html#bcp.syntax.output" title="output-path">output-path</a>
</h3></div></div></div>
@@ -251,14 +251,14 @@
The path to which files will be copied (this path must exist).
</p>
</div>
-<div class="section">
+<div class="section bcp_syntax_dependencies">
<div class="titlepage"><div><div><h3 class="title">
<a name="bcp.syntax.dependencies"></a><a class="link" href="index.html#bcp.syntax.dependencies" title="Dependencies">Dependencies</a>
</h3></div></div></div>
<p>
File dependencies are found as follows:
</p>
-<div class="itemizedlist"><ul class="itemizedlist" type="disc">
+<div class="itemizedlist"><ul class="itemizedlist" style="list-style-type: disc; ">
<li class="listitem">
C++ source files are scanned for #includes, all #includes present in
the boost source tree will then be scanned for their dependencies and
@@ -280,27 +280,36 @@
</ul></div>
<p>
It should be noted that in practice bcp can produce a rather "fat"
- list of dependencies, reasons for this include: * It searches for library
- names first, so using "regex" as a name will give you everything
- in the libs/regex directory and everything that depends on. This can be a
- long list as all the regex test and example programs will get scanned for
- their dependencies. If you want a more minimal list, then try using the names
- of the headers you are actually including, or use the --scan option to scan
- your source code. * If you include the header of a library with separate
- source, then you get that libraries source and all it's dependencies. This
- is deliberate and in general those extra dependencies are needed. * When
- you include a header, bcp doesn't know what compiler you're using, so it
- follows all possible preprocessor paths. If you're distributing a subset
- of Boost with you're application then that is what you want to have happen
- in general.
+ list of dependencies, reasons for this include:
</p>
+<div class="itemizedlist"><ul class="itemizedlist" style="list-style-type: disc; ">
+<li class="listitem">
+ It searches for library names first, so using "regex" as a
+ name will give you everything in the libs/regex directory and everything
+ that depends on. This can be a long list as all the regex test and example
+ programs will get scanned for their dependencies. If you want a more
+ minimal list, then try using the names of the headers you are actually
+ including, or use the --scan option to scan your source code.
+ </li>
+<li class="listitem">
+ If you include the header of a library with separate source, then you
+ get that libraries source and all it's dependencies. This is deliberate
+ and in general those extra dependencies are needed.
+ </li>
+<li class="listitem">
+ When you include a header, bcp doesn't know what compiler you're using,
+ so it follows all possible preprocessor paths. If you're distributing
+ a subset of Boost with you're application then that is what you want
+ to have happen in general.
+ </li>
+</ul></div>
<p>
The last point above can result in a substantial increase in the number of
headers found compared to most peoples expectations. For example bcp finds
274 header dependencies for boost/shared_ptr.hpp: by running bcp in report
mode we can see why all these headers have been found as dependencies:
</p>
-<div class="itemizedlist"><ul class="itemizedlist" type="disc">
+<div class="itemizedlist"><ul class="itemizedlist" style="list-style-type: disc; ">
<li class="listitem">
All of the Config library headers get included (52 headers, would be
about 6 for one compiler only).
@@ -338,7 +347,7 @@
</div>
</div>
<table xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" width="100%"><tr>
-<td align="left"><p><small>Last revised: August 14, 2011 at 16:17:07 GMT</small></p></td>
+<td align="left"><p><small>Last revised: April 18, 2013 at 12:30:13 GMT</small></p></td>
<td align="right"><div class="copyright-footer"></div></td>
</tr></table>
<hr>
diff --git a/tools/bcp/fileview.cpp b/tools/bcp/fileview.cpp
index 54b3758551..36e785062c 100644
--- a/tools/bcp/fileview.cpp
+++ b/tools/bcp/fileview.cpp
@@ -1,14 +1,15 @@
/*
*
* Copyright (c) 2003 Dr John Maddock
- * Use, modification and distribution is subject to the
- * Boost Software License, Version 1.0. (See accompanying file
+ * Use, modification and distribution is subject to the
+ * Boost Software License, Version 1.0. (See accompanying file
* LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
*
* This file implements the fileview class
*/
#include "fileview.hpp"
+#include <boost/filesystem/fstream.hpp>
#include <vector>
#include <algorithm>
#include <string>
@@ -57,7 +58,7 @@ void fileview::close()
void fileview::open(const boost::filesystem::path& p)
{
cow();
- std::ifstream is(p.c_str());
+ boost::filesystem::ifstream is(p);
if(!is)
{
std::string msg("Bad file name: ");
diff --git a/tools/boostbook/doc/Jamfile.v2 b/tools/boostbook/doc/Jamfile.v2
index 6c468cb883..d09d3de63f 100644
--- a/tools/boostbook/doc/Jamfile.v2
+++ b/tools/boostbook/doc/Jamfile.v2
@@ -4,7 +4,7 @@
# (See accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
-project boost/doc ;
+project boostbook/doc ;
import boostbook : boostbook ;
boostbook boostbook : boostbook.xml :
diff --git a/tools/boostbook/doc/boostbook.xml b/tools/boostbook/doc/boostbook.xml
index 1e57966fd5..c0afdd393f 100644
--- a/tools/boostbook/doc/boostbook.xml
+++ b/tools/boostbook/doc/boostbook.xml
@@ -9,7 +9,7 @@
<!DOCTYPE part PUBLIC "-//Boost//DTD BoostBook XML V1.0//EN"
"http://www.boost.org/tools/boostbook/dtd/boostbook.dtd">
<part xmlns:xi="http://www.w3.org/2001/XInclude" id="boostbook"
- last-revision="$Date: 2010-07-19 16:29:09 -0700 (Mon, 19 Jul 2010) $">
+ last-revision="$Date$">
<partinfo>
<author>
<firstname>Douglas</firstname>
@@ -140,9 +140,9 @@ std::cout &lt;&lt; f(5, 3) &gt;&gt; std::endl;
happen in the BBv2 user configuration file,
<filename>user-config.jam</filename>. If you do not have a copy
of this file in your home directory, you should copy the one
- that resides in <code>tools/build/v2</code> to your home
+ that resides in <code>tools/build/</code> to your home
directory. Alternatively, you can edit
- <filename>tools/build/v2/user-config.jam</filename> directly or
+ <filename>tools/build/user-config.jam</filename> directly or
a site-wide <filename>site-config.jam</filename> file.</para>
<section id="boostbook.setup.xsltproc">
diff --git a/tools/boostbook/doc/documenting.xml b/tools/boostbook/doc/documenting.xml
index 04552e5f5a..30e4cd88b9 100644
--- a/tools/boostbook/doc/documenting.xml
+++ b/tools/boostbook/doc/documenting.xml
@@ -9,7 +9,7 @@
<!DOCTYPE chapter PUBLIC "-//Boost//DTD BoostBook XML V1.0//EN"
"http://www.boost.org/tools/boostbook/dtd/boostbook.dtd">
<chapter xmlns:xi="http://www.w3.org/2001/XInclude" id="boostbook.documenting"
- last-revision="$Date: 2008-07-12 12:30:45 -0700 (Sat, 12 Jul 2008) $">
+ last-revision="$Date$">
<title>Documenting libraries</title>
<para>BoostBook is an extension to <ulink
@@ -40,7 +40,7 @@
&lt;!DOCTYPE library PUBLIC "-//Boost//DTD BoostBook XML V1.0//EN"
"http://www.boost.org/tools/boostbook/dtd/boostbook.dtd"&gt;
&lt;library name="Any" dirname="any" xmlns:xi="http://www.w3.org/2001/XInclude"
- id="any" last-revision="$Date: 2008-07-12 12:30:45 -0700 (Sat, 12 Jul 2008) $"&gt;
+ id="any" last-revision="$Date$"&gt;
&lt;libraryinfo&gt;
&lt;author&gt;
&lt;firstname&gt;Kevlin&lt;/firstname&gt;
@@ -99,7 +99,7 @@
<varlistentry>
<term><code>last-revision</code></term>
<listitem>
- <simpara>Always set to <code>$Date: 2008-07-12 12:30:45 -0700 (Sat, 12 Jul 2008) $</code>, which is
+ <simpara>Always set to <code>$Date$</code>, which is
expanded by CVS to include the date and time that the file
was last modified.</simpara>
</listitem>
diff --git a/tools/boostbook/doc/reference.xml b/tools/boostbook/doc/reference.xml
index 67aa7913a3..d4c56a5ef4 100644
--- a/tools/boostbook/doc/reference.xml
+++ b/tools/boostbook/doc/reference.xml
@@ -36,7 +36,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>name</entry><entry>#REQUIRED</entry><entry>CDATA</entry><entry>The name of the element being declared to referenced</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
<row><entry>xml:base</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Implementation detail used by XIncludes</entry></row>
@@ -140,7 +140,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>name</entry><entry>#REQUIRED</entry><entry>CDATA</entry><entry>The name of the element being declared to referenced</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
<row><entry>xml:base</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Implementation detail used by XIncludes</entry></row>
@@ -176,7 +176,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>name</entry><entry>#REQUIRED</entry><entry>CDATA</entry><entry>The name of the element being declared to referenced</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
<row><entry>xml:base</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Implementation detail used by XIncludes</entry></row>
@@ -218,7 +218,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
<row><entry>xml:base</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Implementation detail used by XIncludes</entry></row>
</tbody>
@@ -258,7 +258,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>specifiers</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>The specifiers for this function, e.g., inline, static, etc.</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
<row><entry>xml:base</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Implementation detail used by XIncludes</entry></row>
@@ -294,7 +294,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>pack</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to '1' if the parameter is a parameter pack.</entry></row>
<row><entry>name</entry><entry>#REQUIRED</entry><entry>CDATA</entry><entry>The name of the element being declared to referenced</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
@@ -340,7 +340,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
<row><entry>xml:base</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Implementation detail used by XIncludes</entry></row>
</tbody>
@@ -378,7 +378,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
<row><entry>xml:base</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Implementation detail used by XIncludes</entry></row>
</tbody>
@@ -441,7 +441,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
<row><entry>xml:base</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Implementation detail used by XIncludes</entry></row>
</tbody>
@@ -481,7 +481,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
<row><entry>xml:base</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Implementation detail used by XIncludes</entry></row>
</tbody>
@@ -516,7 +516,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>name</entry><entry>#REQUIRED</entry><entry>CDATA</entry><entry>The name of the element being declared to referenced</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
<row><entry>xml:base</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Implementation detail used by XIncludes</entry></row>
@@ -555,7 +555,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>access</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>The access specifier ("public", "private", or "protected") of the inheritance.</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
<row><entry>pack</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to '1' if this is a pack exapansion.</entry></row>
@@ -599,7 +599,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
<row><entry>xml:base</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Implementation detail used by XIncludes</entry></row>
</tbody>
@@ -717,7 +717,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>specifiers</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>The specifiers for this function, e.g., inline, static, etc.</entry></row>
<row><entry>name</entry><entry>#REQUIRED</entry><entry>CDATA</entry><entry>The name of the element being declared to referenced</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
@@ -754,7 +754,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>alt</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry/></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
</tbody>
@@ -789,7 +789,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
<row><entry>xml:base</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Implementation detail used by XIncludes</entry></row>
</tbody>
@@ -863,7 +863,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>cv</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>cv-qualifiers for this method, e.g., const volatile</entry></row>
<row><entry>specifiers</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>The specifiers for this function, e.g., inline, static, etc.</entry></row>
<row><entry>name</entry><entry>#REQUIRED</entry><entry>CDATA</entry><entry>The name of the element being declared to referenced</entry></row>
@@ -939,7 +939,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>specifiers</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>The specifiers for this function, e.g., inline, static, etc.</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
<row><entry>xml:base</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Implementation detail used by XIncludes</entry></row>
@@ -991,7 +991,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>name</entry><entry>#REQUIRED</entry><entry>CDATA</entry><entry>The name of the element being declared to referenced</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
<row><entry>xml:base</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Implementation detail used by XIncludes</entry></row>
@@ -1050,7 +1050,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>alt</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry/></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
</tbody>
@@ -1085,7 +1085,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>name</entry><entry>#REQUIRED</entry><entry>CDATA</entry><entry>The name of the element being declared to referenced</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
<row><entry>xml:base</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Implementation detail used by XIncludes</entry></row>
@@ -1129,7 +1129,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>alt</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry/></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
<row><entry>xml:base</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Implementation detail used by XIncludes</entry></row>
@@ -1165,7 +1165,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>name</entry><entry>#REQUIRED</entry><entry>CDATA</entry><entry>The name of the element being declared to referenced</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
<row><entry>xml:base</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Implementation detail used by XIncludes</entry></row>
@@ -1201,7 +1201,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
<row><entry>xml:base</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Implementation detail used by XIncludes</entry></row>
</tbody>
@@ -1236,7 +1236,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>specifiers</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>The specifiers for this function, e.g., inline, static, etc.</entry></row>
<row><entry>name</entry><entry>#REQUIRED</entry><entry>CDATA</entry><entry>The name of the element being declared to referenced</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
@@ -1273,7 +1273,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
<row><entry>xml:base</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Implementation detail used by XIncludes</entry></row>
</tbody>
@@ -1308,7 +1308,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>name</entry><entry>#REQUIRED</entry><entry>CDATA</entry><entry>The name of the element being declared to referenced</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
<row><entry>xml:base</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Implementation detail used by XIncludes</entry></row>
@@ -1344,7 +1344,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
<row><entry>xml:base</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Implementation detail used by XIncludes</entry></row>
</tbody>
@@ -1379,7 +1379,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
<row><entry>pack</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to '1' if this is a pack exapansion.</entry></row>
<row><entry>xml:base</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Implementation detail used by XIncludes</entry></row>
@@ -1415,7 +1415,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>alt</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry/></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
</tbody>
@@ -1450,7 +1450,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>name</entry><entry>#REQUIRED</entry><entry>CDATA</entry><entry>The name of the element being declared to referenced</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
<row><entry>xml:base</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Implementation detail used by XIncludes</entry></row>
@@ -1528,7 +1528,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
<row><entry>xml:base</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Implementation detail used by XIncludes</entry></row>
</tbody>
@@ -1563,7 +1563,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
<row><entry>xml:base</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Implementation detail used by XIncludes</entry></row>
</tbody>
@@ -1598,7 +1598,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>name</entry><entry>#REQUIRED</entry><entry>CDATA</entry><entry>The name of the element being declared to referenced</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
<row><entry>xml:base</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Implementation detail used by XIncludes</entry></row>
@@ -1668,7 +1668,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
<row><entry>xml:base</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Implementation detail used by XIncludes</entry></row>
</tbody>
@@ -1712,7 +1712,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>cv</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>cv-qualifiers for this method, e.g., const volatile</entry></row>
<row><entry>specifiers</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>The specifiers for this function, e.g., inline, static, etc.</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
@@ -1783,7 +1783,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
<row><entry>xml:base</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Implementation detail used by XIncludes</entry></row>
</tbody>
@@ -1852,7 +1852,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
<row><entry>xml:base</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Implementation detail used by XIncludes</entry></row>
</tbody>
@@ -1887,7 +1887,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
<row><entry>xml:base</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Implementation detail used by XIncludes</entry></row>
</tbody>
@@ -1965,7 +1965,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>cv</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>cv-qualifiers for this method, e.g., const volatile</entry></row>
<row><entry>specifiers</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>The specifiers for this function, e.g., inline, static, etc.</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
@@ -2007,7 +2007,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>name</entry><entry>#REQUIRED</entry><entry>CDATA</entry><entry>The name of the element being declared to referenced</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
<row><entry>xml:base</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Implementation detail used by XIncludes</entry></row>
@@ -2046,7 +2046,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>name</entry><entry>#REQUIRED</entry><entry>CDATA</entry><entry>The name of the access specification, e.g. "public", "private", or "protected".</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
</tbody>
@@ -2126,7 +2126,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>name</entry><entry>#REQUIRED</entry><entry>CDATA</entry><entry>The name of the element being declared to referenced</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
<row><entry>xml:base</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Implementation detail used by XIncludes</entry></row>
@@ -2165,7 +2165,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>name</entry><entry>#REQUIRED</entry><entry>CDATA</entry><entry>The name of the element being declared to referenced</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
<row><entry>xml:base</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Implementation detail used by XIncludes</entry></row>
@@ -2201,7 +2201,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
<row><entry>xml:base</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Implementation detail used by XIncludes</entry></row>
</tbody>
@@ -2236,7 +2236,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>name</entry><entry>#REQUIRED</entry><entry>CDATA</entry><entry>The name of the element being declared to referenced</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
<row><entry>xml:base</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Implementation detail used by XIncludes</entry></row>
@@ -2277,7 +2277,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>name</entry><entry>#REQUIRED</entry><entry>CDATA</entry><entry>The name of the element being declared to referenced</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
<row><entry>xml:base</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Implementation detail used by XIncludes</entry></row>
@@ -2346,7 +2346,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
<row><entry>xml:base</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Implementation detail used by XIncludes</entry></row>
</tbody>
@@ -2381,7 +2381,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
<row><entry>xml:base</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Implementation detail used by XIncludes</entry></row>
</tbody>
@@ -2416,7 +2416,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>pack</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to '1' if the parameter is a parameter pack.</entry></row>
<row><entry>name</entry><entry>#REQUIRED</entry><entry>CDATA</entry><entry>The name of the element being declared to referenced</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
@@ -2453,7 +2453,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>dirname</entry><entry>#REQUIRED</entry><entry>CDATA</entry><entry/></row>
<row><entry>url</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry/></row>
<row><entry>name</entry><entry>#REQUIRED</entry><entry>CDATA</entry><entry>The name of the element being declared to referenced</entry></row>
@@ -2497,7 +2497,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
<row><entry>xml:base</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Implementation detail used by XIncludes</entry></row>
</tbody>
@@ -2532,7 +2532,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>name</entry><entry>#REQUIRED</entry><entry>CDATA</entry><entry>The name of the element being declared to referenced</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
<row><entry>xml:base</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Implementation detail used by XIncludes</entry></row>
@@ -2568,7 +2568,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>alt</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry/></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
</tbody>
@@ -2603,7 +2603,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>name</entry><entry>#REQUIRED</entry><entry>CDATA</entry><entry>The name of the element being declared to referenced</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
<row><entry>xml:base</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Implementation detail used by XIncludes</entry></row>
@@ -2639,7 +2639,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>name</entry><entry>#REQUIRED</entry><entry>CDATA</entry><entry>The name of the element being declared to referenced</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
<row><entry>xml:base</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Implementation detail used by XIncludes</entry></row>
@@ -2692,7 +2692,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>name</entry><entry>#REQUIRED</entry><entry>CDATA</entry><entry>The name of the element being declared to referenced</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
<row><entry>xml:base</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Implementation detail used by XIncludes</entry></row>
@@ -2728,7 +2728,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
<row><entry>xml:base</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Implementation detail used by XIncludes</entry></row>
</tbody>
@@ -2763,7 +2763,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
<row><entry>xml:base</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Implementation detail used by XIncludes</entry></row>
</tbody>
@@ -2798,7 +2798,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
<row><entry>xml:base</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Implementation detail used by XIncludes</entry></row>
</tbody>
@@ -2833,7 +2833,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
<row><entry>xml:base</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Implementation detail used by XIncludes</entry></row>
</tbody>
@@ -2868,7 +2868,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
<row><entry>xml:base</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Implementation detail used by XIncludes</entry></row>
</tbody>
@@ -2903,7 +2903,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>name</entry><entry>#REQUIRED</entry><entry>CDATA</entry><entry>The name of the element being declared to referenced</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
<row><entry>xml:base</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Implementation detail used by XIncludes</entry></row>
@@ -2939,7 +2939,7 @@
</thead>
<tbody>
-<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date: 2009-10-10 07:53:46 -0700 (Sat, 10 Oct 2009) $ to keep "last revised" information in sync with CVS changes</entry></row>
+<row><entry>last-revision</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Set to $Date$ to keep "last revised" information in sync with CVS changes</entry></row>
<row><entry>id</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>A global identifier for this element</entry></row>
<row><entry>xml:base</entry><entry>#IMPLIED</entry><entry>CDATA</entry><entry>Implementation detail used by XIncludes</entry></row>
</tbody>
diff --git a/tools/boostbook/doc/together.xml b/tools/boostbook/doc/together.xml
index ca93fc53b7..47269b7d64 100644
--- a/tools/boostbook/doc/together.xml
+++ b/tools/boostbook/doc/together.xml
@@ -9,7 +9,7 @@
<!DOCTYPE chapter PUBLIC "-//Boost//DTD BoostBook XML V1.0//EN"
"http://www.boost.org/tools/boostbook/dtd/boostbook.dtd">
<chapter xmlns:xi="http://www.w3.org/2001/XInclude" id="boostbook.together"
- last-revision="$Date: 2009-04-15 00:37:45 -0700 (Wed, 15 Apr 2009) $">
+ last-revision="$Date$">
<title>Bringing Together a BoostBook Document</title>
<section id="boostbook.linking">
diff --git a/tools/boostbook/dtd/1.1/boostbook.dtd b/tools/boostbook/dtd/1.1/boostbook.dtd
index ec2086aebf..9805ec7064 100644
--- a/tools/boostbook/dtd/1.1/boostbook.dtd
+++ b/tools/boostbook/dtd/1.1/boostbook.dtd
@@ -16,8 +16,8 @@
PUBLIC "-//Boost//DTD BoostBook XML V1.1//EN"
SYSTEM "http://www.boost.org/tools/boostbook/dtd/1.1/boostbook.dtd"
- $Revision: 55188 $
- $Date: 2009-07-26 13:11:03 -0700 (Sun, 26 Jul 2009) $
+ $Revision$
+ $Date$
-->
<!--========== Define XInclude features. ==========-->
diff --git a/tools/boostbook/dtd/boostbook.dtd b/tools/boostbook/dtd/boostbook.dtd
index 4718abe39c..bd4c3f871e 100644
--- a/tools/boostbook/dtd/boostbook.dtd
+++ b/tools/boostbook/dtd/boostbook.dtd
@@ -16,8 +16,8 @@
PUBLIC "-//Boost//DTD BoostBook XML V1.1//EN"
SYSTEM "http://www.boost.org/tools/boostbook/dtd/1.1/boostbook.dtd"
- $Revision: 51774 $
- $Date: 2009-03-14 04:42:38 -0700 (Sat, 14 Mar 2009) $
+ $Revision$
+ $Date$
-->
<!--========== Define XInclude features. ==========-->
diff --git a/tools/boostbook/setup_boostbook.py b/tools/boostbook/setup_boostbook.py
index b06736afc1..032587d8a9 100644
--- a/tools/boostbook/setup_boostbook.py
+++ b/tools/boostbook/setup_boostbook.py
@@ -242,8 +242,8 @@ def find_user_config():
JAM_CONFIG_IN_TEMP="yes"
print " Updating Boost.Jam configuration in %s... " % JAM_CONFIG_OUT
return JAM_CONFIG_OUT
- elif os.environ.has_key( "BOOST_ROOT" ) and os.path.exists( os.path.join( os.environ[ "BOOST_ROOT" ], "tools/build/v2/user-config.jam" ) ):
- JAM_CONFIG_IN=os.path.join( os.environ[ "BOOST_ROOT" ], "tools/build/v2/user-config.jam" )
+ elif os.environ.has_key( "BOOST_ROOT" ) and os.path.exists( os.path.join( os.environ[ "BOOST_ROOT" ], "tools/build/user-config.jam" ) ):
+ JAM_CONFIG_IN=os.path.join( os.environ[ "BOOST_ROOT" ], "tools/build/user-config.jam" )
print " Found user-config.jam in BOOST_ROOT directory (%s)" % JAM_CONFIG_IN
JAM_CONFIG_IN_TEMP="no"
print " Writing Boost.Jam configuration to %s... " % JAM_CONFIG_OUT
@@ -286,7 +286,7 @@ def setup_boostbook( tools_directory ):
print "done."
- print "Done! Execute \"bjam --v2\" in a documentation directory to generate"
+ print "Done! Execute \"b2\" in a documentation directory to generate"
print "documentation with BoostBook. If you have not already, you will need"
print "to compile Boost.Jam."
diff --git a/tools/boostbook/setup_boostbook.sh b/tools/boostbook/setup_boostbook.sh
index 0e79031573..54348361fa 100644..100755
--- a/tools/boostbook/setup_boostbook.sh
+++ b/tools/boostbook/setup_boostbook.sh
@@ -129,8 +129,8 @@ if test -r "$HOME/user-config.jam"; then
JAM_CONFIG_IN_TEMP="yes"
echo -n "Updating Boost.Jam configuration in $JAM_CONFIG_OUT... "
-elif test -r "$BOOST_ROOT/tools/build/v2/user-config.jam"; then
- JAM_CONFIG_IN="$BOOST_ROOT/tools/build/v2/user-config.jam";
+elif test -r "$BOOST_ROOT/tools/build/user-config.jam"; then
+ JAM_CONFIG_IN="$BOOST_ROOT/tools/build/user-config.jam";
JAM_CONFIG_IN_TEMP="no"
echo -n "Writing Boost.Jam configuration to $JAM_CONFIG_OUT... "
else
@@ -176,6 +176,6 @@ awk -f setup_boostbook.awk $JAM_CONFIG_IN > $JAM_CONFIG_OUT
rm -f setup_boostbook.awk
echo "done."
-echo "Done! Execute \"bjam --v2\" in a documentation directory to generate"
+echo "Done! Execute \"b2\" in a documentation directory to generate"
echo "documentation with BoostBook. If you have not already, you will need"
echo "to compile Boost.Jam."
diff --git a/tools/boostbook/test/Jamfile.v2 b/tools/boostbook/test/Jamfile.v2
index 4699cd15c1..23aff82796 100644
--- a/tools/boostbook/test/Jamfile.v2
+++ b/tools/boostbook/test/Jamfile.v2
@@ -4,7 +4,7 @@
# (See accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
-project boost/test ;
+project /boost/boostbook/test ;
import boostbook : boostbook ;
boostbook alt : alt.xml :
diff --git a/tools/boostbook/test/doxygen/autodoc.gold b/tools/boostbook/test/doxygen/autodoc.gold
index 8ff920bd0d..95c818114a 100644
--- a/tools/boostbook/test/doxygen/autodoc.gold
+++ b/tools/boostbook/test/doxygen/autodoc.gold
@@ -1,12 +1,60 @@
<?xml version="1.0" standalone="yes"?>
-<library-reference id="example_reference"><title>Example Reference</title><header name="boost/example.hpp"><namespace name="example"><class name="example"><purpose>Documentation for class example. </purpose><class name="inner_class"><data-member name="x"><type>int</type></data-member></class><enum name="class_enum"><enumvalue name="enumerator"/></enum><typedef name="documented_type1"><description><para>This type has documentation. </para></description><type>int</type></typedef><typedef name="documented_type2"><purpose>This type has documentation. </purpose><type>long</type></typedef><typedef name="documented_type3"><description><para>This type has documentation. </para></description><type>long double</type></typedef><typedef name="undocumented_type1"><type>short</type></typedef><typedef name="undocumented_type2"><type>double</type></typedef><data-member name="integer"><type>int</type></data-member><data-member name="mutable_integer" specifiers="mutable"><type>int</type></data-member><data-member name="const_integer"><type>const int</type></data-member><data-member name="static_integer" specifiers="static"><type>int</type></data-member><data-member name="static_const_integer" specifiers="static"><type>const int</type></data-member><method-group name="public member functions"><method name="conversion-operator" cv="" specifiers="explicit"><type>int</type></method></method-group></class><struct name="example_template"><template>
+<library-reference id="example_reference"><title>Example Reference</title><header name="boost/example.hpp">
+<namespace name="example">
+<class name="example"><purpose>Documentation for class example. </purpose><description><para>Detailed documentation</para><para><programlisting language="c++">void class_code_sample();
+</programlisting> </para></description><class name="inner_class"><data-member name="x"><type>int</type></data-member>
+</class><enum name="class_enum"><enumvalue name="enumerator"/></enum>
+<typedef name="documented_type1"><description><para>This type has documentation. </para></description><type>int</type></typedef>
+<typedef name="documented_type2"><purpose>This type has documentation. </purpose><type>long</type></typedef>
+<typedef name="documented_type3"><description><para>This type has documentation. </para></description><type>long double</type></typedef>
+<typedef name="undocumented_type1"><type>short</type></typedef>
+<typedef name="undocumented_type2"><type>double</type></typedef>
+<data-member name="integer"><type>int</type></data-member>
+<data-member name="mutable_integer" specifiers="mutable"><type>int</type></data-member>
+<data-member name="const_integer"><type>const int</type></data-member>
+<data-member name="static_integer" specifiers="static"><type>int</type></data-member>
+<data-member name="static_const_integer" specifiers="static"><type>const int</type></data-member>
+<method-group name="public member functions">
+<method name="virtual_method" specifiers="virtual"><type>int</type></method>
+<method name="virtual_abstract_method" cv="= 0" specifiers="virtual"><type>int</type></method>
+<method name="virtual_const_method" cv="const" specifiers="virtual"><type>int</type></method>
+<method name="method_with_default_value"><type>int</type><parameter name=""><paramtype>int</paramtype><default>default_value</default></parameter></method>
+<method name="method_with_fp"><type>int</type><parameter name="fp"><paramtype>int(*)()</paramtype></parameter><parameter name=""><paramtype>volatile char</paramtype></parameter></method>
+<method name="method_with_string_default1"><type>int</type><parameter name=""><paramtype>char *</paramtype><default>")"</default></parameter><parameter name=""><paramtype>volatile char</paramtype></parameter></method>
+<method name="method_with_string_default2"><type>int</type><parameter name=""><paramtype>char *</paramtype><default>"("</default></parameter><parameter name=""><paramtype>volatile char</paramtype></parameter></method>
+<method name="method_with_char_default1"><type>int</type><parameter name=""><paramtype>char</paramtype><default>'('</default></parameter><parameter name=""><paramtype>volatile char</paramtype></parameter></method>
+<method name="method_with_char_default2"><type>int</type><parameter name=""><paramtype>char</paramtype><default>')'</default></parameter><parameter name=""><paramtype>volatile char</paramtype></parameter></method>
+<method name="volatile_method_with_fp" cv="volatile"><type>int</type><parameter name="fp"><paramtype>int(*)()</paramtype></parameter><parameter name=""><paramtype>volatile char</paramtype></parameter></method>
+<method name="volatile_method_with_string_default1" cv="volatile"><type>int</type><parameter name=""><paramtype>char *</paramtype><default>")"</default></parameter><parameter name=""><paramtype>volatile char</paramtype></parameter></method>
+<method name="volatile_method_with_string_default2" cv="volatile"><type>int</type><parameter name=""><paramtype>char *</paramtype><default>"("</default></parameter><parameter name=""><paramtype>volatile char</paramtype></parameter></method>
+<method name="volatile_method_with_char_default1" cv="volatile"><type>int</type><parameter name=""><paramtype>char</paramtype><default>'('</default></parameter><parameter name=""><paramtype>volatile char</paramtype></parameter></method>
+<method name="volatile_method_with_char_default2" cv="volatile"><type>int</type><parameter name=""><paramtype>char</paramtype><default>')'</default></parameter><parameter name=""><paramtype>volatile char</paramtype></parameter></method>
+<method name="const_method" cv="const"><type>void</type></method>
+<method name="volatile_method" cv="volatile"><type>void</type></method>
+<method name="trad_noexcept" cv="noexcept"><type>void</type></method>
+<method name="trad_noexcept_if" cv="noexcept(a==b &amp;&amp;(c||d)))"><type>void</type></method>
+<method name="boost_noexcept" cv="noexcept"><type>void</type></method>
+<method name="boost_noexcept_if" cv="noexcept(condition)"><type>void</type></method>
+<method name="trad_constexpr" cv="constexpr"><type>void</type></method>
+<method name="boost_constexpr" cv="constexpr"><type>void</type></method>
+<method name="boost_constexpr_or_const" cv="constexpr"><type>void</type></method>
+<method name="constexpr_noexcept" cv="constexpr noexcept"><type>void</type></method>
+<method name="conversion-operator" specifiers="explicit"><type>int</type></method>
+</method-group>
+<constructor cv="= default"><parameter name=""><paramtype><classname>example</classname> const &amp;</paramtype></parameter></constructor>
+<copy-assignment cv="= delete"><type><classname>example</classname> &amp;</type><parameter name=""><paramtype><classname>example</classname> const &amp;</paramtype></parameter></copy-assignment>
+<method-group name="public static functions">
+<method name="static_method" specifiers="static"><type>int</type></method>
+<method name="static_constexpr" cv="constexpr" specifiers="static"><type>int</type></method>
+</method-group>
+</class><struct name="example_template"><template>
<template-type-parameter name="TypeParameter"><purpose><para>A template parameter </para></purpose></template-type-parameter>
<template-nontype-parameter name="NonTypeParameter"><type>int</type><purpose><para>This is a non-type template parameter </para></purpose></template-nontype-parameter>
<template-type-parameter name="TypeParameterWithDefault"><default>int</default><purpose><para>This is a template parameter with a default argument </para></purpose></template-type-parameter>
</template><description><para>Test some doxygen markup</para><para><warning><para>This is just an example.</para></warning>
Embedded docbook list:</para><para>
- <orderedlist><listitem><simpara>1</simpara></listitem><listitem><simpara>2</simpara></listitem></orderedlist>
- </para><para><emphasis>Special</emphasis> <emphasis role="bold">Bold</emphasis> <computeroutput>Typewriter</computeroutput> <emphasis>Italics</emphasis> <emphasis>emphasis</emphasis> <computeroutput>parameter</computeroutput> </para><para><itemizedlist>
+<orderedlist><listitem><simpara>1</simpara></listitem><listitem><simpara>2</simpara></listitem></orderedlist>
+</para><para><emphasis>Special</emphasis> <emphasis role="bold">Bold</emphasis> <computeroutput>Typewriter</computeroutput> <emphasis>Italics</emphasis> <emphasis>emphasis</emphasis> <computeroutput>parameter</computeroutput> </para><para><itemizedlist>
<listitem><para>Arg1 first argument. </para></listitem>
<listitem><para>Arg2 second argument.</para></listitem>
</itemizedlist>
@@ -15,15 +63,37 @@ Embedded docbook list:</para><para>
<listitem><para>Second list item</para></listitem>
</itemizedlist>
Line 1<sbr/>
- Line 2</para><para><programlisting> void foo() {}
+ Line 2</para><para><programlisting language="c++">void foo() {}
</programlisting></para><para>
-</para></description></struct><enum name="namespace_enum"><enumvalue name="enumerator"/></enum><data-member name="namespace_integer"><type>int</type></data-member><data-member name="namespace_static_integer" specifiers="static"><type>int</type></data-member><data-member name="namespace_const_integer"><type>const int</type></data-member><data-member name="namespace_static_const_integer" specifiers="static"><type>const int</type></data-member><function name="namespace_func"><type>int</type><parameter name="i"><paramtype>int</paramtype><description><para>A function parameter </para></description></parameter><parameter name="j"><paramtype>int</paramtype><description><para>Another </para></description></parameter><description><para>
+</para></description></struct><struct name="specialization_test"><template>
+ <template-type-parameter name="T"/>
+ </template></struct><struct-specialization name="specialization_test"><template>
+ <template-type-parameter name="T"/>
+ </template><specialization><template-arg>T *</template-arg></specialization><method-group name="public member functions">
+</method-group>
+<constructor><description><para>A constructor. </para></description></constructor>
+<destructor><description><para>A destructor. </para></description></destructor>
+<copy-assignment><type><emphasis>unspecified</emphasis></type><parameter name=""><paramtype>const <classname>specialization_test</classname> &amp;</paramtype></parameter><description><para>An assignment operator. </para></description></copy-assignment>
+</struct-specialization><enum name="namespace_enum"><enumvalue name="enumerator"/></enum>
+<data-member name="namespace_integer"><type>int</type></data-member>
+<data-member name="namespace_static_integer" specifiers="static"><type>int</type></data-member>
+<data-member name="namespace_const_integer"><type>const int</type></data-member>
+<data-member name="namespace_static_const_integer" specifiers="static"><type>const int</type></data-member>
+<function name="free_function"><type>void</type><parameter name="x"><paramtype>int</paramtype><description><para>Parameter description.</para></description></parameter><description><para>
+<programlisting language="c++">void function_code_sample();
+</programlisting> </para></description></function>
+<function name="namespace_func"><type>int</type><parameter name="i"><paramtype>int</paramtype><description><para>A function parameter </para></description></parameter><parameter name="j"><paramtype>int</paramtype><description><para>Another </para></description></parameter><description><para>
This is a test function. <classname alt="example::example">Link to class</classname> <classname alt="example::example_template">Link to class template</classname> <note><para>This is a note.</para></note>
<para><emphasis role="bold">See Also:</emphasis><para><classname alt="example::example">example::example</classname> and <classname alt="example::example_template">example_template</classname> </para></para>
-</para></description><requires><para>i &gt; j</para></requires><returns><para>The answer </para></returns></function><function name="namespace_func_template"><type>void</type><template>
+</para></description><requires><para>i &gt; j</para></requires><returns><para>The answer </para></returns></function>
+<function name="namespace_func_template"><type>void</type><template>
<template-type-parameter name="TypeParameter"><purpose><para>A template parameter </para></purpose></template-type-parameter>
<template-nontype-parameter name="NonTypeParameter"><type>int</type><purpose><para>This is a non-type template parameter </para></purpose></template-nontype-parameter>
</template><description><para>Testing a function template.
-</para></description></function></namespace><macro name="EXAMPLE" kind="functionlike"><macro-parameter name="m"/><purpose>Documentation for macro example. </purpose></macro></header></library-reference>
+</para></description></function>
+</namespace>
+<macro name="EXAMPLE" kind="functionlike"><macro-parameter name="m"/><purpose>Documentation for macro example. </purpose></macro>
+</header>
+</library-reference> \ No newline at end of file
diff --git a/tools/boostbook/test/doxygen/boost/example.hpp b/tools/boostbook/test/doxygen/boost/example.hpp
index da5d36d85c..629a6d9a4b 100644
--- a/tools/boostbook/test/doxygen/boost/example.hpp
+++ b/tools/boostbook/test/doxygen/boost/example.hpp
@@ -7,6 +7,12 @@
\class example::example
\brief Documentation for class example
+
+ Detailed documentation
+
+ \code{.cpp}
+ void class_code_sample();
+ \endcode
*/
/*!
@@ -23,6 +29,16 @@ enum global_enum { enumerator1 = 1, enumerator2 };
namespace example
{
+ /*!
+
+ \param x Parameter description.
+
+ \code{.cpp}
+ void function_code_sample();
+ \endcode
+ */
+ void free_function(int x);
+
int namespace_integer;
static int namespace_static_integer;
const int namespace_const_integer = 1;
@@ -32,6 +48,42 @@ namespace example
class example
{
public:
+ example(example const&) = default;
+ example& operator=(example const&) = delete;
+ virtual int virtual_method();
+ virtual int virtual_abstract_method() = 0;
+ virtual int virtual_const_method() const;
+ int method_with_default_value(int = default_value);
+
+ int method_with_fp(int (*fp)(), volatile char);
+ int method_with_string_default1(char* = ")", volatile char);
+ int method_with_string_default2(char* = "(", volatile char);
+ int method_with_char_default1(char = '(', volatile char);
+ int method_with_char_default2(char = ')', volatile char);
+
+ int volatile_method_with_fp(int (*fp)(), volatile char) volatile;
+ int volatile_method_with_string_default1(char* = ")", volatile char) volatile;
+ int volatile_method_with_string_default2(char* = "(", volatile char) volatile;
+ int volatile_method_with_char_default1(char = '(', volatile char) volatile;
+ int volatile_method_with_char_default2(char = ')', volatile char) volatile;
+
+ void const_method() const;
+ void volatile_method() volatile;
+
+ void trad_noexcept() noexcept;
+ void trad_noexcept_if() noexcept(a == b && (c || d));
+ void boost_noexcept() BOOST_NOEXCEPT;
+ void boost_noexcept_if() BOOST_NOEXCEPT_IF(a == b && (c || d));
+
+ void trad_constexpr() constexpr;
+ void boost_constexpr() BOOST_CONSTEXPR;
+ void boost_constexpr_or_const() BOOST_CONSTEXPR_OR_CONST;
+
+ void constexpr_noexcept() constexpr noexcept;
+
+ static int static_method();
+ static int static_constexpr() constexpr;
+
int integer;
static int static_integer;
mutable int mutable_integer;
@@ -134,6 +186,20 @@ namespace example
*/
template <typename TypeParameter, int NonTypeParameter>
void namespace_func_template();
+
+ template<class T>
+ struct specialization_test {
+ };
+
+ template<class T>
+ struct specialization_test<T*> {
+ /** A constructor. */
+ specialization_test();
+ /** A destructor. */
+ ~specialization_test();
+ /** An assignment operator. */
+ detail::unspecified& operator=(const specialization_test&);
+ };
}
#define EXAMPLE(m) The macro
diff --git a/tools/boostbook/test/more/run-tests.py b/tools/boostbook/test/more/run-tests.py
index 2a56795426..393e8cc2ed 100755
--- a/tools/boostbook/test/more/run-tests.py
+++ b/tools/boostbook/test/more/run-tests.py
@@ -12,6 +12,7 @@ Usage: python build_docs.py [--generate-gold]
import difflib, getopt, os, re, sys
import lxml.ElementInclude
from lxml import etree
+from collections import defaultdict
# Globals
@@ -73,7 +74,7 @@ def main(argv):
gold_text = file.read()
finally:
file.close()
- compare_xml(filename, doc_text, gold_text)
+ compare_xml(src_path, doc_text, gold_text)
def run_boostbook(parser, boostbook_xsl, file):
doc = boostbook_xsl(etree.parse(file, parser))
@@ -82,7 +83,7 @@ def run_boostbook(parser, boostbook_xsl, file):
def normalize_boostbook_ids(doc):
ids = {}
- id_bases = {}
+ id_bases = defaultdict(int)
for node in doc.xpath("//*[starts-with(@id, 'id') or contains(@id, '_id')]"):
id = node.get('id')
@@ -90,13 +91,14 @@ def normalize_boostbook_ids(doc):
if(id in ids):
print 'Duplicate id: ' + id
- match = re.match("(id|.+_id)(\d+)((?:-bb)?)", id)
+ match = re.match("(.+_id|id)([mp]?\d+)((?:-bb)?)", id)
if(match):
- count = 1
- if(match.group(1) in id_bases):
- count = id_bases[match.group(1)] + 1
- id_bases[match.group(1)] = count
- ids[id] = match.group(1) + str(count) + match.group(3)
+ # Truncate id name, as it sometimes has different lengths...
+ match2 = re.match("(.*?)([^.]*?)(_?id)", match.group(1))
+ base = match2.group(1) + match2.group(2)[:14] + match2.group(3)
+ count = id_bases[base] + 1
+ id_bases[base] = count
+ ids[id] = base + str(count) + match.group(3)
for node in doc.xpath("//*[@linkend or @id]"):
x = node.get('linkend')
@@ -117,6 +119,8 @@ def compare_xml(file, doc_text, gold_text):
doc_text.splitlines(True)
)
)
+ print
+ print
if __name__ == "__main__":
- main(sys.argv[1:]) \ No newline at end of file
+ main(sys.argv[1:])
diff --git a/tools/boostbook/test/more/tests/libs/hash-ref.gold b/tools/boostbook/test/more/tests/libs/hash-ref.gold
index e88658044c..6404700c3a 100644
--- a/tools/boostbook/test/more/tests/libs/hash-ref.gold
+++ b/tools/boostbook/test/more/tests/libs/hash-ref.gold
@@ -15,22 +15,22 @@
<phrase role="keyword">template</phrase><phrase role="special">&lt;</phrase><phrase role="special">&gt;</phrase> <phrase role="keyword">struct</phrase> <link linkend="boost.hash_bool_id1">hash</link><phrase role="special">&lt;</phrase><phrase role="keyword">bool</phrase><phrase role="special">&gt;</phrase><phrase role="special">;</phrase>
<phrase role="keyword">template</phrase><phrase role="special">&lt;</phrase><phrase role="special">&gt;</phrase> <phrase role="keyword">struct</phrase> <link linkend="boost.hash_char_id1">hash</link><phrase role="special">&lt;</phrase><phrase role="keyword">char</phrase><phrase role="special">&gt;</phrase><phrase role="special">;</phrase>
- <phrase role="keyword">template</phrase><phrase role="special">&lt;</phrase><phrase role="special">&gt;</phrase> <phrase role="keyword">struct</phrase> <link linkend="boost.hash_signed_char_id1">hash</link><phrase role="special">&lt;</phrase><phrase role="keyword">signed</phrase> <phrase role="keyword">char</phrase><phrase role="special">&gt;</phrase><phrase role="special">;</phrase>
- <phrase role="keyword">template</phrase><phrase role="special">&lt;</phrase><phrase role="special">&gt;</phrase> <phrase role="keyword">struct</phrase> <link linkend="boost.hash_unsigned_char_id1">hash</link><phrase role="special">&lt;</phrase><phrase role="keyword">unsigned</phrase> <phrase role="keyword">char</phrase><phrase role="special">&gt;</phrase><phrase role="special">;</phrase>
+ <phrase role="keyword">template</phrase><phrase role="special">&lt;</phrase><phrase role="special">&gt;</phrase> <phrase role="keyword">struct</phrase> <link linkend="boost.hash_signed_ch_id1">hash</link><phrase role="special">&lt;</phrase><phrase role="keyword">signed</phrase> <phrase role="keyword">char</phrase><phrase role="special">&gt;</phrase><phrase role="special">;</phrase>
+ <phrase role="keyword">template</phrase><phrase role="special">&lt;</phrase><phrase role="special">&gt;</phrase> <phrase role="keyword">struct</phrase> <link linkend="boost.hash_unsigned__id1">hash</link><phrase role="special">&lt;</phrase><phrase role="keyword">unsigned</phrase> <phrase role="keyword">char</phrase><phrase role="special">&gt;</phrase><phrase role="special">;</phrase>
<phrase role="keyword">template</phrase><phrase role="special">&lt;</phrase><phrase role="special">&gt;</phrase> <phrase role="keyword">struct</phrase> <link linkend="boost.hash_wchar_t_id1">hash</link><phrase role="special">&lt;</phrase><phrase role="keyword">wchar_t</phrase><phrase role="special">&gt;</phrase><phrase role="special">;</phrase>
<phrase role="keyword">template</phrase><phrase role="special">&lt;</phrase><phrase role="special">&gt;</phrase> <phrase role="keyword">struct</phrase> <link linkend="boost.hash_short_id1">hash</link><phrase role="special">&lt;</phrase><phrase role="keyword">short</phrase><phrase role="special">&gt;</phrase><phrase role="special">;</phrase>
- <phrase role="keyword">template</phrase><phrase role="special">&lt;</phrase><phrase role="special">&gt;</phrase> <phrase role="keyword">struct</phrase> <link linkend="boost.hash_unsigned_short_id1">hash</link><phrase role="special">&lt;</phrase><phrase role="keyword">unsigned</phrase> <phrase role="keyword">short</phrase><phrase role="special">&gt;</phrase><phrase role="special">;</phrase>
+ <phrase role="keyword">template</phrase><phrase role="special">&lt;</phrase><phrase role="special">&gt;</phrase> <phrase role="keyword">struct</phrase> <link linkend="boost.hash_unsigned__id2">hash</link><phrase role="special">&lt;</phrase><phrase role="keyword">unsigned</phrase> <phrase role="keyword">short</phrase><phrase role="special">&gt;</phrase><phrase role="special">;</phrase>
<phrase role="keyword">template</phrase><phrase role="special">&lt;</phrase><phrase role="special">&gt;</phrase> <phrase role="keyword">struct</phrase> <link linkend="boost.hash_int_id1">hash</link><phrase role="special">&lt;</phrase><phrase role="keyword">int</phrase><phrase role="special">&gt;</phrase><phrase role="special">;</phrase>
- <phrase role="keyword">template</phrase><phrase role="special">&lt;</phrase><phrase role="special">&gt;</phrase> <phrase role="keyword">struct</phrase> <link linkend="boost.hash_unsigned_int_id1">hash</link><phrase role="special">&lt;</phrase><phrase role="keyword">unsigned</phrase> <phrase role="keyword">int</phrase><phrase role="special">&gt;</phrase><phrase role="special">;</phrase>
+ <phrase role="keyword">template</phrase><phrase role="special">&lt;</phrase><phrase role="special">&gt;</phrase> <phrase role="keyword">struct</phrase> <link linkend="boost.hash_unsigned__id3">hash</link><phrase role="special">&lt;</phrase><phrase role="keyword">unsigned</phrase> <phrase role="keyword">int</phrase><phrase role="special">&gt;</phrase><phrase role="special">;</phrase>
<phrase role="keyword">template</phrase><phrase role="special">&lt;</phrase><phrase role="special">&gt;</phrase> <phrase role="keyword">struct</phrase> <link linkend="boost.hash_long_id1">hash</link><phrase role="special">&lt;</phrase><phrase role="keyword">long</phrase><phrase role="special">&gt;</phrase><phrase role="special">;</phrase>
- <phrase role="keyword">template</phrase><phrase role="special">&lt;</phrase><phrase role="special">&gt;</phrase> <phrase role="keyword">struct</phrase> <link linkend="boost.hash_unsigned_long_id1">hash</link><phrase role="special">&lt;</phrase><phrase role="keyword">unsigned</phrase> <phrase role="keyword">long</phrase><phrase role="special">&gt;</phrase><phrase role="special">;</phrase>
+ <phrase role="keyword">template</phrase><phrase role="special">&lt;</phrase><phrase role="special">&gt;</phrase> <phrase role="keyword">struct</phrase> <link linkend="boost.hash_unsigned__id4">hash</link><phrase role="special">&lt;</phrase><phrase role="keyword">unsigned</phrase> <phrase role="keyword">long</phrase><phrase role="special">&gt;</phrase><phrase role="special">;</phrase>
<phrase role="keyword">template</phrase><phrase role="special">&lt;</phrase><phrase role="special">&gt;</phrase> <phrase role="keyword">struct</phrase> <link linkend="boost.hash_long_long_id1">hash</link><phrase role="special">&lt;</phrase><phrase role="keyword">long</phrase> <phrase role="keyword">long</phrase><phrase role="special">&gt;</phrase><phrase role="special">;</phrase>
- <phrase role="keyword">template</phrase><phrase role="special">&lt;</phrase><phrase role="special">&gt;</phrase> <phrase role="keyword">struct</phrase> <link linkend="boost.hash_unsigned_long_long_id1">hash</link><phrase role="special">&lt;</phrase><phrase role="keyword">unsigned</phrase> <phrase role="keyword">long</phrase> <phrase role="keyword">long</phrase><phrase role="special">&gt;</phrase><phrase role="special">;</phrase>
+ <phrase role="keyword">template</phrase><phrase role="special">&lt;</phrase><phrase role="special">&gt;</phrase> <phrase role="keyword">struct</phrase> <link linkend="boost.hash_unsigned__id5">hash</link><phrase role="special">&lt;</phrase><phrase role="keyword">unsigned</phrase> <phrase role="keyword">long</phrase> <phrase role="keyword">long</phrase><phrase role="special">&gt;</phrase><phrase role="special">;</phrase>
<phrase role="keyword">template</phrase><phrase role="special">&lt;</phrase><phrase role="special">&gt;</phrase> <phrase role="keyword">struct</phrase> <link linkend="boost.hash_float_id1">hash</link><phrase role="special">&lt;</phrase><phrase role="keyword">float</phrase><phrase role="special">&gt;</phrase><phrase role="special">;</phrase>
<phrase role="keyword">template</phrase><phrase role="special">&lt;</phrase><phrase role="special">&gt;</phrase> <phrase role="keyword">struct</phrase> <link linkend="boost.hash_double_id1">hash</link><phrase role="special">&lt;</phrase><phrase role="keyword">double</phrase><phrase role="special">&gt;</phrase><phrase role="special">;</phrase>
- <phrase role="keyword">template</phrase><phrase role="special">&lt;</phrase><phrase role="special">&gt;</phrase> <phrase role="keyword">struct</phrase> <link linkend="boost.hash_long_double_id1">hash</link><phrase role="special">&lt;</phrase><phrase role="keyword">long</phrase> <phrase role="keyword">double</phrase><phrase role="special">&gt;</phrase><phrase role="special">;</phrase>
- <phrase role="keyword">template</phrase><phrase role="special">&lt;</phrase><phrase role="special">&gt;</phrase> <phrase role="keyword">struct</phrase> <link linkend="boost.hash_std_string_id1">hash</link><phrase role="special">&lt;</phrase><phrase role="identifier">std</phrase><phrase role="special">::</phrase><phrase role="identifier">string</phrase><phrase role="special">&gt;</phrase><phrase role="special">;</phrase>
- <phrase role="keyword">template</phrase><phrase role="special">&lt;</phrase><phrase role="special">&gt;</phrase> <phrase role="keyword">struct</phrase> <link linkend="boost.hash_std_wstring_id1">hash</link><phrase role="special">&lt;</phrase><phrase role="identifier">std</phrase><phrase role="special">::</phrase><phrase role="identifier">wstring</phrase><phrase role="special">&gt;</phrase><phrase role="special">;</phrase>
+ <phrase role="keyword">template</phrase><phrase role="special">&lt;</phrase><phrase role="special">&gt;</phrase> <phrase role="keyword">struct</phrase> <link linkend="boost.hash_long_doub_id1">hash</link><phrase role="special">&lt;</phrase><phrase role="keyword">long</phrase> <phrase role="keyword">double</phrase><phrase role="special">&gt;</phrase><phrase role="special">;</phrase>
+ <phrase role="keyword">template</phrase><phrase role="special">&lt;</phrase><phrase role="special">&gt;</phrase> <phrase role="keyword">struct</phrase> <link linkend="boost.hash_std_strin_id1">hash</link><phrase role="special">&lt;</phrase><phrase role="identifier">std</phrase><phrase role="special">::</phrase><phrase role="identifier">string</phrase><phrase role="special">&gt;</phrase><phrase role="special">;</phrase>
+ <phrase role="keyword">template</phrase><phrase role="special">&lt;</phrase><phrase role="special">&gt;</phrase> <phrase role="keyword">struct</phrase> <link linkend="boost.hash_std_wstri_id1">hash</link><phrase role="special">&lt;</phrase><phrase role="identifier">std</phrase><phrase role="special">::</phrase><phrase role="identifier">wstring</phrase><phrase role="special">&gt;</phrase><phrase role="special">;</phrase>
<phrase role="keyword">template</phrase><phrase role="special">&lt;</phrase><phrase role="keyword">typename</phrase> T<phrase role="special">&gt;</phrase> <phrase role="keyword">struct</phrase> <link linkend="boost.hash_T_id1">hash</link><phrase role="special">&lt;</phrase><phrase role="identifier">T</phrase><phrase role="special">*</phrase><phrase role="special">&gt;</phrase><phrase role="special">;</phrase>
<phrase role="comment">// <link linkend="id1-bb">Support functions (Boost extension).</link></phrase>
@@ -273,17 +273,17 @@
<phrase role="identifier">std</phrase><phrase role="special">::</phrase><phrase role="identifier">size_t</phrase> <link linkend="id36-bb"><phrase role="keyword">operator</phrase><phrase role="special">(</phrase><phrase role="special">)</phrase></link><phrase role="special">(</phrase><phrase role="keyword">char</phrase><phrase role="special">)</phrase> <phrase role="keyword">const</phrase><phrase role="special">;</phrase>
<phrase role="special">}</phrase><phrase role="special">;</phrase></synopsis></refsynopsisdiv><refsect1><title>Description</title><para><literallayout class="monospaced"><phrase role="identifier">std</phrase><phrase role="special">::</phrase><phrase role="identifier">size_t</phrase> <anchor id="id36-bb"/><phrase role="keyword">operator</phrase><phrase role="special">(</phrase><phrase role="special">)</phrase><phrase role="special">(</phrase><phrase role="keyword">char</phrase> val<phrase role="special">)</phrase> <phrase role="keyword">const</phrase><phrase role="special">;</phrase></literallayout></para><variablelist spacing="compact"><?dbhtml
list-presentation="table"
- ?><varlistentry><term>Returns:</term><listitem><para>Unspecified in TR1, except that equal arguments yield the same result.</para><para><link linkend="boost.hash_value">hash_value</link>(val) in Boost.</para></listitem></varlistentry><varlistentry><term>Throws:</term><listitem><para>Doesn't throw</para></listitem></varlistentry></variablelist></refsect1></refentry><refentry xmlns:xi="http://www.w3.org/2001/XInclude" id="boost.hash_signed_char_id1"><refmeta><refentrytitle>Struct hash&lt;signed char&gt;</refentrytitle><manvolnum>3</manvolnum></refmeta><refnamediv><refname>boost::hash&lt;signed char&gt;</refname><refpurpose/></refnamediv><refsynopsisdiv><synopsis><phrase role="comment">// In header: &lt;<link linkend="header.boost.functional.hash_hpp">boost/functional/hash.hpp</link>&gt;
+ ?><varlistentry><term>Returns:</term><listitem><para>Unspecified in TR1, except that equal arguments yield the same result.</para><para><link linkend="boost.hash_value">hash_value</link>(val) in Boost.</para></listitem></varlistentry><varlistentry><term>Throws:</term><listitem><para>Doesn't throw</para></listitem></varlistentry></variablelist></refsect1></refentry><refentry xmlns:xi="http://www.w3.org/2001/XInclude" id="boost.hash_signed_ch_id1"><refmeta><refentrytitle>Struct hash&lt;signed char&gt;</refentrytitle><manvolnum>3</manvolnum></refmeta><refnamediv><refname>boost::hash&lt;signed char&gt;</refname><refpurpose/></refnamediv><refsynopsisdiv><synopsis><phrase role="comment">// In header: &lt;<link linkend="header.boost.functional.hash_hpp">boost/functional/hash.hpp</link>&gt;
</phrase>
-<phrase role="keyword">struct</phrase> <link linkend="boost.hash_signed_char_id1">hash</link><phrase role="special">&lt;</phrase><phrase role="keyword">signed</phrase> <phrase role="keyword">char</phrase><phrase role="special">&gt;</phrase> <phrase role="special">{</phrase>
+<phrase role="keyword">struct</phrase> <link linkend="boost.hash_signed_ch_id1">hash</link><phrase role="special">&lt;</phrase><phrase role="keyword">signed</phrase> <phrase role="keyword">char</phrase><phrase role="special">&gt;</phrase> <phrase role="special">{</phrase>
<phrase role="identifier">std</phrase><phrase role="special">::</phrase><phrase role="identifier">size_t</phrase> <link linkend="id37-bb"><phrase role="keyword">operator</phrase><phrase role="special">(</phrase><phrase role="special">)</phrase></link><phrase role="special">(</phrase><phrase role="keyword">signed</phrase> <phrase role="keyword">char</phrase><phrase role="special">)</phrase> <phrase role="keyword">const</phrase><phrase role="special">;</phrase>
<phrase role="special">}</phrase><phrase role="special">;</phrase></synopsis></refsynopsisdiv><refsect1><title>Description</title><para><literallayout class="monospaced"><phrase role="identifier">std</phrase><phrase role="special">::</phrase><phrase role="identifier">size_t</phrase> <anchor id="id37-bb"/><phrase role="keyword">operator</phrase><phrase role="special">(</phrase><phrase role="special">)</phrase><phrase role="special">(</phrase><phrase role="keyword">signed</phrase> <phrase role="keyword">char</phrase> val<phrase role="special">)</phrase> <phrase role="keyword">const</phrase><phrase role="special">;</phrase></literallayout></para><variablelist spacing="compact"><?dbhtml
list-presentation="table"
- ?><varlistentry><term>Returns:</term><listitem><para>Unspecified in TR1, except that equal arguments yield the same result.</para><para><link linkend="boost.hash_value">hash_value</link>(val) in Boost.</para></listitem></varlistentry><varlistentry><term>Throws:</term><listitem><para>Doesn't throw</para></listitem></varlistentry></variablelist></refsect1></refentry><refentry xmlns:xi="http://www.w3.org/2001/XInclude" id="boost.hash_unsigned_char_id1"><refmeta><refentrytitle>Struct hash&lt;unsigned char&gt;</refentrytitle><manvolnum>3</manvolnum></refmeta><refnamediv><refname>boost::hash&lt;unsigned char&gt;</refname><refpurpose/></refnamediv><refsynopsisdiv><synopsis><phrase role="comment">// In header: &lt;<link linkend="header.boost.functional.hash_hpp">boost/functional/hash.hpp</link>&gt;
+ ?><varlistentry><term>Returns:</term><listitem><para>Unspecified in TR1, except that equal arguments yield the same result.</para><para><link linkend="boost.hash_value">hash_value</link>(val) in Boost.</para></listitem></varlistentry><varlistentry><term>Throws:</term><listitem><para>Doesn't throw</para></listitem></varlistentry></variablelist></refsect1></refentry><refentry xmlns:xi="http://www.w3.org/2001/XInclude" id="boost.hash_unsigned__id1"><refmeta><refentrytitle>Struct hash&lt;unsigned char&gt;</refentrytitle><manvolnum>3</manvolnum></refmeta><refnamediv><refname>boost::hash&lt;unsigned char&gt;</refname><refpurpose/></refnamediv><refsynopsisdiv><synopsis><phrase role="comment">// In header: &lt;<link linkend="header.boost.functional.hash_hpp">boost/functional/hash.hpp</link>&gt;
</phrase>
-<phrase role="keyword">struct</phrase> <link linkend="boost.hash_unsigned_char_id1">hash</link><phrase role="special">&lt;</phrase><phrase role="keyword">unsigned</phrase> <phrase role="keyword">char</phrase><phrase role="special">&gt;</phrase> <phrase role="special">{</phrase>
+<phrase role="keyword">struct</phrase> <link linkend="boost.hash_unsigned__id1">hash</link><phrase role="special">&lt;</phrase><phrase role="keyword">unsigned</phrase> <phrase role="keyword">char</phrase><phrase role="special">&gt;</phrase> <phrase role="special">{</phrase>
<phrase role="identifier">std</phrase><phrase role="special">::</phrase><phrase role="identifier">size_t</phrase> <link linkend="id38-bb"><phrase role="keyword">operator</phrase><phrase role="special">(</phrase><phrase role="special">)</phrase></link><phrase role="special">(</phrase><phrase role="keyword">unsigned</phrase> <phrase role="keyword">char</phrase><phrase role="special">)</phrase> <phrase role="keyword">const</phrase><phrase role="special">;</phrase>
<phrase role="special">}</phrase><phrase role="special">;</phrase></synopsis></refsynopsisdiv><refsect1><title>Description</title><para><literallayout class="monospaced"><phrase role="identifier">std</phrase><phrase role="special">::</phrase><phrase role="identifier">size_t</phrase> <anchor id="id38-bb"/><phrase role="keyword">operator</phrase><phrase role="special">(</phrase><phrase role="special">)</phrase><phrase role="special">(</phrase><phrase role="keyword">unsigned</phrase> <phrase role="keyword">char</phrase> val<phrase role="special">)</phrase> <phrase role="keyword">const</phrase><phrase role="special">;</phrase></literallayout></para><variablelist spacing="compact"><?dbhtml
list-presentation="table"
@@ -301,10 +301,10 @@
<phrase role="identifier">std</phrase><phrase role="special">::</phrase><phrase role="identifier">size_t</phrase> <link linkend="id40-bb"><phrase role="keyword">operator</phrase><phrase role="special">(</phrase><phrase role="special">)</phrase></link><phrase role="special">(</phrase><phrase role="keyword">short</phrase><phrase role="special">)</phrase> <phrase role="keyword">const</phrase><phrase role="special">;</phrase>
<phrase role="special">}</phrase><phrase role="special">;</phrase></synopsis></refsynopsisdiv><refsect1><title>Description</title><para><literallayout class="monospaced"><phrase role="identifier">std</phrase><phrase role="special">::</phrase><phrase role="identifier">size_t</phrase> <anchor id="id40-bb"/><phrase role="keyword">operator</phrase><phrase role="special">(</phrase><phrase role="special">)</phrase><phrase role="special">(</phrase><phrase role="keyword">short</phrase> val<phrase role="special">)</phrase> <phrase role="keyword">const</phrase><phrase role="special">;</phrase></literallayout></para><variablelist spacing="compact"><?dbhtml
list-presentation="table"
- ?><varlistentry><term>Returns:</term><listitem><para>Unspecified in TR1, except that equal arguments yield the same result.</para><para><link linkend="boost.hash_value">hash_value</link>(val) in Boost.</para></listitem></varlistentry><varlistentry><term>Throws:</term><listitem><para>Doesn't throw</para></listitem></varlistentry></variablelist></refsect1></refentry><refentry xmlns:xi="http://www.w3.org/2001/XInclude" id="boost.hash_unsigned_short_id1"><refmeta><refentrytitle>Struct hash&lt;unsigned short&gt;</refentrytitle><manvolnum>3</manvolnum></refmeta><refnamediv><refname>boost::hash&lt;unsigned short&gt;</refname><refpurpose/></refnamediv><refsynopsisdiv><synopsis><phrase role="comment">// In header: &lt;<link linkend="header.boost.functional.hash_hpp">boost/functional/hash.hpp</link>&gt;
+ ?><varlistentry><term>Returns:</term><listitem><para>Unspecified in TR1, except that equal arguments yield the same result.</para><para><link linkend="boost.hash_value">hash_value</link>(val) in Boost.</para></listitem></varlistentry><varlistentry><term>Throws:</term><listitem><para>Doesn't throw</para></listitem></varlistentry></variablelist></refsect1></refentry><refentry xmlns:xi="http://www.w3.org/2001/XInclude" id="boost.hash_unsigned__id2"><refmeta><refentrytitle>Struct hash&lt;unsigned short&gt;</refentrytitle><manvolnum>3</manvolnum></refmeta><refnamediv><refname>boost::hash&lt;unsigned short&gt;</refname><refpurpose/></refnamediv><refsynopsisdiv><synopsis><phrase role="comment">// In header: &lt;<link linkend="header.boost.functional.hash_hpp">boost/functional/hash.hpp</link>&gt;
</phrase>
-<phrase role="keyword">struct</phrase> <link linkend="boost.hash_unsigned_short_id1">hash</link><phrase role="special">&lt;</phrase><phrase role="keyword">unsigned</phrase> <phrase role="keyword">short</phrase><phrase role="special">&gt;</phrase> <phrase role="special">{</phrase>
+<phrase role="keyword">struct</phrase> <link linkend="boost.hash_unsigned__id2">hash</link><phrase role="special">&lt;</phrase><phrase role="keyword">unsigned</phrase> <phrase role="keyword">short</phrase><phrase role="special">&gt;</phrase> <phrase role="special">{</phrase>
<phrase role="identifier">std</phrase><phrase role="special">::</phrase><phrase role="identifier">size_t</phrase> <link linkend="id41-bb"><phrase role="keyword">operator</phrase><phrase role="special">(</phrase><phrase role="special">)</phrase></link><phrase role="special">(</phrase><phrase role="keyword">unsigned</phrase> <phrase role="keyword">short</phrase><phrase role="special">)</phrase> <phrase role="keyword">const</phrase><phrase role="special">;</phrase>
<phrase role="special">}</phrase><phrase role="special">;</phrase></synopsis></refsynopsisdiv><refsect1><title>Description</title><para><literallayout class="monospaced"><phrase role="identifier">std</phrase><phrase role="special">::</phrase><phrase role="identifier">size_t</phrase> <anchor id="id41-bb"/><phrase role="keyword">operator</phrase><phrase role="special">(</phrase><phrase role="special">)</phrase><phrase role="special">(</phrase><phrase role="keyword">unsigned</phrase> <phrase role="keyword">short</phrase> val<phrase role="special">)</phrase> <phrase role="keyword">const</phrase><phrase role="special">;</phrase></literallayout></para><variablelist spacing="compact"><?dbhtml
list-presentation="table"
@@ -315,10 +315,10 @@
<phrase role="identifier">std</phrase><phrase role="special">::</phrase><phrase role="identifier">size_t</phrase> <link linkend="id42-bb"><phrase role="keyword">operator</phrase><phrase role="special">(</phrase><phrase role="special">)</phrase></link><phrase role="special">(</phrase><phrase role="keyword">int</phrase><phrase role="special">)</phrase> <phrase role="keyword">const</phrase><phrase role="special">;</phrase>
<phrase role="special">}</phrase><phrase role="special">;</phrase></synopsis></refsynopsisdiv><refsect1><title>Description</title><para><literallayout class="monospaced"><phrase role="identifier">std</phrase><phrase role="special">::</phrase><phrase role="identifier">size_t</phrase> <anchor id="id42-bb"/><phrase role="keyword">operator</phrase><phrase role="special">(</phrase><phrase role="special">)</phrase><phrase role="special">(</phrase><phrase role="keyword">int</phrase> val<phrase role="special">)</phrase> <phrase role="keyword">const</phrase><phrase role="special">;</phrase></literallayout></para><variablelist spacing="compact"><?dbhtml
list-presentation="table"
- ?><varlistentry><term>Returns:</term><listitem><para>Unspecified in TR1, except that equal arguments yield the same result.</para><para><link linkend="boost.hash_value">hash_value</link>(val) in Boost.</para></listitem></varlistentry><varlistentry><term>Throws:</term><listitem><para>Doesn't throw</para></listitem></varlistentry></variablelist></refsect1></refentry><refentry xmlns:xi="http://www.w3.org/2001/XInclude" id="boost.hash_unsigned_int_id1"><refmeta><refentrytitle>Struct hash&lt;unsigned int&gt;</refentrytitle><manvolnum>3</manvolnum></refmeta><refnamediv><refname>boost::hash&lt;unsigned int&gt;</refname><refpurpose/></refnamediv><refsynopsisdiv><synopsis><phrase role="comment">// In header: &lt;<link linkend="header.boost.functional.hash_hpp">boost/functional/hash.hpp</link>&gt;
+ ?><varlistentry><term>Returns:</term><listitem><para>Unspecified in TR1, except that equal arguments yield the same result.</para><para><link linkend="boost.hash_value">hash_value</link>(val) in Boost.</para></listitem></varlistentry><varlistentry><term>Throws:</term><listitem><para>Doesn't throw</para></listitem></varlistentry></variablelist></refsect1></refentry><refentry xmlns:xi="http://www.w3.org/2001/XInclude" id="boost.hash_unsigned__id3"><refmeta><refentrytitle>Struct hash&lt;unsigned int&gt;</refentrytitle><manvolnum>3</manvolnum></refmeta><refnamediv><refname>boost::hash&lt;unsigned int&gt;</refname><refpurpose/></refnamediv><refsynopsisdiv><synopsis><phrase role="comment">// In header: &lt;<link linkend="header.boost.functional.hash_hpp">boost/functional/hash.hpp</link>&gt;
</phrase>
-<phrase role="keyword">struct</phrase> <link linkend="boost.hash_unsigned_int_id1">hash</link><phrase role="special">&lt;</phrase><phrase role="keyword">unsigned</phrase> <phrase role="keyword">int</phrase><phrase role="special">&gt;</phrase> <phrase role="special">{</phrase>
+<phrase role="keyword">struct</phrase> <link linkend="boost.hash_unsigned__id3">hash</link><phrase role="special">&lt;</phrase><phrase role="keyword">unsigned</phrase> <phrase role="keyword">int</phrase><phrase role="special">&gt;</phrase> <phrase role="special">{</phrase>
<phrase role="identifier">std</phrase><phrase role="special">::</phrase><phrase role="identifier">size_t</phrase> <link linkend="id43-bb"><phrase role="keyword">operator</phrase><phrase role="special">(</phrase><phrase role="special">)</phrase></link><phrase role="special">(</phrase><phrase role="keyword">unsigned</phrase> <phrase role="keyword">int</phrase><phrase role="special">)</phrase> <phrase role="keyword">const</phrase><phrase role="special">;</phrase>
<phrase role="special">}</phrase><phrase role="special">;</phrase></synopsis></refsynopsisdiv><refsect1><title>Description</title><para><literallayout class="monospaced"><phrase role="identifier">std</phrase><phrase role="special">::</phrase><phrase role="identifier">size_t</phrase> <anchor id="id43-bb"/><phrase role="keyword">operator</phrase><phrase role="special">(</phrase><phrase role="special">)</phrase><phrase role="special">(</phrase><phrase role="keyword">unsigned</phrase> <phrase role="keyword">int</phrase> val<phrase role="special">)</phrase> <phrase role="keyword">const</phrase><phrase role="special">;</phrase></literallayout></para><variablelist spacing="compact"><?dbhtml
list-presentation="table"
@@ -329,10 +329,10 @@
<phrase role="identifier">std</phrase><phrase role="special">::</phrase><phrase role="identifier">size_t</phrase> <link linkend="id44-bb"><phrase role="keyword">operator</phrase><phrase role="special">(</phrase><phrase role="special">)</phrase></link><phrase role="special">(</phrase><phrase role="keyword">long</phrase><phrase role="special">)</phrase> <phrase role="keyword">const</phrase><phrase role="special">;</phrase>
<phrase role="special">}</phrase><phrase role="special">;</phrase></synopsis></refsynopsisdiv><refsect1><title>Description</title><para><literallayout class="monospaced"><phrase role="identifier">std</phrase><phrase role="special">::</phrase><phrase role="identifier">size_t</phrase> <anchor id="id44-bb"/><phrase role="keyword">operator</phrase><phrase role="special">(</phrase><phrase role="special">)</phrase><phrase role="special">(</phrase><phrase role="keyword">long</phrase> val<phrase role="special">)</phrase> <phrase role="keyword">const</phrase><phrase role="special">;</phrase></literallayout></para><variablelist spacing="compact"><?dbhtml
list-presentation="table"
- ?><varlistentry><term>Returns:</term><listitem><para>Unspecified in TR1, except that equal arguments yield the same result.</para><para><link linkend="boost.hash_value">hash_value</link>(val) in Boost.</para></listitem></varlistentry><varlistentry><term>Throws:</term><listitem><para>Doesn't throw</para></listitem></varlistentry></variablelist></refsect1></refentry><refentry xmlns:xi="http://www.w3.org/2001/XInclude" id="boost.hash_unsigned_long_id1"><refmeta><refentrytitle>Struct hash&lt;unsigned long&gt;</refentrytitle><manvolnum>3</manvolnum></refmeta><refnamediv><refname>boost::hash&lt;unsigned long&gt;</refname><refpurpose/></refnamediv><refsynopsisdiv><synopsis><phrase role="comment">// In header: &lt;<link linkend="header.boost.functional.hash_hpp">boost/functional/hash.hpp</link>&gt;
+ ?><varlistentry><term>Returns:</term><listitem><para>Unspecified in TR1, except that equal arguments yield the same result.</para><para><link linkend="boost.hash_value">hash_value</link>(val) in Boost.</para></listitem></varlistentry><varlistentry><term>Throws:</term><listitem><para>Doesn't throw</para></listitem></varlistentry></variablelist></refsect1></refentry><refentry xmlns:xi="http://www.w3.org/2001/XInclude" id="boost.hash_unsigned__id4"><refmeta><refentrytitle>Struct hash&lt;unsigned long&gt;</refentrytitle><manvolnum>3</manvolnum></refmeta><refnamediv><refname>boost::hash&lt;unsigned long&gt;</refname><refpurpose/></refnamediv><refsynopsisdiv><synopsis><phrase role="comment">// In header: &lt;<link linkend="header.boost.functional.hash_hpp">boost/functional/hash.hpp</link>&gt;
</phrase>
-<phrase role="keyword">struct</phrase> <link linkend="boost.hash_unsigned_long_id1">hash</link><phrase role="special">&lt;</phrase><phrase role="keyword">unsigned</phrase> <phrase role="keyword">long</phrase><phrase role="special">&gt;</phrase> <phrase role="special">{</phrase>
+<phrase role="keyword">struct</phrase> <link linkend="boost.hash_unsigned__id4">hash</link><phrase role="special">&lt;</phrase><phrase role="keyword">unsigned</phrase> <phrase role="keyword">long</phrase><phrase role="special">&gt;</phrase> <phrase role="special">{</phrase>
<phrase role="identifier">std</phrase><phrase role="special">::</phrase><phrase role="identifier">size_t</phrase> <link linkend="id45-bb"><phrase role="keyword">operator</phrase><phrase role="special">(</phrase><phrase role="special">)</phrase></link><phrase role="special">(</phrase><phrase role="keyword">unsigned</phrase> <phrase role="keyword">long</phrase><phrase role="special">)</phrase> <phrase role="keyword">const</phrase><phrase role="special">;</phrase>
<phrase role="special">}</phrase><phrase role="special">;</phrase></synopsis></refsynopsisdiv><refsect1><title>Description</title><para><literallayout class="monospaced"><phrase role="identifier">std</phrase><phrase role="special">::</phrase><phrase role="identifier">size_t</phrase> <anchor id="id45-bb"/><phrase role="keyword">operator</phrase><phrase role="special">(</phrase><phrase role="special">)</phrase><phrase role="special">(</phrase><phrase role="keyword">unsigned</phrase> <phrase role="keyword">long</phrase> val<phrase role="special">)</phrase> <phrase role="keyword">const</phrase><phrase role="special">;</phrase></literallayout></para><variablelist spacing="compact"><?dbhtml
list-presentation="table"
@@ -343,10 +343,10 @@
<phrase role="identifier">std</phrase><phrase role="special">::</phrase><phrase role="identifier">size_t</phrase> <link linkend="id46-bb"><phrase role="keyword">operator</phrase><phrase role="special">(</phrase><phrase role="special">)</phrase></link><phrase role="special">(</phrase><phrase role="keyword">long</phrase> <phrase role="keyword">long</phrase><phrase role="special">)</phrase> <phrase role="keyword">const</phrase><phrase role="special">;</phrase>
<phrase role="special">}</phrase><phrase role="special">;</phrase></synopsis></refsynopsisdiv><refsect1><title>Description</title><para><literallayout class="monospaced"><phrase role="identifier">std</phrase><phrase role="special">::</phrase><phrase role="identifier">size_t</phrase> <anchor id="id46-bb"/><phrase role="keyword">operator</phrase><phrase role="special">(</phrase><phrase role="special">)</phrase><phrase role="special">(</phrase><phrase role="keyword">long</phrase> <phrase role="keyword">long</phrase> val<phrase role="special">)</phrase> <phrase role="keyword">const</phrase><phrase role="special">;</phrase></literallayout></para><variablelist spacing="compact"><?dbhtml
list-presentation="table"
- ?><varlistentry><term>Returns:</term><listitem><para>Unspecified in TR1, except that equal arguments yield the same result.</para><para><link linkend="boost.hash_value">hash_value</link>(val) in Boost.</para></listitem></varlistentry><varlistentry><term>Throws:</term><listitem><para>Doesn't throw</para></listitem></varlistentry></variablelist></refsect1></refentry><refentry xmlns:xi="http://www.w3.org/2001/XInclude" id="boost.hash_unsigned_long_long_id1"><refmeta><refentrytitle>Struct hash&lt;unsigned long long&gt;</refentrytitle><manvolnum>3</manvolnum></refmeta><refnamediv><refname>boost::hash&lt;unsigned long long&gt;</refname><refpurpose/></refnamediv><refsynopsisdiv><synopsis><phrase role="comment">// In header: &lt;<link linkend="header.boost.functional.hash_hpp">boost/functional/hash.hpp</link>&gt;
+ ?><varlistentry><term>Returns:</term><listitem><para>Unspecified in TR1, except that equal arguments yield the same result.</para><para><link linkend="boost.hash_value">hash_value</link>(val) in Boost.</para></listitem></varlistentry><varlistentry><term>Throws:</term><listitem><para>Doesn't throw</para></listitem></varlistentry></variablelist></refsect1></refentry><refentry xmlns:xi="http://www.w3.org/2001/XInclude" id="boost.hash_unsigned__id5"><refmeta><refentrytitle>Struct hash&lt;unsigned long long&gt;</refentrytitle><manvolnum>3</manvolnum></refmeta><refnamediv><refname>boost::hash&lt;unsigned long long&gt;</refname><refpurpose/></refnamediv><refsynopsisdiv><synopsis><phrase role="comment">// In header: &lt;<link linkend="header.boost.functional.hash_hpp">boost/functional/hash.hpp</link>&gt;
</phrase>
-<phrase role="keyword">struct</phrase> <link linkend="boost.hash_unsigned_long_long_id1">hash</link><phrase role="special">&lt;</phrase><phrase role="keyword">unsigned</phrase> <phrase role="keyword">long</phrase> <phrase role="keyword">long</phrase><phrase role="special">&gt;</phrase> <phrase role="special">{</phrase>
+<phrase role="keyword">struct</phrase> <link linkend="boost.hash_unsigned__id5">hash</link><phrase role="special">&lt;</phrase><phrase role="keyword">unsigned</phrase> <phrase role="keyword">long</phrase> <phrase role="keyword">long</phrase><phrase role="special">&gt;</phrase> <phrase role="special">{</phrase>
<phrase role="identifier">std</phrase><phrase role="special">::</phrase><phrase role="identifier">size_t</phrase> <link linkend="id47-bb"><phrase role="keyword">operator</phrase><phrase role="special">(</phrase><phrase role="special">)</phrase></link><phrase role="special">(</phrase><phrase role="keyword">unsigned</phrase> <phrase role="keyword">long</phrase> <phrase role="keyword">long</phrase><phrase role="special">)</phrase> <phrase role="keyword">const</phrase><phrase role="special">;</phrase>
<phrase role="special">}</phrase><phrase role="special">;</phrase></synopsis></refsynopsisdiv><refsect1><title>Description</title><para><literallayout class="monospaced"><phrase role="identifier">std</phrase><phrase role="special">::</phrase><phrase role="identifier">size_t</phrase> <anchor id="id47-bb"/><phrase role="keyword">operator</phrase><phrase role="special">(</phrase><phrase role="special">)</phrase><phrase role="special">(</phrase><phrase role="keyword">unsigned</phrase> <phrase role="keyword">long</phrase> <phrase role="keyword">long</phrase> val<phrase role="special">)</phrase> <phrase role="keyword">const</phrase><phrase role="special">;</phrase></literallayout></para><variablelist spacing="compact"><?dbhtml
list-presentation="table"
@@ -364,24 +364,24 @@
<phrase role="identifier">std</phrase><phrase role="special">::</phrase><phrase role="identifier">size_t</phrase> <link linkend="id49-bb"><phrase role="keyword">operator</phrase><phrase role="special">(</phrase><phrase role="special">)</phrase></link><phrase role="special">(</phrase><phrase role="keyword">double</phrase><phrase role="special">)</phrase> <phrase role="keyword">const</phrase><phrase role="special">;</phrase>
<phrase role="special">}</phrase><phrase role="special">;</phrase></synopsis></refsynopsisdiv><refsect1><title>Description</title><para><literallayout class="monospaced"><phrase role="identifier">std</phrase><phrase role="special">::</phrase><phrase role="identifier">size_t</phrase> <anchor id="id49-bb"/><phrase role="keyword">operator</phrase><phrase role="special">(</phrase><phrase role="special">)</phrase><phrase role="special">(</phrase><phrase role="keyword">double</phrase> val<phrase role="special">)</phrase> <phrase role="keyword">const</phrase><phrase role="special">;</phrase></literallayout></para><variablelist spacing="compact"><?dbhtml
list-presentation="table"
- ?><varlistentry><term>Returns:</term><listitem><para>Unspecified in TR1, except that equal arguments yield the same result.</para><para><link linkend="boost.hash_value">hash_value</link>(val) in Boost.</para></listitem></varlistentry><varlistentry><term>Throws:</term><listitem><para>Doesn't throw</para></listitem></varlistentry></variablelist></refsect1></refentry><refentry xmlns:xi="http://www.w3.org/2001/XInclude" id="boost.hash_long_double_id1"><refmeta><refentrytitle>Struct hash&lt;long double&gt;</refentrytitle><manvolnum>3</manvolnum></refmeta><refnamediv><refname>boost::hash&lt;long double&gt;</refname><refpurpose/></refnamediv><refsynopsisdiv><synopsis><phrase role="comment">// In header: &lt;<link linkend="header.boost.functional.hash_hpp">boost/functional/hash.hpp</link>&gt;
+ ?><varlistentry><term>Returns:</term><listitem><para>Unspecified in TR1, except that equal arguments yield the same result.</para><para><link linkend="boost.hash_value">hash_value</link>(val) in Boost.</para></listitem></varlistentry><varlistentry><term>Throws:</term><listitem><para>Doesn't throw</para></listitem></varlistentry></variablelist></refsect1></refentry><refentry xmlns:xi="http://www.w3.org/2001/XInclude" id="boost.hash_long_doub_id1"><refmeta><refentrytitle>Struct hash&lt;long double&gt;</refentrytitle><manvolnum>3</manvolnum></refmeta><refnamediv><refname>boost::hash&lt;long double&gt;</refname><refpurpose/></refnamediv><refsynopsisdiv><synopsis><phrase role="comment">// In header: &lt;<link linkend="header.boost.functional.hash_hpp">boost/functional/hash.hpp</link>&gt;
</phrase>
-<phrase role="keyword">struct</phrase> <link linkend="boost.hash_long_double_id1">hash</link><phrase role="special">&lt;</phrase><phrase role="keyword">long</phrase> <phrase role="keyword">double</phrase><phrase role="special">&gt;</phrase> <phrase role="special">{</phrase>
+<phrase role="keyword">struct</phrase> <link linkend="boost.hash_long_doub_id1">hash</link><phrase role="special">&lt;</phrase><phrase role="keyword">long</phrase> <phrase role="keyword">double</phrase><phrase role="special">&gt;</phrase> <phrase role="special">{</phrase>
<phrase role="identifier">std</phrase><phrase role="special">::</phrase><phrase role="identifier">size_t</phrase> <link linkend="id50-bb"><phrase role="keyword">operator</phrase><phrase role="special">(</phrase><phrase role="special">)</phrase></link><phrase role="special">(</phrase><phrase role="keyword">long</phrase> <phrase role="keyword">double</phrase><phrase role="special">)</phrase> <phrase role="keyword">const</phrase><phrase role="special">;</phrase>
<phrase role="special">}</phrase><phrase role="special">;</phrase></synopsis></refsynopsisdiv><refsect1><title>Description</title><para><literallayout class="monospaced"><phrase role="identifier">std</phrase><phrase role="special">::</phrase><phrase role="identifier">size_t</phrase> <anchor id="id50-bb"/><phrase role="keyword">operator</phrase><phrase role="special">(</phrase><phrase role="special">)</phrase><phrase role="special">(</phrase><phrase role="keyword">long</phrase> <phrase role="keyword">double</phrase> val<phrase role="special">)</phrase> <phrase role="keyword">const</phrase><phrase role="special">;</phrase></literallayout></para><variablelist spacing="compact"><?dbhtml
list-presentation="table"
- ?><varlistentry><term>Returns:</term><listitem><para>Unspecified in TR1, except that equal arguments yield the same result.</para><para><link linkend="boost.hash_value">hash_value</link>(val) in Boost.</para></listitem></varlistentry><varlistentry><term>Throws:</term><listitem><para>Doesn't throw</para></listitem></varlistentry></variablelist></refsect1></refentry><refentry xmlns:xi="http://www.w3.org/2001/XInclude" id="boost.hash_std_string_id1"><refmeta><refentrytitle>Struct hash&lt;std::string&gt;</refentrytitle><manvolnum>3</manvolnum></refmeta><refnamediv><refname>boost::hash&lt;std::string&gt;</refname><refpurpose/></refnamediv><refsynopsisdiv><synopsis><phrase role="comment">// In header: &lt;<link linkend="header.boost.functional.hash_hpp">boost/functional/hash.hpp</link>&gt;
+ ?><varlistentry><term>Returns:</term><listitem><para>Unspecified in TR1, except that equal arguments yield the same result.</para><para><link linkend="boost.hash_value">hash_value</link>(val) in Boost.</para></listitem></varlistentry><varlistentry><term>Throws:</term><listitem><para>Doesn't throw</para></listitem></varlistentry></variablelist></refsect1></refentry><refentry xmlns:xi="http://www.w3.org/2001/XInclude" id="boost.hash_std_strin_id1"><refmeta><refentrytitle>Struct hash&lt;std::string&gt;</refentrytitle><manvolnum>3</manvolnum></refmeta><refnamediv><refname>boost::hash&lt;std::string&gt;</refname><refpurpose/></refnamediv><refsynopsisdiv><synopsis><phrase role="comment">// In header: &lt;<link linkend="header.boost.functional.hash_hpp">boost/functional/hash.hpp</link>&gt;
</phrase>
-<phrase role="keyword">struct</phrase> <link linkend="boost.hash_std_string_id1">hash</link><phrase role="special">&lt;</phrase><phrase role="identifier">std</phrase><phrase role="special">::</phrase><phrase role="identifier">string</phrase><phrase role="special">&gt;</phrase> <phrase role="special">{</phrase>
+<phrase role="keyword">struct</phrase> <link linkend="boost.hash_std_strin_id1">hash</link><phrase role="special">&lt;</phrase><phrase role="identifier">std</phrase><phrase role="special">::</phrase><phrase role="identifier">string</phrase><phrase role="special">&gt;</phrase> <phrase role="special">{</phrase>
<phrase role="identifier">std</phrase><phrase role="special">::</phrase><phrase role="identifier">size_t</phrase> <link linkend="id51-bb"><phrase role="keyword">operator</phrase><phrase role="special">(</phrase><phrase role="special">)</phrase></link><phrase role="special">(</phrase><phrase role="identifier">std</phrase><phrase role="special">::</phrase><phrase role="identifier">string</phrase> <phrase role="keyword">const</phrase><phrase role="special">&amp;</phrase><phrase role="special">)</phrase> <phrase role="keyword">const</phrase><phrase role="special">;</phrase>
<phrase role="special">}</phrase><phrase role="special">;</phrase></synopsis></refsynopsisdiv><refsect1><title>Description</title><para><literallayout class="monospaced"><phrase role="identifier">std</phrase><phrase role="special">::</phrase><phrase role="identifier">size_t</phrase> <anchor id="id51-bb"/><phrase role="keyword">operator</phrase><phrase role="special">(</phrase><phrase role="special">)</phrase><phrase role="special">(</phrase><phrase role="identifier">std</phrase><phrase role="special">::</phrase><phrase role="identifier">string</phrase> <phrase role="keyword">const</phrase><phrase role="special">&amp;</phrase> val<phrase role="special">)</phrase> <phrase role="keyword">const</phrase><phrase role="special">;</phrase></literallayout></para><variablelist spacing="compact"><?dbhtml
list-presentation="table"
- ?><varlistentry><term>Returns:</term><listitem><para>Unspecified in TR1, except that equal arguments yield the same result.</para><para><link linkend="boost.hash_value">hash_value</link>(val) in Boost.</para></listitem></varlistentry><varlistentry><term>Throws:</term><listitem><para>Doesn't throw</para></listitem></varlistentry></variablelist></refsect1></refentry><refentry xmlns:xi="http://www.w3.org/2001/XInclude" id="boost.hash_std_wstring_id1"><refmeta><refentrytitle>Struct hash&lt;std::wstring&gt;</refentrytitle><manvolnum>3</manvolnum></refmeta><refnamediv><refname>boost::hash&lt;std::wstring&gt;</refname><refpurpose/></refnamediv><refsynopsisdiv><synopsis><phrase role="comment">// In header: &lt;<link linkend="header.boost.functional.hash_hpp">boost/functional/hash.hpp</link>&gt;
+ ?><varlistentry><term>Returns:</term><listitem><para>Unspecified in TR1, except that equal arguments yield the same result.</para><para><link linkend="boost.hash_value">hash_value</link>(val) in Boost.</para></listitem></varlistentry><varlistentry><term>Throws:</term><listitem><para>Doesn't throw</para></listitem></varlistentry></variablelist></refsect1></refentry><refentry xmlns:xi="http://www.w3.org/2001/XInclude" id="boost.hash_std_wstri_id1"><refmeta><refentrytitle>Struct hash&lt;std::wstring&gt;</refentrytitle><manvolnum>3</manvolnum></refmeta><refnamediv><refname>boost::hash&lt;std::wstring&gt;</refname><refpurpose/></refnamediv><refsynopsisdiv><synopsis><phrase role="comment">// In header: &lt;<link linkend="header.boost.functional.hash_hpp">boost/functional/hash.hpp</link>&gt;
</phrase>
-<phrase role="keyword">struct</phrase> <link linkend="boost.hash_std_wstring_id1">hash</link><phrase role="special">&lt;</phrase><phrase role="identifier">std</phrase><phrase role="special">::</phrase><phrase role="identifier">wstring</phrase><phrase role="special">&gt;</phrase> <phrase role="special">{</phrase>
+<phrase role="keyword">struct</phrase> <link linkend="boost.hash_std_wstri_id1">hash</link><phrase role="special">&lt;</phrase><phrase role="identifier">std</phrase><phrase role="special">::</phrase><phrase role="identifier">wstring</phrase><phrase role="special">&gt;</phrase> <phrase role="special">{</phrase>
<phrase role="identifier">std</phrase><phrase role="special">::</phrase><phrase role="identifier">size_t</phrase> <link linkend="id52-bb"><phrase role="keyword">operator</phrase><phrase role="special">(</phrase><phrase role="special">)</phrase></link><phrase role="special">(</phrase><phrase role="identifier">std</phrase><phrase role="special">::</phrase><phrase role="identifier">wstring</phrase> <phrase role="keyword">const</phrase><phrase role="special">&amp;</phrase><phrase role="special">)</phrase> <phrase role="keyword">const</phrase><phrase role="special">;</phrase>
<phrase role="special">}</phrase><phrase role="special">;</phrase></synopsis></refsynopsisdiv><refsect1><title>Description</title><para><literallayout class="monospaced"><phrase role="identifier">std</phrase><phrase role="special">::</phrase><phrase role="identifier">size_t</phrase> <anchor id="id52-bb"/><phrase role="keyword">operator</phrase><phrase role="special">(</phrase><phrase role="special">)</phrase><phrase role="special">(</phrase><phrase role="identifier">std</phrase><phrase role="special">::</phrase><phrase role="identifier">wstring</phrase> <phrase role="keyword">const</phrase><phrase role="special">&amp;</phrase> val<phrase role="special">)</phrase> <phrase role="keyword">const</phrase><phrase role="special">;</phrase></literallayout></para><variablelist spacing="compact"><?dbhtml
list-presentation="table"
diff --git a/tools/boostbook/test/more/tests/libs/unordered-ref.gold b/tools/boostbook/test/more/tests/libs/unordered-ref.gold
index 3498c4153f..8a0793a396 100644
--- a/tools/boostbook/test/more/tests/libs/unordered-ref.gold
+++ b/tools/boostbook/test/more/tests/libs/unordered-ref.gold
@@ -47,12 +47,12 @@
<phrase role="keyword">typedef</phrase> <phrase role="keyword">typename</phrase> <phrase role="identifier">allocator_type</phrase><phrase role="special">::</phrase><phrase role="identifier">const_pointer</phrase> <anchor id="boost.unordered_set.const_pointer"/><phrase role="identifier">const_pointer</phrase><phrase role="special">;</phrase>
<phrase role="keyword">typedef</phrase> <phrase role="keyword">typename</phrase> <phrase role="identifier">allocator_type</phrase><phrase role="special">::</phrase><phrase role="identifier">reference</phrase> <anchor id="boost.unordered_set.reference"/><phrase role="identifier">reference</phrase><phrase role="special">;</phrase> <phrase role="comment">// lvalue of value_type.</phrase>
<phrase role="keyword">typedef</phrase> <phrase role="keyword">typename</phrase> <phrase role="identifier">allocator_type</phrase><phrase role="special">::</phrase><phrase role="identifier">const_reference</phrase> <anchor id="boost.unordered_set.const_reference"/><phrase role="identifier">const_reference</phrase><phrase role="special">;</phrase> <phrase role="comment">// const lvalue of value_type.</phrase>
- <phrase role="keyword">typedef</phrase> <emphasis><phrase role="identifier">implementation</phrase><phrase role="special">-</phrase><phrase role="identifier">defined</phrase></emphasis> <link linkend="boost.unordered_set.size_type"><phrase role="identifier">size_type</phrase></link><phrase role="special">;</phrase>
- <phrase role="keyword">typedef</phrase> <emphasis><phrase role="identifier">implementation</phrase><phrase role="special">-</phrase><phrase role="identifier">defined</phrase></emphasis> <link linkend="boost.unordered_set.difference_type"><phrase role="identifier">difference_type</phrase></link><phrase role="special">;</phrase>
- <phrase role="keyword">typedef</phrase> <emphasis><phrase role="identifier">implementation</phrase><phrase role="special">-</phrase><phrase role="identifier">defined</phrase></emphasis> <link linkend="boost.unordered_set.iterator"><phrase role="identifier">iterator</phrase></link><phrase role="special">;</phrase>
- <phrase role="keyword">typedef</phrase> <emphasis><phrase role="identifier">implementation</phrase><phrase role="special">-</phrase><phrase role="identifier">defined</phrase></emphasis> <link linkend="boost.unordered_set.const_iterator"><phrase role="identifier">const_iterator</phrase></link><phrase role="special">;</phrase>
- <phrase role="keyword">typedef</phrase> <emphasis><phrase role="identifier">implementation</phrase><phrase role="special">-</phrase><phrase role="identifier">defined</phrase></emphasis> <link linkend="boost.unordered_set.local_iterator"><phrase role="identifier">local_iterator</phrase></link><phrase role="special">;</phrase>
- <phrase role="keyword">typedef</phrase> <emphasis><phrase role="identifier">implementation</phrase><phrase role="special">-</phrase><phrase role="identifier">defined</phrase></emphasis> <link linkend="boost.unordered_set.const_local_iterator"><phrase role="identifier">const_local_iterator</phrase></link><phrase role="special">;</phrase>
+ <phrase role="keyword">typedef</phrase> <emphasis><phrase role="identifier">implementation</phrase><phrase role="special">-</phrase><phrase role="identifier">defined</phrase></emphasis> <link linkend="boost.unordered_set.size_type"><phrase role="identifier">size_type</phrase></link><phrase role="special">;</phrase>
+ <phrase role="keyword">typedef</phrase> <emphasis><phrase role="identifier">implementation</phrase><phrase role="special">-</phrase><phrase role="identifier">defined</phrase></emphasis> <link linkend="boost.unordered_set.difference_type"><phrase role="identifier">difference_type</phrase></link><phrase role="special">;</phrase>
+ <phrase role="keyword">typedef</phrase> <emphasis><phrase role="identifier">implementation</phrase><phrase role="special">-</phrase><phrase role="identifier">defined</phrase></emphasis> <link linkend="boost.unordered_set.iterator"><phrase role="identifier">iterator</phrase></link><phrase role="special">;</phrase>
+ <phrase role="keyword">typedef</phrase> <emphasis><phrase role="identifier">implementation</phrase><phrase role="special">-</phrase><phrase role="identifier">defined</phrase></emphasis> <link linkend="boost.unordered_set.const_iterator"><phrase role="identifier">const_iterator</phrase></link><phrase role="special">;</phrase>
+ <phrase role="keyword">typedef</phrase> <emphasis><phrase role="identifier">implementation</phrase><phrase role="special">-</phrase><phrase role="identifier">defined</phrase></emphasis> <link linkend="boost.unordered_set.local_iterator"><phrase role="identifier">local_iterator</phrase></link><phrase role="special">;</phrase>
+ <phrase role="keyword">typedef</phrase> <emphasis><phrase role="identifier">implementation</phrase><phrase role="special">-</phrase><phrase role="identifier">defined</phrase></emphasis> <link linkend="boost.unordered_set.const_local_iterator"><phrase role="identifier">const_local_iterator</phrase></link><phrase role="special">;</phrase>
<phrase role="comment">// <link linkend="boost.unordered_setconstruct-copy-destruct">construct/copy/destruct</link></phrase>
<phrase role="keyword">explicit</phrase> <link linkend="id1-bb"><phrase role="identifier">unordered_set</phrase></link><phrase role="special">(</phrase><phrase role="identifier">size_type</phrase> <phrase role="special">=</phrase> <emphasis><phrase role="identifier">implementation</phrase><phrase role="special">-</phrase><phrase role="identifier">defined</phrase></emphasis><phrase role="special">,</phrase>
@@ -399,12 +399,12 @@
<phrase role="keyword">typedef</phrase> <phrase role="keyword">typename</phrase> <phrase role="identifier">allocator_type</phrase><phrase role="special">::</phrase><phrase role="identifier">const_pointer</phrase> <anchor id="boost.unordered_multiset.const_pointer"/><phrase role="identifier">const_pointer</phrase><phrase role="special">;</phrase>
<phrase role="keyword">typedef</phrase> <phrase role="keyword">typename</phrase> <phrase role="identifier">allocator_type</phrase><phrase role="special">::</phrase><phrase role="identifier">reference</phrase> <anchor id="boost.unordered_multiset.reference"/><phrase role="identifier">reference</phrase><phrase role="special">;</phrase> <phrase role="comment">// lvalue of value_type.</phrase>
<phrase role="keyword">typedef</phrase> <phrase role="keyword">typename</phrase> <phrase role="identifier">allocator_type</phrase><phrase role="special">::</phrase><phrase role="identifier">const_reference</phrase> <anchor id="boost.unordered_multiset.const_reference"/><phrase role="identifier">const_reference</phrase><phrase role="special">;</phrase> <phrase role="comment">// const lvalue of value_type.</phrase>
- <phrase role="keyword">typedef</phrase> <emphasis><phrase role="identifier">implementation</phrase><phrase role="special">-</phrase><phrase role="identifier">defined</phrase></emphasis> <link linkend="boost.unordered_multiset.size_type"><phrase role="identifier">size_type</phrase></link><phrase role="special">;</phrase>
- <phrase role="keyword">typedef</phrase> <emphasis><phrase role="identifier">implementation</phrase><phrase role="special">-</phrase><phrase role="identifier">defined</phrase></emphasis> <link linkend="boost.unordered_multiset.difference_type"><phrase role="identifier">difference_type</phrase></link><phrase role="special">;</phrase>
- <phrase role="keyword">typedef</phrase> <emphasis><phrase role="identifier">implementation</phrase><phrase role="special">-</phrase><phrase role="identifier">defined</phrase></emphasis> <link linkend="boost.unordered_multiset.iterator"><phrase role="identifier">iterator</phrase></link><phrase role="special">;</phrase>
- <phrase role="keyword">typedef</phrase> <emphasis><phrase role="identifier">implementation</phrase><phrase role="special">-</phrase><phrase role="identifier">defined</phrase></emphasis> <link linkend="boost.unordered_multiset.const_iterator"><phrase role="identifier">const_iterator</phrase></link><phrase role="special">;</phrase>
- <phrase role="keyword">typedef</phrase> <emphasis><phrase role="identifier">implementation</phrase><phrase role="special">-</phrase><phrase role="identifier">defined</phrase></emphasis> <link linkend="boost.unordered_multiset.local_iterator"><phrase role="identifier">local_iterator</phrase></link><phrase role="special">;</phrase>
- <phrase role="keyword">typedef</phrase> <emphasis><phrase role="identifier">implementation</phrase><phrase role="special">-</phrase><phrase role="identifier">defined</phrase></emphasis> <link linkend="boost.unordered_multiset.const_local_iterator"><phrase role="identifier">const_local_iterator</phrase></link><phrase role="special">;</phrase>
+ <phrase role="keyword">typedef</phrase> <emphasis><phrase role="identifier">implementation</phrase><phrase role="special">-</phrase><phrase role="identifier">defined</phrase></emphasis> <link linkend="boost.unordered_multiset.size_type"><phrase role="identifier">size_type</phrase></link><phrase role="special">;</phrase>
+ <phrase role="keyword">typedef</phrase> <emphasis><phrase role="identifier">implementation</phrase><phrase role="special">-</phrase><phrase role="identifier">defined</phrase></emphasis> <link linkend="boost.unordered_multiset.difference_type"><phrase role="identifier">difference_type</phrase></link><phrase role="special">;</phrase>
+ <phrase role="keyword">typedef</phrase> <emphasis><phrase role="identifier">implementation</phrase><phrase role="special">-</phrase><phrase role="identifier">defined</phrase></emphasis> <link linkend="boost.unordered_multiset.iterator"><phrase role="identifier">iterator</phrase></link><phrase role="special">;</phrase>
+ <phrase role="keyword">typedef</phrase> <emphasis><phrase role="identifier">implementation</phrase><phrase role="special">-</phrase><phrase role="identifier">defined</phrase></emphasis> <link linkend="boost.unordered_multiset.const_iterator"><phrase role="identifier">const_iterator</phrase></link><phrase role="special">;</phrase>
+ <phrase role="keyword">typedef</phrase> <emphasis><phrase role="identifier">implementation</phrase><phrase role="special">-</phrase><phrase role="identifier">defined</phrase></emphasis> <link linkend="boost.unordered_multiset.local_iterator"><phrase role="identifier">local_iterator</phrase></link><phrase role="special">;</phrase>
+ <phrase role="keyword">typedef</phrase> <emphasis><phrase role="identifier">implementation</phrase><phrase role="special">-</phrase><phrase role="identifier">defined</phrase></emphasis> <link linkend="boost.unordered_multiset.const_local_iterator"><phrase role="identifier">const_local_iterator</phrase></link><phrase role="special">;</phrase>
<phrase role="comment">// <link linkend="boost.unordered_multisetconstruct-copy-destruct">construct/copy/destruct</link></phrase>
<phrase role="keyword">explicit</phrase> <link linkend="id70-bb"><phrase role="identifier">unordered_multiset</phrase></link><phrase role="special">(</phrase><phrase role="identifier">size_type</phrase> <phrase role="special">=</phrase> <emphasis><phrase role="identifier">implementation</phrase><phrase role="special">-</phrase><phrase role="identifier">defined</phrase></emphasis><phrase role="special">,</phrase>
@@ -789,12 +789,12 @@
<phrase role="keyword">typedef</phrase> <phrase role="keyword">typename</phrase> <phrase role="identifier">allocator_type</phrase><phrase role="special">::</phrase><phrase role="identifier">const_pointer</phrase> <anchor id="boost.unordered_map.const_pointer"/><phrase role="identifier">const_pointer</phrase><phrase role="special">;</phrase>
<phrase role="keyword">typedef</phrase> <phrase role="keyword">typename</phrase> <phrase role="identifier">allocator_type</phrase><phrase role="special">::</phrase><phrase role="identifier">reference</phrase> <anchor id="boost.unordered_map.reference"/><phrase role="identifier">reference</phrase><phrase role="special">;</phrase> <phrase role="comment">// lvalue of value_type.</phrase>
<phrase role="keyword">typedef</phrase> <phrase role="keyword">typename</phrase> <phrase role="identifier">allocator_type</phrase><phrase role="special">::</phrase><phrase role="identifier">const_reference</phrase> <anchor id="boost.unordered_map.const_reference"/><phrase role="identifier">const_reference</phrase><phrase role="special">;</phrase> <phrase role="comment">// const lvalue of value_type.</phrase>
- <phrase role="keyword">typedef</phrase> <emphasis><phrase role="identifier">implementation</phrase><phrase role="special">-</phrase><phrase role="identifier">defined</phrase></emphasis> <link linkend="boost.unordered_map.size_type"><phrase role="identifier">size_type</phrase></link><phrase role="special">;</phrase>
- <phrase role="keyword">typedef</phrase> <emphasis><phrase role="identifier">implementation</phrase><phrase role="special">-</phrase><phrase role="identifier">defined</phrase></emphasis> <link linkend="boost.unordered_map.difference_type"><phrase role="identifier">difference_type</phrase></link><phrase role="special">;</phrase>
- <phrase role="keyword">typedef</phrase> <emphasis><phrase role="identifier">implementation</phrase><phrase role="special">-</phrase><phrase role="identifier">defined</phrase></emphasis> <link linkend="boost.unordered_map.iterator"><phrase role="identifier">iterator</phrase></link><phrase role="special">;</phrase>
- <phrase role="keyword">typedef</phrase> <emphasis><phrase role="identifier">implementation</phrase><phrase role="special">-</phrase><phrase role="identifier">defined</phrase></emphasis> <link linkend="boost.unordered_map.const_iterator"><phrase role="identifier">const_iterator</phrase></link><phrase role="special">;</phrase>
- <phrase role="keyword">typedef</phrase> <emphasis><phrase role="identifier">implementation</phrase><phrase role="special">-</phrase><phrase role="identifier">defined</phrase></emphasis> <link linkend="boost.unordered_map.local_iterator"><phrase role="identifier">local_iterator</phrase></link><phrase role="special">;</phrase>
- <phrase role="keyword">typedef</phrase> <emphasis><phrase role="identifier">implementation</phrase><phrase role="special">-</phrase><phrase role="identifier">defined</phrase></emphasis> <link linkend="boost.unordered_map.const_local_iterator"><phrase role="identifier">const_local_iterator</phrase></link><phrase role="special">;</phrase>
+ <phrase role="keyword">typedef</phrase> <emphasis><phrase role="identifier">implementation</phrase><phrase role="special">-</phrase><phrase role="identifier">defined</phrase></emphasis> <link linkend="boost.unordered_map.size_type"><phrase role="identifier">size_type</phrase></link><phrase role="special">;</phrase>
+ <phrase role="keyword">typedef</phrase> <emphasis><phrase role="identifier">implementation</phrase><phrase role="special">-</phrase><phrase role="identifier">defined</phrase></emphasis> <link linkend="boost.unordered_map.difference_type"><phrase role="identifier">difference_type</phrase></link><phrase role="special">;</phrase>
+ <phrase role="keyword">typedef</phrase> <emphasis><phrase role="identifier">implementation</phrase><phrase role="special">-</phrase><phrase role="identifier">defined</phrase></emphasis> <link linkend="boost.unordered_map.iterator"><phrase role="identifier">iterator</phrase></link><phrase role="special">;</phrase>
+ <phrase role="keyword">typedef</phrase> <emphasis><phrase role="identifier">implementation</phrase><phrase role="special">-</phrase><phrase role="identifier">defined</phrase></emphasis> <link linkend="boost.unordered_map.const_iterator"><phrase role="identifier">const_iterator</phrase></link><phrase role="special">;</phrase>
+ <phrase role="keyword">typedef</phrase> <emphasis><phrase role="identifier">implementation</phrase><phrase role="special">-</phrase><phrase role="identifier">defined</phrase></emphasis> <link linkend="boost.unordered_map.local_iterator"><phrase role="identifier">local_iterator</phrase></link><phrase role="special">;</phrase>
+ <phrase role="keyword">typedef</phrase> <emphasis><phrase role="identifier">implementation</phrase><phrase role="special">-</phrase><phrase role="identifier">defined</phrase></emphasis> <link linkend="boost.unordered_map.const_local_iterator"><phrase role="identifier">const_local_iterator</phrase></link><phrase role="special">;</phrase>
<phrase role="comment">// <link linkend="boost.unordered_mapconstruct-copy-destruct">construct/copy/destruct</link></phrase>
<phrase role="keyword">explicit</phrase> <link linkend="id139-bb"><phrase role="identifier">unordered_map</phrase></link><phrase role="special">(</phrase><phrase role="identifier">size_type</phrase> <phrase role="special">=</phrase> <emphasis><phrase role="identifier">implementation</phrase><phrase role="special">-</phrase><phrase role="identifier">defined</phrase></emphasis><phrase role="special">,</phrase>
@@ -1160,12 +1160,12 @@
<phrase role="keyword">typedef</phrase> <phrase role="keyword">typename</phrase> <phrase role="identifier">allocator_type</phrase><phrase role="special">::</phrase><phrase role="identifier">const_pointer</phrase> <anchor id="boost.unordered_multimap.const_pointer"/><phrase role="identifier">const_pointer</phrase><phrase role="special">;</phrase>
<phrase role="keyword">typedef</phrase> <phrase role="keyword">typename</phrase> <phrase role="identifier">allocator_type</phrase><phrase role="special">::</phrase><phrase role="identifier">reference</phrase> <anchor id="boost.unordered_multimap.reference"/><phrase role="identifier">reference</phrase><phrase role="special">;</phrase> <phrase role="comment">// lvalue of value_type.</phrase>
<phrase role="keyword">typedef</phrase> <phrase role="keyword">typename</phrase> <phrase role="identifier">allocator_type</phrase><phrase role="special">::</phrase><phrase role="identifier">const_reference</phrase> <anchor id="boost.unordered_multimap.const_reference"/><phrase role="identifier">const_reference</phrase><phrase role="special">;</phrase> <phrase role="comment">// const lvalue of value_type.</phrase>
- <phrase role="keyword">typedef</phrase> <emphasis><phrase role="identifier">implementation</phrase><phrase role="special">-</phrase><phrase role="identifier">defined</phrase></emphasis> <link linkend="boost.unordered_multimap.size_type"><phrase role="identifier">size_type</phrase></link><phrase role="special">;</phrase>
- <phrase role="keyword">typedef</phrase> <emphasis><phrase role="identifier">implementation</phrase><phrase role="special">-</phrase><phrase role="identifier">defined</phrase></emphasis> <link linkend="boost.unordered_multimap.difference_type"><phrase role="identifier">difference_type</phrase></link><phrase role="special">;</phrase>
- <phrase role="keyword">typedef</phrase> <emphasis><phrase role="identifier">implementation</phrase><phrase role="special">-</phrase><phrase role="identifier">defined</phrase></emphasis> <link linkend="boost.unordered_multimap.iterator"><phrase role="identifier">iterator</phrase></link><phrase role="special">;</phrase>
- <phrase role="keyword">typedef</phrase> <emphasis><phrase role="identifier">implementation</phrase><phrase role="special">-</phrase><phrase role="identifier">defined</phrase></emphasis> <link linkend="boost.unordered_multimap.const_iterator"><phrase role="identifier">const_iterator</phrase></link><phrase role="special">;</phrase>
- <phrase role="keyword">typedef</phrase> <emphasis><phrase role="identifier">implementation</phrase><phrase role="special">-</phrase><phrase role="identifier">defined</phrase></emphasis> <link linkend="boost.unordered_multimap.local_iterator"><phrase role="identifier">local_iterator</phrase></link><phrase role="special">;</phrase>
- <phrase role="keyword">typedef</phrase> <emphasis><phrase role="identifier">implementation</phrase><phrase role="special">-</phrase><phrase role="identifier">defined</phrase></emphasis> <link linkend="boost.unordered_multimap.const_local_iterator"><phrase role="identifier">const_local_iterator</phrase></link><phrase role="special">;</phrase>
+ <phrase role="keyword">typedef</phrase> <emphasis><phrase role="identifier">implementation</phrase><phrase role="special">-</phrase><phrase role="identifier">defined</phrase></emphasis> <link linkend="boost.unordered_multimap.size_type"><phrase role="identifier">size_type</phrase></link><phrase role="special">;</phrase>
+ <phrase role="keyword">typedef</phrase> <emphasis><phrase role="identifier">implementation</phrase><phrase role="special">-</phrase><phrase role="identifier">defined</phrase></emphasis> <link linkend="boost.unordered_multimap.difference_type"><phrase role="identifier">difference_type</phrase></link><phrase role="special">;</phrase>
+ <phrase role="keyword">typedef</phrase> <emphasis><phrase role="identifier">implementation</phrase><phrase role="special">-</phrase><phrase role="identifier">defined</phrase></emphasis> <link linkend="boost.unordered_multimap.iterator"><phrase role="identifier">iterator</phrase></link><phrase role="special">;</phrase>
+ <phrase role="keyword">typedef</phrase> <emphasis><phrase role="identifier">implementation</phrase><phrase role="special">-</phrase><phrase role="identifier">defined</phrase></emphasis> <link linkend="boost.unordered_multimap.const_iterator"><phrase role="identifier">const_iterator</phrase></link><phrase role="special">;</phrase>
+ <phrase role="keyword">typedef</phrase> <emphasis><phrase role="identifier">implementation</phrase><phrase role="special">-</phrase><phrase role="identifier">defined</phrase></emphasis> <link linkend="boost.unordered_multimap.local_iterator"><phrase role="identifier">local_iterator</phrase></link><phrase role="special">;</phrase>
+ <phrase role="keyword">typedef</phrase> <emphasis><phrase role="identifier">implementation</phrase><phrase role="special">-</phrase><phrase role="identifier">defined</phrase></emphasis> <link linkend="boost.unordered_multimap.const_local_iterator"><phrase role="identifier">const_local_iterator</phrase></link><phrase role="special">;</phrase>
<phrase role="comment">// <link linkend="boost.unordered_multimapconstruct-copy-destruct">construct/copy/destruct</link></phrase>
<phrase role="keyword">explicit</phrase> <link linkend="id212-bb"><phrase role="identifier">unordered_multimap</phrase></link><phrase role="special">(</phrase><phrase role="identifier">size_type</phrase> <phrase role="special">=</phrase> <emphasis><phrase role="identifier">implementation</phrase><phrase role="special">-</phrase><phrase role="identifier">defined</phrase></emphasis><phrase role="special">,</phrase>
diff --git a/tools/boostbook/test/more/tests/ref/methodname.gold b/tools/boostbook/test/more/tests/ref/methodname.gold
new file mode 100644
index 0000000000..51cb112d01
--- /dev/null
+++ b/tools/boostbook/test/more/tests/ref/methodname.gold
@@ -0,0 +1,26 @@
+<!DOCTYPE chapter PUBLIC "-//OASIS//DTD DocBook XML V4.2//EN" "http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd">
+<chapter id="method">
+ <title>Methodname Test</title>
+ <section id="links">
+ <para>
+ <computeroutput xmlns:xi="http://www.w3.org/2001/XInclude"><link linkend="id1-bb">thing::foo::bar</link></computeroutput>,
+ <computeroutput xmlns:xi="http://www.w3.org/2001/XInclude"><link linkend="id1-bb">thing::foo::bar(int x)</link></computeroutput>,
+ <computeroutput xmlns:xi="http://www.w3.org/2001/XInclude"><link linkend="id2-bb">thing::foo::operator()</link></computeroutput>,
+ <computeroutput xmlns:xi="http://www.w3.org/2001/XInclude"><link linkend="id2-bb">thing::foo::operator()(int x)</link></computeroutput>.
+ </para>
+ </section>
+ <section id="method.reference"><title>Reference</title>
+ <section id="header.boost.unordered_set_hpp"><title>Header &lt;<ulink url="../../boost/unordered_set.hpp">boost/unordered_set.hpp</ulink>&gt;</title><synopsis xmlns:xi="http://www.w3.org/2001/XInclude"><phrase role="keyword">namespace</phrase> <phrase role="identifier">thing</phrase> <phrase role="special">{</phrase>
+ <phrase role="keyword">class</phrase> <link linkend="thing.foo">foo</link><phrase role="special">;</phrase>
+<phrase role="special">}</phrase></synopsis>
+ <refentry xmlns:xi="http://www.w3.org/2001/XInclude" id="thing.foo"><refmeta><refentrytitle>Class foo</refentrytitle><manvolnum>3</manvolnum></refmeta><refnamediv><refname>thing::foo</refname><refpurpose/></refnamediv><refsynopsisdiv><synopsis><phrase role="comment">// In header: &lt;<link linkend="header.boost.unordered_set_hpp">boost/unordered_set.hpp</link>&gt;
+
+</phrase>
+<phrase role="keyword">class</phrase> <link linkend="thing.foo">foo</link> <phrase role="special">{</phrase>
+<phrase role="keyword">public</phrase><phrase role="special">:</phrase>
+ <phrase role="keyword">void</phrase> <link linkend="id1-bb"><phrase role="identifier">bar</phrase></link><phrase role="special">(</phrase><phrase role="keyword">int</phrase><phrase role="special">)</phrase><phrase role="special">;</phrase>
+ <phrase role="keyword">void</phrase> <link linkend="id2-bb"><phrase role="keyword">operator</phrase><phrase role="special">(</phrase><phrase role="special">)</phrase></link><phrase role="special">(</phrase><phrase role="keyword">int</phrase><phrase role="special">)</phrase><phrase role="special">;</phrase>
+<phrase role="special">}</phrase><phrase role="special">;</phrase></synopsis></refsynopsisdiv><refsect1><title>Description</title><para><literallayout class="monospaced"><phrase role="keyword">void</phrase> <anchor id="id1-bb"/><phrase role="identifier">bar</phrase><phrase role="special">(</phrase><phrase role="keyword">int</phrase><phrase role="special">)</phrase><phrase role="special">;</phrase></literallayout></para><para><literallayout class="monospaced"><phrase role="keyword">void</phrase> <anchor id="id2-bb"/><phrase role="keyword">operator</phrase><phrase role="special">(</phrase><phrase role="special">)</phrase><phrase role="special">(</phrase><phrase role="keyword">int</phrase><phrase role="special">)</phrase><phrase role="special">;</phrase></literallayout></para></refsect1></refentry>
+ </section>
+ </section>
+</chapter> \ No newline at end of file
diff --git a/tools/boostbook/test/more/tests/ref/methodname.xml b/tools/boostbook/test/more/tests/ref/methodname.xml
new file mode 100644
index 0000000000..337a3ff51e
--- /dev/null
+++ b/tools/boostbook/test/more/tests/ref/methodname.xml
@@ -0,0 +1,36 @@
+<!--
+Copyright Daniel James 2012
+Distributed under the Boost Software License, Version 1.0. (See accompanying
+file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+-->
+<library name="Methodname Test" id="method">
+ <title>Methodname Test</title>
+ <section id="links">
+ <para>
+ <methodname>thing::foo::bar</methodname>,
+ <methodname>thing::foo::bar(int x)</methodname>,
+ <methodname>thing::foo::operator()</methodname>,
+ <methodname>thing::foo::operator()(int x)</methodname>.
+ </para>
+ </section>
+ <library-reference>
+ <header name="boost/unordered_set.hpp">
+ <namespace name="thing">
+ <class name="foo">
+ <method name="bar">
+ <parameter>
+ <paramtype>int</paramtype>
+ </parameter>
+ <type>void</type>
+ </method>
+ <method name="operator()">
+ <parameter>
+ <paramtype>int</paramtype>
+ </parameter>
+ <type>void</type>
+ </method>
+ </class>
+ </namespace>
+ </header>
+ </library-reference>
+</library> \ No newline at end of file
diff --git a/tools/boostbook/test/more/tests/syntax-highlight/language-attribute.gold b/tools/boostbook/test/more/tests/syntax-highlight/language-attribute.gold
new file mode 100644
index 0000000000..9722d10388
--- /dev/null
+++ b/tools/boostbook/test/more/tests/syntax-highlight/language-attribute.gold
@@ -0,0 +1,13 @@
+<!DOCTYPE book PUBLIC "-//OASIS//DTD DocBook XML V4.2//EN" "http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd">
+<book xmlns:xi="http://www.w3.org/2001/XInclude" id="test" lang="en">
+ <title>Test language attribute</title>
+ <programlisting>plain text</programlisting>
+ <programlisting><phrase role="keyword">void</phrase> <phrase role="identifier">main</phrase><phrase role="special">(</phrase><phrase role="special">)</phrase> <phrase role="special">{</phrase><phrase role="special">}</phrase></programlisting>
+ <programlisting><phrase role="keyword">actions</phrase> something <phrase role="special">;</phrase></programlisting>
+
+ <para>
+ <computeroutput>plain text</computeroutput>
+ <computeroutput><phrase role="keyword">void</phrase> <phrase role="identifier">main</phrase><phrase role="special">(</phrase><phrase role="special">)</phrase> <phrase role="special">{</phrase><phrase role="special">}</phrase></computeroutput>
+ <computeroutput><phrase role="keyword">actions</phrase> something <phrase role="special">;</phrase></computeroutput>
+ </para>
+</book> \ No newline at end of file
diff --git a/tools/boostbook/test/more/tests/syntax-highlight/language-attribute.xml b/tools/boostbook/test/more/tests/syntax-highlight/language-attribute.xml
new file mode 100644
index 0000000000..e5891b43d4
--- /dev/null
+++ b/tools/boostbook/test/more/tests/syntax-highlight/language-attribute.xml
@@ -0,0 +1,25 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+ Copyright 2013 Daniel James.
+
+ Distributed under the Boost Software License, Version 1.0.
+ (See accompanying file LICENSE_1_0.txt or copy at
+ http://www.boost.org/LICENSE_1_0.txt)
+-->
+<!DOCTYPE boostbook PUBLIC "-//Boost//DTD BoostBook XML V1.0//EN"
+ "http://www.boost.org/tools/boostbook/dtd/boostbook.dtd">
+<boostbook id="test" xmlns:xi="http://www.w3.org/2001/XInclude"
+ last-revision="$Date: 2010-10-30 15:29:27 +0100 (Sat, 30 Oct 2010) $"
+ lang="en">
+ <title>Test language attribute</title>
+ <programlisting>plain text</programlisting>
+ <programlisting language="c++">void main() {}</programlisting>
+ <programlisting language="jam">actions something ;</programlisting>
+
+ <para>
+ <code>plain text</code>
+ <code language="c++">void main() {}</code>
+ <code language="jam">actions something ;</code>
+ </para>
+</boostbook>
+
diff --git a/tools/boostbook/xsl/annotation.xsl b/tools/boostbook/xsl/annotation.xsl
index ff847d3aac..1d9b79e60e 100644
--- a/tools/boostbook/xsl/annotation.xsl
+++ b/tools/boostbook/xsl/annotation.xsl
@@ -20,7 +20,9 @@
<xsl:key name="macros" match="macro" use="@name"/>
<xsl:key name="headers" match="header" use="@name"/>
<xsl:key name="globals" match="namespace/data-member|header/data-member" use="@name"/>
- <xsl:key name="named-entities" match="class|struct|union|concept|function|overloaded-function|macro|library|namespace/data-member|header/data-member|*[attribute::id]" use="translate(@name|@id, $uppercase-letters, $lowercase-letters)"/>
+ <xsl:key name="named-entities"
+ match="class|struct|union|concept|function|overloaded-function|macro|library|namespace/data-member|header/data-member|*[attribute::id]"
+ use="translate(@name|@id, 'ABCDEFGHIJKLMNOPQRSTUVWXYZ', 'abcdefghijklmnopqrstuvwxyz')"/>
<xsl:template match="function|overloaded-function" mode="generate.id">
<xsl:call-template name="fully-qualified-id">
@@ -116,6 +118,10 @@
<!-- Strip off any call -->
<xsl:variable name="name">
<xsl:choose>
+ <xsl:when test="contains($fullname, 'operator()')">
+ <xsl:value-of select="substring-before($fullname, 'operator()')"/>
+ <xsl:value-of select="'operator()'"/>
+ </xsl:when>
<xsl:when test="contains($fullname, '(')">
<xsl:value-of select="substring-before($fullname, '(')"/>
</xsl:when>
@@ -394,6 +400,14 @@
</computeroutput>
</xsl:template>
+ <xsl:template match="code[@language='c++']" mode="annotation">
+ <computeroutput>
+ <xsl:apply-templates mode="annotation">
+ <xsl:with-param name="highlight" select="true()"/>
+ </xsl:apply-templates>
+ </computeroutput>
+ </xsl:template>
+
<xsl:template match="bold" mode="annotation">
<emphasis role="bold">
<xsl:apply-templates mode="annotation"/>
@@ -416,11 +430,7 @@
<xsl:param name="highlight" select="false()"/>
<xsl:element name="{name(.)}">
- <xsl:for-each select="./@*">
- <xsl:attribute name="{name(.)}">
- <xsl:value-of select="."/>
- </xsl:attribute>
- </xsl:for-each>
+ <xsl:copy-of select="./@*"/>
<xsl:apply-templates select="./*|./text()" mode="annotation">
<xsl:with-param name="highlight" select="$highlight"/>
</xsl:apply-templates>
diff --git a/tools/boostbook/xsl/docbook-layout.xsl b/tools/boostbook/xsl/docbook-layout.xsl
index dfd5afd0af..9a382ad5be 100644
--- a/tools/boostbook/xsl/docbook-layout.xsl
+++ b/tools/boostbook/xsl/docbook-layout.xsl
@@ -10,11 +10,16 @@
<xsl:stylesheet version = "1.0"
xmlns:xsl = "http://www.w3.org/1999/XSL/Transform"
>
+
<!-- needed for calsTable template -->
<xsl:import
href="http://docbook.sourceforge.net/release/xsl/current/html/formal.xsl"/>
+ <!-- Optionally add the section id to each section's class.
+ This is useful if you want to style individual sections differently. -->
+ <xsl:param name="boost.section.class.add.id" select="0"/>
+
<!--
Override the behaviour of some DocBook elements for better
integration with the new look & feel.
@@ -208,4 +213,50 @@
<xsl:with-param name="allow-anchors" select="$allow-anchors"/>
</xsl:apply-templates>
</xsl:template>
+
+
+ <!-- Adds role class for section element resulting div. So that
+ we can style them in the resulting HTML.
+ Also, add the section id, if boost.section.class.add.id = 1.
+ This can be used to style individual sections differently. -->
+ <xsl:template match="section" mode="class.value">
+ <xsl:param name="class" select="local-name(.)"/>
+ <xsl:param name="node" select="."/>
+ <xsl:variable name="id">
+ <xsl:if test="$boost.section.class.add.id">
+ <xsl:call-template name="object.id">
+ <xsl:with-param name="object" select="$node"/>
+ </xsl:call-template>
+ </xsl:if>
+ </xsl:variable>
+ <xsl:value-of select="normalize-space(concat($class, ' ',
+ @role, ' ', translate($id, '.', '_')))"/>
+ </xsl:template>
+
+ <!-- Adds role class for simplesect element resulting div. So that
+ we can style them in the resulting HTML. -->
+ <xsl:template match="simplesect" mode="class.value">
+ <xsl:param name="class" select="local-name(.)"/>
+ <xsl:param name="node" select="."/>
+ <xsl:value-of select="normalize-space(concat($class,' ',@role))"/>
+ </xsl:template>
+
+ <!-- Allow for specifying that a section should not include the parents
+ labeling. This allows us to start clean numering of a sub-section. -->
+ <xsl:template match="section[@label-style='no-parent']" mode="label.markup">
+ <xsl:choose>
+ <xsl:when test="@label">
+ <xsl:value-of select="@label"/>
+ </xsl:when>
+ <xsl:when test="$label != 0">
+ <xsl:variable name="format">
+ <xsl:call-template name="autolabel.format">
+ <xsl:with-param name="format" select="$section.autolabel"/>
+ </xsl:call-template>
+ </xsl:variable>
+ <xsl:number format="{$format}" count="section"/>
+ </xsl:when>
+ </xsl:choose>
+ </xsl:template>
+
</xsl:stylesheet>
diff --git a/tools/boostbook/xsl/docbook.xsl b/tools/boostbook/xsl/docbook.xsl
index c4d50c5608..12bef90857 100644
--- a/tools/boostbook/xsl/docbook.xsl
+++ b/tools/boostbook/xsl/docbook.xsl
@@ -398,6 +398,10 @@ Error: XSL template 'link-or-anchor' called with invalid link-type '<xsl:value-o
</computeroutput>
</xsl:template>
+ <xsl:template match="code[@language='c++']">
+ <xsl:apply-templates select="." mode="annotation"/>
+ </xsl:template>
+
<xsl:template match="bold">
<emphasis role="bold">
<xsl:apply-templates mode="annotation"/>
@@ -431,12 +435,7 @@ Error: XSL template 'link-or-anchor' called with invalid link-type '<xsl:value-o
<xsl:template match="chapter">
<xsl:if test="$boost.include.libraries=''">
<chapter>
- <xsl:for-each select="./@*">
- <xsl:attribute name="{name(.)}">
- <xsl:value-of select="."/>
- </xsl:attribute>
- </xsl:for-each>
-
+ <xsl:copy-of select="./@*" />
<xsl:apply-templates/>
</chapter>
</xsl:if>
@@ -459,6 +458,10 @@ Error: XSL template 'link-or-anchor' called with invalid link-type '<xsl:value-o
</programlisting>
</xsl:template>
+ <xsl:template match="programlisting[@language='c++']">
+ <xsl:apply-templates select="." mode="annotation"/>
+ </xsl:template>
+
<!-- These DocBook elements have special meaning. Use the annotation mode -->
<xsl:template match="classname|methodname|functionname|enumname|
macroname|headername|globalname">
@@ -490,11 +493,7 @@ Error: XSL template 'link-or-anchor' called with invalid link-type '<xsl:value-o
chapters within chpaters into sections. -->
<xsl:template match="part/part|part/article">
<chapter>
- <xsl:for-each select="./@*">
- <xsl:attribute name="{name(.)}">
- <xsl:value-of select="."/>
- </xsl:attribute>
- </xsl:for-each>
+ <xsl:copy-of select="./@*"/>
<xsl:apply-templates/>
</chapter>
</xsl:template>
@@ -503,11 +502,7 @@ Error: XSL template 'link-or-anchor' called with invalid link-type '<xsl:value-o
</xsl:template>
<xsl:template match="part/part/chapter|part/part/appendix">
<section>
- <xsl:for-each select="./@*">
- <xsl:attribute name="{name(.)}">
- <xsl:value-of select="."/>
- </xsl:attribute>
- </xsl:for-each>
+ <xsl:copy-of select="./@*"/>
<xsl:apply-templates/>
</section>
</xsl:template>
diff --git a/tools/boostbook/xsl/doxygen/doxygen2boostbook.xsl b/tools/boostbook/xsl/doxygen/doxygen2boostbook.xsl
index da5a996af6..d3a21cf3a8 100644
--- a/tools/boostbook/xsl/doxygen/doxygen2boostbook.xsl
+++ b/tools/boostbook/xsl/doxygen/doxygen2boostbook.xsl
@@ -51,6 +51,19 @@
<xsl:key name="compounds-by-id" match="compounddef" use="@id"/>
<xsl:key name="members-by-id" match="memberdef" use="@id" />
+ <!-- Add trailing slash to formuladir if missing -->
+
+ <xsl:variable name="boost.doxygen.formuladir.fixed">
+ <xsl:choose>
+ <xsl:when test="substring(boost.doxygen.formuladir, string-length(boost.doxygen.formuladir) - 1) = '/'">
+ <xsl:value-of select="$boost.doxygen.formuladir" />
+ </xsl:when>
+ <xsl:otherwise>
+ <xsl:value-of select="concat($boost.doxygen.formuladir, '/')" />
+ </xsl:otherwise>
+ </xsl:choose>
+ </xsl:variable>
+
<xsl:strip-space elements="briefdescription detaileddescription inbodydescription"/>
<xsl:template name="kind-error-message">
@@ -189,6 +202,7 @@
<xsl:attribute name="name">
<xsl:value-of select="$name"/>
</xsl:attribute>
+ <xsl:text>&#10;</xsl:text><!-- Newline -->
<xsl:apply-templates>
<xsl:with-param name="with-namespace-refs"
@@ -196,6 +210,7 @@
<xsl:with-param name="in-file" select="$in-file"/>
</xsl:apply-templates>
</namespace>
+ <xsl:text>&#10;</xsl:text><!-- Newline -->
</xsl:if>
</xsl:template>
@@ -288,6 +303,7 @@
<xsl:apply-templates select="detaileddescription" mode="passthrough"/>
<xsl:apply-templates select="inbodydescription" mode="passthrough"/>
</enum>
+ <xsl:text>&#10;</xsl:text><!-- Newline -->
</xsl:if>
</xsl:template>
@@ -374,6 +390,7 @@
<xsl:with-param name="header" select="location/attribute::file"/>
</xsl:call-template>
</xsl:attribute>
+ <xsl:text>&#10;</xsl:text><!-- Newline -->
<xsl:if test="briefdescription/*|detaileddescription/*|inbodydescription/*">
<xsl:apply-templates select="briefdescription/*" mode="passthrough"/>
@@ -387,6 +404,7 @@
<xsl:with-param name="in-file" select="location/attribute::file"/>
</xsl:apply-templates>
</header>
+ <xsl:text>&#10;</xsl:text><!-- Newline -->
</xsl:if>
</xsl:template>
@@ -484,10 +502,20 @@
</xsl:if>
<xsl:for-each select="param">
+ <xsl:variable name="name" select="defname/text()"/>
<macro-parameter>
<xsl:attribute name="name">
<xsl:value-of select="defname/text()"/>
</xsl:attribute>
+ <xsl:variable name="params"
+ select="../detaileddescription/para/parameterlist"/>
+ <xsl:variable name="description" select="$params/parameteritem/
+ parameternamelist/parametername[text() = $name]/../../parameterdescription/para"/>
+ <xsl:if test="$description">
+ <description>
+ <xsl:apply-templates select="$description" mode="passthrough"/>
+ </description>
+ </xsl:if>
</macro-parameter>
</xsl:for-each>
@@ -495,6 +523,7 @@
<xsl:apply-templates select="detaileddescription" mode="passthrough"/>
<xsl:apply-templates select="inbodydescription" mode="passthrough"/>
</macro>
+ <xsl:text>&#10;</xsl:text><!-- Newline -->
</xsl:when>
<xsl:when test="@kind='function'">
@@ -694,29 +723,35 @@
<xsl:when test="@kind='public-static-func'">
<!-- TBD: pass on the fact that these are static functions -->
<method-group name="public static functions">
+ <xsl:text>&#10;</xsl:text><!-- Newline -->
<xsl:apply-templates>
<xsl:with-param name="in-section" select="true()"/>
<xsl:with-param name="in-file" select="$in-file"/>
</xsl:apply-templates>
</method-group>
+ <xsl:text>&#10;</xsl:text><!-- Newline -->
</xsl:when>
<xsl:when test="@kind='protected-static-func'">
<!-- TBD: pass on the fact that these are static functions -->
<method-group name="protected static functions">
+ <xsl:text>&#10;</xsl:text><!-- Newline -->
<xsl:apply-templates>
<xsl:with-param name="in-section" select="true()"/>
<xsl:with-param name="in-file" select="$in-file"/>
</xsl:apply-templates>
</method-group>
+ <xsl:text>&#10;</xsl:text><!-- Newline -->
</xsl:when>
<xsl:when test="@kind='private-static-func'">
<!-- TBD: pass on the fact that these are static functions -->
<method-group name="private static functions">
+ <xsl:text>&#10;</xsl:text><!-- Newline -->
<xsl:apply-templates>
<xsl:with-param name="in-section" select="true()"/>
<xsl:with-param name="in-file" select="$in-file"/>
</xsl:apply-templates>
</method-group>
+ <xsl:text>&#10;</xsl:text><!-- Newline -->
</xsl:when>
<xsl:when test="@kind='public-func'">
<xsl:variable name="members" select="./memberdef"/>
@@ -727,21 +762,25 @@
</xsl:variable>
<xsl:if test="$num-internal-only &lt; count($members)">
<method-group name="public member functions">
+ <xsl:text>&#10;</xsl:text><!-- Newline -->
<xsl:apply-templates>
<xsl:with-param name="in-section" select="true()"/>
<xsl:with-param name="in-file" select="$in-file"/>
</xsl:apply-templates>
</method-group>
+ <xsl:text>&#10;</xsl:text><!-- Newline -->
<xsl:apply-templates/>
</xsl:if>
</xsl:when>
<xsl:when test="@kind='protected-func'">
<method-group name="protected member functions">
+ <xsl:text>&#10;</xsl:text><!-- Newline -->
<xsl:apply-templates>
<xsl:with-param name="in-section" select="true()"/>
<xsl:with-param name="in-file" select="$in-file"/>
</xsl:apply-templates>
</method-group>
+ <xsl:text>&#10;</xsl:text><!-- Newline -->
<xsl:apply-templates/>
</xsl:when>
<xsl:when test="@kind='private-func'">
@@ -753,22 +792,26 @@
</xsl:variable>
<xsl:if test="$num-internal-only &lt; count($members)">
<method-group name="private member functions">
+ <xsl:text>&#10;</xsl:text><!-- Newline -->
<xsl:apply-templates>
<xsl:with-param name="in-section" select="true()"/>
<xsl:with-param name="in-file" select="$in-file"/>
</xsl:apply-templates>
</method-group>
+ <xsl:text>&#10;</xsl:text><!-- Newline -->
</xsl:if>
<xsl:apply-templates/>
</xsl:when>
<xsl:when test="@kind='friend'">
<xsl:if test="./memberdef/detaileddescription/para or ./memberdef/briefdescription/para">
<method-group name="friend functions">
+ <xsl:text>&#10;</xsl:text><!-- Newline -->
<xsl:apply-templates>
<xsl:with-param name="in-section" select="true()"/>
<xsl:with-param name="in-file" select="$in-file"/>
</xsl:apply-templates>
</method-group>
+ <xsl:text>&#10;</xsl:text><!-- Newline -->
</xsl:if>
</xsl:when>
<xsl:when test="@kind='public-static-attrib' or @kind='public-attrib'">
@@ -855,12 +898,23 @@
<xsl:otherwise>
<!-- We are in a class -->
<!-- The name of the class we are in -->
- <xsl:variable name="in-class">
+ <xsl:variable name="in-class-full">
<xsl:call-template name="strip-qualifiers">
<xsl:with-param name="name"
select="string(ancestor::compounddef/compoundname/text())"/>
</xsl:call-template>
</xsl:variable>
+
+ <xsl:variable name ="in-class">
+ <xsl:choose>
+ <xsl:when test="contains($in-class-full, '&lt;')">
+ <xsl:value-of select="substring-before($in-class-full, '&lt;')"/>
+ </xsl:when>
+ <xsl:otherwise>
+ <xsl:value-of select="$in-class-full"/>
+ </xsl:otherwise>
+ </xsl:choose>
+ </xsl:variable>
<xsl:choose>
<xsl:when test="string(name/text())=$in-class">
@@ -934,6 +988,7 @@
<type><xsl:apply-templates select="type"/></type>
</typedef>
+ <xsl:text>&#10;</xsl:text><!-- Newline -->
</xsl:if>
</xsl:template>
@@ -1004,6 +1059,162 @@
</xsl:if>
</xsl:template>
+ <xsl:template name="function.attributes">
+
+ <!-- argsstring = '(arguments) [= delete] [= default] [constexpt]' -->
+ <xsl:variable name="extra-qualifiers-a">
+ <xsl:if test="contains(argsstring/text(), '(')">
+ <xsl:call-template name="strip-brackets">
+ <xsl:with-param name="text" select="substring-after(argsstring/text(), '(')" />
+ </xsl:call-template>
+ </xsl:if>
+ </xsl:variable>
+ <xsl:variable name="extra-qualifiers">
+ <xsl:if test="$extra-qualifiers-a">
+ <xsl:value-of select="concat(' ', normalize-space($extra-qualifiers-a), ' ')" />
+ </xsl:if>
+ </xsl:variable>
+
+ <!-- CV Qualifiers -->
+ <!-- Plus deleted and defaulted function markers as they're not properly
+ supported in boostbook -->
+
+ <!-- noexcept is complicated because is can have parameters.
+ TODO: should really remove the noexcept parameters before doing
+ anything else. -->
+ <xsl:variable name="noexcept">
+ <xsl:choose>
+ <xsl:when test="contains($extra-qualifiers, ' noexcept(')">
+ <xsl:call-template name="noexcept-if">
+ <xsl:with-param name="condition" select="substring-after($extra-qualifiers, ' noexcept(')" />
+ </xsl:call-template>
+ </xsl:when>
+
+ <xsl:when test="contains($extra-qualifiers, ' BOOST_NOEXCEPT_IF(')">
+ <xsl:call-template name="noexcept-if">
+ <xsl:with-param name="condition" select="substring-after($extra-qualifiers, ' BOOST_NOEXCEPT_IF(')" />
+ </xsl:call-template>
+ </xsl:when>
+
+ <xsl:when test="contains($extra-qualifiers, ' noexcept ') or contains($extra-qualifiers, ' BOOST_NOEXCEPT ')">
+ <xsl:value-of select="'noexcept '" />
+ </xsl:when>
+ </xsl:choose>
+ </xsl:variable>
+
+ <!-- Calculate constexpr now, so that we can avoid it getting confused
+ with const -->
+ <xsl:variable name="constexpr" select="
+ contains($extra-qualifiers, ' const expr ') or
+ contains($extra-qualifiers, ' BOOST_CONSTEXPR ') or
+ contains($extra-qualifiers, ' BOOST_CONSTEXPR_OR_CONST ')" />
+
+ <!-- The 'substring' trick includes the string if the condition is true -->
+ <xsl:variable name="cv-qualifiers" select="normalize-space(concat(
+ substring('constexpr ', 1, 999 * $constexpr),
+ substring('const ', 1, 999 * (not($constexpr) and @const='yes')),
+ substring('volatile ', 1, 999 * (@volatile='yes' or contains($extra-qualifiers, ' volatile '))),
+ $noexcept,
+ substring('= delete ', 1, 999 * contains($extra-qualifiers, ' =delete ')),
+ substring('= default ', 1, 999 * contains($extra-qualifiers, ' =default ')),
+ substring('= 0 ', 1, 999 * (@virt = 'pure-virtual')),
+ ''))" />
+
+ <!-- Specifiers -->
+ <xsl:variable name="specifiers" select="normalize-space(concat(
+ substring('explicit ', 1, 999 * (@explicit = 'yes')),
+ substring('virtual ', 1, 999 * (
+ @virtual='yes' or @virt='virtual' or @virt='pure-virtual')),
+ substring('static ', 1, 999 * (@static = 'yes')),
+ ''))" />
+
+ <xsl:if test="$cv-qualifiers">
+ <xsl:attribute name="cv">
+ <xsl:value-of select="$cv-qualifiers" />
+ </xsl:attribute>
+ </xsl:if>
+
+ <xsl:if test="$specifiers">
+ <xsl:attribute name="specifiers">
+ <xsl:value-of select="$specifiers" />
+ </xsl:attribute>
+ </xsl:if>
+
+ </xsl:template>
+
+ <!-- $condition = string after the opening bracket of the condition -->
+ <xsl:template name="noexcept-if">
+ <xsl:param name="condition"/>
+
+ <xsl:variable name="trailing">
+ <xsl:call-template name="strip-brackets">
+ <xsl:with-param name="text" select="$condition" />
+ </xsl:call-template>
+ </xsl:variable>
+
+ <xsl:choose>
+ <xsl:when test="string-length($trailing)">
+ <xsl:value-of select="concat(
+ 'noexcept(',
+ substring($condition, 1, string-length($condition) - string-length($trailing)),
+ ') ')" />
+ </xsl:when>
+ <xsl:otherwise>
+ <!-- Something has gone wrong so: -->
+ <xsl:value-of select="'noexcept(condition) '" />
+ </xsl:otherwise>
+ </xsl:choose>
+ </xsl:template>
+
+ <!-- $text = substring after the opening bracket -->
+ <xsl:template name="strip-brackets">
+ <xsl:param name="text"/>
+
+ <xsl:if test="contains($text, ')')">
+ <xsl:variable name="prefix1" select="substring-before($text, ')')" />
+ <xsl:variable name="prefix2" select="substring($prefix1, 1,
+ string-length(substring-before($prefix1, '(')) +
+ 999 * not(contains($prefix1, '(')))" />
+ <xsl:variable name="prefix3" select="substring($prefix2, 1,
+ string-length(substring-before($prefix2, '&quot;')) +
+ 999 * not(contains($prefix2, '&quot;')))" />
+ <xsl:variable name="prefix" select="substring($prefix3, 1,
+ string-length(substring-before($prefix3, &quot;'&quot;)) +
+ 999 * not(contains($prefix3, &quot;'&quot;)))" />
+
+ <xsl:variable name="prefix-length" select="string-length($prefix)" />
+ <xsl:variable name="char" select="substring($text, $prefix-length + 1, 1)" />
+
+ <xsl:choose>
+ <xsl:when test="$char=')'">
+ <xsl:value-of select="substring($text, $prefix-length + 2)" />
+ </xsl:when>
+ <xsl:when test="$char='('">
+ <xsl:variable name="text2">
+ <xsl:call-template name="strip-brackets">
+ <xsl:with-param name="text" select="substring($text, $prefix-length + 2)" />
+ </xsl:call-template>
+ </xsl:variable>
+ <xsl:call-template name="strip-brackets">
+ <xsl:with-param name="text" select="$text2" />
+ </xsl:call-template>
+ </xsl:when>
+ <xsl:when test="$char=&quot;'&quot;">
+ <!-- Not bothering with escapes, because this is crazy enough as it is -->
+ <xsl:call-template name="strip-brackets">
+ <xsl:with-param name="text" select="substring-after(substring($text, $prefix-length + 2), &quot;'&quot;)" />
+ </xsl:call-template>
+ </xsl:when>
+ <xsl:when test="$char='&quot;'">
+ <!-- Not bothering with escapes, because this is crazy enough as it is -->
+ <xsl:call-template name="strip-brackets">
+ <xsl:with-param name="text" select="substring-after(substring($text, $prefix-length + 2), '&quot;')" />
+ </xsl:call-template>
+ </xsl:when>
+ </xsl:choose>
+ </xsl:if>
+ </xsl:template>
+
<!-- Handle function children -->
<xsl:template name="function.children">
<xsl:param name="is-overloaded" select="false()"/>
@@ -1091,6 +1302,7 @@
</xsl:otherwise>
</xsl:choose>
</xsl:if>
+ <xsl:text>&#10;</xsl:text><!-- Newline -->
</xsl:template>
<!-- Emit overload signatures -->
@@ -1130,8 +1342,10 @@
<xsl:if test="@explicit = 'yes'">
<xsl:attribute name="specifiers">explicit</xsl:attribute>
</xsl:if>
+ <xsl:call-template name="function.attributes"/>
<xsl:call-template name="function.children"/>
</constructor>
+ <xsl:text>&#10;</xsl:text><!-- Newline -->
</xsl:template>
<!-- Handle Destructors -->
@@ -1139,13 +1353,21 @@
<destructor>
<xsl:call-template name="function.children"/>
</destructor>
+ <xsl:text>&#10;</xsl:text><!-- Newline -->
</xsl:template>
<!-- Handle Copy Assignment -->
<xsl:template name="copy-assignment">
<copy-assignment>
+ <xsl:call-template name="function.attributes"/>
+ <!-- Return type -->
+ <xsl:element name="type">
+ <xsl:apply-templates select="type"/>
+ </xsl:element>
+
<xsl:call-template name="function.children"/>
</copy-assignment>
+ <xsl:text>&#10;</xsl:text><!-- Newline -->
</xsl:template>
<!-- Handle conversion operator -->
@@ -1154,25 +1376,7 @@
<xsl:attribute name="name">
<xsl:text>conversion-operator</xsl:text>
</xsl:attribute>
-
- <!-- CV Qualifiers -->
- <xsl:if test="not (@const='no' and @volatile='no')">
- <xsl:attribute name="cv">
- <xsl:if test="@const='yes'">
- <xsl:text>const</xsl:text>
- </xsl:if>
- <xsl:if test="@volatile='yes'">
- <xsl:if test="@const='yes'">
- <xsl:text> </xsl:text>
- </xsl:if>
- <xsl:text>volatile</xsl:text>
- </xsl:if>
- </xsl:attribute>
- </xsl:if>
-
- <xsl:if test="@explicit = 'yes'">
- <xsl:attribute name="specifiers">explicit</xsl:attribute>
- </xsl:if>
+ <xsl:call-template name="function.attributes"/>
<!-- Conversion type -->
<type>
@@ -1181,6 +1385,7 @@
<xsl:call-template name="function.children"/>
</method>
+ <xsl:text>&#10;</xsl:text><!-- Newline -->
</xsl:template>
<!-- Handle methods -->
@@ -1189,38 +1394,16 @@
<xsl:attribute name="name">
<xsl:value-of select="name/text()"/>
</xsl:attribute>
-
- <!-- CV Qualifiers -->
- <xsl:if test="not (@const='no' and @volatile='no')">
- <xsl:attribute name="cv">
- <xsl:if test="@const='yes'">
- <xsl:text>const</xsl:text>
- </xsl:if>
- <xsl:if test="@volatile='yes'">
- <xsl:if test="@const='yes'">
- <xsl:text> </xsl:text>
- </xsl:if>
- <xsl:text>volatile</xsl:text>
- </xsl:if>
- </xsl:attribute>
- </xsl:if>
+ <xsl:call-template name="function.attributes"/>
<!-- Return type -->
<xsl:element name="type">
- <!-- Cheat on virtual and static by dropping them into the type -->
- <xsl:if test="@static='yes'">
- <xsl:text>static </xsl:text>
- </xsl:if>
-
- <xsl:if test="@virtual='yes'">
- <xsl:text>virtual </xsl:text>
- </xsl:if>
-
<xsl:apply-templates select="type"/>
</xsl:element>
<xsl:call-template name="function.children"/>
</method>
+ <xsl:text>&#10;</xsl:text><!-- Newline -->
</xsl:template>
<!-- Handle member variables -->
@@ -1248,6 +1431,7 @@
<xsl:apply-templates select="detaileddescription" mode="passthrough"/>
<xsl:apply-templates select="inbodydescription" mode="passthrough"/>
</data-member>
+ <xsl:text>&#10;</xsl:text><!-- Newline -->
</xsl:if>
</xsl:template>
@@ -1524,7 +1708,7 @@
<!-- Handle program listings -->
<xsl:template match="programlisting" mode="passthrough">
- <programlisting>
+ <programlisting language="c++">
<xsl:apply-templates mode="programlisting"/>
</programlisting>
</xsl:template>
@@ -1582,7 +1766,7 @@
<imageobject role="html">
<imagedata format="PNG" align="center">
<xsl:attribute name="fileref">
- <xsl:value-of select="concat(concat(concat($boost.doxygen.formuladir, 'form_'), @id), '.png')"/>
+ <xsl:value-of select="concat(concat(concat($boost.doxygen.formuladir.fixed, 'form_'), @id), '.png')"/>
</xsl:attribute>
</imagedata>
</imageobject>
@@ -1603,7 +1787,7 @@
<imageobject role="html">
<imagedata format="PNG">
<xsl:attribute name="fileref">
- <xsl:value-of select="concat(concat(concat($boost.doxygen.formuladir, 'form_'), @id), '.png')"/>
+ <xsl:value-of select="concat(concat(concat($boost.doxygen.formuladir.fixed, 'form_'), @id), '.png')"/>
</xsl:attribute>
</imagedata>
</imageobject>
diff --git a/tools/boostbook/xsl/fo.xsl b/tools/boostbook/xsl/fo.xsl
index 422811cb80..622e01cf6e 100644
--- a/tools/boostbook/xsl/fo.xsl
+++ b/tools/boostbook/xsl/fo.xsl
@@ -221,8 +221,133 @@
</fo:list-item>
</xsl:template>
+
<!--
+ The following rules apply text coloring to Quickbook items like
+
+ [role blue Some blue text]
+
+ These correspond to an arbitrary list of colors added to the CSS file
+
+ $(BOOST-ROOT)\doc\src\boostbook.css
+
+ and are required for building pdf documentation.
+
+ A more elegant way of doing this is probably possible.
+ Other colors can be added simply by copying these examples.
+-->
+
+<xsl:template match="phrase[@role='red']">
+ <fo:inline color="red">
+ <xsl:apply-templates/>
+ </fo:inline>
+</xsl:template>
+
+<xsl:template match="phrase[@role='blue']">
+ <fo:inline color="blue">
+ <xsl:apply-templates/>
+ </fo:inline>
+</xsl:template>
+
+<xsl:template match="phrase[@role='green']">
+ <fo:inline color="green">
+ <xsl:apply-templates/>
+ </fo:inline>
+</xsl:template>
+<xsl:template match="phrase[@role='lime']">
+ <fo:inline color="lime">
+ <xsl:apply-templates/>
+ </fo:inline>
+</xsl:template>
+<xsl:template match="phrase[@role='navy']">
+ <fo:inline color="navy">
+ <xsl:apply-templates/>
+ </fo:inline>
+</xsl:template>
+<xsl:template match="phrase[@role='yellow']">
+ <fo:inline color="yellow">
+ <xsl:apply-templates/>
+ </fo:inline>
+</xsl:template>
+<xsl:template match="phrase[@role='magenta']">
+ <fo:inline color="magenta">
+ <xsl:apply-templates/>
+ </fo:inline>
+</xsl:template>
+
+<xsl:template match="phrase[@role='indigo']">
+ <fo:inline color="indigo">
+ <xsl:apply-templates/>
+ </fo:inline>
+</xsl:template>
+
+<xsl:template match="phrase[@role='cyan']">
+ <fo:inline color="cyan">
+ <xsl:apply-templates/>
+ </fo:inline>
+</xsl:template>
+
+<xsl:template match="phrase[@role='purple']">
+ <fo:inline color="purple">
+ <xsl:apply-templates/>
+ </fo:inline>
+</xsl:template>
+
+<xsl:template match="phrase[@role='gold']">
+ <fo:inline color="gold">
+ <xsl:apply-templates/>
+ </fo:inline>
+</xsl:template>
+
+<xsl:template match="phrase[@role='silver']">
+ <fo:inline color="silver">
+ <xsl:apply-templates/>
+ </fo:inline>
+</xsl:template>
+
+<xsl:template match="phrase[@role='gray']">
+ <fo:inline color="gray">
+ <xsl:apply-templates/>
+ </fo:inline>
+</xsl:template>
+
+<!-- alignment -->
+
+<xsl:template match="phrase[@role='aligncenter']">
+ <fo:inline>
+ <fo:block text-align="center">
+ <xsl:apply-templates/>
+ </fo:block>
+ </fo:inline>
+</xsl:template>
+
+<xsl:template match="phrase[@role='alignleft']">
+ <fo:inline>
+ <fo:block text-align="left">
+ <xsl:apply-templates/>
+ </fo:block>
+ </fo:inline>
+</xsl:template>
+
+<xsl:template match="phrase[@role='alignright']">
+ <fo:inline>
+ <fo:block text-align="right">
+ <xsl:apply-templates/>
+ </fo:block>
+ </fo:inline>
+</xsl:template>
+
+<xsl:template match="phrase[@role='alignjustify']">
+ <fo:inline>
+ <fo:block text-align="justify">
+ <xsl:apply-templates/>
+ </fo:block>
+ </fo:inline>
+</xsl:template>
+
+ <!--
+
The following rules apply syntax highlighting to phrases
that have been appropriately marked up, the highlighting
used is the same as that used by our CSS style sheets,
diff --git a/tools/boostbook/xsl/html-base.xsl b/tools/boostbook/xsl/html-base.xsl
index 30442b1f64..022a2242ff 100644
--- a/tools/boostbook/xsl/html-base.xsl
+++ b/tools/boostbook/xsl/html-base.xsl
@@ -35,6 +35,8 @@
<xsl:param name="boost.mathjax" select="0"/>
<xsl:param name="boost.mathjax.script"
select="'http://cdn.mathjax.org/mathjax/latest/MathJax.js?config=TeX-AMS-MML_HTMLorMML'"/>
+ <!--See usage below for explanation of this param-->
+ <xsl:param name="boost.noexpand.chapter.toc" select="0"/>
<xsl:param name="admon.style"/>
<xsl:param name="admon.graphics">1</xsl:param>
@@ -138,9 +140,9 @@ set toc,title
<xsl:template name="format.svn.revision">
<xsl:param name="text"/>
- <!-- Remove the "$Date: " -->
+ <!-- Remove the "$Date: " or "$Date:: " -->
<xsl:variable name="text.noprefix"
- select="substring-after($text, '$Date: ')"/>
+ select="substring-after($text, ': ')"/>
<!-- Grab the year -->
<xsl:variable name="year" select="substring-before($text.noprefix, '-')"/>
@@ -182,8 +184,10 @@ set toc,title
</xsl:choose>
</xsl:variable>
- <xsl:value-of select="concat($month.name, ' ', $day, ', ', $year, ' at ',
- $time, ' ', $timezone)"/>
+ <xsl:value-of select="concat($month.name, ' ', $day, ', ', $year)"/>
+ <xsl:if test="$time != ''">
+ <xsl:value-of select="concat(' at ', $time, ' ', $timezone)"/>
+ </xsl:if>
</xsl:template>
<!-- Footer Copyright -->
@@ -228,11 +232,14 @@ set toc,title
<xsl:value-of
select="normalize-space($revision-node/attribute::rev:last-revision)"/>
</xsl:variable>
- <xsl:if test="string-length($revision-text) &gt; 0">
+ <xsl:if test="string-length($revision-text) &gt; 0 and not($revision-text = '$Date$')">
<p>
<small>
<xsl:text>Last revised: </xsl:text>
<xsl:choose>
+ <xsl:when test="not(contains($revision-text, '$Date: ')) and not(contains($revision-text, '$Date:: '))">
+ <xsl:value-of select="$revision-text"/>
+ </xsl:when>
<xsl:when test="contains($revision-text, '/')">
<xsl:call-template name="format.cvs.revision">
<xsl:with-param name="text" select="$revision-text"/>
@@ -380,4 +387,33 @@ set toc,title
<xsl:value-of select="'index'"/>
</xsl:template>
+<xsl:template match="preface|chapter|appendix|article" mode="toc">
+ <xsl:param name="toc-context" select="."/>
+
+ <!--
+ When boost.noexpand.chapter.toc is set to 1, then the TOC for
+ chapters is only one level deep (ie toc.max.depth has no effect)
+ and nested sections within chapters are not shown. TOC's and LOC's
+ at other levels are not effected and respond to toc.max.depth as normal.
+ -->
+ <xsl:choose>
+ <xsl:when test="local-name($toc-context) = 'book' and $boost.noexpand.chapter.toc = 1">
+ <xsl:call-template name="subtoc">
+ <xsl:with-param name="toc-context" select="$toc-context"/>
+ <xsl:with-param name="nodes" select="foo"/>
+ </xsl:call-template>
+ </xsl:when>
+ <xsl:otherwise>
+ <xsl:call-template name="subtoc">
+ <xsl:with-param name="toc-context" select="$toc-context"/>
+ <xsl:with-param name="nodes"
+ select="section|sect1|glossary|bibliography|index
+ |bridgehead[$bridgehead.in.toc != 0]"/>
+ </xsl:call-template>
+ </xsl:otherwise>
+ </xsl:choose>
+</xsl:template>
+
</xsl:stylesheet>
+
+
diff --git a/tools/boostbook/xsl/html-help.xsl b/tools/boostbook/xsl/html-help.xsl
index 9b9978943b..2234af36f6 100644
--- a/tools/boostbook/xsl/html-help.xsl
+++ b/tools/boostbook/xsl/html-help.xsl
@@ -28,104 +28,6 @@
<xsl:param name="draft.mode">no</xsl:param>
<xsl:param name="admon.graphics" select="1"/>
- <xsl:template name="format.cvs.revision">
- <xsl:param name="text"/>
-
- <!-- Remove the "$Date: " -->
- <xsl:variable name="text.noprefix"
- select="substring-after($text, '$Date: ')"/>
-
- <!-- Grab the year -->
- <xsl:variable name="year" select="substring-before($text.noprefix, '/')"/>
- <xsl:variable name="text.noyear"
- select="substring-after($text.noprefix, '/')"/>
-
- <!-- Grab the month -->
- <xsl:variable name="month" select="substring-before($text.noyear, '/')"/>
- <xsl:variable name="text.nomonth"
- select="substring-after($text.noyear, '/')"/>
-
- <!-- Grab the year -->
- <xsl:variable name="day" select="substring-before($text.nomonth, ' ')"/>
- <xsl:variable name="text.noday"
- select="substring-after($text.nomonth, ' ')"/>
-
- <!-- Get the time -->
- <xsl:variable name="time" select="substring-before($text.noday, ' ')"/>
-
- <xsl:variable name="month.name">
- <xsl:choose>
- <xsl:when test="$month=1">January</xsl:when>
- <xsl:when test="$month=2">February</xsl:when>
- <xsl:when test="$month=3">March</xsl:when>
- <xsl:when test="$month=4">April</xsl:when>
- <xsl:when test="$month=5">May</xsl:when>
- <xsl:when test="$month=6">June</xsl:when>
- <xsl:when test="$month=7">July</xsl:when>
- <xsl:when test="$month=8">August</xsl:when>
- <xsl:when test="$month=9">September</xsl:when>
- <xsl:when test="$month=10">October</xsl:when>
- <xsl:when test="$month=11">November</xsl:when>
- <xsl:when test="$month=12">December</xsl:when>
- </xsl:choose>
- </xsl:variable>
-
- <xsl:value-of select="concat($month.name, ' ', $day, ', ', $year, ' at ',
- $time, ' GMT')"/>
- </xsl:template>
-
-
- <xsl:template name="format.svn.revision">
- <xsl:param name="text"/>
-
- <!-- Remove the "$Date: " -->
- <xsl:variable name="text.noprefix"
- select="substring-after($text, '$Date: ')"/>
-
- <!-- Grab the year -->
- <xsl:variable name="year" select="substring-before($text.noprefix, '-')"/>
- <xsl:variable name="text.noyear"
- select="substring-after($text.noprefix, '-')"/>
-
- <!-- Grab the month -->
- <xsl:variable name="month" select="substring-before($text.noyear, '-')"/>
- <xsl:variable name="text.nomonth"
- select="substring-after($text.noyear, '-')"/>
-
- <!-- Grab the year -->
- <xsl:variable name="day" select="substring-before($text.nomonth, ' ')"/>
- <xsl:variable name="text.noday"
- select="substring-after($text.nomonth, ' ')"/>
-
- <!-- Get the time -->
- <xsl:variable name="time" select="substring-before($text.noday, ' ')"/>
- <xsl:variable name="text.notime"
- select="substring-after($text.noday, ' ')"/>
-
- <!-- Get the timezone -->
- <xsl:variable name="timezone" select="substring-before($text.notime, ' ')"/>
-
- <xsl:variable name="month.name">
- <xsl:choose>
- <xsl:when test="$month=1">January</xsl:when>
- <xsl:when test="$month=2">February</xsl:when>
- <xsl:when test="$month=3">March</xsl:when>
- <xsl:when test="$month=4">April</xsl:when>
- <xsl:when test="$month=5">May</xsl:when>
- <xsl:when test="$month=6">June</xsl:when>
- <xsl:when test="$month=7">July</xsl:when>
- <xsl:when test="$month=8">August</xsl:when>
- <xsl:when test="$month=9">September</xsl:when>
- <xsl:when test="$month=10">October</xsl:when>
- <xsl:when test="$month=11">November</xsl:when>
- <xsl:when test="$month=12">December</xsl:when>
- </xsl:choose>
- </xsl:variable>
-
- <xsl:value-of select="concat($month.name, ' ', $day, ', ', $year, ' at ',
- $time, ' ', $timezone)"/>
- </xsl:template>
-
<!-- We don't want refentry's to show up in the TOC because they
will merely be redundant with the synopsis. -->
<xsl:template match="refentry" mode="toc"/>
diff --git a/tools/boostbook/xsl/html-single.xsl b/tools/boostbook/xsl/html-single.xsl
index d453a84cca..6291f25cea 100644
--- a/tools/boostbook/xsl/html-single.xsl
+++ b/tools/boostbook/xsl/html-single.xsl
@@ -13,8 +13,20 @@
<!-- Import the HTML stylesheet -->
<xsl:import
href="http://docbook.sourceforge.net/release/xsl/current/html/docbook.xsl"/>
- <xsl:import href="admon.xsl"/>
- <xsl:import href="relative-href.xsl"/>
+ <xsl:import
+ href="http://docbook.sourceforge.net/release/xsl/current/html/math.xsl"/>
+
+ <xsl:output method="html" encoding="US-ASCII" indent="no"/>
+
+ <!-- We have to make sure that our templates override all
+ docbook templates. Therefore, we include our own templates
+ instead of importing them. In order for this to work,
+ the stylesheets included here cannot also include each other -->
+ <xsl:include href="docbook-layout.xsl"/>
+ <xsl:include href="admon.xsl"/>
+ <xsl:include href="xref.xsl"/>
+ <xsl:include href="relative-href.xsl"/>
+ <xsl:include href="callout.xsl"/>
<xsl:param name="admon.style"/>
<xsl:param name="admon.graphics">1</xsl:param>
diff --git a/tools/boostbook/xsl/macro.xsl b/tools/boostbook/xsl/macro.xsl
index e3962954b2..fcd97a25d5 100644
--- a/tools/boostbook/xsl/macro.xsl
+++ b/tools/boostbook/xsl/macro.xsl
@@ -45,6 +45,44 @@
</xsl:with-param>
<xsl:with-param name="text">
<xsl:apply-templates select="description"/>
+
+ <xsl:if test="macro-parameter/description">
+ <variablelist spacing="compact">
+ <xsl:processing-instruction name="dbhtml">
+ list-presentation="table"
+ </xsl:processing-instruction>
+
+ <!-- Document parameters -->
+ <xsl:if test="macro-parameter/description">
+ <varlistentry>
+ <term>Parameters:</term>
+ <listitem>
+ <variablelist spacing="compact">
+ <xsl:processing-instruction name="dbhtml">
+ list-presentation="table"
+ </xsl:processing-instruction>
+ <xsl:for-each select="macro-parameter">
+ <xsl:sort select="attribute::name"/>
+ <xsl:if test="description">
+ <varlistentry>
+ <term>
+ <xsl:call-template name="monospaced">
+ <xsl:with-param name="text" select="@name"/>
+ </xsl:call-template>
+ </term>
+ <listitem>
+ <xsl:apply-templates select="description/*"/>
+ </listitem>
+ </varlistentry>
+ </xsl:if>
+ </xsl:for-each>
+ </variablelist>
+ </listitem>
+ </varlistentry>
+ </xsl:if>
+ </variablelist>
+ </xsl:if>
+
</xsl:with-param>
</xsl:call-template>
</xsl:template>
diff --git a/tools/boostbook/xsl/source-highlight.xsl b/tools/boostbook/xsl/source-highlight.xsl
index 4945e38ab1..7a34b8fbca 100644
--- a/tools/boostbook/xsl/source-highlight.xsl
+++ b/tools/boostbook/xsl/source-highlight.xsl
@@ -29,7 +29,7 @@
<xsl:variable name="digits" select="'1234567890'"/>
<xsl:variable name="number-chars" select="'1234567890abcdefABCDEFxX.'"/>
<xsl:variable name="keywords"
- select="' asm auto bool break case catch char class const const_cast continue default delete do double dynamic_cast else enum explicit export extern false float for friend goto if inline int long mutable namespace new operator private protected public register reinterpret_cast return short signed sizeof static static_cast struct switch template this throw true try typedef typeid typename union unsigned using virtual void volatile wchar_t while '"/>
+ select="' alignas ailgnof asm auto bool break case catch char char16_t char32_t class const const_cast constexpr continue decltype default delete do double dynamic_cast else enum explicit export extern false float for friend goto if inline int long mutable namespace new noexcept nullptr operator private protected public register reinterpret_cast return short signed sizeof static static_cast struct switch template this thread_local throw true try typedef typeid typename union unsigned using virtual void volatile wchar_t while '"/>
<xsl:variable name="operators4" select="'%:%:'"/>
<xsl:variable name="operators3" select="'&gt;&gt;= &lt;&lt;= -&gt;* ...'"/>
<xsl:variable name="operators2" select="'.* :: ## &lt;: :&gt; &lt;% %&gt; %: += -= *= /= %= ^= &amp;= |= &lt;&lt; &gt;&gt; == != &lt;= &gt;= &amp;&amp; || ++ -- -&gt;'"/>
@@ -542,9 +542,7 @@
</xsl:attribute>
</xsl:when>
<xsl:otherwise>
- <xsl:attribute name="{name(.)}">
- <xsl:value-of select="."/>
- </xsl:attribute>
+ <xsl:copy-of select="."/>
</xsl:otherwise>
</xsl:choose>
</xsl:for-each>
diff --git a/tools/boostdep/README.md b/tools/boostdep/README.md
new file mode 100644
index 0000000000..ff87dfb859
--- /dev/null
+++ b/tools/boostdep/README.md
@@ -0,0 +1,4 @@
+boostdep
+========
+
+A tool to create Boost module dependency reports
diff --git a/tools/boostdep/examples/report.bat b/tools/boostdep/examples/report.bat
new file mode 100644
index 0000000000..5690adde55
--- /dev/null
+++ b/tools/boostdep/examples/report.bat
@@ -0,0 +1,26 @@
+REM This is an example cmd.exe batch script
+REM that uses boostdep.exe to generate a
+REM complete Boost dependency report.
+REM It needs to be run from the Boost root.
+
+REM git pull
+REM git submodule update --init
+
+SET BOOSTDEP=bin.v2\tools\boostdep\src\msvc-8.0\release\link-static\threading-multi\boostdep.exe
+
+FOR /f %%i IN ('git rev-parse HEAD') DO @SET REV=%%i
+
+FOR /f %%i IN ('git rev-parse --short HEAD') DO @SET SHREV=%%i
+
+SET FOOTER=Generated on %DATE% %TIME% from revision %REV%
+
+SET OUTDIR=..\report-%SHREV%
+
+mkdir %OUTDIR%
+
+%BOOSTDEP% --list-modules > %OUTDIR%\list-modules.txt
+
+%BOOSTDEP% --footer "%FOOTER%" --html --module-overview > %OUTDIR%\module-overview.html
+%BOOSTDEP% --footer "%FOOTER%" --html --module-levels > %OUTDIR%\module-levels.html
+
+FOR /f %%i IN (%OUTDIR%\list-modules.txt) DO %BOOSTDEP% --title "Dependency Report for %%i" --footer "%FOOTER%" --html --primary %%i --secondary %%i --reverse %%i > %OUTDIR%\%%i.html
diff --git a/tools/boostdep/src/Jamfile.v2 b/tools/boostdep/src/Jamfile.v2
new file mode 100644
index 0000000000..4cd3de3f48
--- /dev/null
+++ b/tools/boostdep/src/Jamfile.v2
@@ -0,0 +1,17 @@
+# Copyright 2014 Peter Dimov
+#
+# Distributed under the Boost Software License, Version 1.0.
+# See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt
+
+project boostdep ;
+
+exe boostdep :
+ # sources
+ boostdep.cpp
+ /boost//filesystem :
+ # requirements
+ :
+ # default build
+ <variant>release
+ ;
diff --git a/tools/boostdep/src/boostdep.cpp b/tools/boostdep/src/boostdep.cpp
new file mode 100644
index 0000000000..5b0b9032af
--- /dev/null
+++ b/tools/boostdep/src/boostdep.cpp
@@ -0,0 +1,1349 @@
+
+// boostdep - a tool to generate Boost dependency reports
+//
+// Copyright 2014 Peter Dimov
+//
+// Distributed under the Boost Software License, Version 1.0.
+// See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt
+
+
+#include <boost/filesystem.hpp>
+#include <boost/filesystem/fstream.hpp>
+#include <string>
+#include <iostream>
+#include <fstream>
+#include <vector>
+#include <map>
+#include <set>
+#include <algorithm>
+#include <climits>
+
+namespace fs = boost::filesystem;
+
+// header -> module
+static std::map< std::string, std::string > s_header_map;
+
+static std::set< std::string > s_modules;
+
+static void scan_module_headers( fs::path const & path )
+{
+ try
+ {
+ std::string module = path.generic_string().substr( 5 ); // strip "libs/"
+
+ std::replace( module.begin(), module.end(), '/', '~' );
+
+ s_modules.insert( module );
+
+ fs::path dir = path / "include";
+ size_t n = dir.generic_string().size();
+
+ fs::recursive_directory_iterator it( dir ), last;
+
+ for( ; it != last; ++it )
+ {
+ fs::directory_entry const & e = *it;
+
+ std::string p2 = e.path().generic_string();
+ p2 = p2.substr( n+1 );
+
+ // std::cout << module << ": " << p2 << std::endl;
+
+ s_header_map[ p2 ] = module;
+ }
+ }
+ catch( fs::filesystem_error const & x )
+ {
+ std::cout << x.what() << std::endl;
+ }
+}
+
+static void scan_submodules( fs::path const & path )
+{
+ fs::directory_iterator it( path ), last;
+
+ for( ; it != last; ++it )
+ {
+ fs::directory_entry const & e = *it;
+
+ if( e.status().type() != fs::directory_file )
+ {
+ continue;
+ }
+
+ fs::path path = e.path();
+
+ if( fs::exists( path / "include" ) )
+ {
+ scan_module_headers( path );
+ }
+
+ if( fs::exists( path / "sublibs" ) )
+ {
+ scan_submodules( path );
+ }
+ }
+}
+
+static void build_header_map()
+{
+ scan_submodules( "libs" );
+}
+
+static void scan_header_dependencies( std::string const & header, std::istream & is, std::map< std::string, std::set< std::string > > & deps, std::map< std::string, std::set< std::string > > & from )
+{
+ std::string line;
+
+ while( std::getline( is, line ) )
+ {
+ while( !line.empty() && ( line[0] == ' ' || line[0] == '\t' ) )
+ {
+ line.erase( 0, 1 );
+ }
+
+ if( line.empty() || line[0] != '#' ) continue;
+
+ line.erase( 0, 1 );
+
+ while( !line.empty() && ( line[0] == ' ' || line[0] == '\t' ) )
+ {
+ line.erase( 0, 1 );
+ }
+
+ if( line.substr( 0, 7 ) != "include" ) continue;
+
+ line.erase( 0, 7 );
+
+ while( !line.empty() && ( line[0] == ' ' || line[0] == '\t' ) )
+ {
+ line.erase( 0, 1 );
+ }
+
+ if( line.size() < 2 ) continue;
+
+ char ch = line[0];
+
+ if( ch != '<' && ch != '"' ) continue;
+
+ if( ch == '<' )
+ {
+ ch = '>';
+ }
+
+ line.erase( 0, 1 );
+
+ std::string::size_type k = line.find_first_of( ch );
+
+ if( k != std::string::npos )
+ {
+ line.erase( k );
+ }
+
+ std::map< std::string, std::string >::const_iterator i = s_header_map.find( line );
+
+ if( i != s_header_map.end() )
+ {
+ deps[ i->second ].insert( line );
+ from[ line ].insert( header );
+ }
+ else if( line.substr( 0, 6 ) == "boost/" )
+ {
+ deps[ "(unknown)" ].insert( line );
+ from[ line ].insert( header );
+ }
+ }
+}
+
+struct module_primary_actions
+{
+ virtual void heading( std::string const & module ) = 0;
+
+ virtual void module_start( std::string const & module ) = 0;
+ virtual void module_end( std::string const & module ) = 0;
+
+ virtual void header_start( std::string const & header ) = 0;
+ virtual void header_end( std::string const & header ) = 0;
+
+ virtual void from_header( std::string const & header ) = 0;
+};
+
+static fs::path module_include_path( std::string module )
+{
+ std::replace( module.begin(), module.end(), '~', '/' );
+ return fs::path( "libs" ) / module / "include";
+}
+
+static void scan_module_dependencies( std::string const & module, module_primary_actions & actions )
+{
+ // module -> [ header, header... ]
+ std::map< std::string, std::set< std::string > > deps;
+
+ // header -> included from [ header, header... ]
+ std::map< std::string, std::set< std::string > > from;
+
+ fs::path dir = module_include_path( module );
+ size_t n = dir.generic_string().size();
+
+ fs::recursive_directory_iterator it( dir ), last;
+
+ for( ; it != last; ++it )
+ {
+ std::string header = it->path().generic_string().substr( n+1 );
+
+ fs::ifstream is( it->path() );
+
+ scan_header_dependencies( header, is, deps, from );
+ }
+
+ actions.heading( module );
+
+ for( std::map< std::string, std::set< std::string > >::iterator i = deps.begin(); i != deps.end(); ++i )
+ {
+ if( i->first == module ) continue;
+
+ actions.module_start( i->first );
+
+ for( std::set< std::string >::iterator j = i->second.begin(); j != i->second.end(); ++j )
+ {
+ actions.header_start( *j );
+
+ std::set< std::string > const & f = from[ *j ];
+
+ for( std::set< std::string >::const_iterator k = f.begin(); k != f.end(); ++k )
+ {
+ actions.from_header( *k );
+ }
+
+ actions.header_end( *j );
+ }
+
+ actions.module_end( i->first );
+ }
+}
+
+// module depends on [ module, module... ]
+static std::map< std::string, std::set< std::string > > s_module_deps;
+
+// header is included by [header, header...]
+static std::map< std::string, std::set< std::string > > s_header_deps;
+
+// [ module, module... ] depend on module
+static std::map< std::string, std::set< std::string > > s_reverse_deps;
+
+struct build_mdmap_actions: public module_primary_actions
+{
+ std::string module_;
+ std::string header_;
+
+ void heading( std::string const & module )
+ {
+ module_ = module;
+ }
+
+ void module_start( std::string const & module )
+ {
+ s_module_deps[ module_ ].insert( module );
+ s_reverse_deps[ module ].insert( module_ );
+ }
+
+ void module_end( std::string const & /*module*/ )
+ {
+ }
+
+ void header_start( std::string const & header )
+ {
+ header_ = header;
+ }
+
+ void header_end( std::string const & /*header*/ )
+ {
+ }
+
+ void from_header( std::string const & header )
+ {
+ s_header_deps[ header_ ].insert( header );
+ }
+};
+
+static void build_module_dependency_map()
+{
+ for( std::set< std::string >::iterator i = s_modules.begin(); i != s_modules.end(); ++i )
+ {
+ build_mdmap_actions actions;
+ scan_module_dependencies( *i, actions );
+ }
+}
+
+static void output_module_primary_report( std::string const & module, module_primary_actions & actions )
+{
+ try
+ {
+ scan_module_dependencies( module, actions );
+ }
+ catch( fs::filesystem_error const & x )
+ {
+ std::cout << x.what() << std::endl;
+ }
+}
+
+struct module_secondary_actions
+{
+ virtual void heading( std::string const & module ) = 0;
+
+ virtual void module_start( std::string const & module ) = 0;
+ virtual void module_end( std::string const & module ) = 0;
+
+ virtual void module_adds( std::string const & module ) = 0;
+};
+
+static void exclude( std::set< std::string > & x, std::set< std::string > const & y )
+{
+ for( std::set< std::string >::const_iterator i = y.begin(); i != y.end(); ++i )
+ {
+ x.erase( *i );
+ }
+}
+
+static void output_module_secondary_report( std::string const & module, module_secondary_actions & actions )
+{
+ actions.heading( module );
+
+ std::set< std::string > deps = s_module_deps[ module ];
+
+ deps.insert( module );
+
+ // build transitive closure
+
+ for( ;; )
+ {
+ std::set< std::string > deps2( deps );
+
+ for( std::set< std::string >::iterator i = deps.begin(); i != deps.end(); ++i )
+ {
+ std::set< std::string > deps3 = s_module_deps[ *i ];
+
+ exclude( deps3, deps );
+
+ if( deps3.empty() )
+ {
+ continue;
+ }
+
+ actions.module_start( *i );
+
+ for( std::set< std::string >::iterator j = deps3.begin(); j != deps3.end(); ++j )
+ {
+ actions.module_adds( *j );
+ }
+
+ actions.module_end( *i );
+
+ deps2.insert( deps3.begin(), deps3.end() );
+ }
+
+ if( deps == deps2 )
+ {
+ break;
+ }
+ else
+ {
+ deps = deps2;
+ }
+ }
+}
+
+struct header_inclusion_actions
+{
+ virtual void heading( std::string const & header, std::string const & module ) = 0;
+
+ virtual void module_start( std::string const & module ) = 0;
+ virtual void module_end( std::string const & module ) = 0;
+
+ virtual void header( std::string const & header ) = 0;
+};
+
+static void output_header_inclusion_report( std::string const & header, header_inclusion_actions & actions )
+{
+ std::string module = s_header_map[ header ];
+
+ actions.heading( header, module );
+
+ std::set< std::string > from = s_header_deps[ header ];
+
+ // classify 'from' dependencies by module
+
+ // module -> [header, header...]
+ std::map< std::string, std::set< std::string > > from2;
+
+ for( std::set< std::string >::iterator i = from.begin(); i != from.end(); ++i )
+ {
+ from2[ s_header_map[ *i ] ].insert( *i );
+ }
+
+ for( std::map< std::string, std::set< std::string > >::iterator i = from2.begin(); i != from2.end(); ++i )
+ {
+ actions.module_start( i->first );
+
+ for( std::set< std::string >::iterator j = i->second.begin(); j != i->second.end(); ++j )
+ {
+ actions.header( *j );
+ }
+
+ actions.module_end( i->first );
+ }
+}
+
+// output_module_primary_report
+
+struct module_primary_txt_actions: public module_primary_actions
+{
+ void heading( std::string const & module )
+ {
+ std::cout << "Primary dependencies for " << module << ":\n\n";
+ }
+
+ void module_start( std::string const & module )
+ {
+ std::cout << module << ":\n";
+ }
+
+ void module_end( std::string const & /*module*/ )
+ {
+ std::cout << "\n";
+ }
+
+ void header_start( std::string const & header )
+ {
+ std::cout << " <" << header << ">\n";
+ }
+
+ void header_end( std::string const & /*header*/ )
+ {
+ }
+
+ void from_header( std::string const & header )
+ {
+ std::cout << " from <" << header << ">\n";
+ }
+};
+
+struct module_primary_html_actions: public module_primary_actions
+{
+ void heading( std::string const & module )
+ {
+ std::cout << "\n\n<h1 id=\"primary-dependencies\">Primary dependencies for <em>" << module << "</em></h1>\n";
+ }
+
+ void module_start( std::string const & module )
+ {
+ std::cout << " <h2 id=\"" << module << "\"><a href=\"" << module << ".html\"><em>" << module << "</em></a></h2>\n";
+ }
+
+ void module_end( std::string const & /*module*/ )
+ {
+ }
+
+ void header_start( std::string const & header )
+ {
+ std::cout << " <h3><code>&lt;" << header << "&gt;</code></h3><ul>\n";
+ }
+
+ void header_end( std::string const & /*header*/ )
+ {
+ std::cout << " </ul>\n";
+ }
+
+ void from_header( std::string const & header )
+ {
+ std::cout << " <li>from <code>&lt;" << header << "&gt;</code></li>\n";
+ }
+};
+
+static void output_module_primary_report( std::string const & module, bool html )
+{
+ if( html )
+ {
+ module_primary_html_actions actions;
+ output_module_primary_report( module, actions );
+ }
+ else
+ {
+ module_primary_txt_actions actions;
+ output_module_primary_report( module, actions );
+ }
+}
+
+// output_module_secondary_report
+
+struct module_secondary_txt_actions: public module_secondary_actions
+{
+ void heading( std::string const & module )
+ {
+ std::cout << "Secondary dependencies for " << module << ":\n\n";
+ }
+
+ void module_start( std::string const & module )
+ {
+ std::cout << module << ":\n";
+ }
+
+ void module_end( std::string const & /*module*/ )
+ {
+ std::cout << "\n";
+ }
+
+ void module_adds( std::string const & module )
+ {
+ std::cout << " adds " << module << "\n";
+ }
+};
+
+struct module_secondary_html_actions: public module_secondary_actions
+{
+ std::string m2_;
+
+ void heading( std::string const & module )
+ {
+ std::cout << "\n\n<h1 id=\"secondary-dependencies\">Secondary dependencies for <em>" << module << "</em></h1>\n";
+ }
+
+ void module_start( std::string const & module )
+ {
+ std::cout << " <h2><a href=\"" << module << ".html\"><em>" << module << "</em></a></h2><ul>\n";
+ m2_ = module;
+ }
+
+ void module_end( std::string const & /*module*/ )
+ {
+ std::cout << " </ul>\n";
+ }
+
+ void module_adds( std::string const & module )
+ {
+ std::cout << " <li><a href=\"" << m2_ << ".html#" << module << "\">adds <em>" << module << "</em></a></li>\n";
+ }
+};
+
+static void output_module_secondary_report( std::string const & module, bool html )
+{
+ if( html )
+ {
+ module_secondary_html_actions actions;
+ output_module_secondary_report( module, actions );
+ }
+ else
+ {
+ module_secondary_txt_actions actions;
+ output_module_secondary_report( module, actions );
+ }
+}
+
+// output_header_report
+
+struct header_inclusion_txt_actions: public header_inclusion_actions
+{
+ void heading( std::string const & header, std::string const & module )
+ {
+ std::cout << "Inclusion report for <" << header << "> (in module " << module << "):\n\n";
+ }
+
+ void module_start( std::string const & module )
+ {
+ std::cout << " from " << module << ":\n";
+ }
+
+ void module_end( std::string const & /*module*/ )
+ {
+ std::cout << "\n";
+ }
+
+ void header( std::string const & header )
+ {
+ std::cout << " <" << header << ">\n";
+ }
+};
+
+struct header_inclusion_html_actions: public header_inclusion_actions
+{
+ void heading( std::string const & header, std::string const & module )
+ {
+ std::cout << "<h1>Inclusion report for <code>&lt;" << header << "&gt;</code> (in module <em>" << module << "</em>)</h1>\n";
+ }
+
+ void module_start( std::string const & module )
+ {
+ std::cout << " <h2>From <a href=\"" << module << ".html\"><em>" << module << "</em></a></h2><ul>\n";
+ }
+
+ void module_end( std::string const & /*module*/ )
+ {
+ std::cout << " </ul>\n";
+ }
+
+ void header( std::string const & header )
+ {
+ std::cout << " <li><code>&lt;" << header << "&gt;</code></li>\n";
+ }
+};
+
+static void output_header_report( std::string const & header, bool html )
+{
+ if( html )
+ {
+ header_inclusion_html_actions actions;
+ output_header_inclusion_report( header, actions );
+ }
+ else
+ {
+ header_inclusion_txt_actions actions;
+ output_header_inclusion_report( header, actions );
+ }
+}
+
+// output_module_reverse_report
+
+struct module_reverse_actions
+{
+ virtual void heading( std::string const & module ) = 0;
+
+ virtual void module_start( std::string const & module ) = 0;
+ virtual void module_end( std::string const & module ) = 0;
+
+ virtual void header_start( std::string const & header ) = 0;
+ virtual void header_end( std::string const & header ) = 0;
+
+ virtual void from_header( std::string const & header ) = 0;
+};
+
+static void output_module_reverse_report( std::string const & module, module_reverse_actions & actions )
+{
+ actions.heading( module );
+
+ std::set< std::string > const from = s_reverse_deps[ module ];
+
+ for( std::set< std::string >::const_iterator i = from.begin(); i != from.end(); ++i )
+ {
+ actions.module_start( *i );
+
+ for( std::map< std::string, std::set< std::string > >::iterator j = s_header_deps.begin(); j != s_header_deps.end(); ++j )
+ {
+ if( s_header_map[ j->first ] == module )
+ {
+ bool header_started = false;
+
+ for( std::set< std::string >::iterator k = j->second.begin(); k != j->second.end(); ++k )
+ {
+ if( s_header_map[ *k ] == *i )
+ {
+ if( !header_started )
+ {
+ actions.header_start( j->first );
+
+ header_started = true;
+ }
+
+ actions.from_header( *k );
+ }
+ }
+
+ if( header_started )
+ {
+ actions.header_end( j->first );
+ }
+ }
+ }
+
+ actions.module_end( *i );
+ }
+}
+
+struct module_reverse_txt_actions: public module_reverse_actions
+{
+ void heading( std::string const & module )
+ {
+ std::cout << "Reverse dependencies for " << module << ":\n\n";
+ }
+
+ void module_start( std::string const & module )
+ {
+ std::cout << module << ":\n";
+ }
+
+ void module_end( std::string const & /*module*/ )
+ {
+ std::cout << "\n";
+ }
+
+ void header_start( std::string const & header )
+ {
+ std::cout << " <" << header << ">\n";
+ }
+
+ void header_end( std::string const & /*header*/ )
+ {
+ }
+
+ void from_header( std::string const & header )
+ {
+ std::cout << " from <" << header << ">\n";
+ }
+};
+
+struct module_reverse_html_actions: public module_reverse_actions
+{
+ void heading( std::string const & module )
+ {
+ std::cout << "\n\n<h1 id=\"reverse-dependencies\">Reverse dependencies for <em>" << module << "</em></h1>\n";
+ }
+
+ void module_start( std::string const & module )
+ {
+ std::cout << " <h2 id=\"reverse-" << module << "\"><a href=\"" << module << ".html\"><em>" << module << "</em></a></h2>\n";
+ }
+
+ void module_end( std::string const & /*module*/ )
+ {
+ }
+
+ void header_start( std::string const & header )
+ {
+ std::cout << " <h3><code>&lt;" << header << "&gt;</code></h3><ul>\n";
+ }
+
+ void header_end( std::string const & /*header*/ )
+ {
+ std::cout << " </ul>\n";
+ }
+
+ void from_header( std::string const & header )
+ {
+ std::cout << " <li>from <code>&lt;" << header << "&gt;</code></li>\n";
+ }
+};
+
+static void output_module_reverse_report( std::string const & module, bool html )
+{
+ if( html )
+ {
+ module_reverse_html_actions actions;
+ output_module_reverse_report( module, actions );
+ }
+ else
+ {
+ module_reverse_txt_actions actions;
+ output_module_reverse_report( module, actions );
+ }
+}
+
+// module_level_report
+
+int const unknown_level = INT_MAX / 2;
+
+struct module_level_actions
+{
+ virtual void heading() = 0;
+
+ virtual void level_start( int level ) = 0;
+ virtual void level_end( int level ) = 0;
+
+ virtual void module_start( std::string const & module ) = 0;
+ virtual void module_end( std::string const & module ) = 0;
+
+ virtual void module2( std::string const & module, int level ) = 0;
+};
+
+static void output_module_level_report( module_level_actions & actions )
+{
+ // build module level map
+
+ std::map< std::string, int > level_map;
+
+ for( std::set< std::string >::iterator i = s_modules.begin(); i != s_modules.end(); ++i )
+ {
+ if( s_module_deps[ *i ].empty() )
+ {
+ level_map[ *i ] = 0;
+ // std::cerr << *i << ": " << 0 << std::endl;
+ }
+ else
+ {
+ level_map[ *i ] = unknown_level;
+ }
+ }
+
+ // build transitive closure to see through cycles
+
+ std::map< std::string, std::set< std::string > > deps2 = s_module_deps;
+
+ {
+ bool done;
+
+ do
+ {
+ done = true;
+
+ for( std::map< std::string, std::set< std::string > >::iterator i = deps2.begin(); i != deps2.end(); ++i )
+ {
+ std::set< std::string > tmp = i->second;
+
+ for( std::set< std::string >::iterator j = i->second.begin(); j != i->second.end(); ++j )
+ {
+ std::set< std::string > tmp2 = deps2[ *j ];
+ tmp.insert( tmp2.begin(), tmp2.end() );
+ }
+
+ if( tmp.size() != i->second.size() )
+ {
+ i->second = tmp;
+ done = false;
+ }
+ }
+ }
+ while( !done );
+ }
+
+ // compute acyclic levels
+
+ for( int k = 1, n = s_modules.size(); k < n; ++k )
+ {
+ for( std::map< std::string, std::set< std::string > >::iterator i = s_module_deps.begin(); i != s_module_deps.end(); ++i )
+ {
+ // i->first depends on i->second
+
+ if( level_map[ i->first ] >= unknown_level )
+ {
+ int level = 0;
+
+ for( std::set< std::string >::iterator j = i->second.begin(); j != i->second.end(); ++j )
+ {
+ level = std::max( level, level_map[ *j ] + 1 );
+ }
+
+ if( level == k )
+ {
+ level_map[ i->first ] = level;
+ // std::cerr << i->first << ": " << level << std::endl;
+ }
+ }
+ }
+ }
+
+ // min_level_map[ M ] == L means the level is unknown, but at least L
+ std::map< std::string, int > min_level_map;
+
+ // initialize min_level_map for acyclic dependencies
+
+ for( std::map< std::string, int >::iterator i = level_map.begin(); i != level_map.end(); ++i )
+ {
+ if( i->second < unknown_level )
+ {
+ min_level_map[ i->first ] = i->second;
+ }
+ }
+
+ // compute levels for cyclic modules
+
+ for( int k = 1, n = s_modules.size(); k < n; ++k )
+ {
+ for( std::map< std::string, std::set< std::string > >::iterator i = s_module_deps.begin(); i != s_module_deps.end(); ++i )
+ {
+ if( level_map[ i->first ] >= unknown_level )
+ {
+ int level = 0;
+
+ for( std::set< std::string >::iterator j = i->second.begin(); j != i->second.end(); ++j )
+ {
+ int jl = level_map[ *j ];
+
+ if( jl < unknown_level )
+ {
+ level = std::max( level, jl + 1 );
+ }
+ else
+ {
+ int ml = min_level_map[ *j ];
+
+ if( deps2[ *j ].count( i->first ) == 0 )
+ {
+ // *j does not depend on i->first, so
+ // the level of i->first is at least
+ // 1 + the minimum level of *j
+
+ ++ml;
+ }
+
+ level = std::max( level, ml );
+ }
+ }
+
+ min_level_map[ i->first ] = level;
+ }
+ }
+ }
+
+ // reverse level map
+
+ std::map< int, std::set< std::string > > reverse_level_map;
+
+ for( std::map< std::string, int >::iterator i = level_map.begin(); i != level_map.end(); ++i )
+ {
+ int level = i->second;
+
+ if( level >= unknown_level )
+ {
+ int min_level = min_level_map[ i->first ];
+
+ if( min_level != 0 )
+ {
+ level = min_level;
+ }
+ }
+
+ reverse_level_map[ level ].insert( i->first );
+ }
+
+ // output report
+
+ actions.heading();
+
+ for( std::map< int, std::set< std::string > >::iterator i = reverse_level_map.begin(); i != reverse_level_map.end(); ++i )
+ {
+ actions.level_start( i->first );
+
+ for( std::set< std::string >::iterator j = i->second.begin(); j != i->second.end(); ++j )
+ {
+ actions.module_start( *j );
+
+ std::set< std::string > mdeps = s_module_deps[ *j ];
+
+ for( std::set< std::string >::iterator k = mdeps.begin(); k != mdeps.end(); ++k )
+ {
+ int level = level_map[ *k ];
+
+ if( level >= unknown_level )
+ {
+ int min_level = min_level_map[ *k ];
+
+ if( min_level != 0 )
+ {
+ level = min_level;
+ }
+ }
+
+ actions.module2( *k, level );
+ }
+
+ actions.module_end( *j );
+ }
+
+ actions.level_end( i->first );
+ }
+}
+
+struct module_level_txt_actions: public module_level_actions
+{
+ int level_;
+
+ void heading()
+ {
+ std::cout << "Module Levels:\n\n";
+ }
+
+ void level_start( int level )
+ {
+ if( level >= unknown_level )
+ {
+ std::cout << "Level (undetermined):\n";
+ }
+ else
+ {
+ std::cout << "Level " << level << ":\n";
+ }
+
+ level_ = level;
+ }
+
+ void level_end( int /*level*/ )
+ {
+ std::cout << "\n";
+ }
+
+ void module_start( std::string const & module )
+ {
+ std::cout << " " << module;
+
+ if( level_ > 0 )
+ {
+ std::cout << " ->";
+ }
+ }
+
+ void module_end( std::string const & /*module*/ )
+ {
+ std::cout << "\n";
+ }
+
+ void module2( std::string const & module, int level )
+ {
+ std::cout << " " << module << "(";
+
+ if( level >= unknown_level )
+ {
+ std::cout << "-";
+ }
+ else
+ {
+ std::cout << level;
+ }
+
+ std::cout << ")";
+ }
+};
+
+struct module_level_html_actions: public module_level_actions
+{
+ int level_;
+
+ void heading()
+ {
+ std::cout << "<h1>Module Levels</h1>\n";
+ }
+
+ void level_start( int level )
+ {
+ std::cout << " <h2>Level ";
+
+ if( level >= unknown_level )
+ {
+ std::cout << "<em>undetermined</em>";
+ }
+ else
+ {
+ std::cout << level;
+ }
+
+ std::cout << "</h2><ul>\n";
+
+ level_ = level;
+ }
+
+ void level_end( int /*level*/ )
+ {
+ std::cout << " </ul>\n";
+ }
+
+ void module_start( std::string const & module )
+ {
+ std::cout << " <li><a href =\"" << module << ".html\">" << module << "</a><small>";
+
+ if( level_ > 0 )
+ {
+ std::cout << "<br />&#8674;";
+ }
+ }
+
+ void module_end( std::string const & /*module*/ )
+ {
+ std::cout << "</small></li>\n";
+ }
+
+ void module2( std::string const & module, int level )
+ {
+ std::cout << " " << module;
+
+ if( level < unknown_level )
+ {
+ std::cout << "<sup>" << level << "</sup>";
+ }
+ }
+};
+
+static void output_module_level_report( bool html )
+{
+ if( html )
+ {
+ module_level_html_actions actions;
+ output_module_level_report( actions );
+ }
+ else
+ {
+ module_level_txt_actions actions;
+ output_module_level_report( actions );
+ }
+}
+
+// module_overview_report
+
+struct module_overview_actions
+{
+ virtual void heading() = 0;
+
+ virtual void module_start( std::string const & module ) = 0;
+ virtual void module_end( std::string const & module ) = 0;
+
+ virtual void module2( std::string const & module ) = 0;
+};
+
+static void output_module_overview_report( module_overview_actions & actions )
+{
+ actions.heading();
+
+ for( std::set< std::string >::iterator i = s_modules.begin(); i != s_modules.end(); ++i )
+ {
+ actions.module_start( *i );
+
+ std::set< std::string > const mdeps = s_module_deps[ *i ];
+
+ for( std::set< std::string >::const_iterator j = mdeps.begin(); j != mdeps.end(); ++j )
+ {
+ actions.module2( *j );
+ }
+
+ actions.module_end( *i );
+ }
+}
+
+struct module_overview_txt_actions: public module_overview_actions
+{
+ bool deps_;
+
+ void heading()
+ {
+ std::cout << "Module Overview:\n\n";
+ }
+
+ void module_start( std::string const & module )
+ {
+ std::cout << module;
+ deps_ = false;
+ }
+
+ void module_end( std::string const & /*module*/ )
+ {
+ std::cout << "\n";
+ }
+
+ void module2( std::string const & module )
+ {
+ if( !deps_ )
+ {
+ std::cout << " ->";
+ deps_ = true;
+ }
+
+ std::cout << " " << module;
+ }
+};
+
+struct module_overview_html_actions: public module_overview_actions
+{
+ bool deps_;
+
+ void heading()
+ {
+ std::cout << "<h1>Module Overview</h1>\n";
+ }
+
+ void module_start( std::string const & module )
+ {
+ std::cout << " <h2><a href =\"" << module << ".html\"><em>" << module << "</em></a></h2><p><small>";
+ deps_ = false;
+ }
+
+ void module_end( std::string const & /*module*/ )
+ {
+ std::cout << "</small></p>\n";
+ }
+
+ void module2( std::string const & module )
+ {
+ if( !deps_ )
+ {
+ std::cout << "&#8674;";
+ deps_ = true;
+ }
+
+ std::cout << " " << module;
+ }
+};
+
+static void output_module_overview_report( bool html )
+{
+ if( html )
+ {
+ module_overview_html_actions actions;
+ output_module_overview_report( actions );
+ }
+ else
+ {
+ module_overview_txt_actions actions;
+ output_module_overview_report( actions );
+ }
+}
+
+//
+
+static void output_html_header( std::string const & title )
+{
+ std::cout << "<html>\n";
+ std::cout << "<head>\n";
+ std::cout << "<title>" << title << "</title>\n";
+ std::cout << "</head>\n";
+ std::cout << "<body>\n";
+}
+
+static void output_html_footer( std::string const & footer )
+{
+ std::cout << "<hr />\n";
+ std::cout << "<p><small>" << footer << "</small></p>\n";
+ std::cout << "</body>\n";
+ std::cout << "</html>\n";
+}
+
+static void enable_secondary( bool & secondary )
+{
+ if( !secondary )
+ {
+ try
+ {
+ build_module_dependency_map();
+ }
+ catch( fs::filesystem_error const & x )
+ {
+ std::cout << x.what() << std::endl;
+ }
+
+ secondary = true;
+ }
+}
+
+int main( int argc, char const* argv[] )
+{
+ if( argc < 2 )
+ {
+ std::cerr << "Usage:\n\n";
+ std::cerr << " boostdep --list-modules\n";
+ std::cerr << "\n";
+ std::cerr << " boostdep [options] --module-overview\n";
+ std::cerr << " boostdep [options] --module-levels\n";
+ std::cerr << "\n";
+ std::cerr << " boostdep [options] [--primary] <module>\n";
+ std::cerr << " boostdep [options] --secondary <module>\n";
+ std::cerr << " boostdep [options] --reverse <module>\n";
+ std::cerr << " boostdep [options] [--header] <header>\n";
+ std::cerr << "\n";
+ std::cerr << " where [options] can be [--title <title>] [--footer <footer>] [--html]\n";
+
+ return -1;
+ }
+
+ try
+ {
+ build_header_map();
+ }
+ catch( fs::filesystem_error const & x )
+ {
+ std::cout << x.what() << std::endl;
+ }
+
+ bool html = false;
+ bool secondary = false;
+
+ std::string title = "Boost Dependency Report";
+ std::string footer;
+
+ for( int i = 1; i < argc; ++i )
+ {
+ std::string option = argv[ i ];
+
+ if( option == "--list-modules" )
+ {
+ for( std::set< std::string >::iterator i = s_modules.begin(); i != s_modules.end(); ++i )
+ {
+ std::cout << *i << "\n";
+ }
+ }
+ else if( option == "--title" )
+ {
+ if( i + 1 < argc )
+ {
+ title = argv[ ++i ];
+ }
+ }
+ else if( option == "--footer" )
+ {
+ if( i + 1 < argc )
+ {
+ footer = argv[ ++i ];
+ }
+ }
+ else if( option == "--html" )
+ {
+ if( !html )
+ {
+ html = true;
+ output_html_header( title );
+ }
+ }
+ else if( option == "--primary" )
+ {
+ if( i + 1 < argc )
+ {
+ output_module_primary_report( argv[ ++i ], html );
+ }
+ }
+ else if( option == "--secondary" )
+ {
+ if( i + 1 < argc )
+ {
+ enable_secondary( secondary );
+ output_module_secondary_report( argv[ ++i ], html );
+ }
+ }
+ else if( option == "--reverse" )
+ {
+ if( i + 1 < argc )
+ {
+ enable_secondary( secondary );
+ output_module_reverse_report( argv[ ++i ], html );
+ }
+ }
+ else if( option == "--header" )
+ {
+ if( i + 1 < argc )
+ {
+ enable_secondary( secondary );
+ output_header_report( argv[ ++i ], html );
+ }
+ }
+ else if( option == "--module-levels" )
+ {
+ enable_secondary( secondary );
+ output_module_level_report( html );
+ }
+ else if( option == "--module-overview" )
+ {
+ enable_secondary( secondary );
+ output_module_overview_report( html );
+ }
+ else if( s_modules.count( option ) )
+ {
+ output_module_primary_report( option, html );
+ }
+ else if( s_header_map.count( option ) )
+ {
+ enable_secondary( secondary );
+ output_header_report( option, html );
+ }
+ else
+ {
+ std::cerr << "'" << option << "': not an option, module or header.\n";
+ }
+ }
+
+ if( html )
+ {
+ output_html_footer( footer );
+ }
+}
diff --git a/tools/build/Jamroot.jam b/tools/build/Jamroot.jam
new file mode 100644
index 0000000000..54e9cc8399
--- /dev/null
+++ b/tools/build/Jamroot.jam
@@ -0,0 +1,47 @@
+
+path-constant SELF : . ;
+
+import path ;
+import package ;
+import os ;
+
+local ext = "" ;
+if [ os.on-windows ]
+{
+ ext = ".exe" ;
+}
+
+
+package.install boost-build-engine boost-build
+ : # properties
+ : # binaries
+ b2$(ext) bjam$(ext)
+ ;
+
+local e1 = [ path.glob-tree $(SELF)/example : * : . .svn ] ;
+local e2 ;
+for e in $(e1)
+{
+ e = [ path.native $(e) ] ;
+ if [ CHECK_IF_FILE $(e) ]
+ {
+ e2 += $(e) ;
+ }
+}
+
+package.install-data boost-build-core
+ : # Which subdir of $prefix/share
+ boost-build
+ : # What to install
+ $(SELF)/boost-build.jam
+ $(SELF)/src/build-system.jam
+ [ path.glob-tree $(SELF)/src/build : *.jam *.py ]
+ [ path.glob-tree $(SELF)/src/kernel : *.jam *.py ]
+ [ path.glob-tree $(SELF)/src/util : *.jam *.py ]
+ [ path.glob-tree $(SELF)/src/tools : *.jam *.py *.xml *.xsl *.doxyfile *.hpp ]
+ $(e2)
+ : # What is the root of the directory
+ <install-source-root>.
+ ;
+
+alias install : boost-build-engine boost-build-core ;
diff --git a/tools/build/boost-build.jam b/tools/build/boost-build.jam
new file mode 100644
index 0000000000..a1240fdf75
--- /dev/null
+++ b/tools/build/boost-build.jam
@@ -0,0 +1,8 @@
+# Copyright 2001, 2002 Dave Abrahams
+# Copyright 2002 Rene Rivera
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+
+boost-build src/kernel ;
diff --git a/tools/build/boost.css b/tools/build/boost.css
deleted file mode 100644
index d0a30762dd..0000000000
--- a/tools/build/boost.css
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
-Copyright 2002 David Abrahams.
-Distributed under the Boost Software License, Version 1.0.
-(See accompanying file LICENSE_1_0.txt or copy at
-http://www.boost.org/LICENSE_1_0.txt)
- */
-H1
-{
- FONT-SIZE: 200%
- COLOR: #00007f
-}
-H2
-{
- FONT-SIZE: 150%;
-}
-H3
-{
- FONT-SIZE: 125%;
-}
-H4
-{
- FONT-SIZE: 108%;
-}
-BODY
-{
- FONT-SIZE: 100%;
- BACKGROUND-COLOR: #ffffff
-}
-PRE
-{
- MARGIN-LEFT: 2pc;
- FONT-SIZE: 80%;
- BACKGROUND-COLOR: #dfffff
-}
-CODE
-{
- FONT-SIZE: 95%;
- white-space: pre
-}
-.index
-{
- TEXT-ALIGN: left
-}
-.page-index
-{
- TEXT-ALIGN: left
-}
-.definition
-{
- TEXT-ALIGN: left
-}
-.footnote
-{
- FONT-SIZE: 66%;
- VERTICAL-ALIGN: super;
- TEXT-DECORATION: none
-}
-.function-semantics
-{
- CLEAR: left
-}
-.metafunction-semantics
-{
- CLEAR: left
-}
diff --git a/tools/build/bootstrap.bat b/tools/build/bootstrap.bat
new file mode 100644
index 0000000000..1229499cc2
--- /dev/null
+++ b/tools/build/bootstrap.bat
@@ -0,0 +1,49 @@
+@ECHO OFF
+
+REM Copyright (C) 2009 Vladimir Prus
+REM
+REM Distributed under the Boost Software License, Version 1.0.
+REM (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+ECHO Bootstrapping the build engine
+if exist ".\src\engine\bin.ntx86\bjam.exe" del src\engine\bin.ntx86\bjam.exe
+if exist ".\src\engine\bin.ntx86_64\bjam.exe" del src\engine\bin.ntx86_64\bjam.exe
+
+pushd src\engine
+call .\build.bat %* > ..\..\bootstrap.log
+@ECHO OFF
+popd
+
+if exist ".\src\engine\bin.ntx86\b2.exe" (
+ copy .\src\engine\bin.ntx86\b2.exe . > nul
+ copy .\src\engine\bin.ntx86\bjam.exe . > nul
+ goto :bjam_built)
+
+if exist ".\src\engine\bin.ntx86_64\b2.exe" (
+ copy .\src\engine\bin.ntx86_64\b2.exe . > nul
+ copy .\src\engine\bin.ntx86_64\bjam.exe . > nul
+ goto :bjam_built)
+
+goto :bjam_failure
+
+:bjam_built
+
+ECHO.
+ECHO Bootstrapping is done. To build, run:
+ECHO.
+ECHO .\b2 --prefix=DIR install
+ECHO.
+
+goto :end
+
+:bjam_failure
+
+ECHO.
+ECHO Failed to bootstrap the build engine
+ECHO Please consult bootstrap.log for furter diagnostics.
+ECHO.
+
+
+goto :end
+
+:end
diff --git a/tools/build/bootstrap.sh b/tools/build/bootstrap.sh
new file mode 100755
index 0000000000..8523c3db3e
--- /dev/null
+++ b/tools/build/bootstrap.sh
@@ -0,0 +1,120 @@
+#!/bin/sh
+# Copyright (C) 2005, 2006 Douglas Gregor.
+# Copyright (C) 2006 The Trustees of Indiana University
+# Copyright (C) 2010 Bryce Lelbach
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# boostinspect:notab - Tabs are required for the Makefile.
+
+B2=""
+TOOLSET=""
+B2_CONFIG=""
+
+for option
+do
+ case $option in
+
+ -help | --help | -h)
+ want_help=yes ;;
+
+ -with-toolset=* | --with-toolset=* )
+ TOOLSET=`expr "x$option" : "x-*with-toolset=\(.*\)"`
+ ;;
+
+ -*)
+ { echo "error: unrecognized option: $option
+Try \`$0 --help' for more information." >&2
+ { (exit 1); exit 1; }; }
+ ;;
+
+ esac
+done
+
+if test "x$want_help" = xyes; then
+ cat <<EOF
+\`./bootstrap.sh' creates minimal Boost.Build, which can install itself.
+
+Usage: $0 [OPTION]...
+
+Defaults for the options are specified in brackets.
+
+Configuration:
+ -h, --help display this help and exit
+ --with-b2=B2 use existing Boost.Build executable (b2)
+ [automatically built]
+ --with-toolset=TOOLSET use specific Boost.Build toolset
+ [automatically detected]
+EOF
+fi
+test -n "$want_help" && exit 0
+
+# TBD: Determine where the script is located
+my_dir="."
+
+# Determine the toolset, if not already decided
+if test "x$TOOLSET" = x; then
+ guessed_toolset=`$my_dir/src/engine/build.sh --guess-toolset`
+ case $guessed_toolset in
+ acc | darwin | gcc | como | mipspro | pathscale | pgi | qcc | vacpp )
+ TOOLSET=$guessed_toolset
+ ;;
+
+ intel-* )
+ TOOLSET=intel
+ ;;
+
+ mingw )
+ TOOLSET=gcc
+ ;;
+
+ clang* )
+ TOOLSET=clang
+ ;;
+
+ sun* )
+ TOOLSET=sun
+ ;;
+
+ * )
+ # Not supported by Boost.Build
+ ;;
+ esac
+fi
+
+case $TOOLSET in
+ clang*)
+ TOOLSET=clang
+ ;;
+esac
+
+
+rm -f config.log
+
+# Build b2
+if test "x$B2" = x; then
+ echo -n "Bootstrapping the build engine with toolset $TOOLSET... "
+ pwd=`pwd`
+ (cd "$my_dir/src/engine" && ./build.sh "$TOOLSET") > bootstrap.log 2>&1
+ if [ $? -ne 0 ]; then
+ echo
+ echo "Failed to bootstrap the build engine"
+ echo "Consult 'bootstrap.log' for more details"
+ exit 1
+ fi
+ cd "$pwd"
+ arch=`cd $my_dir/src/engine && ./bootstrap/jam0 -d0 -f build.jam --toolset=$TOOLSET --toolset-root= --show-locate-target && cd ..`
+ B2="$my_dir/src/engine/$arch/b2"
+ echo "engine/$arch/b2"
+ cp "$B2" .
+ cp "$my_dir/src/engine/$arch/bjam" .
+fi
+
+cat << EOF
+
+Bootstrapping is done. To build and install, run:
+
+ ./b2 install --prefix=<DIR>
+
+EOF
diff --git a/tools/build/doc/bjam.qbk b/tools/build/doc/bjam.qbk
new file mode 100644
index 0000000000..a57a44021e
--- /dev/null
+++ b/tools/build/doc/bjam.qbk
@@ -0,0 +1,1696 @@
+[article Boost.Jam
+ [quickbook 1.3]
+ [version: 3.1.19]
+ [authors [Rivera, Rene], [Abrahams, David], [Prus, Vladimir]]
+ [copyright 2003 2004 2005 2006 2007 Rene Rivera, David Abrahams, Vladimir Prus]
+ [category tool-build]
+ [id jam]
+ [dirname jam]
+ [purpose
+ Jam is a make(1) replacement that makes building simple things simple
+ and building complicated things manageable.
+ ]
+ [license
+ Distributed under the Boost Software License, Version 1.0.
+ (See accompanying file LICENSE_1_0.txt or copy at
+ [@http://www.boost.org/LICENSE_1_0.txt])
+ ]
+]
+
+[/ QuickBook Document version 1.3 ]
+
+[/ Shortcuts ]
+
+[def :version: 3.1.19]
+
+[/ Images ]
+
+[def :NOTE: [$images/note.png]]
+[def :ALERT: [$images/caution.png]]
+[def :DETAIL: [$images/note.png]]
+[def :TIP: [$images/tip.png]]
+
+[/ Links ]
+
+[def :Boost: [@http://www.boost.org Boost]]
+[def :Perforce_Jam: [@http://www.perforce.com/jam/jam.html Perforce Jam]]
+
+[/ Templates ]
+
+[template literal[text]'''<literallayout><literal>'''[text]'''</literal></literallayout>''']
+[template list[items]'''<itemizedlist>'''[items]'''</itemizedlist>''']
+[template orderedlist[items]'''<orderedlist>'''[items]'''</orderedlist>''']
+[template li[text]'''<listitem>'''[text]'''</listitem>''']
+[template lines[items]'''<simplelist type='vert' columns='1'>'''[items]'''</simplelist>''']
+[template line[text]'''<member>'''[text]'''</member>''']
+
+[section:building Building B2]
+
+Installing =B2= after building it is simply a matter of copying the
+generated executables someplace in your =PATH=. For building the executables
+there are a set of =build= bootstrap scripts to accomodate particular
+environments. The scripts take one optional argument, the name of the toolset
+to build with. When the toolset is not given an attempt is made to detect an
+available toolset and use that. The build scripts accept these arguments:
+
+[pre
+/build/ \[/toolset/\]
+]
+
+Running the scripts without arguments will give you the best chance of success. On Windows platforms from a command console do:
+
+[pre
+cd /jam source location/
+.\\build.bat
+]
+
+On Unix type platforms do:
+
+[pre
+cd /jam source location/
+sh ./build.sh
+]
+
+For the Boost.Jam source included with the Boost distribution the /jam source location/ is =BOOST_ROOT/tools/build/v2/engine=.
+
+If the scripts fail to detect an appropriate toolset to build with your particular toolset may not be auto-detectable. In that case, you can specify the toolset as the first argument, this assumes that the toolset is readily available in the =PATH=.
+
+[note
+The toolset used to build Boost.Jam is independent of the toolsets used for Boost.Build. Only one version of Boost.Jam is needed to use Boost.Build.
+]
+
+The supported toolsets, and whether they are auto-detected, are:
+
+[table Supported Toolsets
+
+[[Script] [Platform] [Toolset] [Detection and Notes]]
+
+[ [=build.bat=] [Windows NT, 2000, and XP]
+ [[lines
+ [line [@http://www.codegear.com/downloads/free/cppbuilder =borland=]]
+ [line [@http://www.borland.com/ Borland] C++Builder (BCC 5.5)]
+ ]]
+ [[list
+ [li Common install location: "=C:\Borland\BCC55="]
+ [li =BCC32.EXE= in =PATH=]
+ ]]
+]
+
+[ [] []
+ [[lines
+ [line [@http://www.comeaucomputing.com/ =como=]]
+ [line Comeau Computing C/C++]
+ ]]
+ []
+]
+
+[ [] []
+ [[lines
+ [line [@http://gcc.gnu.org/ =gcc=]]
+ [line GNU GCC]
+ ]]
+ []
+]
+
+[ [] []
+ [[lines
+ [line [@http://gcc.gnu.org/ =gcc-nocygwin=]]
+ [line GNU GCC]
+ ]]
+ []
+]
+
+[ [] []
+ [[lines
+ [line [@http://www.intel.com/software/products/compilers/c60 =intel-win32=]]
+ [line Intel C++ Compiler for Windows]
+ ]]
+ [[list
+ [li =ICL.EXE= in =PATH=]
+ ]]
+]
+
+[ [] []
+ [[lines
+ [line [@http://www.metrowerks.com/ =metrowerks=]]
+ [line MetroWerks CodeWarrior C/C++ 7.x, 8.x, 9.x]
+ ]]
+ [[list
+ [li =CWFolder= variable configured]
+ [li =MWCC.EXE= in =PATH=]
+ ]]
+]
+
+[ [] []
+ [[lines
+ [line [@http://www.mingw.org/ =mingw=]]
+ [line GNU [@http://gcc.gnu.org/ GCC] as the [@http://www.mingw.org/ MinGW] configuration]
+ ]]
+ [[list
+ [li Common install location: "=C:\MinGW="]
+ ]]
+]
+
+[ [] []
+ [[lines
+ [line [@http://msdn.microsoft.com/visualc/ =msvc=]]
+ [line Microsoft Visual C++ 6.x]
+ ]]
+ [[list
+ [li =VCVARS32.BAT= already configured]
+ [li =%MSVCDir%= is present in environment]
+ [li Common install locations: "=%ProgramFiles%\Microsoft Visual Studio=", "=%ProgramFiles%\Microsoft Visual C++="]
+ [li =CL.EXE= in =PATH=]
+ ]]
+]
+
+[ [] []
+ [[lines
+ [line [@http://msdn.microsoft.com/visualc/ =vc7=]]
+ [line Microsoft Visual C++ 7.x]
+ ]]
+ [[list
+ [li =VCVARS32.BAT= or =VSVARS32.BAT= already configured]
+ [li =%VS71COMNTOOLS%= is present in environment]
+ [li =%VCINSTALLDIR%= is present in environment]
+ [li Common install locations: "=%ProgramFiles%\Microsoft Visual Studio .NET=", "=%ProgramFiles%\Microsoft Visual Studio .NET 2003="]
+ [li =CL.EXE= in =PATH=]
+ ]]
+]
+
+[ [] []
+ [[lines
+ [line [@http://msdn.microsoft.com/visualc/ =vc8= and =vc9=]]
+ [line Microsoft Visual C++ 8.x and 9.x]
+ ]]
+ [Detection:
+ [list
+ [li =VCVARSALL.BAT= already configured]
+ [li =%VS90COMNTOOLS%= is present in environment]
+ [li Common install location: "=%ProgramFiles%\Microsoft Visual Studio 9="]
+ [li =%VS80COMNTOOLS%= is present in environment]
+ [li Common install location: "=%ProgramFiles%\Microsoft Visual Studio 8="]
+ [li =CL.EXE= in =PATH=]
+ ]
+
+ Notes:
+ [list
+ [li If =VCVARSALL.BAT= is called to set up the toolset, it is passed all the extra arguments, see below for what those arguments are. This can be used to build, for example, a Win64 specific version of =b2=. Consult the VisualStudio documentation for what the possible argument values to the =VCVARSALL.BAT= are.]
+ ]
+ ]
+]
+
+[ [=build.sh=] [Unix, Linux, Cygwin, etc.]
+ [[lines
+ [line [@http://www.hp.com/go/c++ =acc=]]
+ [line HP-UX aCC]
+ ]]
+ [[list
+ [li =aCC= in =PATH=]
+ [li =uname= is "HP-UX"]
+ ]]
+]
+
+[ [] []
+ [[lines
+ [line [@http://www.comeaucomputing.com/ =como=]]
+ [line Comeau Computing C/C++]
+ ]]
+ [[list
+ [li como in =PATH=]
+ ]]
+]
+
+[ [] []
+ [[lines
+ [line [@http://gcc.gnu.org/ =gcc=]]
+ [line GNU GCC]
+ ]]
+ [[list
+ [li gcc in =PATH=]
+ ]]
+]
+
+[ [] []
+ [[lines
+ [line [@http://www.intel.com/software/products/compilers/c60l/ =intel-linux=]]
+ [line Intel C++ for Linux]
+ ]]
+ [[list
+ [li =icc= in =PATH=]
+ [li Common install locations: "=/opt/intel/cc/9.0=", "=/opt/intel_cc_80=", "=/opt/intel/compiler70=", "=/opt/intel/compiler60=", "=/opt/intel/compiler50="]
+ ]]
+]
+
+[ [] []
+ [[lines
+ [line =kcc=]
+ [line Intel KAI C++]
+ ]]
+ [[list
+ [li =KCC= in =PATH=]
+ ]]
+]
+
+[ [] []
+ [[lines
+ [line [@http://www.codegear.com/downloads/free/cppbuilder =kylix=]]
+ [line [@http://www.borland.com/ Borland] C++Builder]
+ ]]
+ [[list
+ [li bc++ in PATH]
+ ]]
+]
+
+[ [] []
+ [[lines
+ [line [@http://www.sgi.com/developers/devtools/languages/mipspro.html =mipspro=]]
+ [line SGI MIPSpro C]
+ ]]
+ [[list
+ [li =uname= is "=IRIX=" or "=IRIX64="]
+ ]]
+]
+
+[ [] []
+ [[lines
+ [line =sunpro=]
+ [line Sun Workshop 6 C++]
+ ]]
+ [[list
+ [li Standard install location: "=/opt/SUNWspro="]
+ ]]
+]
+
+[ [] []
+ [[lines
+ [line =qcc=]
+ [line [@http://www.qnx.com/ QNX Neutrino]]
+ ]]
+ [[list
+ [li =uname= is "=QNX=" and =qcc= in =PATH=]
+ ]]
+]
+
+[ [] []
+ [[lines
+ [line [@http://www.tru64unix.compaq.com/cplus/ =true64cxx=]]
+ [line Compaq C++ Compiler for True64 UNIX]
+ ]]
+ [[list
+ [li =uname= is "=OSF1="]
+ ]]
+]
+
+[ [] []
+ [[lines
+ [line [@http://www.ibm.com/software/awdtools/vacpp/ =vacpp=]]
+ [line IBM VisualAge C++]
+ ]]
+ [[list
+ [li =xlc= in =PATH=]
+ ]]
+]
+
+[ [] [MacOS X]
+ [[lines
+ [line [@http://developer.apple.com/tools/compilers.html =darwin=]]
+ [line Apple MacOS X GCC]
+ ]]
+ [[list
+ [li =uname= is "=Darwin="]
+ ]]
+]
+
+[ [] [Windows NT, 2000, and XP]
+ [[lines
+ [line [@http://www.mingw.org/ =mingw=]]
+ [line GNU [@http://gcc.gnu.org/ GCC] as the [@http://www.mingw.org/ MinGW] configuration with the MSYS shell]
+ ]]
+ [[list
+ [li Common install location: "=/mingw="]
+ ]]
+]
+
+]
+
+The built executables are placed in a subdirectory specific to your platform. For example, in Linux running on an Intel x86 compatible chip, the executables are placed in: "=bin.linuxx86=". The =b2[.exe]= executable can be used to invoke Boost.Build.
+
+The build scripts support additional invocation arguments for use by developers of Boost.Jam and for additional setup of the toolset. The extra arguments come after the toolset:
+
+* Arguments not in the form of an option, before option arguments, are used for extra setup to toolset configuration scripts.
+* Arguments of the form "=--option=", which are passed to the =build.jam= build script.
+* Arguments not in the form of an option, after the options, which are targets for the =build.jam= script.
+
+[pre
+/build/ \[/toolset/\] \[/setup/\*\] \[--/option/+ /target/\*\]
+]
+
+The arguments immediately after the toolset are passed directly to the setup script of the toolset, if available and if it needs to be invoked. This allows one to configure the toolset ass needed to do non-default builds of =b2=. For example to build a Win64 version with =vc8=. See the toolset descriptiona above for when particular toolsets support this.
+
+The arguments starting with the "=--option=" forms are passed to the =build.jam= script and are used to further customize what gets built. Options and targets supported by the =build.jam= script:
+
+[variablelist
+ [[[literal ---]]
+ [Empty option when one wants to only specify a target.]]
+ [[[literal --release]]
+ [The default, builds the optimized executable.]]
+ [[[literal --debug]]
+ [Builds debugging versions of the executable. When built they are placed in their own directory "=bin./platform/.debug=".]]
+ [[[literal --grammar]]
+ [Normally the Jam language grammar parsing files are not regenerated. This forces building of the grammar, although it may not force the regeneration of the grammar parser. If the parser is out of date it will be regenerated and subsequently built.]]
+ [[[literal --with-python=/path/]]
+ [Enables Python integration, given a path to the Python libraries.]]
+ [[[literal --gc]]
+ [Enables use of the Boehm Garbage Collector. The build will look for the Boehm-GC source in a "boehm_gc" subdirectory from the =b2= sources.]]
+ [[[literal --duma]]
+ [Enables use of the DUMA (Detect Uintended Memory Access) debugging memory allocator. The build expects to find the DUMA source files in a "duma" subdirectory from the =b2= sources.]]
+ [[[literal --toolset-root=/path/]]
+ [Indicates where the toolset used to build is located. This option is passed in by the bootstrap (=build.bat= or =build.sh=) script.]]
+ [[[literal --show-locate-target]]
+ [For information, prints out where it will put the built executable.]]
+ [[[literal --noassert]]
+ [Disable debug assertions, even if building the debug version of the executable.]]
+ [[[literal dist]]
+ [Generate packages (compressed archives) as appropriate for distribution in the platform, if possible.]]
+ [[[literal clean]]
+ [Remove all the built executables and objects.]]
+]
+
+[endsect]
+
+[section:language Language]
+
+=B2= has an interpreted, procedural language. Statements in =b2= are rule (procedure) definitions, rule invocations, flow-of-control structures, variable assignments, and sundry language support.
+
+[section:lexical Lexical Features]
+
+=B2= treats its input files as whitespace-separated tokens, with two exceptions: double quotes (") can enclose whitespace to embed it into a token, and everything between the matching curly braces ({}) in the definition of a rule action is treated as a single string. A backslash (\\) can escape a double quote, or any single whitespace character.
+
+=B2= requires whitespace (blanks, tabs, or newlines) to surround all tokens, including the colon (:) and semicolon (;) tokens.
+
+=B2= keywords (an mentioned in this document) are reserved and generally
+must be quoted with double quotes (") to be used as arbitrary tokens, such as
+variable or target names.
+
+Comments start with the [^#] character and extend until the end of line.
+
+[endsect]
+
+[section:target Targets]
+
+The essential =b2= data entity is a target. Build targets are files to be updated. Source targets are the files used in updating built targets. Built targets and source targets are collectively referred to as file targets, and frequently built targets are source targets for other built targets. Pseudotargets are symbols representing dependencies on other targets, but which are not themselves associated with any real file.
+
+A file target's identifier is generally the file's name, which can be absolutely rooted, relative to the directory of =b2='s invocation, or simply local (no directory). Most often it is the last case, and the actual file path is bound using the =$(SEARCH)= and =$(LOCATE)= special variables. See [link jam.language.variables.builtins.search SEARCH and LOCATE Variables] below. A local filename is optionally qualified with grist, a string value used to assure uniqueness. A file target with an identifier of the form /file(member)/ is a library member (usually an =ar=(1) archive on Unix).
+
+[section Binding Detection]
+
+Whenever a target is bound to a location in the filesystem, Boost Jam will look for a variable called =BINDRULE= (first "on" the target being bound, then in the global module). If non-empty, =$(BINDRULE[1])= names a rule which is called with the name of the target and the path it is being bound to. The signature of the rule named by =$(BINDRULE[1])= should match the following:
+
+[pre
+rule /bind-rule/ ( /target/ : /path/ )
+]
+
+This facility is useful for correct header file scanning, since many compilers will search for `#include` files first in the directory containing the file doing the `#include` directive. =$(BINDRULE)= can be used to make a record of that directory.
+
+[endsect]
+
+[endsect]
+
+[section:rules Rules]
+
+The basic =b2= language entity is called a rule. A rule is defined in two parts: the procedure and the actions. The procedure is a body of jam statements to be run when the rule is invoked; the actions are the OS shell commands to execute when updating the built targets of the rule.
+
+Rules can return values, which can be expanded into a list with "[ /rule/ /args/ ... ]". A rule's value is the value of its last statement, though only the following statements have values: 'if' (value of the leg chosen), 'switch' (value of the case chosen), set (value of the resulting variable), and 'return' (value of its arguments). Note that 'return' doesn't actually cause a return, i.e., is a no-op unless it is the last statement of the last block executed within rule body.
+
+The =b2= statements for defining and invoking rules are as follows:
+
+Define a rule's procedure, replacing any previous definition.
+
+[pre
+rule /rulename/ { /statements/ }
+]
+
+Define a rule's updating actions, replacing any previous definition.
+
+[pre
+actions \[ /modifiers/ \] /rulename/ { /commands/ }
+]
+
+Invoke a rule.
+
+[pre
+/rulename/ /field1/ : /field2/ : /.../ : /fieldN/ ;
+]
+
+Invoke a rule under the influence of target's specific variables..
+
+[pre
+on /target/ /rulename/ /field1/ : /field2/ : /.../ : /fieldN/ ;
+]
+
+Used as an argument, expands to the return value of the rule invoked.
+
+[pre
+\[ /rulename/ /field1/ : /field2/ : /.../ : /fieldN/ \]
+\[ on /target/ /rulename/ /field1/ : /field2/ : /.../ : /fieldN/ \]
+]
+
+A rule is invoked with values in /field1/ through /fieldN/. They may be referenced in the procedure's statements as [^$(1)] through [^$(['N])] (9 max), and the first two only may be referenced in the action's /commands/ as [^$(1)] and [^$(2)]. [^$(<)] and [^$(>)] are synonymous with [^$(1)] and [^$(2)].
+
+Rules fall into two categories: updating rules (with actions), and pure procedure rules (without actions). Updating rules treat arguments [^$(1)] and [^$(2)] as built targets and sources, respectively, while pure procedure rules can take arbitrary arguments.
+
+When an updating rule is invoked, its updating actions are added to those associated with its built targets ([^$(1)]) before the rule's procedure is run. Later, to build the targets in the updating phase, /commands/ are passed to the OS command shell, with [^$(1)] and [^$(2)] replaced by bound versions of the target names. See Binding above.
+
+Rule invocation may be indirected through a variable:
+
+[pre
+$(/var/) /field1/ : /field2/ : /.../ : /fieldN/ ;
+
+on /target/ $(/var/) /field1/ : /field2/ : /.../ : /fieldN/ ;
+
+\[ $(/var/) /field1/ : /field2/ : /.../ : /fieldN/ \]
+\[ on /target/ $(/var/) /field1/ : /field2/ : /.../ : /fieldN/ \]
+]
+
+The variable's value names the rule (or rules) to be invoked. A rule is
+invoked for each element in the list of [^$(/var/)]'s values. The fields
+[^/field1/ : /field2/ : /.../] are passed as arguments for each
+invokation. For the [ ... ] forms, the return value is the concatenation of
+the return values for all of the invocations.
+
+[section Action Modifiers]
+
+The following action modifiers are understood:
+
+[variablelist
+
+[[[^actions bind /vars/]]
+ [[^$(/vars/)] will be replaced with bound values.]]
+
+[[[^actions existing]]
+ [[^$(>)] includes only source targets currently existing.]]
+
+[[[^actions ignore]]
+ [The return status of the commands is ignored.]]
+
+[[[^actions piecemeal]]
+ [commands are repeatedly invoked with a subset of [^$(>)] small enough to fit in the command buffer on this OS.]]
+
+[[[^actions quietly]]
+ [The action is not echoed to the standard output.]]
+
+[[[^actions together]]
+ [The [^$(>)] from multiple invocations of the same action on the same built target are glommed together.]]
+
+[[[^actions updated]]
+ [[^$(>)] includes only source targets themselves marked for updating.]]
+
+]
+
+[endsect]
+
+[section Argument lists]
+
+You can describe the arguments accepted by a rule, and refer to them by name within the rule. For example, the following prints "I'm sorry, Dave" to the console:
+
+[pre
+rule report ( pronoun index ? : state : names + )
+{
+ local he.suffix she.suffix it.suffix = s ;
+ local I.suffix = m ;
+ local they.suffix you.suffix = re ;
+ ECHO $(pronoun)'$($(pronoun).suffix) $(state), $(names\[$(index)\]) ;
+}
+report I 2 : sorry : Joe Dave Pete ;
+]
+
+Each name in a list of formal arguments (separated by "=:=" in the rule declaration) is bound to a single element of the corresponding actual argument unless followed by one of these modifiers:
+
+[table
+[[Symbol] [Semantics of preceding symbol]]
+[[=?=] [optional]]
+[[=*=] [Bind to zero or more unbound elements of the actual argument. When =*= appears where an argument name is expected, any number of additional arguments are accepted. This feature can be used to implement "varargs" rules.]]
+[[=+=] [Bind to one or more unbound elements of the actual argument.]]
+]
+
+The actual and formal arguments are checked for inconsistencies, which cause =b2= to exit with an error code:
+
+[pre
+### argument error
+# rule report ( pronoun index ? : state : names + )
+# called with: ( I 2 foo : sorry : Joe Dave Pete )
+# extra argument foo
+### argument error
+# rule report ( pronoun index ? : state : names + )
+# called with: ( I 2 : sorry )
+# missing argument names
+]
+
+If you omit the list of formal arguments, all checking is bypassed as in "classic" Jam. Argument lists drastically improve the reliability and readability of your rules, however, and are *strongly recommended* for any new Jam code you write.
+
+[endsect]
+
+[section:builtins Built-in Rules]
+
+=B2= has a growing set of built-in rules, all of which are pure procedure rules without updating actions. They are in three groups: the first builds the dependency graph; the second modifies it; and the third are just utility rules.
+
+[section Dependency Building]
+
+[section =DEPENDS= ]
+
+[pre
+rule DEPENDS ( /targets1/ * : /targets2/ * )
+]
+
+Builds a direct dependency: makes each of /targets1/ depend on each of /targets2/. Generally, /targets1/ will be rebuilt if /targets2/ are themselves rebuilt or are newer than /targets1/.
+
+[endsect]
+
+[section =INCLUDES= ]
+
+[pre
+rule INCLUDES ( /targets1/ * : /targets2/ * )
+]
+
+Builds a sibling dependency: makes any target that depends on any of /targets1/ also depend on each of /targets2/. This reflects the dependencies that arise when one source file includes another: the object built from the source file depends both on the original and included source file, but the two sources files don't depend on each other. For example:
+
+[pre
+DEPENDS foo.o : foo.c ;
+INCLUDES foo.c : foo.h ;
+]
+
+"=foo.o=" depends on "=foo.c=" and "=foo.h=" in this example.
+
+[endsect]
+
+[endsect]
+
+[section Modifying Binding]
+
+The six rules =ALWAYS=, =LEAVES=, =NOCARE=, =NOTFILE=, =NOUPDATE=, and =TEMPORARY= modify the dependency graph so that =b2= treats the targets differently during its target binding phase. See Binding above. Normally, =b2= updates a target if it is missing, if its filesystem modification time is older than any of its dependencies (recursively), or if any of its dependencies are being updated. This basic behavior can be changed by invoking the following rules:
+
+[section =ALWAYS= ]
+
+[pre
+rule ALWAYS ( /targets/ * )
+]
+
+Causes /targets/ to be rebuilt regardless of whether they are up-to-date (they must still be in the dependency graph). This is used for the clean and uninstall targets, as they have no dependencies and would otherwise appear never to need building. It is best applied to targets that are also =NOTFILE= targets, but it can also be used to force a real file to be updated as well.
+
+[endsect]
+
+[section =LEAVES= ]
+
+[pre
+rule LEAVES ( /targets/ * )
+]
+
+Makes each of /targets/ depend only on its leaf sources, and not on any intermediate targets. This makes it immune to its dependencies being updated, as the "leaf" dependencies are those without their own dependencies and without updating actions. This allows a target to be updated only if original source files change.
+
+[endsect]
+
+[section =NOCARE= ]
+
+[pre
+rule NOCARE ( /targets/ * )
+]
+
+Causes =b2= to ignore /targets/ that neither can be found nor have updating actions to build them. Normally for such targets =b2= issues a warning and then skips other targets that depend on these missing targets. The =HdrRule= in =Jambase= uses =NOCARE= on the header file names found during header file scanning, to let =b2= know that the included files may not exist. For example, if an `#include` is within an `#ifdef`, the included file may not actually be around.
+
+[warning For targets with build actions: if their build actions exit with a nonzero return code, dependent targets will still be built.]
+
+[endsect]
+
+[section =NOTFILE= ]
+
+[pre
+rule NOTFILE ( /targets/ * )
+]
+
+Marks /targets/ as pseudotargets and not real files. No timestamp is checked, and so the actions on such a target are only executed if the target's dependencies are updated, or if the target is also marked with =ALWAYS=. The default =b2= target "=all=" is a pseudotarget. In =Jambase=, =NOTFILE= is used to define several addition convenient pseudotargets.
+
+[endsect]
+
+[section =NOUPDATE= ]
+
+[pre
+rule NOUPDATE ( /targets/ * )
+]
+
+Causes the timestamps on /targets/ to be ignored. This has two effects: first, once the target has been created it will never be updated; second, manually updating target will not cause other targets to be updated. In =Jambase=, for example, this rule is applied to directories by the =MkDir= rule, because =MkDir= only cares that the target directory exists, not when it has last been updated.
+
+[endsect]
+
+[section =TEMPORARY= ]
+
+[pre
+rule TEMPORARY ( /targets/ * )
+]
+
+Marks /targets/ as temporary, allowing them to be removed after other targets that depend upon them have been updated. If a =TEMPORARY= target is missing, =b2= uses the timestamp of the target's parent. =Jambase= uses =TEMPORARY= to mark object files that are archived in a library after they are built, so that they can be deleted after they are archived.
+
+[endsect]
+
+[section =FAIL_EXPECTED= ]
+
+[pre
+rule FAIL_EXPECTED ( /targets/ * )
+]
+
+For handling targets whose build actions are expected to fail (e.g. when testing
+that assertions or compile-time type checking work properly), Boost Jam supplies
+the =FAIL_EXPECTED= rule in the same style as =NOCARE=, et. al. During target
+updating, the return code of the build actions for arguments to =FAIL_EXPECTED=
+is inverted: if it fails, building of dependent targets continues as though it
+succeeded. If it succeeds, dependent targets are skipped.
+
+[endsect]
+
+[section =RMOLD= ]
+
+[pre
+rule RMOLD ( /targets/ * )
+]
+
+=B2= removes any target files that may exist on disk when the rule used to build those targets fails. However, targets whose dependencies fail to build are not removed by default. The =RMOLD= rule causes its arguments to be removed if any of their dependencies fail to build.
+
+[endsect]
+
+[section =ISFILE= ]
+
+[pre
+rule ISFILE ( /targets/ * )
+]
+
+=ISFILE= marks targets as required to be files. This changes the way =b2= searches for the target such that it ignores matches for file system items that are not files, like directories. This makes it possible to avoid `#include "exception"` matching if one happens to have a directory named exception in the header search path.
+
+[warning This is currently not fully implemented.]
+
+[endsect]
+
+[endsect]
+
+[section Utility]
+
+The two rules =ECHO= and =EXIT= are utility rules, used only in =b2='s parsing phase.
+
+[section =ECHO= ]
+
+[pre
+rule ECHO ( /args/ * )
+]
+
+Blurts out the message /args/ to stdout.
+
+[endsect]
+
+[section =EXIT= ]
+
+[pre
+rule EXIT ( /message/ * : /result-value/ ? )
+]
+
+Blurts out the /message/ to stdout and then exits with a failure status if no /result-value/ is given, otherwise it exits with the given /result-value/.
+
+"=Echo=", "=echo=", "=Exit=", and "=exit=" are accepted as aliases for =ECHO= and =EXIT=, since it is hard to tell that these are built-in rules and not part of the language, like "=include=".
+
+[endsect]
+
+[section =GLOB= ]
+
+The =GLOB= rule does filename globbing.
+
+[pre
+rule GLOB ( /directories/ * : /patterns/ * : /downcase-opt/ ? )
+]
+
+Using the same wildcards as for the patterns in the switch statement. It is invoked by being used as an argument to a rule invocation inside of "=[ ]=". For example: "[^FILES = \[ GLOB dir1 dir2 : *.c *.h \]]" sets =FILES= to the list of C source and header files in =dir1= and =dir2=. The resulting filenames are the full pathnames, including the directory, but the pattern is applied only to the file name without the directory.
+
+If /downcase-opt/ is supplied, filenames are converted to all-lowercase before matching against the pattern; you can use this to do case-insensitive matching using lowercase patterns. The paths returned will still have mixed case if the OS supplies them. On Windows NT and Cygwin, filenames are always downcased before matching.
+
+[endsect]
+
+[section =MATCH= ]
+
+The =MATCH= rule does pattern matching.
+
+[pre
+rule MATCH ( /regexps/ + : /list/ * )
+]
+
+Matches the =egrep=(1) style regular expressions /regexps/ against the strings in /list/. The result is a list of matching =()= subexpressions for each string in /list/, and for each regular expression in /regexps/.
+
+[endsect]
+
+[section =BACKTRACE= ]
+
+[pre
+rule BACKTRACE ( )
+]
+
+Returns a list of quadruples: /filename/ /line/ /module/ /rulename/..., describing each shallower level of the call stack. This rule can be used to generate useful diagnostic messages from Jam rules.
+
+[endsect]
+
+[section =UPDATE= ]
+
+[pre
+rule UPDATE ( /targets/ * )
+]
+
+Classic jam treats any non-option element of command line as a name of target to be updated. This prevented more sophisticated handling of command line. This is now enabled again but with additional changes to the =UPDATE= rule to allow for the flexibility of changing the list of targets to update. The UPDATE rule has two effects:
+
+# It clears the list of targets to update, and
+# Causes the specified targets to be updated.
+
+If no target was specified with the =UPDATE= rule, no targets will be updated. To support changing of the update list in more useful ways, the rule also returns the targets previously in the update list. This makes it possible to add targets as such:
+
+[pre
+local previous-updates = \[ UPDATE \] ;
+UPDATE $(previous-updates) a-new-target ;
+]
+
+[endsect]
+
+[section =W32_GETREG= ]
+
+[pre
+rule W32_GETREG ( /path/ : /data/ ? )
+]
+
+Defined only for win32 platform. It reads the registry of Windows. '/path/' is the location of the information, and '/data/' is the name of the value which we want to get. If '/data/' is omitted, the default value of '/path/' will be returned. The '/path/' value must conform to MS key path format and must be prefixed with one of the predefined root keys. As usual,
+
+* '=HKLM=' is equivalent to '=HKEY_LOCAL_MACHINE='.
+* '=HKCU=' is equivalent to '=HKEY_CURRENT_USER='.
+* '=HKCR=' is equivalent to '=HKEY_CLASSES_ROOT='.
+
+Other predefined root keys are not supported.
+
+Currently supported data types : '=REG_DWORD=', '=REG_SZ=', '=REG_EXPAND_SZ=', '=REG_MULTI_SZ='. The data with '=REG_DWORD=' type will be turned into a string, '=REG_MULTI_SZ=' into a list of strings, and for those with '=REG_EXPAND_SZ=' type environment variables in it will be replaced with their defined values. The data with '=REG_SZ=' type and other unsupported types will be put into a string without modification. If it can't receive the value of the data, it just return an empty list. For example,
+
+[pre
+local PSDK-location =
+ \[ W32_GETREG HKEY_LOCAL_MACHINE\\\\SOFTWARE\\\\Microsoft\\\\MicrosoftSDK\\\\Directories : "Install Dir" \] ;
+]
+
+[endsect]
+
+[section =W32_GETREGNAMES= ]
+
+[pre
+rule W32_GETREGNAMES ( /path/ : /result-type/ )
+]
+
+Defined only for win32 platform. It reads the registry of Windows. '/path/' is the location of the information, and '/result-type/' is either '=subkeys=' or '=values='. For more information on '/path/' format and constraints, please see =W32_GETREG=.
+
+Depending on '/result-type/', the rule returns one of the following:
+
+[variablelist
+ [[=subkeys=] [Names of all direct subkeys of '/path/'.]]
+ [[=values=] [Names of values contained in registry key given by '/path/'. The "default" value of the key appears in the returned list only if its value has been set in the registry.]]
+]
+
+If '/result-type/' is not recognized, or requested data cannot be retrieved, the rule returns an empty list.
+Example:
+
+[pre
+local key = "HKEY_LOCAL_MACHINE\\\\SOFTWARE\\\\Microsoft\\\\Windows\\\\CurrentVersion\\\\App Paths" ;
+local subkeys = \[ W32_GETREGNAMES "$(key)" : subkeys \] ;
+for local subkey in $(subkeys)
+{
+ local values = \[ W32_GETREGNAMES "$(key)\\\\$(subkey)" : values \] ;
+ for local value in $(values)
+ {
+ local data = \[ W32_GETREG "$(key)\\\\$(subkey)" : "$(value)" \] ;
+ ECHO "Registry path: " $(key)\\\\$(subkey) ":" $(value) "=" $(data) ;
+ }
+}
+]
+
+[endsect]
+
+[section =SHELL= ]
+
+[pre
+rule SHELL ( /command/ : * )
+]
+
+=SHELL= executes /command/, and then returns the standard output of /command/. =SHELL= only works on platforms with a =popen()= function in the C library. On platforms without a working =popen()= function, =SHELL= is implemented as a no-op. =SHELL= works on Unix, MacOS X, and most Windows compilers. =SHELL= is a no-op on Metrowerks compilers under Windows. There is a variable set of allowed options as additional arguments:
+
+[variablelist
+ [[=exit-status=] [In addition to the output the result status of the executed command is returned as a second element of the result.]]
+ [[=no-output=] [Don't capture the output of the command. Instead an empty ("") string value is returned in place of the output.]]
+]
+
+Because the Perforce/Jambase defines a =SHELL= rule which hides the
+builtin rule, =COMMAND= can be used as an alias for =SHELL= in such a case.
+
+[endsect]
+
+[section =MD5= ]
+
+[pre
+rule MD5 ( /string/ )
+]
+
+=MD5= computes the MD5 hash of the string passed as paramater and returns it.
+
+[endsect]
+
+[section =SPLIT_BY_CHARACTERS= ]
+
+[pre
+rule SPLIT_BY_CHARACTERS ( /string/ : /delimiters/ )
+]
+
+=SPLIT_BY_CHARACTERS= splits the specified /string/ on any delimiter character
+present in /delimiters/ and returns the resulting list.
+
+[endsect]
+
+[section =PRECIOUS= ]
+
+[pre
+rule PRECIOUS ( /targets/ * )
+]
+
+The =PRECIOUS= rule specifies that each of the targets passed as the arguments
+should not be removed even if the command updating that target fails.
+
+[endsect]
+
+[section =PAD= ]
+
+[pre
+rule PAD ( /string/ : /width/ )
+]
+
+If /string/ is shorter than /width/ characters, pads it with whitespace
+characters on the right, and returns the result. Otherwise, returns
+/string/ unmodified.
+
+[endsect]
+
+[section =FILE_OPEN= ]
+
+[pre
+rule FILE_OPEN ( /filename/ : /mode/ )
+]
+
+The =FILE_OPEN= rule opens the specified file and returns a file
+descriptor. The /mode/ parameter can be either "w" or "r". Note
+that at present, only the =UPDATE_NOW= rule can use the resulting
+file descriptor number.
+
+[endsect]
+
+[section =UPDATE_NOW= ]
+
+[pre
+rule UPDATE_NOW ( /targets/ * : /log/ ? : /ignore-minus-n/ ? )
+]
+
+The =UPDATE_NOW= caused the specified targets to be updated immediately.
+If update was successfull, non-empty string is returned. The /log/ parameter,
+if present, specifies a descriptor of a file where all output from building
+is redirected. If the /ignore-minus-n/ parameter is specified, the targets
+are updated even if the =-n= parameter is specified on the command line.
+
+[endsect]
+
+[endsect]
+
+[endsect]
+
+[endsect]
+
+[section Flow-of-Control]
+
+=B2= has several simple flow-of-control statements:
+
+[pre
+for /var/ in /list/ { /statements/ }
+]
+
+Executes /statements/ for each element in /list/, setting the variable /var/ to the element value.
+
+[pre
+if /cond/ { /statements/ }
+\[ else { /statements/ } \]
+]
+
+Does the obvious; the =else= clause is optional. /cond/ is built of:
+
+[variablelist
+
+[[[^['a]]]
+ [true if any ['a] element is a non-zero-length string]]
+
+[[[^['a] = ['b]]]
+ [list ['a] matches list ['b] string-for-string]]
+
+[[[^['a] != ['b]]]
+ [list ['a] does not match list ['b]]]
+
+[[[^['a] < ['b]]]
+ [['a\[i\]] string is less than ['b\[i\]] string, where ['i] is first mismatched element in lists ['a] and ['b]]]
+
+[[[^['a] <= ['b]]]
+ [every ['a] string is less than or equal to its ['b] counterpart]]
+
+[[[^['a] > ['b]]]
+ [['a\[i\]] string is greater than ['b\[i\]] string, where ['i] is first mismatched element]]
+
+[[[^['a] >= ['b]]]
+ [every ['a] string is greater than or equal to its ['b] counterpart]]
+
+[[[^['a] in ['b]]]
+ [true if all elements of ['a] can be found in ['b], or if ['a] has no elements]]
+
+[[[^! ['cond]]]
+ [condition not true]]
+
+[[[^['cond] && ['cond]]]
+ [conjunction]]
+
+[[[^['cond] || ['cond]]]
+ [disjunction]]
+
+[[[^( ['cond] )]]
+ [precedence grouping]]
+
+]
+
+[pre
+include /file/ ;
+]
+
+Causes =b2= to read the named /file/. The /file/ is bound like a regular target (see Binding above) but unlike a regular target the include /file/ cannot be built.
+
+The include /file/ is inserted into the input stream during the parsing phase. The primary input file and all the included file(s) are treated as a single file; that is, =b2= infers no scope boundaries from included files.
+
+[pre
+local /vars/ \[ = /values/ \] ;
+]
+
+Creates new /vars/ inside to the enclosing ={}= block, obscuring any previous values they might have. The previous values for vars are restored when the current block ends. Any rule called or file included will see the local and not the previous value (this is sometimes called Dynamic Scoping). The local statement may appear anywhere, even outside of a block (in which case the previous value is restored when the input ends). The /vars/ are initialized to /values/ if present, or left uninitialized otherwise.
+
+[pre
+return /values/ ;
+]
+
+Within a rule body, the return statement sets the return value for an invocation of the rule. It does *not* cause the rule to return; a rule's value is actually the value of the last statement executed, so a return should be the last statement executed before the rule "naturally" returns.
+
+[pre
+switch /value/
+{
+ case /pattern1/ : /statements/ ;
+ case /pattern2/ : /statements/ ;
+ ...
+}
+]
+
+The switch statement executes zero or one of the enclosed /statements/, depending on which, if any, is the first case whose /pattern/ matches /value/. The /pattern/ values are not variable-expanded. The pattern values may include the following wildcards:
+
+[variablelist
+
+[[[^?]]
+ [match any single character]]
+
+[[[^*]]
+ [match zero or more characters]]
+
+[[[^\[/chars/\]]]
+ [match any single character in /chars/]]
+
+[[[^\[\^/chars/\]]]
+ [match any single character not in /chars/]]
+
+[[[^\\/x/]]
+ [match /x/ (escapes the other wildcards)]]
+
+]
+
+[pre
+while /cond/ { /statements/ }
+]
+
+Repeatedly execute /statements/ while /cond/ remains true upon entry. (See the description of /cond/ expression syntax under if, above).
+
+[endsect]
+
+[section Variables]
+
+=B2= variables are lists of zero or more elements, with each element being a string value. An undefined variable is indistinguishable from a variable with an empty list, however, a defined variable may have one more elements which are null strings. All variables are referenced as [^$(/variable/)].
+
+Variables are either global or target-specific. In the latter case, the variable takes on the given value only during the updating of the specific target.
+
+A variable is defined with:
+
+[pre
+/variable/ = /elements/ ;
+/variable/ += /elements/ ;
+/variable/ on /targets/ = /elements/ ;
+/variable/ on /targets/ += /elements/ ;
+/variable/ default = /elements/ ;
+/variable/ ?= /elements/ ;
+]
+
+The first two forms set /variable/ globally. The third and forth forms set a target-specific variable. The [^\=] operator replaces any previous elements of /variable/ with /elements/; the [^+=] operation adds /elements/ to /variable/'s list of elements. The final two forms are synonymous: they set /variable/ globally, but only if it was previously unset.
+
+Variables referenced in updating commands will be replaced with their values; target-specific values take precedence over global values. Variables passed as arguments (=$(1)= and =$(2)=) to actions are replaced with their bound values; the "=bind=" modifier can be used on actions to cause other variables to be replaced with bound values. See Action Modifiers above.
+
+=B2= variables are not re-exported to the environment of the shell that executes the updating actions, but the updating actions can reference =b2= variables with [^$(/variable/)].
+
+[section:expansion Variable Expansion]
+
+During parsing, =b2= performs variable expansion on each token that is not a keyword or rule name. Such tokens with embedded variable references are replaced with zero or more tokens. Variable references are of the form [^$(/v/)] or [^$(/vm/)], where ['v] is the variable name, and ['m] are optional modifiers.
+
+Variable expansion in a rule's actions is similar to variable expansion in statements, except that the action string is tokenized at whitespace regardless of quoting.
+
+The result of a token after variable expansion is the /product/ of the components of the token, where each component is a literal substring or a list substituting a variable reference. For example:
+
+[pre
+$(X) -> a b c
+t$(X) -> ta tb tc
+$(X)z -> az bz cz
+$(X)-$(X) -> a-a a-b a-c b-a b-b b-c c-a c-b c-c
+]
+
+The variable name and modifiers can themselves contain a variable reference, and this partakes of the product as well:
+
+[pre
+$(X) -> a b c
+$(Y) -> 1 2
+$(Z) -> X Y
+$($(Z)) -> a b c 1 2
+]
+
+Because of this product expansion, if any variable reference in a token is undefined, the result of the expansion is an empty list. If any variable element is a null string, the result propagates the non-null elements:
+
+[pre
+$(X) -> a ""
+$(Y) -> "" 1
+$(Z) ->
+-$(X)$(Y)- -> -a- -a1- -- -1-
+-$(X)$(Z)- ->
+]
+
+A variable element's string value can be parsed into grist and filename-related components. Modifiers to a variable are used to select elements, select components, and replace components. The modifiers are:
+
+[variablelist
+
+[[[^\[['n]\]]] [Select element number ['n] (starting at 1). If the variable
+ contains fewer than ['n] elements, the result is a zero-element list. ['n]
+ can be negative in which case the element number ['n] from the last leftward
+ is returned.]]
+
+[[[^\[['n]-['m]\]]]
+ [Select elements number ['n] through ['m]. ['n] and ['m] can be negative in which case they refer to elements counting from the last leftward.]]
+
+[[[^\[['n]-\]]]
+ [Select elements number ['n] through the last. ['n] can be negative in which case it refers to the element counting from the last leftward.]]
+
+[[[^:B]]
+ [Select filename base.]]
+
+[[[^:S]]
+ [Select (last) filename suffix.]]
+
+[[[^:M]]
+ [Select archive member name.]]
+
+[[[^:D]]
+ [Select directory path.]]
+
+[[[^:P]]
+ [Select parent directory.]]
+
+[[[^:G]]
+ [Select grist.]]
+
+[[[^:U]]
+ [Replace lowercase characters with uppercase.]]
+
+[[[^:L]]
+ [Replace uppercase characters with lowercase.]]
+
+[[[^:T]]
+ [Converts all back-slashes ("\\") to forward slashes ("/"). For example
+``
+ x = "C:\\Program Files\\Borland" ; ECHO $(x:T) ;
+``
+prints [^"C:/Program Files/Borland"]
+]]
+
+[[[^:W]]
+ [When invoking Windows-based tools from [@http://www.cygwin.com/ Cygwin]
+ it can be important to pass them true windows-style paths. The =:W=
+ modifier, *under Cygwin only*, turns a cygwin path into a Win32 path using
+ the [@http://www.cygwin.com/cygwin-api/func-cygwin-conv-to-win32-path.html
+ =cygwin_conv_to_win32_path=] function. On other platforms, the string is
+ unchanged. For example
+``
+ x = "/cygdrive/c/Program Files/Borland" ; ECHO $(x:W) ;
+``
+prints [^"C:\\Program Files\\Borland"] on Cygwin
+]]
+
+[[[^:['chars]]]
+ [Select the components listed in ['chars].]]
+
+[[[^:G=['grist]]]
+ [Replace grist with ['grist].]]
+
+[[[^:D=['path]]]
+ [Replace directory with ['path].]]
+
+[[[^:B=['base]]]
+ [Replace the base part of file name with ['base].]]
+
+[[[^:S=['suf]]]
+ [Replace the suffix of file name with ['suf].]]
+
+[[[^:M=['mem]]]
+ [Replace the archive member name with ['mem].]]
+
+[[[^:R=['root]]]
+ [Prepend ['root] to the whole file name, if not already rooted.]]
+
+[[[^:E=['value]]]
+ [Assign ['value] to the variable if it is unset.]]
+
+[[[^:J=['joinval]]]
+ [Concatentate list elements into single element, separated by ['joinval]'.]]
+
+]
+
+On VMS, [^$(var:P)] is the parent directory of [^$(var:D)].
+
+[endsect]
+
+[section Local For Loop Variables]
+
+Boost Jam allows you to declare a local for loop control variable right in the loop:
+
+[pre
+x = 1 2 3 ;
+y = 4 5 6 ;
+for *local* y in $(x)
+{
+ ECHO $(y) ; # prints "1", "2", or "3"
+}
+ECHO $(y) ; # prints "4 5 6"
+]
+
+[endsect]
+
+[section:atfile Generated File Expansion]
+
+During expansion of expressions =b2= also looks for subexpressions of the form
+=@(filename:E=filecontents)= and replaces the expression with =filename= after
+creating the given file with the contents set to =filecontents=. This is useful
+for creating compiler response files, and other "internal" files. The expansion
+works both during parsing and action execution. Hence it is possible to create
+files during any of the three build phases.
+
+[endsect]
+
+[section:builtins Built-in Variables]
+
+This section discusses variables that have special meaning to =b2=. All of
+these must be defined or used in the global module -- using those variables
+inside a named module will not have the desired effect.
+See [link jam.language.modules Modules].
+
+[section:search SEARCH and LOCATE]
+
+These two variables control the binding of file target names to locations in
+the file system. Generally, =$(SEARCH)= is used to find existing sources
+while =$(LOCATE)= is used to fix the location for built targets.
+
+Rooted (absolute path) file targets are bound as is. Unrooted file target names are also normally bound as is, and thus relative to the current directory, but the settings of =$(LOCATE)= and =$(SEARCH)= alter this:
+
+* If =$(LOCATE)= is set then the target is bound relative to the first directory in =$(LOCATE)=. Only the first element is used for binding.
+* If =$(SEARCH)= is set then the target is bound to the first directory in =$(SEARCH)= where the target file already exists.
+* If the =$(SEARCH)= search fails, the target is bound relative to the current directory anyhow.
+
+Both =$(SEARCH)= and =$(LOCATE)= should be set target-specific and not globally. If they were set globally, =b2= would use the same paths for all file binding, which is not likely to produce sane results. When writing your own rules, especially ones not built upon those in Jambase, you may need to set =$(SEARCH)= or =$(LOCATE)= directly. Almost all of the rules defined in Jambase set =$(SEARCH)= and =$(LOCATE)= to sensible values for sources they are looking for and targets they create, respectively.
+
+[endsect]
+
+[section:hdrscan HDRSCAN and HDRRULE]
+
+These two variables control header file scanning. =$(HDRSCAN)= is an
+=egrep(1)= pattern, with ()'s surrounding the file name, used to find file
+inclusion statements in source files. =Jambase= uses =$(HDRPATTERN)= as the
+pattern for =$(HDRSCAN)=. =$(HDRRULE)= is the name of a rule to invoke with
+the results of the scan: the scanned file is the target, the found files are
+the sources. This is the only place where =b2= invokes a rule through a
+variable setting.
+
+Both =$(HDRSCAN)= and =$(HDRRULE)= must be set for header file scanning to take place, and they should be set target-specific and not globally. If they were set globally, all files, including executables and libraries, would be scanned for header file include statements.
+
+The scanning for header file inclusions is not exact, but it is at least dynamic, so there is no need to run something like =makedepend(GNU)= to create a static dependency file. The scanning mechanism errs on the side of inclusion (i.e., it is more likely to return filenames that are not actually used by the compiler than to miss include files) because it can't tell if `#include` lines are inside `#ifdefs` or other conditional logic. In =Jambase=, =HdrRule= applies the =NOCARE= rule to each header file found during scanning so that if the file isn't present yet doesn't cause the compilation to fail, =b2= won't care.
+
+Also, scanning for regular expressions only works where the included file name is literally in the source file. It can't handle languages that allow including files using variable names (as the =Jam= language itself does).
+
+[endsect]
+
+[section Semaphores]
+
+It is sometimes desirable to disallow parallel execution of some actions. For example:
+
+* Old versions of yacc use files with fixed names. So, running two yacc actions is dangerous.
+* One might want to perform parallel compiling, but not do parallel linking, because linking is i/o bound and only gets slower.
+
+Craig McPeeters has extended Perforce Jam to solve such problems, and that extension was integrated in Boost.Jam.
+
+Any target can be assigned a /semaphore/, by setting a variable called =SEMAPHORE= on that target. The value of the variable is the semaphore name. It must be different from names of any declared target, but is arbitrary otherwise.
+
+The semantic of semaphores is that in a group of targets which have the same semaphore, only one can be updated at the moment, regardless of "=-j=" option.
+
+[endsect]
+
+[section Platform Identifier]
+
+A number of Jam built-in variables can be used to identify runtime platform:
+
+[variablelist
+[[=OS=] [OS identifier string]]
+[[=OSPLAT=] [Underlying architecture, when applicable]]
+[[=MAC=] [true on MAC platform]]
+[[=NT=] [true on NT platform]]
+[[=OS2=] [true on OS2 platform]]
+[[=UNIX=] [true on Unix platforms]]
+[[=VMS=] [true on VMS platform]]
+]
+
+[endsect]
+
+[section Jam Version]
+
+[variablelist
+[[=JAMDATE=] [Time and date at =b2= start-up as an ISO-8601 UTC value.]]
+[[=JAMUNAME=] [Ouput of uname(1) command (Unix only)]]
+[[=JAMVERSION=] [=b2= version, currently ":version:"]]
+[[=JAM_VERSION=] [A predefined global variable with two elements indicates the version number of Boost Jam. Boost Jam versions start at "=03=" "=00=". Earlier versions of =Jam= do not automatically define =JAM_VERSION=.]]
+]
+
+[endsect]
+
+[section JAMSHELL]
+
+When =b2= executes a rule's action block, it forks and execs a shell, passing the action block as an argument to the shell. The invocation of the shell can be controlled by =$(JAMSHELL)=. The default on Unix is, for example:
+
+[pre
+JAMSHELL = /bin/sh -c % ;
+]
+
+The =%= is replaced with the text of the action block.
+
+=B2= does not directly support building in parallel across multiple hosts, since that is heavily dependent on the local environment. To build in parallel across multiple hosts, you need to write your own shell that provides access to the multiple hosts. You then reset =$(JAMSHELL)= to reference it.
+
+Just as =b2= expands a =%= to be the text of the rule's action block, it expands a =!= to be the multi-process slot number. The slot number varies between 1 and the number of concurrent jobs permitted by the =-j= flag given on the command line. Armed with this, it is possible to write a multiple host shell. For example:
+
+[pre
+#!/bin/sh
+
+# This sample JAMSHELL uses the SunOS on(1) command to execute a
+# command string with an identical environment on another host.
+
+# Set JAMSHELL = jamshell ! %
+#
+# where jamshell is the name of this shell file.
+#
+# This version handles up to -j6; after that they get executed
+# locally.
+
+case $1 in
+1|4) on winken sh -c "$2";;
+2|5) on blinken sh -c "$2";;
+3|6) on nod sh -c "$2";;
+*) eval "$2";;
+esac
+]
+
+[endsect]
+
+[section:actionrule =__TIMING_RULE__= and =__ACTION_RULE__=]
+
+The =__TIMING_RULE__= and =__ACTION_RULE__= can be set to the name of a rule
+for =b2= to call *after* an action completes for a target. They both give
+diagnostic information about the action that completed. For =__TIMING_RULE__=
+the rule is called as:
+
+ rule timing-rule ( args * : target : start end user system )
+
+And =__ACTION_RULE__= is called as:
+
+ rule action-rule ( args * : target : command status start end user system : output ? )
+
+The arguments for both are:
+
+[variablelist
+ [[[^args]]
+ [Any values following the rule name in the =__TIMING_RULE__= or =__ACTION_RULE__=
+ are passed along here.]]
+ [[[^target]]
+ [The =b2= target that was built.]]
+ [[[^command]]
+ [The text of the executed command in the action body.]]
+ [[[^status]]
+ [The integer result of the executed command.]]
+ [[[^start]]
+ [The starting timestamp of the executed command as a ISO-8601 UTC value.]]
+ [[[^end]]
+ [The completion timestamp of the executed command as a ISO-8601 UTC value.]]
+ [[[^user]]
+ [The number of user CPU seconds the executed command spent as a floating
+ point value.]]
+ [[[^system]]
+ [The number of system CPU seconds the executed command spent as a floating
+ point value.]]
+ [[[^output]]
+ [The output of the command as a single string. The content of the output
+ reflects the use of the =-pX= option.]]
+]
+
+[note
+ If both variables are set for a target both are called, first =__TIMING_RULE__=
+ then =__ACTION_RULE__=. ]
+
+[endsect]
+
+[endsect]
+
+[endsect]
+
+[section Modules]
+
+Boost Jam introduces support for modules, which provide some rudimentary namespace protection for rules and variables. A new keyword, "=module=" was also introduced. The features described in this section are primitives, meaning that they are meant to provide the operations needed to write Jam rules which provide a more elegant module interface.
+
+[section Declaration]
+
+[pre
+module /expression/ { ... }
+]
+
+Code within the [^{ ... }] executes within the module named by evaluating expression. Rule definitions can be found in the module's own namespace, and in the namespace of the global module as /module-name/./rule-name/, so within a module, other rules in that module may always be invoked without qualification:
+
+[pre
+*module my_module*
+*{*
+ rule salute ( x ) { ECHO $(x), world ; }
+ rule greet ( ) { salute hello ; }
+ greet ;
+*}*
+*my_module.salute* goodbye ;
+]
+
+When an invoked rule is not found in the current module's namespace, it is looked up in the namespace of the global module, so qualified calls work across modules:
+
+[pre
+module your_module
+{
+ rule bedtime ( ) { *my_module.salute* goodnight ; }
+}
+]
+
+[endsect]
+
+[section Variable Scope]
+
+Each module has its own set of dynamically nested variable scopes. When execution passes from module A to module B, all the variable bindings from A become unavailable, and are replaced by the bindings that belong to B. This applies equally to local and global variables:
+
+[pre
+module A
+{
+ x = 1 ;
+ rule f ( )
+ {
+ local y = 999 ; # becomes visible again when B.f calls A.g
+ B.f ;
+ }
+ rule g ( )
+ {
+ ECHO $(y) ; # prints "999"
+ }
+}
+module B
+{
+ y = 2 ;
+ rule f ( )
+ {
+ ECHO $(y) ; # always prints "2"
+ A.g ;
+ }
+}
+]
+
+The only way to access another module's variables is by entering that module:
+
+[pre
+rule peek ( module-name ? : variables + )
+{
+ module $(module-name)
+ {
+ return $($(>)) ;
+ }
+}
+]
+
+Note that because existing variable bindings change whenever a new module scope is entered, argument bindings become unavailable. That explains the use of "=$(>)=" in the peek rule above.
+
+[endsect]
+
+[section Local Rules]
+
+[pre
+local rule /rulename/...
+]
+
+The rule is declared locally to the current module. It is not entered in the global module with qualification, and its name will not appear in the result of:
+
+[pre
+\[ RULENAMES /module-name/ \]
+]
+
+[endsect]
+
+[section The =RULENAMES= Rule]
+
+[pre
+rule RULENAMES ( /module/ ? )
+]
+
+Returns a list of the names of all non-local rules in the given module. If /module/ is omitted, the names of all non-local rules in the global module are returned.
+
+[endsect]
+
+[section The =VARNAMES= Rule]
+
+[pre
+rule VARNAMES ( /module/ ? )
+]
+
+Returns a list of the names of all variable bindings in the given module. If /module/ is omitted, the names of all variable bindings in the global module are returned.
+
+[note This includes any local variables in rules from the call stack which have not returned at the time of the =VARNAMES= invocation.]
+
+[endsect]
+
+[section The =IMPORT= Rule]
+
+=IMPORT= allows rule name aliasing across modules:
+
+[pre
+rule IMPORT ( /source_module/ ? : /source_rules/ *
+ : /target_module/ ? : /target_rules/ * )
+]
+
+The =IMPORT= rule copies rules from the /source_module/ into the /target_module/ as local rules. If either /source_module/ or /target_module/ is not supplied, it refers to the global module. /source_rules/ specifies which rules from the /source_module/ to import; /target_rules/ specifies the names to give those rules in /target_module/. If /source_rules/ contains a name which doesn't correspond to a rule in /source_module/, or if it contains a different number of items than /target_rules/, an error is issued. For example,
+
+[pre
+# import m1.rule1 into m2 as local rule m1-rule1.
+IMPORT m1 : rule1 : m2 : m1-rule1 ;
+# import all non-local rules from m1 into m2
+IMPORT m1 : \[ RULENAMES m1 \] : m2 : \[ RULENAMES m1 \] ;
+]
+
+[endsect]
+
+[section The =EXPORT= Rule]
+
+=EXPORT= allows rule name aliasing across modules:
+
+[pre
+rule EXPORT ( /module/ ? : /rules/ * )
+]
+
+The =EXPORT= rule marks /rules/ from the =source_module= as non-local (and thus exportable). If an element of /rules/ does not name a rule in /module/, an error is issued. For example,
+
+[pre
+module X {
+ local rule r { ECHO X.r ; }
+}
+IMPORT X : r : : r ; # error - r is local in X
+EXPORT X : r ;
+IMPORT X : r : : r ; # OK.
+]
+
+[endsect]
+
+[section The =CALLER_MODULE= Rule]
+
+[pre
+rule CALLER_MODULE ( /levels/ ? )
+]
+
+=CALLER_MODULE= returns the name of the module scope enclosing the call to its caller (if levels is supplied, it is interpreted as an integer number of additional levels of call stack to traverse to locate the module). If the scope belongs to the global module, or if no such module exists, returns the empty list. For example, the following prints "{Y} {X}":
+
+[pre
+module X {
+ rule get-caller { return \[ CALLER_MODULE \] ; }
+ rule get-caller's-caller { return \[ CALLER_MODULE 1 \] ; }
+ rule call-Y { return Y.call-X2 ; }
+}
+module Y {
+ rule call-X { return X.get-caller ; }
+ rule call-X2 { return X.get-caller's-caller ; }
+}
+callers = \[ X.get-caller \] \[ Y.call-X \] \[ X.call-Y \] ;
+ECHO {$(callers)} ;
+]
+
+[endsect]
+
+[section The =DELETE_MODULE= Rule]
+
+[pre
+rule DELETE_MODULE ( /module/ ? )
+]
+
+=DELETE_MODULE= removes all of the variable bindings and otherwise-unreferenced rules from the given module (or the global module, if no module is supplied), and returns their memory to the system.
+
+[note Though it won't affect rules that are currently executing until they complete, =DELETE_MODULE= should be used with extreme care because it will wipe out any others and all variable (including locals in that module) immediately. Because of the way dynamic binding works, variables which are shadowed by locals will not be destroyed, so the results can be really unpredictable.]
+
+[endsect]
+
+[endsect]
+
+[endsect]
+
+[section Miscellaneous]
+
+[section Diagnostics]
+
+In addition to generic error messages, =b2= may emit one of the following:
+
+[pre warning: unknown rule X]
+
+A rule was invoked that has not been defined with an "=actions=" or "=rule=" statement.
+
+[pre using N temp target(s)]
+
+Targets marked as being temporary (but nonetheless present) have been found.
+
+[pre updating N target(s)]
+
+Targets are out-of-date and will be updated.
+
+[pre can't find N target(s)]
+
+Source files can't be found and there are no actions to create them.
+
+[pre can't make N target(s)]
+
+Due to sources not being found, other targets cannot be made.
+
+[pre warning: X depends on itself]
+
+A target depends on itself either directly or through its sources.
+
+[pre don't know how to make X]
+
+A target is not present and no actions have been defined to create it.
+
+[pre X skipped for lack of Y]
+
+A source failed to build, and thus a target cannot be built.
+
+[pre warning: using independent target X]
+
+A target that is not a dependency of any other target is being referenced with =$(<)= or =$(>)=.
+
+[pre X removed]
+
+=B2= removed a partially built target after being interrupted.
+
+[endsect]
+
+[section Bugs, Limitations]
+
+For parallel building to be successful, the dependencies among files must be properly spelled out, as targets tend to get built in a quickest-first ordering. Also, beware of un-parallelizable commands that drop fixed-named files into the current directory, like =yacc(1)= does.
+
+A poorly set =$(JAMSHELL)= is likely to result in silent failure.
+
+[endsect]
+
+[section Fundamentals]
+
+This section is derived from the official Jam documentation and from experience using it and reading the Jambase rules. We repeat the information here mostly because it is essential to understanding and using Jam, but is not consolidated in a single place. Some of it is missing from the official documentation altogether. We hope it will be useful to anyone wishing to become familiar with Jam and the Boost build system.
+
+* Jam "=rules=" are actually simple procedural entities. Think of them as functions. Arguments are separated by colons.
+
+* A Jam *target* is an abstract entity identified by an arbitrary string. The build-in =DEPENDS= rule creates a link in the dependency graph between the named targets.
+
+* Note that the original Jam documentation for the built-in =INCLUDES= rule is incorrect: [^INCLUDES ['targets1] : ['targets2]] causes everything that depends on a member of /targets1/ to depend on all members of /targets2/. It does this in an odd way, by tacking /targets2/ onto a special tail section in the dependency list of everything in /targets1/. It seems to be OK to create circular dependencies this way; in fact, it appears to be the "right thing to do" when a single build action produces both /targets1/ and /targets2/.
+
+* When a rule is invoked, if there are =actions= declared with the same name as the rule, the actions are added to the updating actions for the target identified by the rule's first argument. It is actually possible to invoke an undeclared rule if corresponding actions are declared: the rule is treated as empty.
+
+* Targets (other than =NOTFILE= targets) are associated with paths in the file system through a process called binding. Binding is a process of searching for a file with the same name as the target (sans grist), based on the settings of the target-specific =SEARCH= and =LOCATE= variables.
+
+* In addition to local and global variables, jam allows you to set a variable =on= a target. Target-specific variable values can usually not be read, and take effect only in the following contexts:
+
+ * In updating actions, variable values are first looked up =on= the target named by the first argument (the target being updated). Because Jam builds its entire dependency tree before executing actions, Jam rules make target-specific variable settings as a way of supplying parameters to the corresponding actions.
+ * Binding is controlled /entirely/ by the target-specific setting of the =SEARCH= and =LOCATE= variables, as described here.
+ * In the special rule used for header file scanning, variable values are first looked up =on= the target named by the rule's first argument (the source file being scanned).
+
+* The "bound value" of a variable is the path associated with the target named by the variable. In build actions, the first two arguments are automatically replaced with their bound values. Target-specific variables can be selectively replaced by their bound values using the =bind= action modifier.
+
+* Note that the term "binding" as used in the Jam documentation indicates a phase of processing that includes three sub-phases: /binding/ (yes!), update determination, and header file scanning. The repetition of the term "binding" can lead to some confusion. In particular, the Modifying Binding section in the Jam documentation should probably be titled "Modifying Update Determination".
+
+* "Grist" is just a string prefix of the form </characters/>. It is used in Jam to create unique target names based on simpler names. For example, the file name "=test.exe=" may be used by targets in separate subprojects, or for the debug and release variants of the "same" abstract target. Each distinct target bound to a file called "test.exe" has its own unique grist prefix. The Boost build system also takes full advantage of Jam's ability to divide strings on grist boundaries, sometimes concatenating multiple gristed elements at the beginning of a string. Grist is used instead of identifying targets with absolute paths for two reasons:
+
+ # The location of targets cannot always be derived solely from what the user puts in a Jamfile, but sometimes depends also on the binding process. Some mechanism to distinctly identify targets with the same name is still needed.
+ # Grist allows us to use a uniform abstract identifier for each built target, regardless of target file location (as allowed by setting ALL_LOCATE_TARGET).
+
+* When grist is extracted from a name with $(var:G), the result includes the leading and trailing angle brackets. When grist is added to a name with $(var:G=expr), existing grist is first stripped. Then, if expr is non-empty, leading <s and trailing >s are added if necessary to form an expression of the form <expr2>; <expr2> is then prepended.
+
+* When Jam is invoked it imports all environment variable settings into corresponding Jam variables, followed by all command-line (-s...) variable settings. Variables whose name ends in PATH, Path, or path are split into string lists on OS-specific path-list separator boundaries (e.g. ":" for UNIX and ";" for Windows). All other variables are split on space (" ") boundaries. Boost Jam modifies that behavior by allowing variables to be quoted.
+
+* A variable whose value is an empty list or which consists entirely of empty
+ strings has a negative logical value. Thus, for example, code like the
+ following allows a sensible non-empty default which can easily be overridden
+ by the user:
+ ``
+MESSAGE ?\= starting jam... ;
+if $(MESSAGE) { ECHO The message is: $(MESSAGE) ; }
+``
+ If the user wants a specific message, he invokes jam with [^"-sMESSAGE\=message text"]. If he wants no message, he invokes jam with [^-sMESSAGE\=] and nothing at all is printed.
+
+* The parsing of command line options in Jam can be rather unintuitive, with regards to how other Unix programs accept options. There are two variants accepted as valid for an option:
+
+ # =-xvalue=, and
+ # =-x value=.
+
+[endsect]
+
+[endsect]
+
+
+[section History]
+[include history.qbk]
+[endsect]
diff --git a/tools/build/v2/doc/development_plan.html b/tools/build/doc/development_plan.html
index 598c23dae7..598c23dae7 100644
--- a/tools/build/v2/doc/development_plan.html
+++ b/tools/build/doc/development_plan.html
diff --git a/tools/build/v2/doc/history.qbk b/tools/build/doc/history.qbk
index f5a89b0c16..f5a89b0c16 100644
--- a/tools/build/v2/doc/history.qbk
+++ b/tools/build/doc/history.qbk
diff --git a/tools/build/doc/jamfile.jam b/tools/build/doc/jamfile.jam
new file mode 100644
index 0000000000..91dcce9b1c
--- /dev/null
+++ b/tools/build/doc/jamfile.jam
@@ -0,0 +1,26 @@
+# Copyright 2004,2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import quickbook ;
+using boostbook ;
+
+project tools/build/v2/doc
+ ;
+
+boostbook userman : src/standalone.xml
+ : <xsl:param>toc.section.depth=1
+ <xsl:param>doc.standalone=true
+ <xsl:param>nav.layout=none
+ <implicit-dependency>jam_docs
+ <dependency>jam_docs
+ <xsl:param>boost.root=../../../..
+ <xsl:param>boost.defaults=Boost
+ ;
+
+xml jam_docs : bjam.qbk ;
+
+if ! $(BOOST_ROOT)
+{
+ BOOST_ROOT = [ modules.peek : BOOST_ROOT ] ;
+}
diff --git a/tools/build/v2/doc/src/abstract-target.xml b/tools/build/doc/src/abstract-target.xml
index dad53380c4..dad53380c4 100644
--- a/tools/build/v2/doc/src/abstract-target.xml
+++ b/tools/build/doc/src/abstract-target.xml
diff --git a/tools/build/doc/src/architecture.xml b/tools/build/doc/src/architecture.xml
new file mode 100644
index 0000000000..0b22defef9
--- /dev/null
+++ b/tools/build/doc/src/architecture.xml
@@ -0,0 +1,668 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE appendix PUBLIC "-//Boost//DTD BoostBook XML V1.0//EN"
+ "http://www.boost.org/tools/boostbook/dtd/boostbook.dtd">
+
+ <appendix id="bbv2.arch">
+ <title>Boost.Build v2 architecture</title>
+
+ <sidebar>
+ <para>
+ This document is work-in progress. Do not expect much from it yet.
+ </para>
+ </sidebar>
+
+ <section id="bbv2.arch.overview">
+ <title>Overview</title>
+
+ <!-- FIXME: the below does not mention engine at all, making rest of the
+ text confusing. Things like 'kernel' and 'util' don't have to be
+ mentioned at all. -->
+ <para>
+ Boost.Build implementation is structured in four different components:
+ "kernel", "util", "build" and "tools". The first two are relatively
+ uninteresting, so we will focus on the remaining pair. The "build" component
+ provides classes necessary to declare targets, determining which properties
+ should be used for their building, and creating the dependency graph. The
+ "tools" component provides user-visible functionality. It mostly allows
+ declaring specific kinds of main targets, as well as registering available
+ tools, which are then used when creating the dependency graph.
+ </para>
+ </section>
+
+ <section id="bbv2.arch.build">
+ <title>The build layer</title>
+
+ <para>
+ The build layer has just four main parts -- metatargets (abstract
+ targets), virtual targets, generators and properties.
+
+ <itemizedlist>
+ <listitem><para>
+ Metatargets (see the "targets.jam" module) represent all the
+ user-defined entities that can be built. The "meta" prefix signifies
+ that they do not need to correspond to exact files or even files at all
+ -- they can produce a different set of files depending on the build
+ request. Metatargets are created when Jamfiles are loaded. Each has a
+ <code>generate</code> method which is given a property set and produces
+ virtual targets for the passed properties.
+ </para></listitem>
+ <listitem><para>
+ Virtual targets (see the "virtual-targets.jam" module) correspond to
+ actual atomic updatable entities -- most typically files.
+ </para></listitem>
+ <listitem><para>
+ Properties are just (name, value) pairs, specified by the user and
+ describing how targets should be built. Properties are stored using the
+ <code>property-set</code> class.
+ </para></listitem>
+ <listitem><para>
+ Generators are objects that encapsulate specific tools -- they can
+ take a list of source virtual targets and produce new virtual targets
+ from them.
+ </para></listitem>
+ </itemizedlist>
+
+ </para>
+
+ <para>
+ The build process includes the following steps:
+
+ <orderedlist>
+ <listitem><para>
+ Top-level code calls the <code>generate</code> method of a metatarget
+ with some properties.
+ </para></listitem>
+
+ <listitem><para>
+ The metatarget combines the requested properties with its requirements
+ and passes the result, together with the list of sources, to the
+ <code>generators.construct</code> function.
+ </para></listitem>
+
+ <listitem><para>
+ A generator appropriate for the build properties is selected and its
+ <code>run</code> method is called. The method returns a list of virtual
+ targets.
+ </para></listitem>
+
+ <listitem><para>
+ The virtual targets are returned to the top level code, and for each instance,
+ the <literal>actualize</literal> method is called to setup nodes and updating
+ actions in the depenendency graph kepts inside Boost.Build engine. This dependency
+ graph is then updated, which runs necessary commands.
+ </para></listitem>
+ </orderedlist>
+ </para>
+
+ <section id="bbv2.arch.build.metatargets">
+ <title>Metatargets</title>
+
+ <para>
+ There are several classes derived from "abstract-target". The
+ "main-target" class represents a top-level main target, the
+ "project-target" class acts like a container holding multiple main
+ targets, and "basic-target" class is a base class for all further target
+ types.
+ </para>
+
+ <para>
+ Since each main target can have several alternatives, all top-level
+ target objects are actually containers, referring to "real" main target
+ classes. The type of that container is "main-target". For example, given:
+<programlisting>
+alias a ;
+lib a : a.cpp : &lt;toolset&gt;gcc ;
+</programlisting>
+ we would have one-top level "main-target" instance, containing one
+ "alias-target" and one "lib-target" instance. "main-target"'s "generate"
+ method decides which of the alternative should be used, and calls
+ "generate" on the corresponding instance.
+ </para>
+
+ <para>
+ Each alternative is an instance of a class derived from "basic-target".
+ "basic-target.generate" does several things that should always be done:
+
+ <itemizedlist>
+ <listitem><para>
+ Determines what properties should be used for building the target.
+ This includes looking at requested properties, requirements, and usage
+ requirements of all sources.
+ </para></listitem>
+
+ <listitem><para>
+ Builds all sources.
+ </para></listitem>
+
+ <listitem><para>
+ Computes usage requirements that should be passed back to targets
+ depending on this one.
+ </para></listitem>
+ </itemizedlist>
+
+ For the real work of constructing a virtual target, a new method
+ "construct" is called.
+ </para>
+
+ <para>
+ The "construct" method can be implemented in any way by classes derived
+ from "basic-target", but one specific derived class plays the central role
+ -- "typed-target". That class holds the desired type of file to be
+ produced, and its "construct" method uses the generators module to do the
+ actual work.
+ </para>
+
+ <para>
+ This means that a specific metatarget subclass may avoid using
+ generators all together. However, this is deprecated and we are trying to
+ eliminate all such subclasses at the moment.
+ </para>
+
+ <para>
+ Note that the <filename>build/targets.jam</filename> file contains an
+ UML diagram which might help.
+ </para>
+ </section>
+
+ <section id="bbv2.arch.build.virtual">
+ <title>Virtual targets</title>
+
+ <para>
+ Virtual targets are atomic updatable entities. Each virtual
+ target can be assigned an updating action -- instance of the
+ <code>action</code> class. The action class, in turn, contains a list of
+ source targets, properties, and a name of an action which
+ should be executed.
+ </para>
+
+ <para>
+ We try hard to never create equal instances of the
+ <code>virtual-target</code> class. Code creating virtual targets passes
+ them though the <code>virtual-target.register</code> function, which
+ detects if a target with the same name, sources, and properties has
+ already been created. In that case, the preexisting target is returned.
+ </para>
+
+ <!-- FIXME: the below 2 para are rubbish, must be totally rewritten. -->
+ <para>
+ When all virtual targets are produced, they are "actualized". This means
+ that the real file names are computed, and the commands that should be run
+ are generated. This is done by the <code>virtual-target.actualize</code>
+ and <code>action.actualize</code> methods. The first is conceptually
+ simple, while the second needs additional explanation. Commands in Boost.Build
+ are generated in a two-stage process. First, a rule with an appropriate
+ name (for example "gcc.compile") is called and is given a list of target
+ names. The rule sets some variables, like "OPTIONS". After that, the
+ command string is taken, and variable are substitutes, so use of OPTIONS
+ inside the command string gets transformed into actual compile options.
+ </para>
+
+ <para>
+ Boost.Build added a third stage to simplify things. It is now possible
+ to automatically convert properties to appropriate variable assignments.
+ For example, &lt;debug-symbols&gt;on would add "-g" to the OPTIONS
+ variable, without requiring to manually add this logic to gcc.compile.
+ This functionality is part of the "toolset" module.
+ </para>
+
+ <para>
+ Note that the <filename>build/virtual-targets.jam</filename> file
+ contains an UML diagram which might help.
+ </para>
+ </section>
+
+ <section id="bbv2.arch.build.properties">
+ <title>Properties</title>
+
+ <para>
+ Above, we noted that metatargets are built with a set of properties.
+ That set is represented by the <code>property-set</code> class. An
+ important point is that handling of property sets can get very expensive.
+ For that reason, we make sure that for each set of (name, value) pairs
+ only one <code>property-set</code> instance is created. The
+ <code>property-set</code> uses extensive caching for all operations, so
+ most work is avoided. The <code>property-set.create</code> is the factory
+ function used to create instances of the <code>property-set</code> class.
+ </para>
+ </section>
+ </section>
+
+ <section id="bbv2.arch.tools">
+ <title>The tools layer</title>
+
+ <para>Write me!</para>
+ </section>
+
+ <section id="bbv2.arch.targets">
+ <title>Targets</title>
+
+ <para>NOTE: THIS SECTION IS NOT EXPECTED TO BE READ!
+ There are two user-visible kinds of targets in Boost.Build. First are
+ "abstract" &#x2014; they correspond to things declared by the user, e.g.
+ projects and executable files. The primary thing about abstract targets is
+ that it is possible to request them to be built with a particular set of
+ properties. Each property combination may possibly yield different built
+ files, so abstract target do not have a direct correspondence to built
+ files.
+ </para>
+
+ <para>
+ File targets, on the other hand, are associated with concrete files.
+ Dependency graphs for abstract targets with specific properties are
+ constructed from file targets. User has no way to create file targets but
+ can specify rules for detecting source file types, as well as rules for
+ transforming between file targets of different types. That information is
+ used in constructing the final dependency graph, as described in the <link
+ linkend="bbv2.arch.depends">next section</link>.
+ <emphasis role="bold">Note:</emphasis>File targets are not the same entities
+ as Jam targets; the latter are created from file targets at the latest
+ possible moment.
+ <emphasis role="bold">Note:</emphasis>"File target" is an originally
+ proposed name for what we now call virtual targets. It is more
+ understandable by users, but has one problem: virtual targets can
+ potentially be "phony", and not correspond to any file.
+ </para>
+ </section>
+
+ <section id="bbv2.arch.depends">
+ <title>Dependency scanning</title>
+
+ <para>
+ Dependency scanning is the process of finding implicit dependencies, like
+ "#include" statements in C++. The requirements for correct dependency
+ scanning mechanism are:
+ </para>
+
+ <itemizedlist>
+ <listitem><simpara>
+ <link linkend="bbv2.arch.depends.different-scanning-algorithms">Support
+ for different scanning algorithms</link>. C++ and XML have quite different
+ syntax for includes and rules for looking up the included files.
+ </simpara></listitem>
+
+ <listitem><simpara>
+ <link linkend="bbv2.arch.depends.same-file-different-scanners">Ability
+ to scan the same file several times</link>. For example, a single C++ file
+ may be compiled using different include paths.
+ </simpara></listitem>
+
+ <listitem><simpara>
+ <link linkend="bbv2.arch.depends.dependencies-on-generated-files">Proper
+ detection of dependencies on generated files.</link>
+ </simpara></listitem>
+
+ <listitem><simpara>
+ <link
+ linkend="bbv2.arch.depends.dependencies-from-generatedfiles">Proper
+ detection of dependencies from a generated file.</link>
+ </simpara></listitem>
+ </itemizedlist>
+
+ <section id="bbv2.arch.depends.different-scanning-algorithms">
+ <title>Support for different scanning algorithms</title>
+
+ <para>
+ Different scanning algorithm are encapsulated by objects called
+ "scanners". Please see the "scanner" module documentation for more
+ details.
+ </para>
+ </section>
+
+ <section id="bbv2.arch.depends.same-file-different-scanners">
+ <title>Ability to scan the same file several times</title>
+
+ <para>
+ As stated above, it is possible to compile a C++ file multiple times,
+ using different include paths. Therefore, include dependencies for those
+ compilations can be different. The problem is that Boost.Build engine does
+ not allow multiple scans of the same target. To solve that, we pass the
+ scanner object when calling <literal>virtual-target.actualize</literal>
+ and it creates different engine targets for different scanners.
+ </para>
+
+ <para>
+ For each engine target created with a specified scanner, a
+ corresponding one is created without it. The updating action is
+ associated with the scanner-less target, and the target with the scanner
+ is made to depend on it. That way if sources for that action are touched,
+ all targets &#x2014; with and without the scanner are considered outdated.
+ </para>
+
+ <para>
+ Consider the following example: "a.cpp" prepared from "a.verbatim",
+ compiled by two compilers using different include paths and copied into
+ some install location. The dependency graph would look like:
+ </para>
+
+<programlisting>
+a.o (&lt;toolset&gt;gcc) &lt;--(compile)-- a.cpp (scanner1) ----+
+a.o (&lt;toolset&gt;msvc) &lt;--(compile)-- a.cpp (scanner2) ----|
+a.cpp (installed copy) &lt;--(copy) ----------------------- a.cpp (no scanner)
+ ^
+ |
+ a.verbose --------------------------------+
+</programlisting>
+ </section>
+
+ <section id="bbv2.arch.depends.dependencies-on-generated-files">
+ <title>Proper detection of dependencies on generated files.</title>
+
+ <para>
+ This requirement breaks down to the following ones.
+ </para>
+
+ <orderedlist>
+ <listitem><simpara>
+ If when compiling "a.cpp" there is an include of "a.h", the "dir"
+ directory is on the include path, and a target called "a.h" will be
+ generated in "dir", then Boost.Build should discover the include, and create
+ "a.h" before compiling "a.cpp".
+ </simpara></listitem>
+
+ <listitem><simpara>
+ Since Boost.Build almost always generates targets under the "bin"
+ directory, this should be supported as well. I.e. in the scenario above,
+ Jamfile in "dir" might create a main target, which generates "a.h". The
+ file will be generated to "dir/bin" directory, but we still have to
+ recognize the dependency.
+ </simpara></listitem>
+ </orderedlist>
+
+ <para>
+ The first requirement means that when determining what "a.h" means when
+ found in "a.cpp", we have to iterate over all directories in include
+ paths, checking for each one:
+ </para>
+
+ <orderedlist>
+ <listitem><simpara>
+ If there is a file named "a.h" in that directory, or
+ </simpara></listitem>
+
+ <listitem><simpara>
+ If there is a target called "a.h", which will be generated in that
+ that directory.
+ </simpara></listitem>
+ </orderedlist>
+
+ <para>
+ Classic Jam has built-in facilities for point (1) above, but that is not
+ enough. It is hard to implement the right semantics without builtin
+ support. For example, we could try to check if there exists a target
+ called "a.h" somewhere in the dependency graph, and add a dependency to
+ it. The problem is that without a file search in the include path, the
+ semantics may be incorrect. For example, one can have an action that
+ generated some "dummy" header, for systems which do not have a native one.
+ Naturally, we do not want to depend on that generated header on platforms
+ where a native one is included.
+ </para>
+
+ <para>
+ There are two design choices for builtin support. Suppose we have files
+ a.cpp and b.cpp, and each one includes header.h, generated by some action.
+ Dependency graph created by classic Jam would look like:
+
+<programlisting>
+a.cpp -----&gt; &lt;scanner1&gt;header.h [search path: d1, d2, d3]
+
+ &lt;d2&gt;header.h --------&gt; header.y
+ [generated in d2]
+
+b.cpp -----&gt; &lt;scanner2&gt;header.h [search path: d1, d2, d4]
+</programlisting>
+ </para>
+
+ <para>
+ In this case, Jam thinks all header.h target are not related. The
+ correct dependency graph might be:
+
+<programlisting>
+a.cpp ----
+ \
+ &gt;----&gt; &lt;d2&gt;header.h --------&gt; header.y
+ / [generated in d2]
+b.cpp ----
+</programlisting>
+
+ or
+
+<programlisting>
+a.cpp -----&gt; &lt;scanner1&gt;header.h [search path: d1, d2, d3]
+ |
+ (includes)
+ V
+ &lt;d2&gt;header.h --------&gt; header.y
+ [generated in d2]
+ ^
+ (includes)
+ |
+b.cpp -----&gt; &lt;scanner2&gt;header.h [ search path: d1, d2, d4]
+</programlisting>
+ </para>
+
+ <para>
+ The first alternative was used for some time. The problem however is:
+ what include paths should be used when scanning header.h? The second
+ alternative was suggested by Matt Armstrong. It has a similar effect: Any
+ target depending on &lt;scanner1&gt;header.h will also depend on
+ &lt;d2&gt;header.h. This way though we now have two different targets with
+ two different scanners, so those targets can be scanned independently. The
+ first alternative's problem is avoided, so the second alternative is
+ implemented now.
+ </para>
+
+ <para>
+ The second sub-requirements is that targets generated under the "bin"
+ directory are handled as well. Boost.Build implements a semi-automatic
+ approach. When compiling C++ files the process is:
+ </para>
+
+ <orderedlist>
+ <listitem><simpara>
+ The main target to which the compiled file belongs to is found.
+ </simpara></listitem>
+
+ <listitem><simpara>
+ All other main targets that the found one depends on are found. These
+ include: main targets used as sources as well as those specified as
+ "dependency" properties.
+ </simpara></listitem>
+
+ <listitem><simpara>
+ All directories where files belonging to those main targets will be
+ generated are added to the include path.
+ </simpara></listitem>
+ </orderedlist>
+
+ <para>
+ After this is done, dependencies are found by the approach explained
+ previously.
+ </para>
+
+ <para>
+ Note that if a target uses generated headers from another main target,
+ that main target should be explicitly specified using the dependency
+ property. It would be better to lift this requirement, but it does not
+ seem to be causing any problems in practice.
+ </para>
+
+ <para>
+ For target types other than C++, adding of include paths must be
+ implemented anew.
+ </para>
+ </section>
+
+ <section id="bbv2.arch.depends.dependencies-from-generated-files">
+ <title>Proper detection of dependencies from generated files</title>
+
+ <para>
+ Suppose file "a.cpp" includes "a.h" and both are generated by some
+ action. Note that classic Jam has two stages. In the first stage the
+ dependency graph is built and actions to be run are determined. In the
+ second stage the actions are executed. Initially, neither file exists, so
+ the include is not found. As the result, Jam might attempt to compile
+ a.cpp before creating a.h, causing the compilation to fail.
+ </para>
+
+ <para>
+ The solution in Boost.Jam is to perform additional dependency scans
+ after targets are updated. This breaks separation between build stages in
+ Jam &#x2014; which some people consider a good thing &#x2014; but I am not
+ aware of any better solution.
+ </para>
+
+ <para>
+ In order to understand the rest of this section, you better read some
+ details about Jam's dependency scanning, available at <ulink url=
+ "http://public.perforce.com:8080/@md=d&amp;cd=//public/jam/src/&amp;ra=s&amp;c=kVu@//2614?ac=10">
+ this link</ulink>.
+ </para>
+
+ <para>
+ Whenever a target is updated, Boost.Jam rescans it for includes.
+ Consider this graph, created before any actions are run.
+<programlisting>
+A -------&gt; C ----&gt; C.pro
+ /
+B --/ C-includes ---&gt; D
+</programlisting>
+ </para>
+
+ <para>
+ Both A and B have dependency on C and C-includes (the latter dependency
+ is not shown). Say during building we have tried to create A, then tried
+ to create C and successfully created C.
+ </para>
+
+ <para>
+ In that case, the set of includes in C might well have changed. We do
+ not bother to detect precisely which includes were added or removed.
+ Instead we create another internal node C-includes-2. Then we determine
+ what actions should be run to update the target. In fact this means that
+ we perform the first stage logic when already in the execution stage.
+ </para>
+
+ <para>
+ After actions for C-includes-2 are determined, we add C-includes-2 to
+ the list of A's dependents, and stage 2 proceeds as usual. Unfortunately,
+ we can not do the same with target B, since when it is not visited, C
+ target does not know B depends on it. So, we add a flag to C marking it as
+ rescanned. When visiting the B target, the flag is noticed and
+ C-includes-2 is added to the list of B's dependencies as well.
+ </para>
+
+ <para>
+ Note also that internal nodes are sometimes updated too. Consider this
+ dependency graph:
+<programlisting>
+a.o ---&gt; a.cpp
+ a.cpp-includes --&gt; a.h (scanned)
+ a.h-includes ------&gt; a.h (generated)
+ |
+ |
+ a.pro &lt;-------------------------------------------+
+</programlisting>
+ </para>
+
+ <para>
+ Here, our handling of generated headers come into play. Say that a.h
+ exists but is out of date with respect to "a.pro", then "a.h (generated)"
+ and "a.h-includes" will be marked for updating, but "a.h (scanned)" will
+ not. We have to rescan "a.h" after it has been created, but since "a.h
+ (generated)" has no associated scanner, it is only possible to rescan
+ "a.h" after "a.h-includes" target has been updated.
+ </para>
+
+ <para>
+ The above consideration lead to the decision to rescan a target whenever
+ it is updated, no matter if it is internal or not.
+ </para>
+
+ </section>
+ </section>
+
+ <warning>
+ <para>
+ The remainder of this document is not intended to be read at all. This
+ will be rearranged in the future.
+ </para>
+ </warning>
+
+ <section>
+ <title>File targets</title>
+
+ <para>
+ As described above, file targets correspond to files that Boost.Build
+ manages. Users may be concerned about file targets in three ways: when
+ declaring file target types, when declaring transformations between types
+ and when determining where a file target is to be placed. File targets can
+ also be connected to actions that determine how the target is to be created.
+ Both file targets and actions are implemented in the
+ <literal>virtual-target</literal> module.
+ </para>
+
+ <section>
+ <title>Types</title>
+
+ <para>
+ A file target can be given a type, which determines what transformations
+ can be applied to the file. The <literal>type.register</literal> rule
+ declares new types. File type can also be assigned a scanner, which is
+ then used to find implicit dependencies. See "<link
+ linkend="bbv2.arch.depends">dependency scanning</link>".
+ </para>
+ </section>
+
+ <section>
+ <title>Target paths</title>
+
+ <para>
+ To distinguish targets build with different properties, they are put in
+ different directories. Rules for determining target paths are given below:
+ </para>
+
+ <orderedlist>
+ <listitem><simpara>
+ All targets are placed under a directory corresponding to the project
+ where they are defined.
+ </simpara></listitem>
+
+ <listitem><simpara>
+ Each non free, non incidental property causes an additional element to
+ be added to the target path. That element has the the form
+ <literal>&lt;feature-name&gt;-&lt;feature-value&gt;</literal> for
+ ordinary features and <literal>&lt;feature-value&gt;</literal> for
+ implicit ones. [TODO: Add note about composite features].
+ </simpara></listitem>
+
+ <listitem><simpara>
+ If the set of free, non incidental properties is different from the
+ set of free, non incidental properties for the project in which the main
+ target that uses the target is defined, a part of the form
+ <literal>main_target-&lt;name&gt;</literal> is added to the target path.
+ <emphasis role="bold">Note:</emphasis>It would be nice to completely
+ track free features also, but this appears to be complex and not
+ extremely needed.
+ </simpara></listitem>
+ </orderedlist>
+
+ <para>
+ For example, we might have these paths:
+<programlisting>
+debug/optimization-off
+debug/main-target-a
+</programlisting>
+ </para>
+ </section>
+ </section>
+
+ </appendix>
+
+<!--
+ Local Variables:
+ mode: xml
+ sgml-indent-data: t
+ sgml-parent-document: ("userman.xml" "chapter")
+ sgml-set-face: t
+ End:
+-->
diff --git a/tools/build/v2/doc/src/basic-target.xml b/tools/build/doc/src/basic-target.xml
index ae7c2795c2..ae7c2795c2 100644
--- a/tools/build/v2/doc/src/basic-target.xml
+++ b/tools/build/doc/src/basic-target.xml
diff --git a/tools/build/doc/src/extending.xml b/tools/build/doc/src/extending.xml
new file mode 100644
index 0000000000..e03a22c2a3
--- /dev/null
+++ b/tools/build/doc/src/extending.xml
@@ -0,0 +1,1216 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE appendix PUBLIC "-//Boost//DTD BoostBook XML V1.0//EN"
+ "http://www.boost.org/tools/boostbook/dtd/boostbook.dtd">
+
+ <chapter id="bbv2.extender">
+ <title>Extender Manual</title>
+
+ <section id="bbv2.extender.intro">
+ <title>Introduction</title>
+
+ <para>
+ This section explains how to extend Boost.Build to accomodate your
+ local requirements&mdash;primarily to add support for non-standard
+ tools you have. Before we start, be sure you have read and understoon
+ the concept of metatarget, <xref linkend="bbv2.overview.concepts"/>,
+ which is critical to understanding the remaining material.
+ </para>
+
+ <para>
+ The current version of Boost.Build has three levels of targets, listed
+ below.
+ </para>
+
+ <variablelist>
+
+ <varlistentry>
+ <term>metatarget</term>
+ <listitem>
+ <para>
+ Object that is created from declarations in Jamfiles. May
+ be called with a set of properties to produce concrete
+ targets.
+ </para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term>concrete target</term>
+ <listitem>
+ <para>
+ Object that corresponds to a file or an action.
+ </para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term>jam target</term>
+ <listitem>
+ <para>
+ Low-level concrete target that is specific to Boost.Jam build
+ engine. Essentially a string&mdash;most often a name of file.
+ </para>
+ </listitem>
+ </varlistentry>
+
+ </variablelist>
+
+ <para>
+ In most cases, you will only have to deal with concrete targets and
+ the process that creates concrete targets from
+ metatargets. Extending metatarget level is rarely required. The jam
+ targets are typically only used inside the command line patterns.
+ </para>
+
+ <warning>
+ <para>All of the Boost.Jam target-related builtin functions, like
+ <code>DEPENDS</code> or <code>ALWAYS</code> operate on jam
+ targets. Applying them to metatargets or concrete targets has no
+ effect.</para>
+ </warning>
+
+ <section id="bbv2.extender.overview.metatargets">
+ <title>Metatargets</title>
+
+ <para>Metatarget is an object that records information specified
+ in Jamfile, such as metatarget kind, name, sources and properties,
+ and can be called with specific properties to generate concrete
+ targets. At the code level it is represented by an instance of
+ class derived from <link linkend="bbv2.reference.class.abstract-target">abstract-target</link>.
+ <footnote><para>This name is historic, and will be eventuall changed to
+ <code>metatarget</code></para></footnote>
+ </para>
+
+ <para>The <link linkend="bbv2.reference.class.abstract-target.generate">generate</link>
+ method takes the build properties
+ (as an instance of the <link linkend="bbv2.reference.class.property-set">
+ property-set</link> class) and returns
+ a list containing:</para>
+ <itemizedlist>
+ <listitem><para>As front element&mdash;Usage-requirements from this invocation
+ (an instance of <link linkend="bbv2.reference.class.property-set">
+ property-set</link>)</para></listitem>
+ <listitem><para>As subsequent elements&mdash;created concrete targets (
+ instances of the <classname>virtual-target</classname> class.)</para></listitem>
+ </itemizedlist>
+
+ <para>It's possible to lookup a metataget by target-id using the
+ <code>targets.resolve-reference</code> function, and the
+ <code>targets.generate-from-reference</code> function can both
+ lookup and generate a metatarget.</para>
+
+ <para>The <link linkend="bbv2.reference.class.abstract-target">abstract-target</link>
+ class has three immediate derived classes:</para>
+ <itemizedlist>
+
+ <listitem><para><link linkend="bbv2.reference.class.project-target">project-target</link> that
+ corresponds to a project and is not intended for further
+ subclassing. The <link linkend="bbv2.reference.class.project-target.generate">
+ generate</link> method of this
+ class builds all targets in the project that are not marked as
+ explicit.</para></listitem>
+
+ <listitem><para><link linkend="bbv2.reference.class.main-target">main-target</link>
+ corresponds to a target in a project
+ and contains one or more target alternatives. This class also should not be
+ subclassed. The <link linkend="bbv2.reference.class.main-target.generate">generate</link>
+ method of this class selects an alternative to build, and calls the
+ <link linkend="bbv2.reference.class.basic-target.generate">generate</link>
+ method of that alternative.</para></listitem>
+
+ <listitem><para><link linkend="bbv2.reference.class.basic-target">basic-target</link>
+ corresponds to a specific target alternative. This is base class,
+ with a number of derived classes. The
+ <link linkend="bbv2.reference.class.basic-target.generate">generate</link> method
+ processes the target requirements and requested build properties to
+ determine final properties for the target, builds all sources, and
+ finally calls the abstract
+ <link linkend="bbv2.reference.class.basic-target.construct">construct</link>
+ method with the list of source virtual targets, and the final properties.
+ </para></listitem>
+
+ </itemizedlist>
+
+ <para>The instances of the <link linkend="bbv2.reference.class.project-target">project-target</link> and
+ <link linkend="bbv2.reference.class.main-target">main-target</link> classes are created
+ implicitly&mdash;when loading a new Jamfiles, or when a new target
+ alternative with as-yet unknown name is created. The instances of the
+ classes derived from <link linkend="bbv2.reference.class.basic-target">basic-target</link>
+ are typically created when Jamfile calls a <firstterm>metatarget rule</firstterm>,
+ such as such as <code>exe</code>.
+ </para>
+
+ <para>It it permissible to create a custom class derived from
+ <link linkend="bbv2.reference.class.basic-target">basic-target</link> and create new metatarget rule
+ that creates instance of such target. However, in the majority
+ of cases, a specific subclass of <link linkend="bbv2.reference.class.basic-target">basic-target</link>&mdash;
+ <link linkend="bbv2.reference.class.typed-target">typed-target</link> is used. That class is associated
+ with a <firstterm>type</firstterm> and relays to <firstterm>generators</firstterm>
+ to construct concrete targets of that type. This process will be explained below.
+ When a new type is declared, a new metatarget rule is automatically defined.
+ That rule creates new instance of type-target, associated with that type.
+ </para>
+
+ </section>
+
+ <section id="bbv2.extender.overview.targets">
+ <title>Concrete targets</title>
+
+ <para>Concrete targets are represented by instance of classes derived
+ from <classname>virtual-target</classname>. The most commonly used
+ subclass is <classname>file-target</classname>. A file target is associated
+ with an action that creates it&mdash; an instance of the <classname>action</classname>
+ class. The action, in turn, hold a list of source targets. It also holds the
+ <link linkend="bbv2.reference.class.property-set">property-set</link>
+ instance with the build properties that should be used for the action.</para>
+
+ <para>Here's an example of creating a target from another target, <code>source</code></para>
+<programlisting>
+local a = [ new action $(source) : common.copy : $(property-set) ] ;
+local t = [ new file-target $(name) : CPP : $(project) : $(a) ] ;
+</programlisting>
+ <para>The first line creates an instance of the <classname>action</classname> class.
+ The first parameter is the list of sources. The second parameter is the name
+ a jam-level <link linkend="bbv2.overview.jam_language.actions">action</link>.
+ The third parameter is the property-set applying to this action. The second line
+ creates a target. We specifie a name, a type and a project. We also pass the
+ action object created earlier. If the action creates several targets, we can repeat
+ the second line several times.</para>
+
+ <para>In some cases, code that creates concrete targets may be invoked more than
+ once with the same properties. Returning to different instance of <classname>file-target</classname>
+ that correspond to the same file clearly will result in problems. Therefore, whenever
+ returning targets you should pass them via the <code>virtual-target.register</code>
+ function, besides allowing Boost.Build to track which virtual targets
+ got created for each metatarget, this will also replace targets with previously created identical
+ ones, as necessary.<footnote><para>This create-then-register pattern is caused by limitations
+ of the Boost.Jam language. Python port is likely to never create duplicate targets.</para></footnote>
+ Here are a couple of examples:
+<programlisting>
+return [ virtual-target.register $(t) ] ;
+return [ sequence.transform virtual-target.register : $(targets) ] ;
+</programlisting>
+ </para>
+
+ </section>
+
+ <section id="bbv2.extender.overview.generators">
+ <title>Generators</title>
+
+ <para>In theory, every kind of metatarget in Boost.Build (like <code>exe</code>,
+ <code>lib</code> or <code>obj</code>) could be implemented
+ by writing a new metatarget class that, independently of the other code, figures
+ what files to produce and what commands to use. However, that would be rather inflexible.
+ For example, adding support for a new compiler would require editing several metatargets.
+ </para>
+
+ <para>In practice, most files have specific types, and most tools
+ consume and produce files of specific type. To take advantage of this
+ fact, Boost.Build defines concept of target type and
+ <indexterm><primary>generators</primary></indexterm>
+ <firstterm>generators</firstterm>, and has special metatarget class
+ <link linkend="bbv2.reference.class.typed-target">typed-target</link>. Target type is merely an
+ identifier. It is associated with a set of file extensions that
+ correspond to that type. Generator is an abstraction of a tool. It advertises
+ the types it produces and, if called with a set of input target, tries to construct
+ output targets of the advertised types. Finally,
+ <link linkend="bbv2.reference.class.typed-target">typed-target</link>
+ is associated with specific target type, and relays the generator (or generators)
+ for that type.
+ </para>
+
+ <para>A generator is an instance of a class derived from <classname>generator</classname>.
+ The <classname>generator</classname> class itself is suitable for common cases.
+ You can define derived classes for custom scenarios.</para>
+
+ <!--
+ <para>Given a set of generators, the fundamental operation is to
+ construct a target of a given type, with given properties, from a
+ set of targets. That operation is performed by rule
+ <literal>generators.construct</literal> and the used algorithm is described
+ below.</para>
+
+ <section>
+ <title>Selecting and ranking viable generators</title>
+
+ <para>Each generator, in addition to target types that it can
+ produce, have attribute that affects its applicability in
+ particular sitiation. Those attributes are:</para>
+
+ <orderedlist>
+ <listitem>
+ <simpara>
+ Required properties, which are properties absolutely
+ necessary for the generator to work. For example, generator
+ encapsulating the gcc compiler would have &lt;toolset&gt;gcc as
+ required property.
+ </simpara>
+ </listitem>
+
+ <listitem>
+ <simpara>
+ Optional properties, which increase the generators
+ suitability for a particual build.
+ </simpara>
+ </listitem>
+ </orderedlist>
+
+ <para>
+ Generator's required and optional properties may not include
+ either free or incidental properties. (Allowing this would
+ greatly complicate caching targets).
+ </para>
+
+ <para>When trying to construct a target, the first step is to select
+ all possible generators for the requested target type, which
+ required properties are a subset of requested properties.
+ Generators that were already selected up the call stack are
+ excluded. In addition, if any composing generators were selected
+ up the call stack, all other composing generators are ignored
+ (TODO: define composing generators). The found generators
+ are assigned a rank, which is the number of optional properties
+ present in requested properties. Finally, generators with highest
+ rank are selected for futher processing.</para>
+
+ </section>
+ <section>
+ <title>Running generators</title>
+
+ <para>When generators are selected, each is run to produce a list of
+ created targets. This list might include targets that are not of
+ requested types, because generators create the same targets as
+ some tool, and tool's behaviour is fixed. (Note: should specify
+ that in some cases we actually want extra targets). If generator
+ fails, it returns an empty list. Generator is free to call
+ 'construct' again, to convert sources to the types it can handle.
+ It also can pass modified properties to 'construct'. However, a
+ generator is not allowed to modify any propagated properties,
+ otherwise when actually consuming properties we might discover
+ that the set of propagated properties is different from what was
+ used for building sources.</para>
+
+ <para>For all targets that are not of requested types, we try to
+ convert them to requested type, using a second call to
+ <literal>construct</literal>. This is done in order to support
+ transformation sequences where single source file expands to
+ several later. See <ulink url=
+ "http://groups.yahoo.com/group/jamboost/message/1667">this
+ message</ulink> for details.</para>
+
+ </section>
+
+ -->
+
+ <!-- FIXME: review the below content. Maybe, some of it is
+ still useful.
+ <section>
+ <title>Property adjustment</title>
+
+ <para>Because target location is determined by the build system, it
+ is sometimes necessary to adjust properties, in order to not
+ break actions. For example, if there's an action that generates
+ a header, say "a_parser.h", and a source file "a.cpp" which
+ includes that file, we must make everything work as if a_parser.h
+ is generated in the same directory where it would be generated
+ without any subvariants.</para>
+
+ <para>Correct property adjustment can be done only after all targets
+ are created, so the approach taken is:</para>
+
+ <orderedlist>
+ <listitem>
+ <para>
+ When dependency graph is constructed, each action can be
+ assigned a rule for property adjustment.
+ </para>
+ </listitem>
+
+ <listitem>
+ <para>
+ When virtual target is actualized, that rule is run and
+ return the final set of properties. At this stage it can use
+ information of all created virtual targets.
+ </para>
+ </listitem>
+ </orderedlist>
+
+ <para>In case of quoted includes, no adjustment can give 100% correct
+ results. If target dirs are not changed by build system, quoted
+ includes are searched in "." and then in include path, while angle
+ includes are searched only in include path. When target dirs are
+ changed, we'd want to make quoted includes to be search in "." then in
+ additional dirs and then in the include path and make angle includes
+ be searched in include path, probably with additional paths added at
+ some position. Unless, include path already has "." as the first
+ element, this is not possible. So, either generated headers should not
+ be included with quotes, or first element of include path should be
+ ".", which essentially erases the difference between quoted and angle
+ includes. <emphasis role="bold">Note:</emphasis> the only way to get
+ "." as include path into compiler command line is via verbatim
+ compiler option. In all other case, Boost.Build will convert "." into
+ directory where it occurs.</para>
+
+ </section>
+
+ -->
+
+ </section>
+
+ </section>
+
+ <section id="bbv2.extender.example">
+ <title>Example: 1-to-1 generator</title>
+
+ <para>Say you're writing an application that generates C++ code. If
+ you ever did this, you know that it's not nice. Embedding large
+ portions of C++ code in string literals is very awkward. A much
+ better solution is:</para>
+
+ <orderedlist>
+ <listitem>
+ <simpara>
+ Write the template of the code to be generated, leaving
+ placeholders at the points that will change
+ </simpara>
+ </listitem>
+
+ <listitem>
+ <simpara>
+ Access the template in your application and replace
+ placeholders with appropriate text.
+ </simpara>
+ </listitem>
+
+ <listitem>
+ <simpara>Write the result.</simpara>
+ </listitem>
+ </orderedlist>
+
+ <para>It's quite easy to achieve. You write special verbatim files that are
+ just C++, except that the very first line of the file contains the name of a
+ variable that should be generated. A simple tool is created that takes a
+ verbatim file and creates a cpp file with a single <code>char*</code> variable
+ whose name is taken from the first line of the verbatim file and whose value
+ is the file's properly quoted content.</para>
+
+ <para>Let's see what Boost.Build can do.</para>
+
+ <para>First off, Boost.Build has no idea about "verbatim files". So, you must
+ register a new target type. The following code does it:</para>
+
+<programlisting>
+import type ;
+type.register VERBATIM : verbatim ;
+</programlisting>
+
+ <para>The first parameter to <link linkend="bbv2.reference.modules.type.register">type.register</link> gives
+ the name of the declared type. By convention, it's uppercase. The second
+ parameter is the suffix for files of this type. So, if Boost.Build sees
+ <filename>code.verbatim</filename> in a list of sources, it knows that it's of
+ type <code>VERBATIM</code>.</para>
+
+ <para>Next, you tell Boost.Build that the verbatim files can be
+ transformed into C++ files in one build step. A
+ <firstterm>generator</firstterm> is a template for a build step that
+ transforms targets of one type (or set of types) into another. Our
+ generator will be called <code>verbatim.inline-file</code>; it
+ transforms <code>VERBATIM</code> files into <code>CPP</code> files:
+
+<programlisting>
+import generators ;
+generators.register-standard verbatim.inline-file : VERBATIM : CPP ;
+</programlisting>
+ </para>
+
+ <para>Lastly, you have to inform Boost.Build about the shell
+ commands used to make that transformation. That's done with an
+ <code>actions</code> declaration.
+
+<programlisting>
+actions inline-file
+{
+ "./inline-file.py" $(&lt;) $(&gt;)
+}
+</programlisting>
+
+<!-- You need to explain all the parameters to an "actions" and
+ describe the accompanying rule declaration: the user has no clue
+ what $(<) and $(>) are, and doesn't know about the third
+ parameter that gets passed to the rule. -->
+
+<!-- We use verbatim.inline-file in one place and just inline-file in
+ another. Is this confusing for user?
+ -->
+</para>
+
+ <para>
+ Now, we're ready to tie it all together. Put all the code above in file
+ <filename>verbatim.jam</filename>, add <code>import verbatim ;</code> to
+ <filename>Jamroot.jam</filename>, and it's possible to write the following
+ in your Jamfile:
+ </para>
+
+<programlisting>
+exe codegen : codegen.cpp class_template.verbatim usage.verbatim ;
+</programlisting>
+
+ <para>
+ The listed verbatim files will be automatically converted into C++ source
+ files, compiled and then linked to the codegen executable.
+ </para>
+
+ <para>
+ In subsequent sections, we will extend this example, and review all the
+ mechanisms in detail. The complete code is available in the
+ <filename>example/customization</filename> directory.
+ </para>
+ </section>
+
+ <section id="bbv2.extending.targets">
+ <title>Target types</title>
+ <para>The first thing we did in the <link
+ linkend="bbv2.extender.intro">introduction</link> was declaring a
+ new target type:
+<programlisting>
+import type ;
+type.register VERBATIM : verbatim ;
+</programlisting>
+ The type is the most important property of a target. Boost.Build can
+ automatically generate necessary build actions only because you
+ specify the desired type (using the different main target rules), and
+ because Boost.Build can guess the type of sources from their
+ extensions.
+ </para>
+
+ <para>The first two parameters for the <code>type.register</code> rule
+ are the name of new type and the list of extensions associated with
+ it. A file with an extension from the list will have the given target
+ type. In the case where a target of the declared type is generated
+ from other sources, the first specified extension will be used.
+ </para>
+
+ <para>Sometimes you want to change the suffix used for generated targets
+ depending on build properties, such as toolset. For example, some compiler
+ uses extension <literal>elf</literal> for executable files. You can use the
+ <code>type.set-generated-target-suffix</code> rule:
+<programlisting>
+type.set-generated-target-suffix EXE : &lt;toolset&gt;elf : elf ;
+</programlisting>
+ </para>
+
+ <para>A new target type can be inherited from an existing one.
+<programlisting>
+type.register PLUGIN : : SHARED_LIB ;
+</programlisting>
+ The above code defines a new type derived from
+ <code>SHARED_LIB</code>. Initially, the new type inherits all the
+ properties of the base type - in particular generators and suffix.
+ Typically, you'll change the new type in some way. For example, using
+ <code>type.set-generated-target-suffix</code> you can set the suffix for
+ the new type. Or you can write special a generator for the new type. For
+ example, it can generate additional metainformation for the plugin.
+ In either way, the <code>PLUGIN</code> type can be used whenever
+ <code>SHARED_LIB</code> can. For example, you can directly link plugins
+ to an application.
+ </para>
+
+ <para>A type can be defined as "main", in which case Boost.Build will
+ automatically declare a main target rule for building targets of that
+ type. More details can be found <link
+ linkend="bbv2.extending.rules.main-type">later</link>.
+ </para>
+
+ <section id="bbv2.extending.scanners">
+ <title>Scanners</title>
+ <para>
+ Sometimes, a file can refer to other files via some include system. To
+ make Boost.Build track dependencies between included files, you need
+ to provide a scanner. The primary limitation is that only one scanner
+ can be assigned to a target type.
+ </para>
+
+ <para>First, we need to declare a new class for the scanner:
+<programlisting>
+class verbatim-scanner : common-scanner
+{
+ rule pattern ( )
+ {
+ return "//###include[ ]*\"([^\"]*)\"" ;
+ }
+}
+</programlisting>
+ All the complex logic is in the <code>common-scanner</code>
+ class, and you only need to override the method that returns
+ the regular expression to be used for scanning. The
+ parentheses in the regular expression indicate which part
+ of the string is the name of the included file. Only the
+ first parenthesized group in the regular expression will be
+ recognized; if you can't express everything you want that
+ way, you can return multiple regular expressions, each of
+ which contains a parenthesized group to be matched.
+ </para>
+
+ <para>After that, we need to register our scanner class:
+<programlisting>
+scanner.register verbatim-scanner : include ;
+</programlisting>
+ The value of the second parameter, in this case
+ <code>include</code>, specifies the properties that contain the list
+ of paths that should be searched for the included files.
+ </para>
+
+ <para>Finally, we assign the new scanner to the <code>VERBATIM</code>
+ target type:
+<programlisting>
+type.set-scanner VERBATIM : verbatim-scanner ;
+</programlisting>
+ That's enough for scanning include dependencies.
+ </para>
+
+ </section>
+
+ </section>
+
+ <section id="bbv2.extending.tools">
+ <title>Tools and generators</title>
+ <para>
+ This section will describe how Boost.Build can be extended to support
+ new tools.
+ </para>
+
+ <para>For each additional tool, a Boost.Build object called generator
+ must be created. That object has specific types of targets that it
+ accepts and produces. Using that information, Boost.Build is able
+ to automatically invoke the generator. For example, if you declare a
+ generator that takes a target of the type <literal>D</literal> and
+ produces a target of the type <literal>OBJ</literal>, when placing a
+ file with extention <literal>.d</literal> in a list of sources will
+ cause Boost.Build to invoke your generator, and then to link the
+ resulting object file into an application. (Of course, this requires
+ that you specify that the <literal>.d</literal> extension corresponds
+ to the <literal>D</literal> type.)
+ </para>
+
+ <para>Each generator should be an instance of a class derived from the
+ <code>generator</code> class. In the simplest case, you don't need to
+ create a derived class, but simply create an instance of the
+ <code>generator</code> class. Let's review the example we've seen in the
+ <link linkend="bbv2.extender.intro">introduction</link>.
+ <!-- Is the following supposed to be verbatim.jam? Tell the
+ user so. You also need to describe the meanings of $(<)
+ and $(>); this is the first time they're encountered. -->
+<programlisting>
+import generators ;
+generators.register-standard verbatim.inline-file : VERBATIM : CPP ;
+actions inline-file
+{
+ "./inline-file.py" $(&lt;) $(&gt;)
+}
+</programlisting>
+ </para>
+
+ <para>We declare a standard generator, specifying its id, the source type
+ and the target type. When invoked, the generator will create a target
+ of type <literal>CPP</literal> with a source target of
+ type <literal>VERBATIM</literal> as the only source. But what command
+ will be used to actually generate the file? In Boost.Build, actions are
+ specified using named "actions" blocks and the name of the action
+ block should be specified when creating targets. By convention,
+ generators use the same name of the action block as their own id. So,
+ in above example, the "inline-file" actions block will be used to
+ convert the source into the target.
+ </para>
+
+ <para>
+ There are two primary kinds of generators: standard and composing,
+ which are registered with the
+ <code>generators.register-standard</code> and the
+ <code>generators.register-composing</code> rules, respectively. For
+ example:
+<programlisting>
+generators.register-standard verbatim.inline-file : VERBATIM : CPP ;
+generators.register-composing mex.mex : CPP LIB : MEX ;
+</programlisting>
+ The first (standard) generator takes a <emphasis>single</emphasis>
+ source of type <code>VERBATIM</code> and produces a result. The second
+ (composing) generator takes any number of sources, which can have either
+ the <code>CPP</code> or the <code>LIB</code> type. Composing generators
+ are typically used for generating top-level target type. For example,
+ the first generator invoked when building an <code>exe</code> target is
+ a composing generator corresponding to the proper linker.
+ </para>
+
+ <para>You should also know about two specific functions for registering
+ generators: <code>generators.register-c-compiler</code> and
+ <code>generators.register-linker</code>. The first sets up header
+ dependecy scanning for C files, and the seconds handles various
+ complexities like searched libraries. For that reason, you should always
+ use those functions when adding support for compilers and linkers.
+ </para>
+
+ <para>(Need a note about UNIX)</para>
+ <!-- What kind of note? Either write the note or don't, but remove this dross. -->
+ <bridgehead>Custom generator classes</bridgehead>
+
+ <para>The standard generators allows you to specify source and target
+ types, an action, and a set of flags. If you need anything more complex,
+ <!-- What sort of flags? Command-line flags? What does the system do with them? -->
+ you need to create a new generator class with your own logic. Then,
+ you have to create an instance of that class and register it. Here's
+ an example how you can create your own generator class:
+<programlisting>
+class custom-generator : generator
+{
+ rule __init__ ( * : * )
+ {
+ generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+<!-- What is the point of this __init__ function?? -->
+}
+
+generators.register
+ [ new custom-generator verbatim.inline-file : VERBATIM : CPP ] ;
+</programlisting>
+ This generator will work exactly like the
+ <code>verbatim.inline-file</code> generator we've defined above, but
+ it's possible to customize the behaviour by overriding methods of the
+ <code>generator</code> class.
+ </para>
+
+ <para>There are two methods of interest. The <code>run</code> method is
+ responsible for the overall process - it takes a number of source targets,
+ converts them to the right types, and creates the result. The
+ <code>generated-targets</code> method is called when all sources are
+ converted to the right types to actually create the result.
+ </para>
+
+ <para>The <code>generated-targets</code> method can be overridden when you
+ want to add additional properties to the generated targets or use
+ additional sources. For a real-life example, suppose you have a program
+ analysis tool that should be given a name of executable and the list of
+ all sources. Naturally, you don't want to list all source files
+ manually. Here's how the <code>generated-targets</code> method can find
+ the list of sources automatically:
+<programlisting>
+class itrace-generator : generator {
+....
+ rule generated-targets ( sources + : property-set : project name ? )
+ {
+ local leaves ;
+ local temp = [ virtual-target.traverse $(sources[1]) : : include-sources ] ;<!-- You must explain include-sources! -->
+ for local t in $(temp)
+ {
+ if ! [ $(t).action<!-- In what namespace is this evaluated? --> ]
+ {
+ leaves += $(t) ;
+ }
+ }
+ return [ generator.generated-targets $(sources) $(leafs)
+ : $(property-set) : $(project) $(name) ] ;
+ }
+}
+generators.register [ new itrace-generator nm.itrace : EXE : ITRACE ] ;
+</programlisting>
+ The <code>generated-targets</code> method will be called with a single
+ source target of type <literal>EXE</literal>. The call to
+ <code>virtual-target.traverse</code> will return all targets the
+ executable depends on, and we further find files that are not
+ produced from anything. <!-- What does "not produced from anything" mean? -->
+ The found targets are added to the sources.
+ </para>
+
+ <para>The <code>run</code> method can be overriden to completely
+ customize the way the generator works. In particular, the conversion of
+ sources to the desired types can be completely customized. Here's
+ another real example. Tests for the Boost Python library usually
+ consist of two parts: a Python program and a C++ file. The C++ file is
+ compiled to Python extension that is loaded by the Python
+ program. But in the likely case that both files have the same name,
+ the created Python extension must be renamed. Otherwise, the Python
+ program will import itself, not the extension. Here's how it can be
+ done:
+<programlisting>
+rule run ( project name ? : property-set : sources * )
+{
+ local python ;
+ for local s in $(sources)
+ {
+ if [ $(s).type ] = PY
+ {
+ python = $(s) ;
+ }
+ }
+ <!-- This is horrible code. Use a filter function, or at _least_ consolidate the two loops! -->
+ local libs ;
+ for local s in $(sources)
+ {
+ if [ type.is-derived [ $(s).type ] LIB ]
+ {
+ libs += $(s) ;
+ }
+ }
+
+ local new-sources ;
+ for local s in $(sources)
+ {
+ if [ type.is-derived [ $(s).type ] CPP ]
+ {
+ local name = [ $(s).name ] ; # get the target's basename
+ if $(name) = [ $(python).name ]
+ {
+ name = $(name)_ext ; # rename the target
+ }
+ new-sources += [ generators.construct $(project) $(name) :
+ PYTHON_EXTENSION : $(property-set) : $(s) $(libs) ] ;
+ }
+ }
+
+ result = [ construct-result $(python) $(new-sources) : $(project) $(name)
+ : $(property-set) ] ;
+}
+</programlisting>
+ <!-- Why are we doing this with a generator??? It seems
+ insane. We could just use a nice front-end rule that
+ calls some normal target-creation rules. No? -->
+
+ First, we separate all source into python files, libraries and C++
+ sources. For each C++ source we create a separate Python extension by
+ calling <code>generators.construct</code> and passing the C++ source
+ and the libraries. At this point, we also change the extension's name,
+ if necessary.
+ </para>
+
+
+ </section>
+
+ <section id="bbv2.extending.features">
+ <title>Features</title>
+ <para>
+ Often, we need to control the options passed the invoked tools. This
+ is done with features. Consider an example:
+<programlisting>
+# Declare a new free feature
+import feature : feature ;
+feature verbatim-options : : free ;
+
+# Cause the value of the 'verbatim-options' feature to be
+# available as 'OPTIONS' variable inside verbatim.inline-file
+import toolset : flags ;
+flags verbatim.inline-file OPTIONS &lt;verbatim-options&gt; ;<!-- You must tell the reader what the syntax of the flags rule is -->
+
+# Use the "OPTIONS" variable
+actions inline-file
+{
+ "./inline-file.py" $(OPTIONS) $(&lt;) $(&gt;)
+}
+</programlisting>
+ We first define a new feature. Then, the <code>flags</code> invocation
+ says that whenever verbatin.inline-file action is run, the value of
+ the <code>verbatim-options</code> feature will be added to the
+ <code>OPTIONS</code> variable, and can be used inside the action body.
+ You'd need to consult online help (--help) to find all the features of
+ the <code>toolset.flags</code> rule.
+ <!-- It's been a while since I wrote these notes, so I don't
+ remember what I meant. But right here, I wrote "bad" and
+ circled it. Maybe you can figure out what I meant. ;-)
+ -->
+ </para>
+
+ <para>
+ Although you can define any set of features and interpret their values
+ in any way, Boost.Build suggests the following coding standard for
+ designing features.
+ </para>
+
+ <para>Most features should have a fixed set of values that is portable
+ (tool neutral) across the class of tools they are designed to work
+ with. The user does not have to adjust the values for a exact tool. For
+ example, <code>&lt;optimization&gt;speed</code> has the same meaning for
+ all C++ compilers and the user does not have to worry about the exact
+ options passed to the compiler's command line.
+ </para>
+
+ <para>
+ Besides such portable features there are special 'raw' features that
+ allow the user to pass any value to the command line parameters for a
+ particular tool, if so desired. For example, the
+ <code>&lt;cxxflags&gt;</code> feature allows you to pass any command line
+ options to a C++ compiler. The <code>&lt;include&gt;</code> feature
+ allows you to pass any string preceded by <code>-I</code> and the interpretation
+ is tool-specific. <!-- It's really tool-specific? That surprises me --> (See <xref
+ linkend="bbv2.faq.external"/> for an example of very smart usage of that
+ feature). Of course one should always strive to use portable
+ features, but these are still be provided as a backdoor just to make
+ sure Boost.Build does not take away any control from the user.
+ </para>
+
+ <para>
+ Using portable features is a good idea because:
+ <itemizedlist>
+ <listitem>
+ <para>When a portable feature is given a fixed set of
+ values, you can build your project with two different
+ settings of the feature and Boost.Build will automatically
+ use two different directories for generated files.
+ Boost.Build does not try to separate targets built with
+ different raw options.
+ <!-- It's a computer program. It doesn't "care" about options -->
+ </para>
+ </listitem>
+
+ <listitem>
+ <para>Unlike with “raw†features, you don't need to use
+ specific command-line flags in your Jamfile, and it will be
+ more likely to work with other tools.
+ </para>
+ </listitem>
+ </itemizedlist>
+ </para>
+
+ <bridgehead>Steps for adding a feauture</bridgehead>
+ <!-- This section is redundant with the previous one -->
+ <para>Adding a feature requires three steps:
+
+ <orderedlist>
+ <listitem><para>Declaring a feature. For that, the "feature.feature"
+ rule is used. You have to decide on the set of <link
+ linkend="bbv2.reference.features.attributes">feature
+ attributes</link>:
+
+ <itemizedlist>
+ <listitem><para>if you want a feature value set for one target
+ to automaticaly propagate to its dependant targets then make it
+ “propagatedâ€. <!-- Examples needed. --></para></listitem>
+
+ <listitem><para>if a feature does not have a fixed list of
+ values, it must be “free.†For example, the <code>include
+ </code> feature is a free feature.</para></listitem>
+
+ <listitem><para>if a feature is used to refer to a path relative
+ to the Jamfile, it must be a “path†feature. Such features will
+ also get their values automatically converted to Boost.Build's
+ internal path representation. For example, <code>include</code>
+ is a path feature.</para></listitem>
+
+ <listitem><para>if feature is used to refer to some target, it
+ must be a “dependency†feature. <!-- for example? --></para>
+
+ <!-- Any other feature attributes? -->
+ </listitem>
+ </itemizedlist>
+ </para>
+ </listitem>
+
+ <listitem><para>Representing the feature value in a
+ target-specific variable. Build actions are command
+ templates modified by Boost.Jam variable expansions. The
+ <code>toolset.flags</code> rule sets a target-specific
+ variable to the value of a feature.</para></listitem>
+
+ <listitem><para>Using the variable. The variable set in step 2 can
+ be used in a build action to form command parameters or
+ files.</para></listitem>
+
+ </orderedlist>
+ </para>
+
+ <bridgehead>Another example</bridgehead>
+
+ <para>Here's another example.
+ Let's see how we can make a feature that refers to a target. For example,
+ when linking dynamic libraries on Windows, one sometimes needs to
+ specify a "DEF file", telling what functions should be exported. It
+ would be nice to use this file like this:
+<programlisting>
+ lib a : a.cpp : &lt;def-file&gt;a.def ;
+</programlisting>
+<!-- Why would that be nice? It seems to me that having a.def in the sources is the obvious and much nicer thing to do:
+
+ lib a : a.cpp a.def ;
+-->
+ Actually, this feature is already supported, but anyway...
+ <!-- Something about saying that is very off-putting. I'm
+ sorry that I can't put my finger on it -->
+ </para>
+
+ <orderedlist>
+ <listitem>
+ <para>Since the feature refers to a target, it must be "dependency".
+<programlisting>
+feature def-file : : free dependency ;
+</programlisting>
+ </para></listitem>
+
+ <listitem><para>One of the toolsets that cares about
+ <!-- The toolset doesn't "care." What do your really mean? -->
+ DEF files is msvc. The following line should be added to it.
+ <!-- Are you saying the msvc toolset is broken (or that it
+ doesn't use DEF files) as-shipped and the reader needs to
+ fix it? -->
+
+<programlisting>
+flags msvc.link DEF_FILE &lt;def-file&gt; ;
+</programlisting>
+ <!-- And that line does... what? -->
+ </para></listitem>
+
+ <listitem><para>Since the DEF_FILE variable is not used by the
+msvc.link action,
+<!-- It's not? You just told us that MSVC "cares" about DEF files. I
+ presume that means that it uses them in some appropriate way? -->
+we need to modify it to be:
+
+<programlisting>
+actions link bind DEF_FILE
+{
+ $(.LD) .... /DEF:$(DEF_FILE) ....
+}
+</programlisting>
+ </para>
+
+
+ <para> Note the <code>bind DEF_FILE</code> part. It tells
+ Boost.Build to translate the internal target name in
+ <varname>DEF_FILE</varname> to a corresponding filename in
+ the <code>link</code> action. Without it the expansion of
+ <code>$(DEF_FILE)</code> would be a strange symbol that is
+ not likely to make sense for the linker.
+ </para>
+
+ <!-- I have a note here that says: "none of this works for
+ targets in general, only source files." I'm not sure
+ what I meant by that; maybe you can figure it out. -->
+ <para>
+ We are almost done, except for adding the follwing code to <filename>msvc.jam</filename>:
+
+<programlisting>
+rule link
+{
+ DEPENDS $(&lt;) : [ on $(&lt;) return $(DEF_FILE) ] ;
+}
+</programlisting>
+<!-- You *must* explain the part in [...] above. It's completely opaque to the casual reader -->
+
+ This is a workaround for a bug in Boost.Build engine, which will hopefully
+ be fixed one day.
+ <!-- This is *NOT* a bug!! Anyway, BBv2 shouild handle this automatically. Why doesn't it? -->
+</para></listitem>
+
+ </orderedlist>
+
+ <bridgehead>Variants and composite features.</bridgehead>
+
+ <para>Sometimes you want to create a shortcut for some set of
+ features. For example, <code>release</code> is a value of
+ <code>&lt;variant&gt;</code> and is a shortcut for a set of features.
+ </para>
+
+ <para>It is possible to define your own build variants. For example:
+<programlisting>
+variant crazy : &lt;optimization&gt;speed &lt;inlining&gt;off
+ &lt;debug-symbols&gt;on &lt;profiling&gt;on ;
+</programlisting>
+ will define a new variant with the specified set of properties. You
+ can also extend an existing variant:
+<programlisting>
+variant super_release : release : &lt;define&gt;USE_ASM ;
+</programlisting>
+ In this case, <code>super_release</code> will expand to all properties
+ specified by <code>release</code>, and the additional one you've specified.
+ </para>
+
+ <para>You are not restricted to using the <code>variant</code> feature
+ only.
+ <!-- What do you mean by that? How is defining a new feature related to what came before? -->
+ Here's example that defines a brand new feature:
+<programlisting>
+feature parallelism : mpi fake none : composite link-incompatible ;
+feature.compose &lt;parallelism&gt;mpi : &lt;library&gt;/mpi//mpi/&lt;parallelism&gt;none ;
+feature.compose &lt;parallelism&gt;fake : &lt;library&gt;/mpi//fake/&lt;parallelism&gt;none ;
+</programlisting>
+<!-- The use of the <library>/mpi//mpi/<parallelism>none construct
+ above is at best confusing and unexplained -->
+ This will allow you to specify the value of feature
+ <code>parallelism</code>, which will expand to link to the necessary
+ library.
+ </para>
+
+ </section>
+
+ <section id="bbv2.extending.rules">
+ <title>Main target rules</title>
+ <para>
+ A main target rule (e.g “<link linkend="bbv2.tasks.programs">exe</link>â€
+ Or “<link linkend="bbv2.tasks.libraries">lib</link>â€) creates a top-level target. It's quite likely that you'll want to declare your own and
+ there are two ways to do that.
+ <!-- Why did "that" get changed to "this" above? -->
+ </para>
+
+ <para id="bbv2.extending.rules.main-type">The first way applies when
+<!-- This is not a "way of defining a main target rule." Rephrase this and the previous sentence. -->
+ your target rule should just produce a target of specific type. In that case, a
+ rule is already defined for you! When you define a new type, Boost.Build
+ automatically defines a corresponding rule. The name of the rule is
+ obtained from the name of the type, by downcasing all letters and
+ replacing underscores with dashes.
+ <!-- This strikes me as needless complexity, and confusing. Why
+ do we have the uppercase-underscore convention for target
+ types? If we just dropped that, the rule names could be
+ the same as the type names. -->
+ For example, if you create a module
+ <filename>obfuscate.jam</filename> containing:
+
+<programlisting>
+import type ;
+type.register OBFUSCATED_CPP : ocpp ;
+
+import generators ;
+generators.register-standard obfuscate.file : CPP : OBFUSCATED_CPP ;
+</programlisting>
+ and import that module, you'll be able to use the rule "obfuscated-cpp"
+ in Jamfiles, which will convert source to the OBFUSCATED_CPP type.
+ </para>
+
+ <para>
+ The second way is to write a wrapper rule that calls any of the existing
+ rules. For example, suppose you have only one library per directory and
+ want all cpp files in the directory to be compiled into that library. You
+ can achieve this effect using:
+<programlisting>
+lib codegen : [ glob *.cpp ] ;
+</programlisting>
+ If you want to make it even simpler, you could add the following
+ definition to the <filename>Jamroot.jam</filename> file:
+<programlisting>
+rule glib ( name : extra-sources * : requirements * )
+{
+ lib $(name) : [ glob *.cpp ] $(extra-sources) : $(requirements) ;
+}
+</programlisting>
+ allowing you to reduce the Jamfile to just
+<programlisting>
+glib codegen ;
+</programlisting>
+ </para>
+
+ <para>
+ Note that because you can associate a custom generator with a target type,
+ the logic of building can be rather complicated. For example, the
+ <code>boostbook</code> module declares a target type
+ <code>BOOSTBOOK_MAIN</code> and a custom generator for that type. You can
+ use that as example if your main target rule is non-trivial.
+ </para>
+ </section>
+
+ <section id="bbv2.extending.toolset_modules">
+
+ <title>Toolset modules</title>
+
+ <para>
+ If your extensions will be used only on one project, they can be placed in
+ a separate <filename>.jam</filename> file and imported by your
+ <filename>Jamroot.jam</filename>. If the extensions will be used on many
+ projects, users will thank you for a finishing touch.
+ </para>
+
+ <para>The <code>using</code> rule provides a standard mechanism
+ for loading and configuring extensions. To make it work, your module
+ <!-- "module" hasn't been defined yet. Furthermore you haven't
+ said anything about where that module file must be
+ placed. -->
+ should provide an <code>init</code> rule. The rule will be called
+ with the same parameters that were passed to the
+ <code>using</code> rule. The set of allowed parameters is
+ determined by you. For example, you can allow the user to specify
+ paths, tool versions, and other options.
+ <!-- But it's not entirely arbitrary. We have a standard
+ parameter order which you should describe here for
+ context. -->
+ </para>
+
+ <para>Here are some guidelines that help to make Boost.Build more
+ consistent:
+ <itemizedlist>
+ <listitem><para>The <code>init</code> rule should never fail. Even if
+ the user provided an incorrect path, you should emit a warning and go
+ on. Configuration may be shared between different machines, and
+ wrong values on one machine can be OK on another.
+ <!-- So why shouldn't init fail on machines where it's wrong?? -->
+ </para></listitem>
+
+ <listitem><para>Prefer specifying the command to be executed
+ to specifying the tool's installation path. First of all, this
+ gives more control: it's possible to specify
+<programlisting>
+/usr/bin/g++-snapshot
+time g++
+<!-- Is this meant to be a single command? If not, insert "or" -->
+</programlisting>
+ as the command. Second, while some tools have a logical
+ "installation root", it's better if the user doesn't have to remember whether
+ a specific tool requires a full command or a path.
+ <!-- But many tools are really collections: e.g. a
+ compiler, a linker, and others. The idea that the
+ "command to invoke" has any significance may be
+ completely bogus. Plus if you want to allow "time
+ /usr/bin/g++" the toolset may need to somehow parse
+ the command and find the path when it needs to invoke
+ some related executable. And in that case, will the
+ command be ignored? This scheme doesn't scale and
+ should be fixed. -->
+ </para></listitem>
+
+ <listitem><para>Check for multiple initialization. A user can try to
+ initialize the module several times. You need to check for this
+ and decide what to do. Typically, unless you support several
+ versions of a tool, duplicate initialization is a user error.
+ <!-- Why should that be typical? -->
+ If the
+ tool's version can be specified during initialization, make sure the
+ version is either always specified, or never specified (in which
+ case the tool is initialied only once). For example, if you allow:
+<programlisting>
+using yfc ;
+using yfc : 3.3 ;
+using yfc : 3.4 ;
+</programlisting>
+ Then it's not clear if the first initialization corresponds to
+ version 3.3 of the tool, version 3.4 of the tool, or some other
+ version. This can lead to building twice with the same version.
+ <!-- That would not be so terrible, and is much less harmful
+ than this restriction, IMO. It makes site-config
+ harder to maintain than necessary. -->
+ </para></listitem>
+
+ <listitem><para>If possible, <code>init</code> must be callable
+ with no parameters. In which case, it should try to autodetect all
+ the necessary information, for example, by looking for a tool in
+ <envar>PATH</envar> or in common installation locations. Often this
+ is possible and allows the user to simply write:
+<programlisting>
+using yfc ;
+</programlisting>
+ </para></listitem>
+
+ <listitem><para>Consider using facilities in the
+ <code>tools/common</code> module. You can take a look at how
+ <code>tools/gcc.jam</code> uses that module in the <code>init</code> rule.
+ </para></listitem>
+
+ </itemizedlist>
+ </para>
+
+
+
+
+ </section>
+
+ </chapter>
+
+<!--
+ Local Variables:
+ sgml-indent-data: t
+ sgml-parent-document: ("userman.xml" "chapter")
+ sgml-set-face: t
+ End:
+-->
diff --git a/tools/build/doc/src/faq.xml b/tools/build/doc/src/faq.xml
new file mode 100644
index 0000000000..9a7f9c4586
--- /dev/null
+++ b/tools/build/doc/src/faq.xml
@@ -0,0 +1,489 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE chapter PUBLIC "-//Boost//DTD BoostBook XML V1.0//EN"
+ "http://www.boost.org/tools/boostbook/dtd/boostbook.dtd">
+
+<chapter id="bbv2.faq">
+ <title>Frequently Asked Questions</title>
+
+ <section id="bbv2.faq.featurevalue">
+ <title>
+ How do I get the current value of feature in Jamfile?
+ </title>
+
+ <para>
+ This is not possible, since Jamfile does not have "current" value of any
+ feature, be it toolset, build variant or anything else. For a single
+ run of Boost.Build, any given main target can be
+ built with several property sets. For example, user can request two build
+ variants on the command line. Or one library is built as shared when used
+ from one application, and as static when used from another. Each Jamfile
+ is read only once so generally there is no single value of a feature you
+ can access in Jamfile.
+ </para>
+
+ <para>
+ A feature has a specific value only when building a target, and there are
+ two ways you can use that value:
+ </para>
+
+ <itemizedlist>
+ <listitem>
+ <simpara>
+ Use conditional requirements or indirect conditional requirements. See
+ <xref linkend="bbv2.overview.targets.requirements.conditional"/>.
+ </simpara>
+ </listitem>
+ <listitem>
+ Define a custom generator and a custom main target type. The custom
+ generator can do arbitrary processing or properties. See the <xref
+ linkend="bbv2.extender">extender manual</xref>.
+ </listitem>
+ </itemizedlist>
+ </section>
+
+ <section id="bbv2.faq.duplicate">
+ <title>
+ I am getting a "Duplicate name of actual target" error. What does that
+ mean?
+ </title>
+
+ <para>
+ The most likely case is that you are trying to compile the same file
+ twice, with almost the same, but differing properties. For example:
+<programlisting>
+exe a : a.cpp : &lt;include&gt;/usr/local/include ;
+exe b : a.cpp ;
+</programlisting>
+ </para>
+
+ <para>
+ The above snippet requires two different compilations of
+ <code>a.cpp</code>, which differ only in their <literal>include</literal>
+ property. Since the <literal>include</literal> feature is declared as
+ <literal>free</literal> Boost.Build does not create a separate build
+ directory for each of its values and those two builds would both produce
+ object files generated in the same build directory. Ignoring this and
+ compiling the file only once would be dangerous as different includes
+ could potentially cause completely different code to be compiled.
+ </para>
+
+ <para>
+ To solve this issue, you need to decide if the file should be compiled
+ once or twice.
+ </para>
+
+ <orderedlist>
+ <listitem>
+ <para>
+ To compile the file only once, make sure that properties are the same
+ for both target requests:
+<programlisting>
+exe a : a.cpp : &lt;include&gt;/usr/local/include ;
+exe b : a.cpp : &lt;include&gt;/usr/local/include ;
+</programlisting>
+ or:
+<programlisting>
+alias a-with-include : a.cpp : &lt;include&gt;/usr/local/include ;
+exe a : a-with-include ;
+exe b : a-with-include ;
+</programlisting>
+ or if you want the <literal>includes</literal> property not to affect
+ how any other sources added for the built <code>a</code> and
+ <code>b</code> executables would be compiled:
+<programlisting>
+obj a-obj : a.cpp : &lt;include&gt;/usr/local/include ;
+exe a : a-obj ;
+exe b : a-obj ;
+</programlisting>
+ </para>
+ <para>
+ Note that in both of these cases the <literal>include</literal>
+ property will be applied only for building these object files and not
+ any other sources that might be added for targets <code>a</code> and
+ <code>b</code>.
+ </para>
+ </listitem>
+
+ <listitem>
+ <para>
+ To compile the file twice, you can tell Boost.Build to compile it to
+ two separate object files like so:
+<programlisting>
+ obj a_obj : a.cpp : &lt;include&gt;/usr/local/include ;
+ obj b_obj : a.cpp ;
+ exe a : a_obj ;
+ exe b : b_obj ;
+</programlisting>
+ or you can make the object file targets local to the main target:
+<programlisting>
+ exe a : [ obj a_obj : a.cpp : &lt;include&gt;/usr/local/include ] ;
+ exe b : [ obj a_obj : a.cpp ] ;
+</programlisting>
+ which will cause Boost.Build to actually change the generated object
+ file names a bit for you and thus avoid any conflicts.
+ </para>
+ <para>
+ Note that in both of these cases the <literal>include</literal>
+ property will be applied only for building these object files and not
+ any other sources that might be added for targets <code>a</code> and
+ <code>b</code>.
+ </para>
+ </listitem>
+ </orderedlist>
+
+ <para>
+ A good question is why Boost.Build can not use some of the above
+ approaches automatically. The problem is that such magic would only help
+ in half of the cases, while in the other half it would be silently doing
+ the wrong thing. It is simpler and safer to ask the user to clarify his
+ intention in such cases.
+ </para>
+ </section>
+
+ <section id="bbv2.faq.envar">
+ <title>
+ Accessing environment variables
+ </title>
+
+ <para>
+ Many users would like to use environment variables in Jamfiles, for
+ example, to control the location of external libraries. In many cases it
+ is better to declare those external libraries in the site-config.jam file,
+ as documented in the <link linkend="bbv2.recipies.site-config">recipes
+ section</link>. However, if the users already have the environment
+ variables set up, it may not be convenient for them to set up their
+ site-config.jam files as well and using the environment variables might be
+ reasonable.
+ </para>
+
+ <para>
+ Boost.Jam automatically imports all environment variables into its
+ built-in .ENVIRON module so user can read them from there directly or by
+ using the helper os.environ rule. For example:
+<programlisting>
+import os ;
+local unga-unga = [ os.environ UNGA_UNGA ] ;
+ECHO $(unga-unga) ;
+</programlisting>
+ or a bit more realistic:
+<programlisting>
+import os ;
+local SOME_LIBRARY_PATH = [ os.environ SOME_LIBRARY_PATH ] ;
+exe a : a.cpp : &lt;include&gt;$(SOME_LIBRARY_PATH) ;
+</programlisting>
+ </para>
+ </section>
+
+ <section id="bbv2.faq.proporder">
+ <title>
+ How to control properties order?
+ </title>
+
+ <para>
+ For internal reasons, Boost.Build sorts all the properties alphabetically.
+ This means that if you write:
+<programlisting>
+exe a : a.cpp : &lt;include&gt;b &lt;include&gt;a ;
+</programlisting>
+ then the command line with first mention the <code>a</code> include
+ directory, and then <code>b</code>, even though they are specified in the
+ opposite order. In most cases, the user does not care. But sometimes the
+ order of includes, or other properties, is important. For such cases, a
+ special syntax is provided:
+<programlisting>
+exe a : a.cpp : &lt;include&gt;a&amp;&amp;b ;
+</programlisting>
+ </para>
+
+ <para>
+ The <code>&amp;&amp;</code> symbols separate property values and specify
+ that their order should be preserved. You are advised to use this feature
+ only when the order of properties really matters and not as a convenient
+ shortcut. Using it everywhere might negatively affect performance.
+ </para>
+ </section>
+
+ <section id="bbv2.faq.liborder">
+ <title>
+ How to control the library linking order on Unix?
+ </title>
+
+ <para>
+ On Unix-like operating systems, the order in which static libraries are
+ specified when invoking the linker is important, because by default, the
+ linker uses one pass though the libraries list. Passing the libraries in
+ the incorrect order will lead to a link error. Further, this behaviour is
+ often used to make one library override symbols from another. So,
+ sometimes it is necessary to force specific library linking order.
+ </para>
+
+ <para>
+ Boost.Build tries to automatically compute the right order. The primary
+ rule is that if library <code>a</code> "uses" library <code>b</code>, then
+ library <code>a</code> will appear on the command line before library
+ <code>b</code>. Library <code>a</code> is considered to use <code>b</code>
+ if <code>b</code> is present either in the <code>a</code> library's
+ sources or its usage is listed in its requirements. To explicitly specify
+ the <literal>use</literal> relationship one can use the
+ <literal>&lt;use&gt;</literal> feature. For example, both of the following
+ lines will cause <code>a</code> to appear before <code>b</code> on the
+ command line:
+<programlisting>
+lib a : a.cpp b ;
+lib a : a.cpp : &lt;use&gt;b ;
+</programlisting>
+ </para>
+
+ <para>
+ The same approach works for searched libraries as well:
+<programlisting>
+lib z ;
+lib png : : &lt;use&gt;z ;
+exe viewer : viewer png z ;
+</programlisting>
+ </para>
+ </section>
+
+ <section id="bbv2.faq.external">
+ <title>
+ Can I get capture external program output using a Boost.Jam variable?
+ </title>
+
+ <para>
+ The <literal>SHELL</literal> builtin rule may be used for this purpose:
+<programlisting>
+local gtk_includes = [ SHELL "gtk-config --cflags" ] ;
+</programlisting>
+ </para>
+ </section>
+
+ <section id="bbv2.faq.projectroot">
+ <title>
+ How to get the project root (a.k.a. Jamroot) location?
+ </title>
+
+ <para>
+ You might want to use your project's root location in your Jamfiles. To
+ access it just declare a path constant in your Jamroot.jam file using:
+<programlisting>
+path-constant TOP : . ;
+</programlisting>
+ After that, the <code>TOP</code> variable can be used in every Jamfile.
+ </para>
+ </section>
+
+ <section id="bbv2.faq.flags">
+ <title>
+ How to change compilation flags for one file?
+ </title>
+
+ <para>
+ If one file must be compiled with special options, you need to explicitly
+ declare an <code>obj</code> target for that file and then use that target
+ in your <code>exe</code> or <code>lib</code> target:
+<programlisting>
+exe a : a.cpp b ;
+obj b : b.cpp : &lt;optimization&gt;off ;
+</programlisting>
+ Of course you can use other properties, for example to specify specific
+ C/C++ compiler options:
+<programlisting>
+exe a : a.cpp b ;
+obj b : b.cpp : &lt;cflags&gt;-g ;
+</programlisting>
+ You can also use <link linkend="bbv2.tutorial.conditions">conditional
+ properties</link> for finer control:
+<programlisting>
+exe a : a.cpp b ;
+obj b : b.cpp : &lt;variant&gt;release:&lt;optimization&gt;off ;
+</programlisting>
+ </para>
+ </section>
+
+ <section id="bbv2.faq.dll-path">
+ <title>
+ Why are the <literal>dll-path</literal> and <literal>hardcode-dll-paths
+ </literal> properties useful?
+ </title>
+ <note>
+ <para>
+ This entry is specific to Unix systems.
+ </para>
+ </note>
+ <para>
+ Before answering the questions, let us recall a few points about shared
+ libraries. Shared libraries can be used by several applications, or other
+ libraries, without physically including the library in the application
+ which can greatly decrease the total application size. It is also possible
+ to upgrade a shared library when the application is already installed.
+ </para>
+
+ <para>
+ However, in order for application depending on shared libraries to be
+ started the OS may need to find the shared library when the application is
+ started. The dynamic linker will search in a system-defined list of paths,
+ load the library and resolve the symbols. Which means that you should
+ either change the system-defined list, given by the <envar>LD_LIBRARY_PATH
+ </envar> environment variable, or install the libraries to a system
+ location. This can be inconvenient when developing, since the libraries
+ are not yet ready to be installed, and cluttering system paths may be
+ undesirable. Luckily, on Unix there is another way.
+ </para>
+
+ <para>
+ An executable can include a list of additional library paths, which will
+ be searched before system paths. This is excellent for development because
+ the build system knows the paths to all libraries and can include them in
+ the executables. That is done when the <literal>hardcode-dll-paths
+ </literal> feature has the <literal>true</literal> value, which is the
+ default. When the executables should be installed, the story is different.
+ </para>
+
+ <para>
+ Obviously, installed executable should not contain hardcoded paths to your
+ development tree. <!-- Make the following parenthised sentence a footer
+ note --> (The <literal>install</literal> rule explicitly disables the
+ <literal>hardcode-dll-paths</literal> feature for that reason.) However,
+ you can use the <literal>dll-path</literal> feature to add explicit paths
+ manually. For example:
+<programlisting>
+install installed : application : &lt;dll-path&gt;/usr/lib/snake
+ &lt;location&gt;/usr/bin ;
+</programlisting>
+ will allow the application to find libraries placed in the <filename>
+ /usr/lib/snake</filename> directory.
+ </para>
+
+ <para>
+ If you install libraries to a nonstandard location and add an explicit
+ path, you get more control over libraries which will be used. A library of
+ the same name in a system location will not be inadvertently used. If you
+ install libraries to a system location and do not add any paths, the
+ system administrator will have more control. Each library can be
+ individually upgraded, and all applications will use the new library.
+ </para>
+
+ <para>
+ Which approach is best depends on your situation. If the libraries are
+ relatively standalone and can be used by third party applications, they
+ should be installed in the system location. If you have lots of libraries
+ which can be used only by your application, it makes sense to install them
+ to a nonstandard directory and add an explicit path, like the example
+ above shows. Please also note that guidelines for different systems differ
+ in this respect. For example, the Debian GNU guidelines prohibit any
+ additional search paths while Solaris guidelines suggest that they should
+ always be used.
+ </para>
+ </section>
+
+ <section id="bbv2.recipies.site-config">
+ <title>Targets in site-config.jam</title>
+
+ <para>
+ It is desirable to declare standard libraries available on a given system.
+ Putting target declaration in a specific project's Jamfile is not really
+ good, since locations of the libraries can vary between different
+ development machines and then such declarations would need to be
+ duplicated in different projects. The solution is to declare the targets
+ in Boost.Build's <filename>site-config.jam</filename> configuration file:
+<programlisting>
+project site-config ;
+lib zlib : : &lt;name&gt;z ;
+</programlisting>
+ </para>
+
+ <para>
+ Recall that both <filename>site-config.jam</filename> and
+ <filename>user-config.jam</filename> are projects, and everything you can
+ do in a Jamfile you can do in those files as well. So, you declare a
+ project id and a target. Now, one can write:
+<programlisting>
+exe hello : hello.cpp /site-config//zlib ;
+</programlisting>
+ in any Jamfile.
+ </para>
+ </section>
+
+ <section id="bbv2.faq.header-only-libraries">
+ <title>Header-only libraries</title>
+
+ <para>
+ In modern C++, libraries often consist of just header files, without any
+ source files to compile. To use such libraries, you need to add proper
+ includes and possibly defines to your project. But with a large number of
+ external libraries it becomes problematic to remember which libraries are
+ header only, and which ones you have to link to. However, with Boost.Build
+ a header-only library can be declared as Boost.Build target and all
+ dependents can use such library without having to remeber whether it is a
+ header-only library or not.
+ </para>
+
+ <para>
+ Header-only libraries may be declared using the <code>alias</code> rule,
+ specifying their include path as a part of its usage requirements, for
+ example:
+<programlisting>
+alias my-lib
+ : # no sources
+ : # no build requirements
+ : # no default build
+ : &lt;include&gt;whatever ;
+</programlisting>
+ The includes specified in usage requirements of <code>my-lib</code> are
+ automatically added to all of its dependants' build properties. The
+ dependants need not care if <code>my-lib</code> is a header-only or not,
+ and it is possible to later make <code>my-lib</code> into a regular
+ compiled library without having to that its dependants' declarations.
+ </para>
+
+ <para>
+ If you already have proper usage requirements declared for a project where
+ a header-only library is defined, you do not need to duplicate them for
+ the <code>alias</code> target:
+<programlisting>
+project my : usage-requirements &lt;include&gt;whatever ;
+alias mylib ;
+</programlisting>
+ </para>
+ </section>
+
+ <section id="bbv2.faq.names">
+ <title>
+ What is the difference between Boost.Build,
+ <filename>b2</filename>, <filename>bjam</filename> and Perforce Jam?
+ </title>
+
+ <para>
+ Boost.Build is the name of the complete build system. The executable that runs
+ it is <filename>b2</filename>. That executable is written in C and implements
+ performance-critical algorithms, like traversal of dependency graph and executing
+ commands. It also implements an interpreted language used to implement the rest of
+ Boost.Build. This executable is formally called "Boost.Build engine".
+ </para>
+
+ <para>
+ The Boost.Build engine is derived from an earlier build tool called Perforce Jam. Originally,
+ there were just minor changes, and the filename was <filename>bjam</filename>. Later on,
+ with more and more changes, the similarity of names because a disservice to users, and as of
+ Boost 1.47.0, the official name of the executable was changed to <filename>b2</filename>.
+ A copy named <filename>bjam</filename> is still created for compatibility,
+ but you are encouraged to use the new name in all cases.
+ </para>
+
+ <para>
+ Perforce Jam was an important foundation, and we gratefully acknowledge its influence,
+ but for users today, these tools share only some basics of the interpreted language.
+ </para>
+
+ </section>
+
+</chapter>
+
+<!--
+ Local Variables:
+ mode: nxml
+ sgml-indent-data: t
+ sgml-parent-document: ("userman.xml" "chapter")
+ sgml-set-face: t
+ End:
+-->
diff --git a/tools/build/v2/doc/src/fragments.xml b/tools/build/doc/src/fragments.xml
index 78438a0c10..78438a0c10 100644
--- a/tools/build/v2/doc/src/fragments.xml
+++ b/tools/build/doc/src/fragments.xml
diff --git a/tools/build/v2/doc/src/howto.xml b/tools/build/doc/src/howto.xml
index de17a286a8..de17a286a8 100644
--- a/tools/build/v2/doc/src/howto.xml
+++ b/tools/build/doc/src/howto.xml
diff --git a/tools/build/doc/src/install.xml b/tools/build/doc/src/install.xml
new file mode 100644
index 0000000000..40c8b5ccbd
--- /dev/null
+++ b/tools/build/doc/src/install.xml
@@ -0,0 +1,150 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE chapter PUBLIC "-//Boost//DTD BoostBook XML V1.0//EN"
+ "http://www.boost.org/tools/boostbook/dtd/boostbook.dtd">
+
+ <chapter id="bbv2.installation">
+ <title>Installation</title>
+
+ <para>
+ To install Boost.Build from an official release or a nightly build, as
+ available on the <ulink url="http://boost.org/boost-build2">official web site</ulink>,
+ follow these steps:
+ </para>
+
+ <orderedlist>
+ <listitem>
+ <simpara>
+ Unpack the release. On the command line, go to the root of the
+ unpacked tree.
+ </simpara>
+ </listitem>
+
+ <listitem>
+ <simpara>
+ Run either <command>.\bootstrap.bat</command> (on Windows), or
+ <command>./bootstrap.sh</command> (on other operating systems).
+ </simpara>
+ </listitem>
+
+ <listitem>
+ <simpara>
+ Run
+ <screen>./b2 install --prefix=<replaceable>PREFIX</replaceable></screen>
+ where <replaceable>PREFIX</replaceable> is a directory where you
+ want Boost.Build to be installed.
+ </simpara>
+ </listitem>
+
+ <listitem>
+ <simpara>
+ Optionally, add <filename><replaceable>PREFIX</replaceable>/bin</filename>
+ to your <envar>PATH</envar> environment variable.
+ </simpara>
+ </listitem>
+ </orderedlist>
+
+ <para>If you are not using a Boost.Build package, but rather the version
+ bundled with the Boost C++ Libraries, the above commands should be run
+ in the <filename>tools/build/v2</filename> directory.</para>
+
+ <para>
+ Now that Boost.Build is installed, you can try some of the examples. Copy
+ <filename><replaceable>PREFIX</replaceable>/share/boost-build/examples/hello</filename>
+ to a different directory, then change to that directory and run:
+<screen><filename><replaceable>PREFIX</replaceable>/bin/b2</filename></screen>
+ A simple executable should be built.
+ </para>
+
+ <!--
+ <simpara>
+ Configure Boost.Build to recognize the build resources (such
+ as compilers and libraries) you have installed on your
+ system. Open the
+ <filename>user-config.jam</filename> file in the Boost.Build
+ root directory and follow the instructions there to describe
+ your toolsets and libraries, and, if necessary, where they
+ are located.
+ </simpara>
+ </listitem>
+ -->
+
+ <!-- This part should not go into intoduction docs, but we need to
+ place it somewhere.
+
+ <para>It is slighly better way is to copy
+ <filename>new/user-config.jam</filename> into one of the locations
+ where it can be found (given in <link linkend=
+ "bbv2.reference.init.config">this table</link>). This prevent you
+ from accidentally overwriting your config when updating.</para>
+
+ -->
+
+ <!--
+ <bridgehead>Information for distributors</bridgehead>
+
+ <para>
+ If you're planning to package Boost.Build for a Linux distribution,
+ please follow these guidelines:
+
+ <itemizedlist>
+ <listitem><para>Create a separate package for Boost.Jam.</para></listitem>
+
+ <listitem>
+ <para>Create another package for Boost.Build, and make
+ this package install all Boost.Build files to
+ <filename>/usr/share/boost-build</filename> directory. After
+ install, that directory should contain everything you see in
+ Boost.Build release package, except for
+ <filename>jam_src</filename> directory. If you're using Boost CVS
+ to obtain Boost.Build, as opposed to release package, take
+ everything from the <filename>tools/build/v2</filename> directory.
+ For a check, make sure that
+ <filename>/usr/share/boost-build/boost-build.jam</filename> is installed.
+ </para>
+
+ <para>Placing Boost.Build into <filename>/usr/share/boost-build</filename>
+ will make sure that <command>b2</command> will find Boost.Build
+ without any additional setup.</para>
+ </listitem>
+
+ <listitem><para>Provide a
+ <filename>/etc/site-config.jam</filename> configuration file that will
+ contain:
+<programlisting>
+using gcc ;
+</programlisting>
+ You might want to add dependency from Boost.Build package to gcc,
+ to make sure that users can always build Boost.Build examples.
+ </para></listitem>
+ </itemizedlist>
+ </para>
+
+ <para>If those guidelines are met, users will be able to invoke
+ <command>b2/command> without any explicit configuration.
+ </para>
+
+
+ -->
+
+
+ </chapter>
+
+<!--
+ Local Variables:
+ mode: nxml
+ sgml-indent-data:t
+ sgml-parent-document:("userman.xml" "chapter")
+ sgml-set-face: t
+ sgml-omittag:nil
+ sgml-shorttag:nil
+ sgml-namecase-general:t
+ sgml-general-insert-case:lower
+ sgml-minimize-attributes:nil
+ sgml-always-quote-attributes:t
+ sgml-indent-step:2
+ sgml-exposed-tags:nil
+ sgml-local-catalogs:nil
+ sgml-local-ecat-files:nil
+ End:
+-->
+
diff --git a/tools/build/v2/doc/src/main-target.xml b/tools/build/doc/src/main-target.xml
index 84afac14f8..84afac14f8 100644
--- a/tools/build/v2/doc/src/main-target.xml
+++ b/tools/build/doc/src/main-target.xml
diff --git a/tools/build/doc/src/overview.xml b/tools/build/doc/src/overview.xml
new file mode 100644
index 0000000000..6dbb38a84a
--- /dev/null
+++ b/tools/build/doc/src/overview.xml
@@ -0,0 +1,1700 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE chapter PUBLIC "-//Boost//DTD BoostBook XML V1.0//EN"
+ "http://www.boost.org/tools/boostbook/dtd/boostbook.dtd">
+
+ <chapter id="bbv2.overview">
+ <title>Overview</title>
+
+ <para>
+ This section will provide the information necessary to create your own
+ projects using Boost.Build. The information provided here is relatively
+ high-level, and <xref linkend="bbv2.reference"/> as well as the on-line
+ help system must be used to obtain low-level documentation (see <xref
+ linkend="bbv2.reference.init.options.help"/>).
+ </para>
+
+ <para>
+ Boost.Build has two parts&mdash;a build engine
+ with its own interpreted language, and Boost.Build itself, implemented in
+ that language. The chain of events when you type
+ <command>b2</command> on the command line is as follows:
+ <orderedlist>
+ <listitem>
+ <para>
+ The Boost.Build executable tries to find Boost.Build modules and
+ loads the top-level module. The exact process is described in <xref linkend=
+ "bbv2.reference.init"/>
+ </para>
+ </listitem>
+ <listitem>
+ <para>
+ The top-level module loads user-defined configuration files,
+ <filename>user-config.jam</filename> and
+ <filename>site-config.jam</filename>, which define available toolsets.
+ </para>
+ </listitem>
+ <listitem>
+ <para>
+ The Jamfile in the current directory is read. That in turn might
+ cause reading of further Jamfiles. As a result, a tree of projects
+ is created, with targets inside projects.
+ </para>
+ </listitem>
+ <listitem>
+ <para>
+ Finally, using the build request specified on the command line,
+ Boost.Build decides which targets should be built and how. That
+ information is passed back to Boost.Jam, which takes care of
+ actually running the scheduled build action commands.
+ </para>
+ </listitem>
+ </orderedlist>
+ </para>
+
+ <para>
+ So, to be able to successfully use Boost.Build, you need to know only four
+ things:
+ <itemizedlist>
+ <listitem>
+ <para>
+ <link linkend="bbv2.overview.configuration">How to configure
+ Boost.Build</link>
+ </para>
+ </listitem>
+ <listitem>
+ <para>
+ <link linkend="bbv2.overview.targets">How to declare targets in
+ Jamfiles</link>
+ </para>
+ </listitem>
+ <listitem>
+ <para>
+ <link linkend="bbv2.overview.build_process">How the build process
+ works</link>
+ </para>
+ </listitem>
+ <listitem>
+ <para>
+ Some Basics about the Boost.Jam language. See <xref linkend=
+ "bbv2.overview.jam_language"/>.
+ </para>
+ </listitem>
+ </itemizedlist>
+ </para>
+
+ <section id="bbv2.overview.concepts">
+ <title>Concepts</title>
+
+ <para>Boost.Build has a few unique concepts that are introduced in this section. The best
+ way to explain the concepts is by comparison with more classical build tools.</para>
+
+ <para>
+ When using any flavour of make, you directly specify <firstterm>targets</firstterm>
+ and commands that are used to create them from other target. The below example
+ creates <filename>a.o</filename> from <filename>a.c</filename> using a hardcoded
+ compiler invocation command.
+<programlisting>
+a.o: a.c
+ g++ -o a.o -g a.c
+</programlisting>
+ This is a rather low-level description mechanism and it's hard to adjust commands, options,
+ and sets of created targets depending on the compiler and operating system used.
+ </para>
+
+ <para>
+ To improve portability, most modern build system provide a set of higher-level
+ functions that can be used in build description files. Consider this example:
+<programlisting>
+add_program ("a", "a.c")
+</programlisting>
+ This is a function call that creates the targets necessary to create a executable file
+ from the source file <filename>a.c</filename>. Depending on configured properties,
+ different command lines may be used. However, <code>add_program</code> is higher-level,
+ but rather thin level. All targets are created immediately when the build description
+ is parsed, which makes it impossible to perform multi-variant builds. Often, change
+ in any build property requires a complete reconfiguration of the build tree.
+ </para>
+
+ <para>
+ In order to support true multivariant builds, Boost.Build introduces the concept of a
+ <indexterm> <primary>metatarget</primary> <secondary>definition</secondary></indexterm>
+ <indexterm> <primary>main target</primary> <see>metataget</see> </indexterm>
+ <firstterm>metatarget</firstterm>&mdash;an object that is created when the build description
+ is parsed and can be called later with specific build properties to generate
+ actual targets.
+ </para>
+
+ <para>
+ Consider an example:
+<programlisting>
+exe a : a.cpp ;
+</programlisting>
+ When this declaration is parsed, Boost.Build creates a metatarget, but does not
+ yet decide what files must be created, or what commands must be used. After
+ all build files are parsed, Boost.Build considers the properties requested on the
+ command line. Supposed you have invoked Boost.Build with:
+<screen>
+b2 toolset=gcc toolset=msvc
+</screen>
+ In that case, the metatarget will be called twice, once with <code>toolset=gcc</code>
+ and once with <code>toolset=msvc</code>. Both invocations will produce concrete
+ targets, that will have different extensions and use different command lines.
+ </para>
+
+ <para>
+ Another key concept is
+ <indexterm><primary>property</primary><secondary>definition</secondary></indexterm>
+ <firstterm>build property</firstterm>. A build property is a variable
+ that affects the build process. It can be specified on the command line, and is
+ passed when calling a metatarget. While all build tools have a similar mechanism,
+ Boost.Build differs by requiring that all build properties are declared in advance,
+ and providing a large set of properties with portable semantics.
+ </para>
+
+ <para>
+ The final concept is <indexterm><primary>property</primary><secondary>propagation</secondary></indexterm>
+ <firstterm>property propagation</firstterm>. Boost.Build does not require that every
+ metatarget is called with the same properties. Instead, the
+ "top-level" metatargets are called with the properties specified on the command line.
+ Each metatarget can elect to augment or override some properties (in particular,
+ using the requirements mechanism, see <xref linkend="bbv2.overview.targets.requirements"/>).
+ Then, the dependency metatargets are called with the modified properties and produce
+ concrete targets that are then used in the build process. Of course, dependency metatargets
+ maybe in turn modify build properties and have dependencies of their own.
+ </para>
+
+ <para>For a more in-depth treatment of the requirements and concepts, you may refer
+ to <ulink url="http://syrcose.ispras.ru/2009/files/04_paper.pdf">SYRCoSE 2009 Boost.Build article</ulink>.
+ </para>
+
+ </section>
+
+ <section id="bbv2.overview.jam_language">
+ <title>Boost.Jam Language</title>
+
+ <para>
+ This section will describe the basics of the Boost.Jam language&#x2014;just
+ enough for writing Jamfiles. For more information, please see the
+ <link linkend="bbv2.jam">Boost.Jam</link> documentation.
+ </para>
+
+ <para>
+ <link linkend="bbv2.jam">Boost.Jam</link> has an interpreted, procedural
+ language. On the lowest level, a <link linkend="bbv2.jam">Boost.Jam
+ </link> program consists of variables and <indexterm><primary>rule
+ </primary></indexterm> <firstterm>rules</firstterm> (the Jam term for
+ functions). They are grouped into modules&#x2014;there is one global
+ module and a number of named modules. Besides that, a <link linkend=
+ "bbv2.jam">Boost.Jam</link> program contains classes and class
+ instances.
+ </para>
+
+ <para>
+ Syntantically, a <link linkend="bbv2.jam">Boost.Jam</link> program
+ consists of two kind of elements&#x2014;keywords (which have a special
+ meaning to <link linkend="bbv2.jam">Boost.Jam</link>) and literals.
+ Consider this code:
+<programlisting>
+a = b ;
+</programlisting>
+ which assigns the value <literal>b</literal> to the variable <literal>a
+ </literal>. Here, <literal>=</literal> and <literal>;</literal> are
+ keywords, while <literal>a</literal> and <literal>b</literal> are
+ literals.
+ <warning>
+ <para>
+ All syntax elements, even keywords, must be separated by spaces. For
+ example, omitting the space character before <literal>;</literal>
+ will lead to a syntax error.
+ </para>
+ </warning>
+ If you want to use a literal value that is the same as some keyword, the
+ value can be quoted:
+<programlisting>
+a = "=" ;
+</programlisting>
+ </para>
+
+ <para>
+ All variables in <link linkend="bbv2.jam">Boost.Jam</link> have the same
+ type&#x2014;list of strings. To define a variable one assigns a value to
+ it, like in the previous example. An undefined variable is the same as a
+ variable with an empty value. Variables can be accessed using the
+ <code>$(<replaceable>variable</replaceable>)</code> syntax. For example:
+<programlisting>
+a = $(b) $(c) ;
+</programlisting>
+ </para>
+
+ <para>
+ Rules are defined by specifying the rule name, the parameter names, and
+ the allowed value list size for each parameter.
+<programlisting>
+rule <replaceable>example</replaceable>
+ (
+ <replaceable>parameter1</replaceable> :
+ <replaceable>parameter2 ?</replaceable> :
+ <replaceable>parameter3 +</replaceable> :
+ <replaceable>parameter4 *</replaceable>
+ )
+ {
+ # rule body
+ }
+ </programlisting>
+ When this rule is called, the list passed as the first argument must
+ have exactly one value. The list passed as the second argument can
+ either have one value of be empty. The two remaining arguments can be
+ arbitrarily long, but the third argument may not be empty.
+ </para>
+
+ <para>
+ The overview of <link linkend="bbv2.jam">Boost.Jam</link> language
+ statements is given below:
+<programlisting>
+helper 1 : 2 : 3 ;
+x = [ helper 1 : 2 : 3 ] ;
+</programlisting>
+ This code calls the named rule with the specified arguments. When the
+ result of the call must be used inside some expression, you need to add
+ brackets around the call, like shown on the second line.
+<programlisting>
+if cond { statements } [ else { statements } ]
+</programlisting>
+ This is a regular if-statement. The condition is composed of:
+ <itemizedlist>
+ <listitem>
+ <para>
+ Literals (true if at least one string is not empty)
+ </para>
+ </listitem>
+ <listitem>
+ <para>
+ Comparisons: <code>a <replaceable>operator</replaceable> b</code>
+ where <replaceable>operator</replaceable> is one of
+ <code>=</code>, <code>!=</code>, <code>&lt;</code>,
+ <code>&gt;</code>, <code>&lt;=</code> or <code>&gt;=</code>. The
+ comparison is done pairwise between each string in the left and
+ the right arguments.
+ </para>
+ </listitem>
+ <listitem>
+ <para>
+ Logical operations: <code>! a</code>, <code>a &amp;&amp; b</code>,
+ <code>a || b</code>
+ </para>
+ </listitem>
+ <listitem>
+ <para>
+ Grouping: <code>( cond )</code>
+ </para>
+ </listitem>
+ </itemizedlist>
+<programlisting>
+for var in list { statements }
+</programlisting>
+ Executes statements for each element in list, setting the variable
+ <varname>var</varname> to the element value.
+<programlisting>
+while cond { statements }
+</programlisting>
+ Repeatedly execute statements while cond remains true upon entry.
+<programlisting>
+return values ;
+</programlisting>
+ This statement should be used only inside a rule and assigns
+ <code>values</code> to the return value of the rule.
+ <warning>
+ <para>
+ The <code>return</code> statement does not exit the rule. For
+ example:
+<programlisting>
+rule test ( )
+{
+ if 1 = 1
+ {
+ return "reasonable" ;
+ }
+ return "strange" ;
+}
+</programlisting>
+ will return <literal>strange</literal>, not
+ <literal>reasonable</literal>.
+ </para>
+ </warning>
+<programlisting>
+import <replaceable>module</replaceable> ;
+import <replaceable>module</replaceable> : <replaceable>rule</replaceable> ;
+</programlisting>
+ The first form imports the specified module. All rules from that
+ module are made available using the qualified name: <code><replaceable>
+ module</replaceable>.<replaceable>rule</replaceable></code>. The second
+ form imports the specified rules only, and they can be called using
+ unqualified names.
+ </para>
+
+ <para id="bbv2.overview.jam_language.actions">
+ Sometimes, you need to specify the actual command lines to be used
+ when creating targets. In the jam language, you use named actions to do
+ this. For example:
+<programlisting>
+actions create-file-from-another
+{
+ create-file-from-another $(&lt;) $(&gt;)
+}
+</programlisting>
+ This specifies a named action called <literal>
+ create-file-from-another</literal>. The text inside braces is the
+ command to invoke. The <literal>$(&lt;)</literal> variable will be
+ expanded to a list of generated files, and the <literal>$(&gt;)
+ </literal> variable will be expanded to a list of source files.
+ </para>
+
+ <para>
+ To adjust the command line flexibly, you can define a rule with the same
+ name as the action and taking three parameters&mdash;targets, sources and
+ properties. For example:
+<programlisting>
+rule create-file-from-another ( targets * : sources * : properties * )
+{
+ if &lt;variant&gt;debug in $(properties)
+ {
+ OPTIONS on $(targets) = --debug ;
+ }
+}
+actions create-file-from-another
+{
+ create-file-from-another $(OPTIONS) $(&lt;) $(&gt;)
+}
+</programlisting>
+ In this example, the rule checks if a certain build property is specified.
+ If so, it sets the variable <varname>OPTIONS</varname> that is then used
+ inside the action. Note that the variables set "on a target" will be
+ visible only inside actions building that target, not globally. Were
+ they set globally, using variable named <varname>OPTIONS</varname> in
+ two unrelated actions would be impossible.
+ </para>
+
+ <para>
+ More details can be found in the Jam reference, <xref
+ linkend="jam.language.rules"/>.
+ </para>
+ </section>
+
+ <section id="bbv2.overview.configuration">
+ <title>Configuration</title>
+
+ <para>
+ On startup, Boost.Build searches and reads two configuration files:
+ <filename>site-config.jam</filename> and <filename>user-config.jam</filename>.
+ The first one is usually installed and maintained by a system administrator, and
+ the second is for the user to modify. You can edit the one in the top-level
+ directory of your Boost.Build installation or create a copy in your home
+ directory and edit the copy. The following table explains where both files
+ are searched.
+ </para>
+
+ <table id="bbv2.reference.init.config">
+ <title>Search paths for configuration files</title>
+
+ <tgroup cols="3">
+ <thead>
+
+ <row>
+ <entry></entry>
+
+ <entry>site-config.jam</entry>
+
+ <entry>user-config.jam</entry>
+ </row>
+
+ </thead>
+ <tbody>
+
+ <row>
+ <entry>Linux</entry>
+
+ <entry>
+ <simpara><code>/etc</code></simpara>
+ <simpara><code>$HOME</code></simpara>
+ <simpara><code>$BOOST_BUILD_PATH</code></simpara>
+ </entry>
+
+ <entry>
+ <simpara><code>$HOME</code></simpara>
+ <simpara><code>$BOOST_BUILD_PATH</code></simpara>
+ </entry>
+ </row>
+
+ <row>
+ <entry>Windows</entry>
+
+ <entry>
+ <simpara><code>%SystemRoot%</code></simpara>
+ <simpara><code>%HOMEDRIVE%%HOMEPATH%</code></simpara>
+ <simpara><code>%HOME%</code></simpara>
+ <simpara><code>%BOOST_BUILD_PATH%</code></simpara>
+ </entry>
+
+ <entry>
+ <simpara><code>%HOMEDRIVE%%HOMEPATH%</code></simpara>
+ <simpara><code>%HOME%</code></simpara>
+ <simpara><code>%BOOST_BUILD_PATH%</code></simpara>
+ </entry>
+ </row>
+ </tbody>
+ </tgroup>
+ </table>
+
+ <tip>
+ <para>
+ You can use the <command>--debug-configuration</command> option to
+ find which configuration files are actually loaded.
+ </para>
+ </tip>
+
+ <para>
+ Usually, <filename>user-config.jam</filename> just defines the available compilers
+ and other tools (see <xref linkend="bbv2.recipies.site-config"/> for more advanced
+ usage). A tool is configured using the following syntax:
+ </para>
+
+<programlisting>
+using <replaceable>tool-name</replaceable> : ... ;
+</programlisting>
+<para>
+ The <code language="jam">using</code> rule is given the name of tool, and
+ will make that tool available to Boost.Build. For example,
+<programlisting>
+using gcc ;
+</programlisting> will make the <ulink url="http://gcc.gnu.org">GCC</ulink> compiler available.
+ </para>
+
+ <para>
+ All the supported tools are documented in <xref linkend="bbv2.reference.tools"/>,
+ including the specific options they take. Some general notes that apply to most
+ C++ compilers are below.
+ </para>
+
+ <para>
+ For all the C++ compiler toolsets that Boost.Build supports
+ out-of-the-box, the list of parameters to
+ <code language="jam">using</code> is the same: <parameter
+ class="function">toolset-name</parameter>, <parameter
+ class="function">version</parameter>, <parameter
+ class="function">invocation-command</parameter>, and <parameter
+ class="function">options</parameter>.
+ </para>
+
+ <para>If you have a single compiler, and the compiler executable
+ <itemizedlist>
+ <listitem><para>has its &#x201C;usual name&#x201D; and is in the
+ <envar>PATH</envar>, or</para></listitem>
+ <listitem><para>was installed in a standard &#x201C;installation
+ directory&#x201D;, or</para></listitem>
+ <listitem><para>can be found using a global system like the Windows
+ registry.</para></listitem>
+ </itemizedlist>
+ it can be configured by simply:</para>
+<programlisting>
+using <replaceable>tool-name</replaceable> ;
+</programlisting>
+ <!-- TODO: mention auto-configuration? -->
+
+ <para>If the compiler is installed in a custom directory, you should provide the
+ command that invokes the compiler, for example:</para>
+<programlisting>
+using gcc : : g++-3.2 ;
+using msvc : : "Z:/Programs/Microsoft Visual Studio/vc98/bin/cl" ;
+</programlisting>
+ <para>
+ Some Boost.Build toolsets will use that path to take additional actions
+ required before invoking the compiler, such as calling vendor-supplied
+ scripts to set up its required environment variables. When the compiler
+ executables for C and C++ are different, the path to the C++ compiler
+ executable must be specified. The command can
+ be any command allowed by the operating system. For example:
+<programlisting>
+using msvc : : echo Compiling &#x26;&#x26; foo/bar/baz/cl ;
+</programlisting>
+ will work.
+ </para>
+
+ <para>
+ To configure several versions of a toolset, simply invoke the
+ <code language="jam">using</code> rule multiple times:
+<programlisting>
+using gcc : 3.3 ;
+using gcc : 3.4 : g++-3.4 ;
+using gcc : 3.2 : g++-3.2 ;
+</programlisting>
+ Note that in the first call to <code language="jam">using</code>, the
+ compiler found in the <envar>PATH</envar> will be used, and there is no
+ need to explicitly specify the command.
+ </para>
+
+<!-- TODO: This is not actually relevant for gcc now, and we need to rethink this
+ <para>As shown above, both the <parameter
+ class="function">version</parameter> and <parameter
+ class="function">invocation-command</parameter> parameters are
+ optional, but there's an important restriction: if you configure
+ the same toolset more than once, you must pass the <parameter
+ class="function">version</parameter>
+ parameter every time. For example, the following is not allowed:
+<programlisting>
+using gcc ;
+using gcc : 3.4 : g++-3.4 ;
+</programlisting>
+ because the first <functionname>using</functionname> call does
+ not specify a <parameter class="function">version</parameter>.
+ </para> -->
+
+ <para>
+ Many of toolsets have an <parameter class="function">options</parameter>
+ parameter to fine-tune the configuration. All of
+ Boost.Build's standard compiler toolsets accept four options
+ <varname>cflags</varname>, <varname>cxxflags</varname>,
+ <varname>compileflags</varname> and <varname>linkflags</varname> as <parameter
+ class="function">options</parameter> specifying flags that will be
+ always passed to the corresponding tools. Values of the
+ <varname>cflags</varname> feature are passed directly to the C
+ compiler, values of the <varname>cxxflags</varname> feature are
+ passed directly to the C++ compiler, and values of the
+ <varname>compileflags</varname> feature are passed to both. For
+ example, to configure a <command>gcc</command> toolset so that it
+ always generates 64-bit code you could write:
+<programlisting>
+ using gcc : 3.4 : : &lt;compileflags&gt;-m64 &lt;linkflags&gt;-m64 ;
+</programlisting>
+ </para>
+
+ <warning>
+ <para>
+ Although the syntax used to specify toolset options is very similar
+ to syntax used to specify requirements in Jamfiles, the toolset options
+ are not the same as features. Don't try to specify a feature value
+ in toolset initialization.
+ </para>
+ </warning>
+
+ </section>
+
+ <section id="bbv2.overview.invocation">
+ <title>Invocation</title>
+
+ <para>To invoke Boost.Build, type <command>b2</command> on the command line. Three kinds
+ of command-line tokens are accepted, in any order:</para>
+ <variablelist>
+ <varlistentry>
+ <term>options</term>
+
+ <listitem><para>Options start with either one or two dashes. The standard options
+ are listed below, and each project may add additional options</para></listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term>properties</term>
+
+ <listitem><para>Properties specify details of what you want to build (e.g. debug
+ or release variant). Syntactically, all command line tokens with an equal sign in them
+ are considered to specify properties. In the simplest form, a property looks like
+ <command><replaceable>feature</replaceable>=<replaceable>value</replaceable></command>
+ </para></listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term>target</term>
+
+ <listitem><para>All tokens that are neither options nor properties specify
+ what targets to build. The available targets entirely depend on the project
+ you are building.</para></listitem>
+ </varlistentry>
+ </variablelist>
+
+ <section id="bbv2.overview.invocation.examples">
+ <title>Examples</title>
+
+ <para>To build all targets defined in the Jamfile in the current directory with the default properties, run:
+<screen>
+b2
+</screen>
+ </para>
+
+ <para>To build specific targets, specify them on the command line:
+<screen>
+b2 lib1 subproject//lib2
+</screen>
+ </para>
+
+ <para>To request a certain value for some property, add <literal>
+ <replaceable>property</replaceable>=<replaceable>value</replaceable></literal> to the command line:
+<screen>
+b2 toolset=gcc variant=debug optimization=space
+</screen>
+ </para>
+ </section>
+
+ <section id="bbv2.overview.invocation.options">
+ <title>Options</title>
+
+ <para>Boost.Build recognizes the following command line options.</para>
+
+ <variablelist>
+
+ <varlistentry id="bbv2.reference.init.options.help">
+ <term><option>--help</option></term>
+ <listitem>
+ <para>Invokes the online help system. This prints general
+ information on how to use the help system with additional
+ --help* options.
+ </para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><option>--clean</option></term>
+ <listitem>
+ <para>Cleans all targets in the current directory and
+ in any subprojects. Note that unlike the <literal>clean</literal>
+ target in make, you can use <literal>--clean</literal>
+ together with target names to clean specific targets.</para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><option>--clean-all</option></term>
+ <listitem>
+ <para>Cleans all targets,
+ no matter where they are defined. In particular, it will clean targets
+ in parent Jamfiles, and targets defined under other project roots.
+ </para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><option>--build-dir</option></term>
+ <listitem>
+ <para>Changes the build directories for all project roots being built. When
+ this option is specified, all Jamroot files must declare a project name.
+ The build directory for the project root will be computed by concatanating
+ the value of the <option>--build-dir</option> option, the project name
+ specified in Jamroot, and the build dir specified in Jamroot
+ (or <literal>bin</literal>, if none is specified).
+ </para>
+
+ <para>The option is primarily useful when building from read-only
+ media, when you can't modify Jamroot.
+ </para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><option>--abbreviate-paths</option></term>
+ <listitem>
+ <para>Compresses target paths by abbreviating each component.
+ This option is useful to keep paths from becoming longer than
+ the filesystem supports. See also <xref linkend="bbv2.reference.buildprocess.targetpath"/>.
+ </para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><option>--hash</option></term>
+ <listitem>
+ <para>Compresses target paths using an MD5 hash. This option is
+ useful to keep paths from becoming longer than the filesystem
+ supports. This option produces shorter paths than --abbreviate-paths
+ does, but at the cost of making them less understandable.
+ See also <xref linkend="bbv2.reference.buildprocess.targetpath"/>.
+ </para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><option>--version</option></term>
+ <listitem>
+ <para>Prints information on the Boost.Build and Boost.Jam
+ versions.
+ </para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><option>-a</option></term>
+ <listitem>
+ <para>Causes all files to be rebuilt.</para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><option>-n</option></term>
+ <listitem>
+ <para>Do no execute the commands, only print them.</para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><option>-q</option></term>
+ <listitem>
+ <para>Stop at the first error, as opposed to continuing to build targets
+ that don't depend on the failed ones.</para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><option>-j <replaceable>N</replaceable></option></term>
+ <listitem>
+ <para>Run up to <replaceable>N</replaceable> commands in parallel.</para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><option>--debug-configuration</option></term>
+ <listitem>
+ <para>Produces debug information about the loading of Boost.Build
+ and toolset files.</para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><option>--debug-building</option></term>
+ <listitem>
+ <para>Prints what targets are being built and with what properties.
+ </para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><option>--debug-generators</option></term>
+ <listitem>
+ <para>Produces debug output from the generator search process.
+ Useful for debugging custom generators.
+ </para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><option>-d0</option></term>
+ <listitem>
+ <para>Supress all informational messages.</para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><option>-d <replaceable>N</replaceable></option></term>
+ <listitem>
+ <para>Enable cummulative debugging levels from 1 to n. Values are:
+ <orderedlist>
+ <listitem>Show the actions taken for building targets, as they are executed (the default).</listitem>
+ <listitem>Show "quiet" actions and display all action text, as they are executed.</listitem>
+ <listitem>Show dependency analysis, and target/source timestamps/paths.</listitem>
+ <listitem>Show arguments and timming of shell invocations.</listitem>
+ <listitem>Show rule invocations and variable expansions.</listitem>
+ <listitem>Show directory/header file/archive scans, and attempts at binding to targets.</listitem>
+ <listitem>Show variable settings.</listitem>
+ <listitem>Show variable fetches, variable expansions, and evaluation of '"if"' expressions.</listitem>
+ <listitem>Show variable manipulation, scanner tokens, and memory usage.</listitem>
+ <listitem>Show profile information for rules, both timing and memory.</listitem>
+ <listitem>Show parsing progress of Jamfiles.</listitem>
+ <listitem>Show graph of target dependencies.</listitem>
+ <listitem>Show change target status (fate).</listitem>
+ </orderedlist>
+ </para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><option>-d +<replaceable>N</replaceable></option></term>
+ <listitem>
+ <para>Enable debugging level <replaceable>N</replaceable>.</para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><option>-o <replaceable>file</replaceable></option></term>
+ <listitem>
+ <para>Write the updating actions to the specified file instead of running them.
+ </para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><option>-s <replaceable>var</replaceable>=<replaceable>value</replaceable></option></term>
+ <listitem>
+ <para>Set the variable <replaceable>var</replaceable> to
+ <replaceable>value</replaceable> in the global scope of the jam
+ language interpreter, overriding variables imported from the
+ environment.
+ </para>
+ </listitem>
+ </varlistentry>
+ </variablelist>
+ </section>
+
+ <section id="bbv2.overview.invocation.properties">
+ <title>Properties</title>
+
+ <para>In the simplest case, the build is performed with a single set of properties,
+ that you specify on the command line with elements in the form
+ <command><replaceable>feature</replaceable>=<replaceable>value</replaceable></command>.
+ The complete list of features can be found in <xref linkend="bbv2.overview.builtins.features"/>.
+ The most common features are summarized below.</para>
+
+ <table>
+ <tgroup cols="3">
+ <thead>
+
+ <row>
+ <entry>Feature</entry>
+
+ <entry>Allowed values</entry>
+
+ <entry>Notes</entry>
+ </row>
+
+ </thead>
+ <tbody>
+
+ <row>
+ <entry>variant</entry>
+
+ <entry>debug,release</entry>
+
+ <entry></entry>
+ </row>
+
+ <row>
+ <entry>link</entry>
+
+ <entry>shared,static</entry>
+
+ <entry>Determines if Boost.Build creates shared or static libraries</entry>
+ </row>
+
+ <row>
+ <entry>threading</entry>
+
+ <entry>single,multi</entry>
+
+ <entry>Cause the produced binaries to be thread-safe. This requires proper support in the source code itself.</entry>
+ </row>
+
+ <row>
+ <entry>address-model</entry>
+
+ <entry>32,64</entry>
+
+ <entry>Explicitly request either 32-bit or 64-bit code generation. This typically
+ requires that your compiler is appropriately configured. Please refer to
+ <xref linkend="bbv2.reference.tools.compilers"/> and your compiler documentation
+ in case of problems.</entry>
+ </row>
+
+ <row>
+ <entry>toolset</entry>
+
+ <entry>(Depends on configuration)</entry>
+
+ <entry>The C++ compiler to use. See <xref linkend="bbv2.reference.tools.compilers"/> for a detailed list.</entry>
+ </row>
+
+ <row>
+ <entry>include</entry>
+
+ <entry>(Arbitrary string)</entry>
+
+ <entry>Additional include paths for C and C++ compilers.</entry>
+ </row>
+
+ <row>
+ <entry>define</entry>
+
+ <entry>(Arbitrary string)</entry>
+
+ <entry>Additional macro definitions for C and C++ compilers. The string should be either
+ <code>SYMBOL</code> or <code>SYMBOL=VALUE</code></entry>
+ </row>
+
+ <row>
+ <entry>cxxflags</entry>
+
+ <entry>(Arbitrary string)</entry>
+
+ <entry>Custom options to pass to the C++ compiler.</entry>
+ </row>
+
+ <row>
+ <entry>cflags</entry>
+
+ <entry>(Arbitrary string)</entry>
+
+ <entry>Custom options to pass to the C compiler.</entry>
+ </row>
+
+ <row>
+ <entry>linkflags</entry>
+
+ <entry>(Arbitrary string)</entry>
+
+ <entry>Custom options to pass to the C++ linker.</entry>
+ </row>
+
+ <row>
+ <entry>runtime-link</entry>
+
+ <entry>shared,static</entry>
+
+ <entry>Determines if shared or static version of C and C++ runtimes should be used.</entry>
+ </row>
+
+ </tbody>
+ </tgroup>
+ </table>
+
+ <para>If you have more than one version of a given C++ toolset (e.g. configured in
+ <filename>user-config.jam</filename>, or autodetected, as happens with msvc), you can
+ request the specific version by passing
+ <code><replaceable>toolset</replaceable>-<replaceable>version</replaceable></code> as
+ the value of the <code>toolset</code> feature, for example <code>toolset=msvc-8.0</code>.
+ </para>
+
+
+ <para>
+ If a feature has a fixed set of values it can be specified more than
+ once on the command line. <!-- define 'base' and link to it -->
+ In which case, everything will be built several times --
+ once for each specified value of a feature. For example, if you use
+ </para>
+<screen>
+b2 link=static link=shared threading=single threading=multi
+</screen>
+ <para>
+ Then a total of 4 builds will be performed. For convenience,
+ instead of specifying all requested values of a feature in separate command line elements,
+ you can separate the values with commas, for example:
+ </para>
+<screen>
+b2 link=static,shared threading=single,multi
+</screen>
+ <para>
+ The comma has this special meaning only if the feature has a fixed set of values, so
+ </para>
+<screen>
+b2 include=static,shared
+</screen>
+ <para>is not treated specially.</para>
+
+ </section>
+
+ <section id="bbv2.overview.invocation.targets">
+ <title>Targets</title>
+
+ <para>All command line elements that are neither options nor properties are the names of the
+ targets to build. See <xref linkend="bbv2.reference.ids"/>. If no target is specified,
+ the project in the current directory is built.</para>
+ </section>
+
+ </section>
+
+ <section id="bbv2.overview.targets">
+ <title>Declaring Targets</title>
+
+ <para id="bbv2.overview.targets.main">
+ A <firstterm>Main target</firstterm> is a user-defined named
+ entity that can be built, for example an executable file.
+ Declaring a main target is usually done using one of the main
+ target rules described in <xref linkend=
+ "bbv2.reference.rules"/>. The user can also declare
+ custom main target rules as shown in <xref
+ linkend="bbv2.extending.rules"/>.
+ </para>
+
+ <indexterm><primary>main target</primary><secondary>declaration
+ syntax</secondary></indexterm>
+ <para>Most main target rules in Boost.Build have the same common
+ signature:</para>
+
+ <!-- I think we maybe ought to be talking about a common
+ _signature_ here, having already explained Boost.Jam function
+ signatures at the beginning of this chapter. Then we could show
+ ( main-target-name : sources * : requirements * : default-build * : usage-requirements * )
+ instead. More precise.
+
+ Also, I suggest replacing "default-build" by "default-properties" everywhere.
+ -->
+
+<indexterm><primary>common signature</primary></indexterm>
+<anchor id="bbv2.main-target-rule-syntax"/>
+<programlisting>
+rule <replaceable>rule-name</replaceable> (
+ main-target-name :
+ sources + :
+ requirements * :
+ default-build * :
+ usage-requirements * )
+</programlisting>
+
+ <itemizedlist>
+ <listitem>
+ <simpara>
+ <parameter>main-target-name</parameter> is the name used
+ to request the target on command line and to use it from
+ other main targets. A main target name may contain
+ alphanumeric characters, dashes
+ (&#x2018;<code>-</code>&#x2019;), and underscores
+ (&#x2018;<code>_</code>&#x2019;).
+ </simpara>
+ </listitem>
+
+ <listitem>
+ <simpara>
+ <parameter>sources</parameter> is the list of source files and other main
+ targets that must be combined.
+ </simpara>
+ </listitem>
+
+ <listitem>
+ <simpara>
+ <parameter>requirements</parameter> is the list of properties that must always
+ be present when this main target is built.
+ </simpara>
+ </listitem>
+
+ <listitem>
+ <simpara>
+ <parameter>default-build</parameter> is the list of properties that will be used
+ unless some other value of the same feature is already
+ specified, e.g. on the command line or by propagation from a dependent target.
+ </simpara>
+ </listitem>
+
+ <listitem>
+ <simpara>
+ <parameter>usage-requirements</parameter> is the list of properties that will be
+ propagated to all main targets that use this one, i.e. to all its
+ dependents.
+ </simpara>
+ </listitem>
+ </itemizedlist>
+
+ <para>
+ Some main target rules have a different list of parameters as explicitly
+ stated in their documentation.
+ </para>
+
+ <para>The actual requirements for a target are obtained by refining
+ the requirements of the project where the target is declared with the
+ explicitly specified requirements. The same is true for
+ usage-requirements. More details can be found in
+ <xref linkend="bbv2.reference.variants.proprefine"/>
+ </para>
+
+ <section>
+ <title>Name</title>
+
+ <!-- perphaps we should use 'name-target-name' to closer
+ bind this description to the rule's signature. Here, and for
+ other parameters. -->
+ <para>The name of main target has two purposes. First, it's used to refer to this target from
+ other targets and from command line. Second, it's used to compute the names of the generated files.
+ Typically, filenames are obtained from main target name by appending system-dependent suffixes and
+ prefixes.
+ </para>
+
+ <para>The name of a main target can contain alphanumeric characters,
+ dashes, undescores and dots. The entire
+ name is significant when resolving references from other targets. For determining filenames, only the
+ part before the first dot is taken. For example:</para>
+<programlisting>
+obj test.release : test.cpp : &lt;variant&gt;release ;
+obj test.debug : test.cpp : &lt;variant&gt;debug ;
+</programlisting>
+ <para>will generate two files named <filename>test.obj</filename> (in two different directories), not
+ two files named <filename>test.release.obj</filename> and <filename>test.debug.obj</filename>.
+ </para>
+
+ </section>
+
+ <section>
+ <title>Sources</title>
+
+ <para>The list of sources specifies what should be processed to
+ get the resulting targets. Most of the time, it's just a list of
+ files. Sometimes, you'll want to automatically construct the
+ list of source files rather than having to spell it out
+ manually, in which case you can use the
+ <link linkend="bbv2.reference.rules.glob">glob</link> rule.
+ Here are two examples:</para>
+<programlisting>
+exe a : a.cpp ; # a.cpp is the only source file
+exe b : [ glob *.cpp ] ; # all .cpp files in this directory are sources
+</programlisting>
+ <para>
+ Unless you specify a file with an absolute path, the name is
+ considered relative to the source directory&#x200A;&#x2014;&#x200A;which is typically
+ the directory where the Jamfile is located, but can be changed as
+ described in <xref linkend=
+ "bbv2.overview.projects.attributes.projectrule"/>.
+ </para>
+
+ <para>
+ <!-- use "project-id" here? -->
+ The list of sources can also refer to other main targets. Targets in
+ the same project can be referred to by name, while targets in other
+ projects must be qualified with a directory or a symbolic project
+ name. The directory/project name is separated from the target name by
+ a double forward slash. There is no special syntax to distinguish the
+ directory name from the project name&#x2014;the part before the double
+ slash is first looked up as project name, and then as directory name.
+ For example:
+ </para>
+<programlisting>
+lib helper : helper.cpp ;
+exe a : a.cpp helper ;
+# Since all project ids start with slash, ".." is a directory name.
+exe b : b.cpp ..//utils ;
+exe c : c.cpp /boost/program_options//program_options ;
+</programlisting>
+ <para>
+ The first exe uses the library defined in the same project. The second
+ one uses some target (most likely a library) defined by a Jamfile one
+ level higher. Finally, the third target uses a <ulink url=
+ "http://boost.org">C++ Boost</ulink> library, referring to it using
+ its absolute symbolic name. More information about target references
+ can be found in <xref linkend="bbv2.tutorial.libs"/> and <xref
+ linkend="bbv2.reference.ids"/>.
+ </para>
+ </section>
+
+ <section id="bbv2.overview.targets.requirements">
+ <title>Requirements</title>
+ <indexterm><primary>requirements</primary></indexterm>
+ <para>Requirements are the properties that should always be present when
+ building a target. Typically, they are includes and defines:
+<programlisting>
+exe hello : hello.cpp : &lt;include&gt;/opt/boost &lt;define&gt;MY_DEBUG ;
+</programlisting>
+ There are a number of other features, listed in
+ <xref linkend="bbv2.overview.builtins.features"/>. For example if
+ a library can only be built statically, or a file can't be compiled
+ with optimization due to a compiler bug, one can use
+<programlisting>
+lib util : util.cpp : &lt;link&gt;static ;
+obj main : main.cpp : &lt;optimization&gt;off ;
+</programlisting>
+ </para>
+
+ <para id="bbv2.overview.targets.requirements.conditional">
+ <indexterm><primary>requirements</primary><secondary>conditional</secondary></indexterm>
+ Sometimes, particular relationships need to be maintained
+ among a target's build properties. This can be achieved with
+ <firstterm>conditional
+ requirements</firstterm>. For example, you might want to set
+ specific <code>#defines</code> when a library is built as shared,
+ or when a target's <code>release</code> variant is built in
+ release mode.
+<programlisting>
+lib network : network.cpp
+ : <emphasis role="bold">&lt;link&gt;shared:&lt;define&gt;NETWORK_LIB_SHARED</emphasis>
+ &lt;variant&gt;release:&lt;define&gt;EXTRA_FAST
+ ;
+</programlisting>
+
+ In the example above, whenever <filename>network</filename> is
+ built with <code>&lt;link&gt;shared</code>,
+ <code>&lt;define&gt;NETWORK_LIB_SHARED</code> will be in its
+ properties, too.
+ </para>
+
+ <para>You can use several properties in the condition, for example:
+<programlisting>
+lib network : network.cpp
+ : &lt;toolset&gt;gcc,&lt;optimization&gt;speed:&lt;define&gt;USE_INLINE_ASSEMBLER
+ ;
+</programlisting>
+ </para>
+
+ <para id="bbv2.overview.targets.requirements.indirect">
+ <indexterm><primary>requirements</primary><secondary>indirect</secondary></indexterm>
+ A more powerful variant of conditional requirements
+ is <firstterm>indirect conditional requirements</firstterm>.
+ You can provide a rule that will be called with the current build properties and can compute additional properties
+ to be added. For example:
+<programlisting>
+lib network : network.cpp
+ : &lt;conditional&gt;@my-rule
+ ;
+rule my-rule ( properties * )
+{
+ local result ;
+ if &lt;toolset&gt;gcc &lt;optimization&gt;speed in $(properties)
+ {
+ result += &lt;define&gt;USE_INLINE_ASSEMBLER ;
+ }
+ return $(result) ;
+}
+</programlisting>
+ This example is equivalent to the previous one, but for complex cases, indirect conditional
+ requirements can be easier to write and understand.
+ </para>
+
+ <para>Requirements explicitly specified for a target are usually
+ combined with the requirements specified for the containing project. You
+ can cause a target to completely ignore a specific project requirement
+ using the syntax by adding a minus sign before the property, for example:
+<programlisting>
+exe main : main.cpp : <emphasis role="bold">-&lt;define&gt;UNNECESSARY_DEFINE</emphasis> ;
+</programlisting>
+ This syntax is the only way to ignore free properties, such as defines,
+ from a parent. It can be also useful for ordinary properties. Consider
+ this example:
+<programlisting>
+project test : requirements &lt;threading&gt;multi ;
+exe test1 : test1.cpp ;
+exe test2 : test2.cpp : &lt;threading&gt;single ;
+exe test3 : test3.cpp : -&lt;threading&gt;multi ;
+</programlisting>
+ Here, <code>test1</code> inherits the project requirements and will always
+ be built in multi-threaded mode. The <code>test2</code> target
+ <emphasis>overrides</emphasis> the project's requirements and will
+ always be built in single-threaded mode. In contrast, the
+ <code>test3</code> target <emphasis>removes</emphasis> a property
+ from the project requirements and will be built either in single-threaded or
+ multi-threaded mode depending on which variant is requested by the
+ user.</para>
+
+ <para>Note that the removal of requirements is completely textual:
+ you need to specify exactly the same property to remove it.</para>
+
+ </section>
+
+ <section>
+ <title>Default Build</title>
+
+ <para>The <varname>default-build</varname> parameter
+ is a set of properties to be used if the build request does
+ not otherwise specify a value for features in the set. For example:
+<programlisting>
+exe hello : hello.cpp : : &lt;threading&gt;multi ;
+</programlisting>
+ would build a multi-threaded target unless the user
+ explicitly requests a single-threaded version. The difference between
+ the requirements and the default-build is that the requirements cannot be
+ overridden in any way.
+ </para>
+ </section>
+
+ <section>
+ <title>Additional Information</title>
+
+ <para>
+ The ways a target is built can be so different that
+ describing them using conditional requirements would be
+ hard. For example, imagine that a library actually uses
+ different source files depending on the toolset used to build
+ it. We can express this situation using <firstterm>target
+ alternatives</firstterm>:
+<programlisting>
+lib demangler : dummy_demangler.cpp ; # alternative 1
+lib demangler : demangler_gcc.cpp : &lt;toolset&gt;gcc ; # alternative 2
+lib demangler : demangler_msvc.cpp : &lt;toolset&gt;msvc ; # alternative 3
+</programlisting>
+ In the example above, when built with <literal>gcc</literal>
+ or <literal>msvc</literal>, <filename>demangler</filename>
+ will use a source file specific to the toolset. Otherwise, it
+ will use a generic source file,
+ <filename>dummy_demangler.cpp</filename>.
+ </para>
+
+ <para>It is possible to declare a target inline, i.e. the "sources"
+ parameter may include calls to other main rules. For example:</para>
+
+<programlisting>
+exe hello : hello.cpp
+ [ obj helpers : helpers.cpp : &lt;optimization&gt;off ] ;</programlisting>
+
+ <para>
+ Will cause "helpers.cpp" to be always compiled without
+ optimization. When referring to an inline main target, its declared
+ name must be prefixed by its parent target's name and two dots. In
+ the example above, to build only helpers, one should run
+ <code>b2 hello..helpers</code>.
+ </para>
+
+ <para>When no target is requested on the command line, all targets in the
+ current project will be built. If a target should be built only by
+ explicit request, this can be expressed by the
+ <link linkend="bbv2.reference.rules.explicit">explicit</link> rule:
+ <programlisting>
+explicit install_programs ;</programlisting>
+ </para>
+
+ </section>
+ </section>
+
+ <section id="bbv2.overview.projects">
+ <title>Projects</title>
+
+ <para>As mentioned before, targets are grouped into projects,
+ and each Jamfile is a separate project. Projects are useful
+ because they allow us to group related targets together, define
+ properties common to all those targets, and assign a symbolic
+ name to the project that can be used in referring to its
+ targets.
+ </para>
+
+ <para>Projects are named using the
+ <code language="jam">project</code> rule, which has the
+ following syntax:
+<programlisting>
+project <replaceable>id</replaceable> : <replaceable>attributes</replaceable> ;
+</programlisting>
+ Here, <replaceable>attributes</replaceable> is a sequence of
+ rule arguments, each of which begins with an attribute-name
+ and is followed by any number of build properties. The list
+ of attribute names along with its handling is also shown in
+ the table below. For example, it is possible to write:
+<programlisting>
+project tennis
+ : requirements &lt;threading&gt;multi
+ : default-build release
+ ;
+</programlisting>
+ </para>
+
+ <para>The possible attributes are listed below.</para>
+
+ <para><emphasis>Project id</emphasis> is a short way to denote a project, as
+ opposed to the Jamfile's pathname. It is a hierarchical path,
+ unrelated to filesystem, such as "boost/thread". <link linkend=
+ "bbv2.reference.ids">Target references</link> make use of project ids to
+ specify a target.</para>
+ <!--
+ This is actually spelled "project-id," isn't it? You
+ have to fix all of these and use a code font. Also below
+ in the table.
+ -->
+
+ <para><emphasis>Source location</emphasis> specifies the directory where sources
+ for the project are located.</para>
+
+ <para><emphasis>Project requirements</emphasis> are requirements that apply to
+ all the targets in the projects as well as all subprojects.</para>
+
+ <para><emphasis>Default build</emphasis> is the build request that should be
+ used when no build request is specified explicitly.</para>
+ <!--
+ This contradicts your earlier description of default
+ build and I believe it is incorrect. Specifying a build
+ request does not neccessarily render default build
+ ineffective, because it may cover different features.
+ This description is repeated too many times in the
+ documentation; you almost *had* to get it wrong once.
+ -->
+
+ <para id="bbv2.overview.projects.attributes.projectrule">
+ The default values for those attributes are
+ given in the table below.
+
+ <table>
+ <title/>
+ <tgroup cols="4">
+ <thead>
+ <row>
+ <entry>Attribute</entry>
+
+ <entry>Name</entry>
+
+ <entry>Default value</entry>
+
+ <entry>Handling by the <code language="jam">project</code>
+ rule</entry>
+
+ </row>
+ </thead>
+
+ <tbody>
+
+ <row>
+ <entry>Project id</entry>
+
+ <entry>none</entry>
+
+ <entry>none</entry>
+
+ <entry>Assigned from the first parameter of the 'project' rule.
+ It is assumed to denote absolute project id.</entry>
+ </row>
+
+ <row>
+ <entry>Source location</entry>
+
+ <entry><literal>source-location</literal></entry>
+
+ <entry>The location of jamfile for the project</entry>
+
+ <entry>Sets to the passed value</entry>
+ </row>
+
+ <row>
+ <entry>Requirements</entry>
+
+ <entry><literal>requirements</literal></entry>
+
+ <entry>The parent's requirements</entry>
+
+ <entry>The parent's requirements are refined with the passed
+ requirement and the result is used as the project
+ requirements.</entry>
+ </row>
+
+ <row>
+ <entry>Default build</entry>
+
+ <entry><literal>default-build</literal></entry>
+
+ <entry>none</entry>
+
+ <entry>Sets to the passed value</entry>
+ </row>
+
+ <row>
+ <entry>Build directory</entry>
+
+ <entry><literal>build-dir</literal></entry>
+
+ <entry>Empty if the parent has no build directory set.
+ Otherwise, the parent's build directory with the
+ relative path from parent to the current project
+ appended to it.
+ </entry>
+
+ <entry>Sets to the passed value, interpreted as relative to the
+ project's location.</entry>
+ </row>
+ </tbody>
+ </tgroup>
+ </table>
+ </para>
+
+ <para>Besides defining projects and main targets, Jamfiles
+ often invoke various utility rules. For the full list of rules
+ that can be directly used in Jamfile see
+ <xref linkend="bbv2.reference.rules"/>.
+ </para>
+
+ <para>Each subproject inherits attributes, constants and rules
+ from its parent project, which is defined by the nearest
+ Jamfile in an ancestor directory above
+ the subproject. The top-level project is declared in a file
+ called <filename>Jamroot</filename> rather than
+ <filename>Jamfile</filename>. When loading a project,
+ Boost.Build looks for either <filename>Jamroot</filename> or
+ <code>Jamfile</code>. They are handled identically, except
+ that if the file is called <filename>Jamroot</filename>, the
+ search for a parent project is not performed.
+ </para>
+
+ <para>Even when building in a subproject directory, parent
+ project files are always loaded before those of their
+ subprojects, so that every definition made in a parent project
+ is always available to its children. The loading order of any
+ other projects is unspecified. Even if one project refers to
+ another via the <code>use-project</code> or a target reference,
+ no specific order should be assumed.
+ </para>
+
+ <note>
+ <para>Giving the root project the special name
+ &#x201C;<filename>Jamroot</filename>&#x201D; ensures that
+ Boost.Build won't misinterpret a directory above it as the
+ project root just because the directory contains a Jamfile.
+ <!-- The logic of the previous reasoning didn't hang together -->
+ </para>
+ </note>
+
+ <!-- All this redundancy with the tutorial is bad. The tutorial
+ should just be made into the introductory sections of this
+ document, which should be called the "User Guide." It's
+ perfectly appropriate to start a user guide with that kind
+ of material. -->
+ </section>
+
+ <section id="bbv2.overview.build_process">
+ <title>The Build Process</title>
+
+ <para>When you've described your targets, you want Boost.Build to run the
+ right tools and create the needed targets.
+ <!-- That sentence is awkward and doesn't add much. -->
+ This section will describe
+ two things: how you specify what to build, and how the main targets are
+ actually constructed.
+ </para>
+
+ <para>The most important thing to note is that in Boost.Build, unlike
+ other build tools, the targets you declare do not correspond to specific
+ files. What you declare in a Jamfile is more like a “metatarget.â€
+ <!-- Do we need a new word? We already have “main target.†If
+ you're going to introduce “metatarget†you should at least
+ tie it together with the main target concept. It's too
+ strange to have been saying “main target†all along and now
+ suddenly start saying “what you declare in a jamfile†-->
+ Depending on the properties you specify on the command line,
+ each metatarget will produce a set of real targets corresponding
+ to the requested properties. It is quite possible that the same
+ metatarget is built several times with different properties,
+ producing different files.
+ </para>
+ <tip>
+ <para>
+ This means that for Boost.Build, you cannot directly obtain a build
+ variant from a Jamfile. There could be several variants requested by the
+ user, and each target can be built with different properties.
+ </para>
+ </tip>
+
+ <section id="bbv2.overview.build_request">
+ <title>Build Request</title>
+
+ <para>
+ The command line specifies which targets to build and with which
+ properties. For example:
+<programlisting>
+b2 app1 lib1//lib1 toolset=gcc variant=debug optimization=full
+</programlisting>
+ would build two targets, "app1" and "lib1//lib1" with the specified
+ properties. You can refer to any targets, using
+ <link linkend="bbv2.reference.ids">target id</link> and specify arbitrary
+ properties. Some of the properties are very common, and for them the name
+ of the property can be omitted. For example, the above can be written as:
+<programlisting>
+b2 app1 lib1//lib1 gcc debug optimization=full
+</programlisting>
+ The complete syntax, which has some additional shortcuts, is
+ described in <xref linkend="bbv2.overview.invocation"/>.
+ </para>
+ </section>
+
+ <section><title>Building a main target</title>
+
+ <para>When you request, directly or indirectly, a build of a main target
+ with specific requirements, the following steps are done. Some brief
+ explanation is provided, and more details are given in <xref
+ linkend="bbv2.reference.buildprocess"/>.
+ <orderedlist>
+
+ <listitem><para>Applying default build. If the default-build
+ property of a target specifies a value of a feature that is not
+ present in the build request, that value is added.</para>
+ <!--
+ Added to what? Don't say “the build request!†The
+ request is what was requested; if its meaning changes
+ the reader will be confused.
+ -->
+ </listitem>
+
+ <listitem><para>Selecting the main target alternative to use. For
+ each alternative we look how many properties are present both in
+ alternative's requirements, and in build request. The
+ alternative with large number of matching properties is selected.
+ </para></listitem>
+
+ <listitem><para>Determining "common" properties.
+ <!-- It would be nice to have a better name for this. But
+ even more importantly, unless you say something about
+ the reason for choosing whatever term you use, the
+ reader is going to wonder what it means. -->
+ The build request
+ is <link linkend="bbv2.reference.variants.proprefine">refined</link>
+ with target's requirements.
+ <!-- It's good that you have the links here and below,
+ but I'm concerned that it doesn't communicate well
+ in print and there's not enough information for the
+ print reader. Maybe we need separate XSL for PDF
+ printing that generates a readable footnote. -->
+ The conditional properties in
+ requirements are handled as well. Finally, default values of
+ features are added.
+ </para></listitem>
+
+ <listitem><para>Building targets referred by the sources list and
+ dependency properties. The list of sources and the properties
+ can refer to other target using <link
+ linkend="bbv2.reference.ids">target references</link>. For each
+ reference, we take all <link
+ linkend="bbv2.reference.features.attributes.propagated">propagated</link>
+ properties, refine them by explicit properties specified in the
+ target reference, and pass the resulting properties as build
+ request to the other target.
+ </para></listitem>
+
+ <listitem><para>Adding the usage requirements produced when building
+ dependencies to the "common" properties. When dependencies are
+ built in the previous step, they return
+ <!-- don't assume reader has a mental model for BB internals! -->
+ both the set of created
+ "real" targets, and usage requirements. The usage requirements
+ are added to the common properties and the resulting property
+ set will be used for building the current target.
+ </para></listitem>
+
+ <listitem><para>Building the target using generators. To convert the
+ sources to the desired type, Boost.Build uses "generators" ---
+ objects that correspond to tools like compilers and linkers. Each
+ generator declares what type of targets it can produce and what
+ type of sources it requires. Using this information, Boost.Build
+ determines which generators must be run to produce a specific
+ target from specific sources. When generators are run, they return
+ the "real" targets.
+ </para></listitem>
+
+ <listitem><para>Computing the usage requirements to be returned. The
+ conditional properties in usage requirements are expanded
+ <!-- what does "expanded" mean? -->
+ and the result is returned.</para></listitem>
+ </orderedlist>
+ </para>
+ </section>
+
+ <section><title>Building a Project</title>
+
+ <para>Often, a user builds a complete project, not just one main
+ target. In fact, invoking <command>b2</command> without
+ arguments
+ <!-- do you know the difference between parameters and
+ arguments? I only learned this year -->
+ builds the project defined in the current
+ directory.</para>
+
+ <para>When a project is built, the build request is passed without
+ modification to all main targets in that project.
+ <!-- What does it mean to pass a build request to a target?
+ -->
+ It's is possible to
+ prevent implicit building of a target in a project with the
+ <code>explicit</code> rule:
+<programlisting>
+explicit hello_test ;
+</programlisting>
+ would cause the <code>hello_test</code> target to be built only if
+ explicitly requested by the user or by some other target.
+ </para>
+
+ <para>The Jamfile for a project can include a number of
+ <code>build-project</code> rule calls that specify additional projects to
+ be built.
+ </para>
+
+ </section>
+
+ </section>
+
+ </chapter>
+
+<!--
+ Local Variables:
+ mode: nxml
+ sgml-indent-data: t
+ sgml-parent-document: ("userman.xml" "chapter")
+ sgml-set-face: t
+ End:
+-->
diff --git a/tools/build/doc/src/path.xml b/tools/build/doc/src/path.xml
new file mode 100644
index 0000000000..f11906db51
--- /dev/null
+++ b/tools/build/doc/src/path.xml
@@ -0,0 +1,249 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE section PUBLIC "-//Boost//DTD BoostBook XML V1.0//EN"
+ "http://www.boost.org/tools/boostbook/dtd/boostbook.dtd">
+
+<section id="bbv2.reference.modules.path">
+
+ <title>path</title>
+ <indexterm>
+ <primary>path</primary>
+ <secondary>module</secondary>
+ </indexterm>
+
+ <para>
+ Performs various path manipulations. Paths are always in a 'normalized'
+ representation. In it, a path may be either:
+
+ <itemizedlist>
+ <listitem><para><code>'.'</code>, or</para></listitem>
+ <listitem>
+ <para>
+ <code>['/'] [ ( '..' '/' )* (token '/')* token ]</code>
+ </para>
+ </listitem>
+ </itemizedlist>
+
+ In plain english, a path can be rooted, <code>'..'</code>
+ elements are allowed only at the beginning, and it never
+ ends in slash, except for the path consisting of slash only.
+ </para>
+
+ <orderedlist>
+
+ <listitem id="bbv2.reference.modules.path.make">
+ <indexterm zone="bbv2.reference.modules.path.make">
+ <primary>make</primary>
+ <secondary>path</secondary>
+ </indexterm>
+ <code language="jam">rule make ( native )</code>
+ <para>Converts the native path into normalized form.</para>
+ </listitem>
+
+ <listitem id="bbv2.reference.modules.path.native">
+ <indexterm zone="bbv2.reference.modules.path.native">
+ <primary>native</primary>
+ </indexterm>
+ <code language="jam">rule native ( path )</code>
+ <para>Builds the native representation of the path.</para>
+ </listitem>
+
+ <listitem id="bbv2.reference.modules.path.is-rooted">
+ <indexterm zone="bbv2.reference.modules.path.is-rooted">
+ <primary>is-rooted</primary>
+ </indexterm>
+ <code language="jam">rule is-rooted ( path )</code>
+ <para>Tests if a path is rooted.</para>
+ </listitem>
+
+ <listitem id="bbv2.reference.modules.path.has-parent">
+ <indexterm zone="bbv2.reference.modules.path.has-parent">
+ <primary>has-parent</primary>
+ </indexterm>
+ <code language="jam">rule has-parent ( path )</code>
+ <para>Tests if a path has a parent.</para>
+ </listitem>
+
+ <listitem id="bbv2.reference.modules.path.basename">
+ <indexterm zone="bbv2.reference.modules.path.basename">
+ <primary>basename</primary>
+ </indexterm>
+ <code language="jam">rule basename ( path )</code>
+ <para>Returns the path without any directory components.</para>
+ </listitem>
+
+ <listitem id="bbv2.reference.modules.path.parent">
+ <indexterm zone="bbv2.reference.modules.path.parent">
+ <primary>parent</primary>
+ </indexterm>
+ <code language="jam">rule parent ( path )</code>
+ <para>Returns the parent directory of the path. If no parent exists, an error is issued.</para>
+ </listitem>
+
+ <listitem id="bbv2.reference.modules.path.reverse">
+ <indexterm zone="bbv2.reference.modules.path.reverse">
+ <primary>reverse</primary>
+ </indexterm>
+ <code language="jam">rule reverse ( path )</code>
+ <para>
+ Returns <code language="jam">path2</code> such that
+ <code language="jam">[ join path path2 ] = "."</code>.
+ The path may not contain <code language="jam">".."</code>
+ element or be rooted.
+ </para>
+ </listitem>
+
+ <listitem id="bbv2.reference.modules.path.join">
+ <indexterm zone="bbv2.reference.modules.path.join">
+ <primary>join</primary>
+ </indexterm>
+ <code language="jam">rule join ( elements + )</code>
+ <para>
+ Concatenates the passed path elements. Generates an error if any
+ element other than the first one is rooted. Skips any empty or
+ undefined path elements.
+ </para>
+ </listitem>
+
+ <listitem id="bbv2.reference.modules.path.root">
+ <indexterm zone="bbv2.reference.modules.path.root">
+ <primary>root</primary>
+ </indexterm>
+ <code language="jam">rule root ( path root )</code>
+ <para>
+ If <code language="jam">path</code> is relative, it is rooted at
+ <code language="jam">root</code>. Otherwise, it is unchanged.
+ </para>
+ </listitem>
+
+ <listitem id="bbv2.reference.modules.path.pwd">
+ <indexterm zone="bbv2.reference.modules.path.pwd">
+ <primary>pwd</primary>
+ </indexterm>
+ <code language="jam">rule pwd ( )</code>
+ <para>Returns the current working directory.</para>
+ </listitem>
+
+ <listitem id="bbv2.reference.modules.path.glob">
+ <indexterm zone="bbv2.reference.modules.path.glob">
+ <primary>glob</primary>
+ </indexterm>
+ <code language="jam">rule glob ( dirs * : patterns + : exclude-patterns * )</code>
+ <para>
+ Returns the list of files matching the given pattern in the specified
+ directory. Both directories and patterns are supplied as portable paths. Each
+ pattern should be a non-absolute path, and can't contain "." or ".." elements.
+ Each slash separated element of a pattern can contain the following special
+ characters:
+ <itemizedlist>
+ <listitem>
+ <para>'?' matches any character</para>
+ </listitem>
+ <listitem>
+ <para>'*' matches an arbitrary number of characters</para>
+ </listitem>
+ </itemizedlist>
+ A file $(d)/e1/e2/e3 (where 'd' is in $(dirs)) matches the pattern p1/p2/p3 if and
+ only if e1 matches p1, e2 matches p2 and so on.
+
+ For example:
+<programlisting language="jam">
+[ glob . : *.cpp ]
+[ glob . : */build/Jamfile ]
+</programlisting>
+ </para>
+ </listitem>
+
+ <listitem id="bbv2.reference.modules.path.glob-tree">
+ <indexterm zone="bbv2.reference.modules.path.glob-tree">
+ <primary>glob-tree</primary>
+ </indexterm>
+ <code language="jam">rule glob-tree ( roots * : patterns + : exclude-patterns * )</code>
+ <para>
+ Recursive version of <link linkend="bbv2.reference.modules.path.glob">glob</link>.
+ Builds the glob of files while also searching in
+ the subdirectories of the given roots. An optional set of exclusion patterns
+ will filter out the matching entries from the result. The exclusions also
+ apply to the subdirectory scanning, such that directories that match the
+ exclusion patterns will not be searched.
+ </para>
+ </listitem>
+
+ <listitem id="bbv2.reference.modules.path.exists">
+ <indexterm zone="bbv2.reference.modules.path.exists">
+ <primary>exists</primary>
+ </indexterm>
+ <code language="jam">rule exists ( file )</code>
+ <para>Returns true if the specified file exists.</para>
+ </listitem>
+
+ <listitem id="bbv2.reference.modules.path.all-parents">
+ <indexterm zone="bbv2.reference.modules.path.all-parents">
+ <primary>all-parents</primary>
+ </indexterm>
+ <code language="jam">rule all-parents ( path : upper_limit ? : cwd ? )</code>
+ <para>
+ Find out the absolute name of path and return the list of all the parents,
+ starting with the immediate one. Parents are returned as relative names. If
+ <code language="jam">upper_limit</code> is specified, directories above it
+ will be pruned.
+ </para>
+ </listitem>
+
+ <listitem id="bbv2.reference.modules.path.glob-in-parents">
+ <indexterm zone="bbv2.reference.modules.path.glob-in-parents">
+ <primary>glob-in-parents</primary>
+ </indexterm>
+ <code language="jam">rule glob-in-parents ( dir : patterns + : upper-limit ? )</code>
+ <para>
+ Search for <code language="jam">patterns</code> in parent directories
+ of <code language="jam">dir</code>, up to and including
+ <code language="jam">upper_limit</code>, if it is specified, or
+ till the filesystem root otherwise.
+ </para>
+ </listitem>
+
+ <listitem id="bbv2.reference.modules.path.relative">
+ <indexterm zone="bbv2.reference.modules.path.relative">
+ <primary>relative</primary>
+ </indexterm>
+ <code language="jam">rule relative ( child parent : no-error ? )</code>
+ <para>
+ Assuming <code language="jam">child</code> is a subdirectory of
+ <code language="jam">parent</code>, return the relative path from
+ <code language="jam">parent</code> to <code language="jam">child</code>.
+ </para>
+ </listitem>
+
+ <listitem id="bbv2.reference.modules.path.relative-to">
+ <indexterm zone="bbv2.reference.modules.path.relative-to">
+ <primary>relative-to</primary>
+ </indexterm>
+ <code language="jam">rule relative-to ( path1 path2 )</code>
+ <para>Returns the minimal path to path2 that is relative path1.</para>
+ </listitem>
+
+ <listitem id="bbv2.reference.modules.path.programs-path">
+ <indexterm zone="bbv2.reference.modules.path.programs-path">
+ <primary>programs-path</primary>
+ </indexterm>
+ <code language="jam">rule programs-path ( )</code>
+ <para>
+ Returns the list of paths which are used by the operating system for
+ looking up programs.
+ </para>
+ </listitem>
+
+ <listitem id="bbv2.reference.modules.path.mkdirs">
+ <indexterm zone="bbv2.reference.modules.path.mkdirs">
+ <primary>mkdirs</primary>
+ </indexterm>
+ <code language="jam">rule makedirs ( path )</code>
+ <para>
+ Creates a directory and all parent directories that do not
+ already exist.
+ </para>
+ </listitem>
+
+ </orderedlist>
+
+</section>
diff --git a/tools/build/v2/doc/src/project-target.xml b/tools/build/doc/src/project-target.xml
index 1e4e39a520..1e4e39a520 100644
--- a/tools/build/v2/doc/src/project-target.xml
+++ b/tools/build/doc/src/project-target.xml
diff --git a/tools/build/v2/doc/src/property-set.xml b/tools/build/doc/src/property-set.xml
index a5b5b8c7f6..a5b5b8c7f6 100644
--- a/tools/build/v2/doc/src/property-set.xml
+++ b/tools/build/doc/src/property-set.xml
diff --git a/tools/build/doc/src/recipes.xml b/tools/build/doc/src/recipes.xml
new file mode 100644
index 0000000000..ec65f28e58
--- /dev/null
+++ b/tools/build/doc/src/recipes.xml
@@ -0,0 +1,11 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE appendix PUBLIC "-//Boost//DTD BoostBook XML V1.0//EN"
+ "http://www.boost.org/tools/boostbook/dtd/boostbook.dtd">
+
+ <!-- The file is empty. It's not clear if it will be needed in
+ future or FAQ completely supercedes it. -->
+
+ <appendix id="bbv2.recipies">
+ <title>Boost.Build System V2 recipes</title>
+
+ </appendix>
diff --git a/tools/build/doc/src/reference.xml b/tools/build/doc/src/reference.xml
new file mode 100644
index 0000000000..b343912687
--- /dev/null
+++ b/tools/build/doc/src/reference.xml
@@ -0,0 +1,2737 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE chapter PUBLIC "-//Boost//DTD BoostBook XML V1.0//EN"
+ "http://www.boost.org/tools/boostbook/dtd/boostbook.dtd"
+[
+<!ENTITY toolset_ops "<optional><replaceable>version</replaceable></optional> : <optional><replaceable>c++-compile-command</replaceable></optional> : <optional><replaceable>compiler options</replaceable></optional>">
+<!ENTITY option_list_intro "<para>The following options can be provided, using <literal>&lt;<replaceable>option-name</replaceable>&gt;<replaceable>option-value</replaceable></literal> syntax:</para>">
+<!ENTITY using_repeation "<para>This statement may be repeated several times, if you want to configure several versions of the compiler.</para>">
+]>
+
+<chapter id="bbv2.reference"
+ xmlns:xi="http://www.w3.org/2001/XInclude">
+ <title>Reference</title>
+
+ <section id="bbv2.reference.general">
+ <title>General information</title>
+
+ <section id="bbv2.reference.init">
+ <title>Initialization</title>
+
+ <para>
+ Immediately upon starting, the Boost.Build engine (<command>b2</command>)
+ loads the Jam code that implements the build system. To do this, it searches for a file
+ called <filename>boost-build.jam</filename>, first in the invocation directory, then
+ in its parent and so forth up to the filesystem root, and finally
+ in the directories specified by the environment variable
+ BOOST_BUILD_PATH. When found, the file is interpreted, and should
+ specify the build system location by calling the boost-build
+ rule:</para>
+
+<programlisting>
+rule boost-build ( location ? )
+</programlisting>
+
+ <para>
+ If location is a relative path, it is treated as relative to
+ the directory of <filename>boost-build.jam</filename>. The directory specified by
+ that location and the directories in BOOST_BUILD_PATH are then searched for
+ a file called <filename>bootstrap.jam</filename>, which is expected to
+ bootstrap the build system. This arrangement allows the build
+ system to work without any command-line or environment variable
+ settings. For example, if the build system files were located in a
+ directory "build-system/" at your project root, you might place a
+ <filename>boost-build.jam</filename> at the project root containing:
+
+<programlisting>
+boost-build build-system ;
+</programlisting>
+
+ In this case, running <command>b2</command> anywhere in the project tree will
+ automatically find the build system.</para>
+
+ <para>The default <filename>bootstrap.jam</filename>, after loading some standard
+ definitions, loads two <filename>site-config.jam</filename> and <filename>user-config.jam</filename>.</para>
+
+ </section>
+
+ </section>
+
+ <section id="bbv2.reference.rules">
+ <title>Builtin rules</title>
+
+ <para>This section contains the list of all rules that
+ can be used in Jamfile&#x2014;both rules that define new
+ targets and auxiliary rules.</para>
+
+ <variablelist>
+ <varlistentry>
+ <term><literal>exe</literal></term>
+
+ <listitem><para>Creates an executable file. See
+ <xref linkend="bbv2.tasks.programs"/>.</para></listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><literal>lib</literal></term>
+
+ <listitem><para>Creates an library file. See
+ <xref linkend="bbv2.tasks.libraries"/>.</para></listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><literal>install</literal></term>
+
+ <listitem><para>Installs built targets and other files. See
+ <xref linkend="bbv2.tasks.installing"/>.</para></listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><literal>alias</literal></term>
+
+ <listitem><para>Creates an alias for other targets. See
+ <xref linkend="bbv2.tasks.alias"/>.</para></listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><literal>unit-test</literal></term>
+
+ <listitem><para>Creates an executable that will be automatically run. See
+ <xref linkend="bbv2.builtins.testing"/>.</para></listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><literal>compile</literal></term>
+ <term><literal>compile-fail</literal></term>
+ <term><literal>link</literal></term>
+ <term><literal>link-fail</literal></term>
+ <term><literal>run</literal></term>
+ <term><literal>run-fail</literal></term>
+
+ <listitem><para>Specialized rules for testing. See
+ <xref linkend="bbv2.builtins.testing"/>.</para></listitem>
+ </varlistentry>
+
+ <varlistentry id="bbv2.reference.check-target-builds">
+ <indexterm><primary>check-target-builds</primary></indexterm>
+ <term><literal>check-target-builds</literal></term>
+
+ <listitem><para>The <literal>check-target-builds</literal> allows you
+ to conditionally use different properties depending on whether some
+ metatarget builds, or not. This is similar to functionality of configure
+ script in autotools projects. The function signature is:
+ </para>
+ <programlisting>
+rule check-target-builds ( target message ? : true-properties * : false-properties * )
+ </programlisting>
+
+ <para>This function can only be used when passing requirements or usage
+ requirements to a metatarget rule. For example, to make an application link
+ to a library if it's avavailable, one has use the following:</para>
+ <programlisting>
+exe app : app.cpp : [ check-target-builds has_foo "System has foo" : &lt;library&gt;foo : &lt;define&gt;FOO_MISSING=1 ] ;
+ </programlisting>
+
+ <para>For another example, the alias rule can be used to consolidate configuraiton
+ choices and make them available to other metatargets, like so:</para>
+ <programlisting>
+alias foobar : : : : [ check-target-builds has_foo "System has foo" : &lt;library&gt;foo : &lt;library&gt;bar ] ;
+ </programlisting>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><literal>obj</literal></term>
+
+ <listitem><para>Creates an object file. Useful when a single source
+ file must be compiled with special properties.</para></listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><literal>preprocessed</literal></term>
+ <indexterm><primary>preprocessed</primary></indexterm>
+
+ <listitem><para>Creates an preprocessed source file. The arguments follow the
+ <link linkend="bbv2.main-target-rule-syntax">common syntax</link>.</para></listitem>
+ </varlistentry>
+
+ <varlistentry id="bbv2.reference.rules.glob">
+ <term><literal>glob</literal></term>
+
+ <listitem><para>The <code>glob</code> rule takes a list shell pattern
+ and returns the list of files in the project's source directory that
+ match the pattern. For example:
+ <programlisting>
+lib tools : [ glob *.cpp ] ;
+ </programlisting>
+ It is possible to also pass a second argument&#x2014;the list of
+ exclude patterns. The result will then include the list of
+ files patching any of include patterns, and not matching any
+ of the exclude patterns. For example:
+ <programlisting>
+lib tools : [ glob *.cpp : file_to_exclude.cpp bad*.cpp ] ;
+ </programlisting>
+ </para></listitem>
+ </varlistentry>
+
+ <varlistentry id="bbv2.reference.glob-tree">
+ <indexterm><primary>glob-tree</primary></indexterm>
+ <term><literal>glob-tree</literal></term>
+
+ <listitem><para>The <code>glob-tree</code> is similar to the
+ <code>glob</code> except that it operates recursively from
+ the directory of the containing Jamfile. For example:
+ <programlisting>
+ECHO [ glob-tree *.cpp : .svn ] ;
+ </programlisting>
+ will print the names of all C++ files in your project. The
+ <literal>.svn</literal> exclude pattern prevents the
+ <code>glob-tree</code> rule from entering administrative
+ directories of the Subversion version control system.
+ </para></listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><literal>project</literal></term>
+
+ <listitem><para>Declares project id and attributes, including
+ project requirements. See <xref linkend="bbv2.overview.projects"/>.
+ </para></listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><literal>use-project</literal></term>
+
+ <listitem><para>Assigns a symbolic project ID to a project at
+ a given path. This rule must be better documented!
+ </para></listitem>
+ </varlistentry>
+
+ <varlistentry id="bbv2.reference.rules.explicit">
+ <term><literal>explicit</literal></term>
+
+ <listitem><para>The <literal>explicit</literal> rule takes a single
+ parameter&#x2014;a list of target names. The named targets will
+ be marked explicit, and will be built only if they are explicitly
+ requested on the command line, or if their dependents are built.
+ Compare this to ordinary targets, that are built implicitly when
+ their containing project is built.</para></listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><literal>always</literal></term>
+ <indexterm><primary>always building a metatarget</primary></indexterm>
+
+ <listitem><para>The <literal>always</literal> funciton takes a single
+ parameter&#x2014;a list of metatarget names. The top-level targets produced
+ by the named metatargets will be always considered out of date. Consider this example:
+ </para>
+<programlisting>
+exe hello : hello.cpp ;
+exe bye : bye.cpp ;
+always hello ;
+</programlisting>
+ <para>If a build of <filename>hello</filename> is requested, then the binary will
+ always be relinked. The object files will not be recompiled, though. Note that if
+ a build of <filename>hello</filename> is not requested, for example you specify just
+ <filename>bye</filename> on the command line, <filename>hello</filename> will not
+ be relinked.</para></listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><literal>constant</literal></term>
+
+ <listitem><para>Sets project-wide constant. Takes two
+ parameters: variable name and a value and makes the specified
+ variable name accessible in this Jamfile and any child Jamfiles.
+ For example:
+ <programlisting>
+constant VERSION : 1.34.0 ;
+ </programlisting>
+ </para></listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><literal>path-constant</literal></term>
+
+ <listitem><para>Same as <literal>constant</literal> except that
+ the value is treated as path relative to Jamfile location. For example,
+ if <command>b2</command> is invoked in the current directory,
+ and Jamfile in <filename>helper</filename> subdirectory has:
+ <programlisting>
+path-constant DATA : data/a.txt ;
+ </programlisting>
+ then the variable <varname>DATA</varname> will be set to
+ <literal>helper/data/a.txt</literal>, and if <command>b2</command>
+ is invoked from the <filename>helper</filename> directory, then
+ the variable <varname>DATA</varname> will be set to
+ <literal>data/a.txt</literal>.
+ </para></listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><literal>build-project</literal></term>
+
+ <listitem><para>Cause some other project to be built. This rule
+ takes a single parameter&#x2014;a directory name relative to
+ the containing Jamfile. When the containing Jamfile is built,
+ the project located at that directory will be built as well.
+ At the moment, the parameter to this rule should be a directory
+ name. Project ID or general target references are not allowed.
+ </para></listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><literal>test-suite</literal></term>
+
+ <listitem><para>This rule is deprecated and equivalent to
+ <code>alias</code>.</para></listitem>
+ </varlistentry>
+
+ </variablelist>
+
+ </section>
+
+ <section id="bbv2.overview.builtins.features">
+ <title>Builtin features</title>
+
+ <para>This section documents the features that are built-in into
+ Boost.Build. For features with a fixed set of values, that set is
+ provided, with the default value listed first.</para>
+
+ <indexterm><primary>features</primary><secondary>builtin</secondary></indexterm>
+
+ <variablelist>
+ <varlistentry><term><literal>variant</literal></term>
+ <indexterm><primary>variant</primary></indexterm>
+
+ <listitem>
+ <para>
+ A feature combining several low-level features, making it easy to
+ request common build configurations.
+ </para>
+
+ <para>
+ <emphasis role="bold">Allowed values:</emphasis>
+ <literal>debug</literal>, <literal>release</literal>,
+ <literal>profile</literal>.
+ </para>
+
+ <para>
+ The value <literal>debug</literal> expands to
+ </para>
+
+<programlisting>
+&lt;optimization&gt;off &lt;debug-symbols&gt;on &lt;inlining&gt;off &lt;runtime-debugging&gt;on
+</programlisting>
+
+ <para>
+ The value <literal>release</literal> expands to
+ </para>
+
+<programlisting>
+&lt;optimization&gt;speed &lt;debug-symbols&gt;off &lt;inlining&gt;full &lt;runtime-debugging&gt;off
+</programlisting>
+
+ <para>
+ The value <literal>profile</literal> expands to the same as
+ <literal>release</literal>, plus:
+ </para>
+
+<programlisting>
+&lt;profiling&gt;on &lt;debug-symbols&gt;on
+</programlisting>
+
+ <para>
+ Users can define their own build variants using the
+ <code>variant</code> rule from the <code>common</code> module.
+ </para>
+
+ <para>
+ <emphasis role="bold">Note:</emphasis> Runtime debugging is on in
+ debug builds to suit the expectations of people used to various
+ IDEs.
+ <!-- Define "runtime debugging". Why will those people expect it to
+ be on in debug builds? -->
+ </para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry id="bbv2.overview.builtins.features.link">
+ <term><literal>link</literal></term>
+ <indexterm><primary>link</primary></indexterm>
+
+ <listitem>
+
+ <para><emphasis role="bold">Allowed values:</emphasis> <literal>shared</literal>,
+ <literal>static</literal></para>
+
+ <simpara>
+ A feature controling how libraries are built.
+ </simpara>
+
+ </listitem>
+ </varlistentry>
+
+ <varlistentry id="bbv2.overview.builtins.features.runtime-link">
+ <indexterm><primary>runtime linking</primary></indexterm>
+ <term><literal>runtime-link</literal></term>
+
+ <listitem>
+ <para><emphasis role="bold">Allowed values:</emphasis> <literal>shared</literal>,
+ <literal>static</literal></para>
+
+ <simpara>
+ Controls if a static or shared C/C++ runtime should be used. There
+ are some restrictions how this feature can be used, for example
+ on some compilers an application using static runtime should
+ not use shared libraries at all, and on some compilers,
+ mixing static and shared runtime requires extreme care. Check
+ your compiler documentation for more details.
+ </simpara>
+
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><literal>threading</literal></term>
+ <indexterm><primary>threading</primary></indexterm>
+
+ <listitem>
+
+ <para><emphasis role="bold">Allowed values:</emphasis> <literal>single</literal>,
+ <literal>multi</literal></para>
+
+ <simpara>
+ Controls if the project should be built in multi-threaded mode. This feature does not
+ necessary change code generation in the compiler, but it causes the compiler to link
+ to additional or different runtime libraries, and define additional preprocessor
+ symbols (for example, <code>_MT</code> on Windows and <code>_REENTRANT</code> on Linux).
+ How those symbols affect the compiled code depends on the code itself.
+ </simpara>
+
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><literal>source</literal></term>
+ <indexterm><primary>source</primary></indexterm>
+
+ <listitem>
+ <simpara>
+ The <code>&lt;source&gt;X</code> feature has the same effect on
+ building a target as putting X in the list of sources. It is useful
+ when you want to add the same source to all targets in the project
+ (you can put &lt;source&gt; in requirements) or to conditionally
+ include a source (using conditional requirements, see <xref linkend=
+ "bbv2.tutorial.conditions"/>). See also the <code>&lt;library&gt;
+ </code> feature.
+ </simpara>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><literal>library</literal></term>
+ <indexterm><primary>library</primary></indexterm>
+
+ <listitem>
+ <simpara>
+ This feature is almost equivalent to the <code>&lt;source&gt;</code>
+ feature, except that it takes effect only for linking. When you want
+ to link all targets in a Jamfile to certain library, the
+ <code>&lt;library&gt;</code> feature is preferred over
+ <code>&lt;source&gt;X</code>&mdash;the latter will add the library to
+ all targets, even those that have nothing to do with libraries.
+ </simpara>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry><term><anchor id="bbv2.builtin.features.dependency"/>
+ <literal>dependency</literal></term>
+ <indexterm><primary>dependency</primary></indexterm>
+
+ <listitem>
+ <simpara>
+ Introduces a dependency on the target named by the value of this
+ feature (so it will be brought up-to-date whenever the target being
+ declared is). The dependency is not used in any other way.
+
+ <!--
+ ====================================================================
+ An example and a motivation is needed here. Below is some commented
+ out content that used to be here but did not make any sense and
+ seems to have been left unfinished in some previous revision. Should
+ be fixed and this whole feature should be retested and fixed as
+ needed.
+ ====================================================================
+ For example, in application with plugins, the plugins are not used
+ when linking the application, application might have a dependency on
+ its plugins, even though
+
+ and
+ adds its usage requirements to the build properties
+ of the target being declared.
+
+ The primary use case is when you want
+ the usage requirements (such as <code>#include</code> paths) of some
+ library to be applied, but do not want to link to it.
+
+ It is hard to picture why anyone would want to do that. Please flesh
+ out this motivation.
+ ====================================================================
+ -->
+ </simpara>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry><term><anchor id="bbv2.builtin.features.implicit-dependency"/>
+ <literal>implicit-dependency</literal></term>
+ <indexterm><primary>implicit-dependency</primary></indexterm>
+
+ <listitem>
+ <simpara>
+ Indicates that the target named by the value of this feature
+ may produce files that are included by the sources of the
+ target being declared. See <xref linkend="bbv2.reference.generated_headers"/>
+ for more information.
+ </simpara>
+ </listitem>
+ </varlistentry>
+
+
+ <varlistentry><term><anchor id="bbv2.builtin.features.use"/>
+ <literal>use</literal></term>
+ <indexterm><primary>use</primary></indexterm>
+
+ <listitem>
+ <simpara>
+ Introduces a dependency on the target named by the value of this
+ feature (so it will be brought up-to-date whenever the target being
+ declared is), and adds its usage requirements to the build
+ properties
+ <!-- Do you really mean "to the requirements?" -->
+ of the target being declared. The dependency is not used in any
+ other way. The primary use case is when you want the usage
+ requirements (such as <code>#include</code> paths) of some library
+ to be applied, but do not want to link to it.
+ <!-- It is hard to picture why anyone would want to do that. Please
+ flesh out this motivation. -->
+ </simpara>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><anchor id="bbv2.reference.features.dll-path"/>
+ <literal>dll-path</literal></term>
+ <indexterm><primary>dll-path</primary></indexterm>
+
+ <listitem>
+ <simpara>
+ Specify an additional directory where the system should
+ look for shared libraries when the executable or shared
+ library is run. This feature only affects Unix
+ compilers. Plase see <xref linkend="bbv2.faq.dll-path"/>
+ in <xref linkend="bbv2.faq"/> for details.
+ </simpara>
+ </listitem></varlistentry>
+
+ <varlistentry>
+ <term><literal>hardcode-dll-paths</literal></term>
+ <indexterm><primary>hardcode-dll-paths</primary></indexterm>
+
+ <listitem>
+ <simpara>
+ Controls automatic generation of dll-path properties.
+ </simpara>
+
+ <para><emphasis role="bold">Allowed values:</emphasis>
+ <literal>true</literal>, <literal>false</literal>. This property is
+ specific to Unix systems. If an executable is built with
+ <code>&lt;hardcode-dll-paths&gt;true</code>, the generated binary
+ will contain the list of all the paths to the used shared libraries.
+ As the result, the executable can be run without changing system
+ paths to shared libraries or installing the libraries to system
+ paths. This <!-- you need an antecedent. This _what_? --> is very
+ convenient during development. Plase see the <link linkend=
+ "bbv2.faq.dll-path">FAQ entry</link> for details. Note that on Mac
+ OSX, the paths are unconditionally hardcoded by the linker, and it
+ is not possible to disable that behaviour.</para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><literal>cflags</literal></term>
+ <term><literal>cxxflags</literal></term>
+ <term><literal>linkflags</literal></term>
+
+ <listitem>
+ <simpara>
+ The value of those features is passed without modification to the
+ corresponding tools. For <code>cflags</code> that is both the C and
+ C++ compilers, for <code>cxxflags</code> that is the C++ compiler
+ and for <code>linkflags</code> that is the linker. The features are
+ handy when you are trying to do something special that cannot be
+ achieved by a higher-level feature in Boost.Build.
+ </simpara>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><literal>include</literal></term>
+ <indexterm><primary>include</primary></indexterm>
+
+ <listitem>
+ <simpara>
+ Specifies an additional include path that is to be passed to C and
+ C++ compilers.
+ </simpara>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><literal>define</literal></term>
+ <indexterm><primary>define</primary></indexterm>
+
+ <listitem>
+ <simpara>
+ Specifies an preprocessor symbol that should be defined on the command
+ line. You may either specify just the symbol, which will be defined
+ without any value, or both the symbol and the value, separated by
+ equal sign.
+ </simpara>
+ </listitem>
+ </varlistentry>
+
+
+ <varlistentry><term><literal>warnings</literal></term>
+ <listitem>
+ <simpara>
+ The <code>&lt;warnings&gt;</code> feature controls the warning level
+ of compilers. It has the following values:
+ <itemizedlist>
+ <listitem><para><code>off</code> - disables all warnings.</para></listitem>
+ <listitem><para><code>on</code> - enables default warning level for the tool.</para></listitem>
+ <listitem><para><code>all</code> - enables all warnings.</para></listitem>
+ </itemizedlist>
+ Default value is <code>all</code>.
+ </simpara>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry><term><literal>warnings-as-errors</literal></term>
+ <listitem>
+ <simpara>
+ The <code>&lt;warnings-as-errors&gt;</code> makes it possible to
+ treat warnings as errors and abort compilation on a warning. The
+ value <code>on</code> enables this behaviour. The default value is
+ <code>off</code>.
+ </simpara>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry><term><literal>build</literal></term>
+
+ <listitem>
+ <para><emphasis role="bold">Allowed values:</emphasis> <literal>no</literal></para>
+
+ <para>
+ The <code>build</code> feature is used to conditionally disable
+ build of a target. If <code>&lt;build&gt;no</code> is in properties
+ when building a target, build of that target is skipped. Combined
+ with conditional requirements this allows you to skip building some
+ target in configurations where the build is known to fail.
+ </para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry><term><anchor id="bbv2.builtin.features.tag"/><literal>tag</literal></term>
+
+ <listitem><para>The <literal>tag</literal> feature is used to customize
+ the name of the generated files. The value should have the form:
+<programlisting>@<replaceable>rulename</replaceable></programlisting> where
+ <replaceable>rulename</replaceable> should be a name of a rule with the
+ following signature:
+<programlisting>rule tag ( name : type ? : property-set )</programlisting>
+ The rule will be called for each target with the default name computed
+ by Boost.Build, the type of the target, and property set. The rule can
+ either return a string that must be used as the name of the target, or
+ an empty string, in which case the default name will be used.
+ </para>
+
+ <para>Most typical use of the <literal>tag</literal> feature is to
+ encode build properties, or library version in library target names. You
+ should take care to return non-empty string from the tag rule only for
+ types you care about &#x2014; otherwise, you might end up modifying
+ names of object files, generated header file and other targets for which
+ changing names does not make sense.</para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry><term><literal>debug-symbols</literal></term>
+
+ <listitem>
+ <para><emphasis role="bold">Allowed values:</emphasis> <literal>on</literal>, <literal>off</literal>.</para>
+
+ <para>The <literal>debug-symbols</literal> feature specifies if
+ produced object files, executables and libraries should include
+ debug information.
+ Typically, the value of this feature is implicitly set by the
+ <literal>variant</literal> feature, but it can be explicitly
+ specified by the user. The most common usage is to build
+ release variant with debugging information.</para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry><term><literal>runtime-debugging</literal></term>
+
+ <listitem>
+ <para><emphasis role="bold">Allowed values:</emphasis> <literal>on</literal>, <literal>off</literal>.</para>
+
+ <para>The <literal>runtime-debugging</literal> feature specifies if
+ produced object files, executables and libraries should include
+ behaviour useful only for debugging, such as asserts.
+ Typically, the value of this feature is implicitly set by the
+ <literal>variant</literal> feature, but it can be explicitly
+ specified by the user. The most common usage is to build
+ release variant with debugging output.</para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry><term><literal>target-os</literal></term>
+ <listitem>
+
+ <anchor id="bbv2.reference.features.target-os"/>
+
+ <para>
+ The operating system for which the code is to be generated. The
+ compiler you used should be the compiler for that operating
+ system. This option causes Boost.Build to use naming conventions
+ suitable for that operating system, and adjust build process
+ accordingly. For example, with gcc, it controls if import
+ libraries are produced for shared libraries or not.
+ </para>
+
+ <para>The complete list of possible values for this feature is:
+ aix, bsd, cygwin, darwin, freebsd, hpux, iphone, linux, netbsd,
+ openbsd, osf, qnx, qnxnto, sgi, solaris, unix, unixware, windows.
+ </para>
+
+ <para>See <xref linkend="bbv2.tasks.crosscompile"/> for details of
+ crosscompilation</para>
+
+ </listitem>
+ </varlistentry>
+
+
+ <varlistentry><term><literal>architecture</literal></term>
+ <listitem>
+
+ <para>The <literal>architecture</literal> features specifies
+ the general processor familty to generate code for.</para>
+
+ </listitem>
+ </varlistentry>
+
+ <varlistentry><term><literal>instruction-set</literal></term>
+ <indexterm><primary>instruction-set</primary></indexterm>
+ <listitem>
+ <para>
+ <emphasis role="bold">Allowed values:</emphasis> depend on the used
+ toolset.
+ </para>
+
+ <para>The <literal>instruction-set</literal> specifies for which
+ specific instruction set the code should be generated. The
+ code in general might not run on processors with older/different
+ instruction sets.</para>
+
+ <para>While Boost.Build allows a large set of possible values
+ for this features, whether a given value works depends on which
+ compiler you use. Please see
+ <xref linkend="bbv2.reference.tools.compilers"/> for details.
+ </para>
+
+ </listitem>
+ </varlistentry>
+
+ <varlistentry><term><literal>address-model</literal></term>
+ <indexterm><primary>64-bit compilation</primary></indexterm>
+ <listitem>
+ <para><emphasis role="bold">Allowed values:</emphasis> <literal>32</literal>, <literal>64</literal>.</para>
+
+ <para>The <literal>address-model</literal> specifies if 32-bit or
+ 64-bit code should be generated by the compiler. Whether this feature
+ works depends on the used compiler, its version, how the compiler is
+ configured, and the values of the <literal>architecture</literal>
+ <literal>instruction-set</literal>
+ features. Please see <xref linkend="bbv2.reference.tools.compilers"/>
+ for details.</para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry><term><literal>c++-template-depth</literal></term>
+ <listitem>
+ <para>
+ <emphasis role="bold">Allowed values:</emphasis> Any positive
+ integer.
+ </para>
+
+ <para>
+ This feature allows configuring a C++ compiler with the maximal
+ template instantiation depth parameter. Specific toolsets may or may
+ not provide support for this feature depending on whether their
+ compilers provide a corresponding command-line option.
+ </para>
+
+ <para>
+ <emphasis role="bold">Note:</emphasis> Due to some internal details
+ in the current Boost.Build implementation it is not possible to have
+ features whose valid values are all positive integer. As a
+ workaround a large set of allowed values has been defined for this
+ feature and, if a different one is needed, user can easily add it by
+ calling the feature.extend rule.
+ </para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry><term><literal>embed-manifest</literal></term>
+ <listitem>
+
+ <indexterm><primary>manifest file</primary><secondary>embedding</secondary></indexterm>
+ <indexterm><primary>embed-manifest</primary></indexterm>
+
+ <para>
+ <emphasis role="bold">Allowed values:</emphasis> on, off.
+ </para>
+
+ <para>This feature is specific to the msvc toolset (see
+ <xref linkend="bbv2.reference.tools.compiler.msvc"/>),
+ and controls whether the manifest files should be embedded inside
+ executables and shared libraries, or placed alongside them. This
+ feature corresponds to the IDE option found in the project settings dialog,
+ under <menuchoice><guimenu>Configuration Properties</guimenu>
+ <guisubmenu>Manifest Tool</guisubmenu>
+ <guisubmenu>Input and Output</guisubmenu>
+ <guimenuitem>Embed manifest</guimenuitem> </menuchoice>.
+ </para>
+
+ </listitem>
+ </varlistentry>
+
+ <varlistentry><term><literal>embed-manifest-file</literal></term>
+ <listitem>
+
+ <indexterm><primary>manifest file</primary><secondary>embedding</secondary></indexterm>
+ <indexterm><primary>embed-manifest-file</primary></indexterm>
+
+ <para>This feature is specific to the msvc toolset (see
+ <xref linkend="bbv2.reference.tools.compiler.msvc"/>),
+ and controls which manifest files should be embedded inside
+ executables and shared libraries. This
+ feature corresponds to the IDE option found in the project settings dialog,
+ under <menuchoice><guimenu>Configuration Properties</guimenu>
+ <guisubmenu>Manifest Tool</guisubmenu>
+ <guisubmenu>Input and Output</guisubmenu>
+ <guimenuitem>Additional Manifest Files</guimenuitem> </menuchoice>.
+ </para>
+
+ </listitem>
+ </varlistentry>
+
+
+ </variablelist>
+ </section>
+
+ <section id="bbv2.reference.tools">
+ <title>Builtin tools</title>
+
+ <para>Boost.Build comes with support for a large number of C++ compilers,
+ and other tools. This section documents how to use those tools.</para>
+
+ <para>Before using any tool, you must declare your intention, and possibly
+ specify additional information about the tool's configuration. This is
+ done by calling the <code>using</code> rule, typically in your
+ <filename>user-config.jam</filename>, for example:</para>
+<programlisting>
+using gcc ;
+</programlisting>
+ <para>additional parameters can be passed just like for other rules, for example:</para>
+<programlisting>
+using gcc : 4.0 : g++-4.0 ;
+</programlisting>
+
+
+
+ <para>The options that can be passed to each tool are documented in the
+ subsequent sections.</para>
+
+ <section id="bbv2.reference.tools.compilers">
+
+ <title>C++ Compilers</title>
+
+ <para>This section lists all Boost.Build modules that support C++
+ compilers and documents how each one can be initialized. The name
+ of support module for compiler is also the value for
+ the <code>toolset</code> feature that can be used to explicitly
+ request that compiler. </para>
+
+ <section id="bbv2.reference.tools.compiler.gcc">
+
+ <title>GNU C++</title>
+
+ <para>The <code>gcc</code> module supports the
+ <ulink url="http://gcc.gnu.org">GNU C++ compiler</ulink>
+ on Linux, a number of Unix-like system including SunOS and on Windows
+ (either <ulink url="http://www.cygwin.com">Cygwin</ulink> or
+ <ulink url="http://www.mingw.org">MinGW</ulink>). On Mac OSX, it is recommended
+ to use system gcc, see <xref linkend="bbv2.reference.tools.compiler.darwin"/>.
+ </para>
+
+ <para>The <code>gcc</code> module is initialized using the following
+ syntax:</para>
+ <programlisting>
+using gcc : &toolset_ops; ;</programlisting>
+
+ &using_repeation;
+
+ <!-- FIXME: mention everywhere what is the semantic
+ of version is -->
+
+ <para>
+ If the version is not explicitly specified, it will be
+ automatically detected by running the compiler with the <code>-v</code>
+ option. If the command is not specified, the <command>g++</command>
+ binary will be searched in <envar>PATH</envar>.</para>
+
+ &option_list_intro;
+ <variablelist>
+
+ <xi:include href="fragments.xml" xpointer="xpointer(id('common_options')/*)"
+ parse="xml"/>
+
+ <xi:include href="fragments.xml" xpointer="xpointer(id('root_option')/*)"
+ parse="xml"/>
+
+ <varlistentry>
+ <term><literal>rc</literal></term>
+
+ <listitem>
+ <para>Specifies the resource compiler command
+ that will be used with the version of gcc that is being
+ configured. This setting makes sense only for Windows and only
+ if you plan to use resource files. By
+ default <command>windres</command> will be used.</para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><literal>rc-type</literal></term>
+
+ <listitem>
+ <para>Specifies the type of resource compiler. The value can
+ be either <code>windres</code> for msvc resource compiler,
+ or <code>rc</code> for borland's resource compiler.</para>
+ </listitem>
+ </varlistentry>
+
+ </variablelist>
+
+ <indexterm><primary>64-bit compilation</primary>
+ <secondary>gcc</secondary></indexterm>
+
+ In order to compile 64-bit applications, you have to specify
+ <code>address-model=64</code>, and the <code>instruction-set</code>
+ feature should refer to a 64 bit processor. Currently, those
+ include <literal>nocona</literal>, <literal>opteron</literal>,
+ <literal>athlon64</literal> and <literal>athlon-fx</literal>.
+
+ </section>
+
+ <section id="bbv2.reference.tools.compiler.darwin">
+
+ <title>Apple Darwin gcc</title>
+
+ <para>The <code>darwin</code> module supports the version of gcc that is
+ modified and provided by Apple. The configuration is essentially identical
+ to that of the gcc module.
+ </para>
+
+ <para>
+ <indexterm><primary>fat binaries</primary></indexterm>
+ The darwin toolset can generate so called "fat"
+ binaries&#x2014;binaries that can run support more than one
+ architecture, or address mode. To build a binary that can run both
+ on Intel and PowerPC processors, specify
+ <code>architecture=combined</code>. To build a binary that can run
+ both in 32-bit and 64-bit modes, specify
+ <code>address-model=32_64</code>. If you specify both of those
+ properties, a "4-way" fat binary will be generated.
+ </para>
+
+ </section>
+
+ <section id="bbv2.reference.tools.compiler.msvc">
+
+ <title>Microsoft Visual C++</title>
+
+ <para>The <code>msvc</code> module supports the
+ <ulink url="http://msdn.microsoft.com/visualc/">Microsoft Visual
+ C++</ulink> command-line tools on Microsoft Windows. The supported
+ products and versions of command line tools are listed below:</para>
+ <itemizedlist>
+ <listitem><para>Visual Studio 2010&#x2014;10.0</para></listitem>
+ <listitem><para>Visual Studio 2008&#x2014;9.0</para></listitem>
+ <listitem><para>Visual Studio 2005&#x2014;8.0</para></listitem>
+ <listitem><para>Visual Studio .NET 2003&#x2014;7.1</para></listitem>
+ <listitem><para>Visual Studio .NET&#x2014;7.0</para></listitem>
+ <listitem><para>Visual Studio 6.0, Service Pack 5&#x2014;6.5</para></listitem>
+ </itemizedlist>
+
+ <para>The <code>msvc</code> module is initialized using the following
+ syntax:</para>
+ <programlisting>
+using msvc : &toolset_ops; ;
+ </programlisting>
+ &using_repeation;
+ <para>If the version is not explicitly specified, the most recent
+ version found in the registry will be used instead. If the special
+ value <code>all</code> is passed as the version, all versions found in
+ the registry will be configured. If a version is specified, but the
+ command is not, the compiler binary will be searched in standard
+ installation paths for that version, followed by <envar>PATH</envar>.
+ </para>
+
+ <para>The compiler command should be specified using forward slashes,
+ and quoted.</para>
+
+ &option_list_intro;
+ <variablelist>
+
+ <xi:include href="fragments.xml" xpointer="xpointer(id('common_options')/*)"
+ parse="xml"/>
+
+ <varlistentry>
+ <term><literal>assembler</literal></term>
+
+ <listitem><para>The command that compiles assembler sources. If
+ not specified, <command>ml</command> will be used. The command
+ will be invoked after the setup script was executed and adjusted
+ the <envar>PATH</envar> variable.</para></listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><literal>compiler</literal></term>
+
+ <listitem><para>The command that compiles C and C++ sources. If
+ not specified, <command>cl</command> will be used. The command
+ will be invoked after the setup script was executed and adjusted
+ the <envar>PATH</envar> variable.</para></listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><literal>compiler-filter</literal></term>
+
+ <listitem><para>Command through which to pipe the output of
+ running the compiler. For example to pass the output to STLfilt.
+ </para></listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><literal>idl-compiler</literal></term>
+
+ <listitem><para>The command that compiles Microsoft COM interface
+ definition files. If not specified, <command>midl</command> will
+ be used. The command will be invoked after the setup script was
+ executed and adjusted the <envar>PATH</envar> variable.</para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><literal>linker</literal></term>
+
+ <listitem><para>The command that links executables and dynamic
+ libraries. If not specified, <command>link</command> will be used.
+ The command will be invoked after the setup script was executed
+ and adjusted the <envar>PATH</envar> variable.</para></listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><literal>mc-compiler</literal></term>
+
+ <listitem><para>The command that compiles Microsoft message
+ catalog files. If not specified, <command>mc</command> will be
+ used. The command will be invoked after the setup script was
+ executed and adjusted the <envar>PATH</envar> variable.</para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><literal>resource-compiler</literal></term>
+
+ <listitem><para>The command that compiles resource files. If not
+ specified, <command>rc</command> will be used. The command will be
+ invoked after the setup script was executed and adjusted the
+ <envar>PATH</envar> variable.</para></listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><literal>setup</literal></term>
+
+ <listitem><para>The filename of the global environment setup
+ script to run before invoking any of the tools defined in this
+ toolset. Will not be used in case a target platform specific
+ script has been explicitly specified for the current target
+ platform. Used setup script will be passed the target platform
+ identifier (x86, x86_amd64, x86_ia64, amd64 or ia64) as a
+ arameter. If not specified a default script is chosen based on the
+ used compiler binary, e.g. <command>vcvars32.bat</command> or
+ <command>vsvars32.bat</command>.</para></listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><literal>setup-amd64</literal></term>
+ <term><literal>setup-i386</literal></term>
+ <term><literal>setup-ia64</literal></term>
+
+ <listitem><para>The filename of the target platform specific
+ environment setup script to run before invoking any of the tools
+ defined in this toolset. If not specified the global environment
+ setup script is used.</para></listitem>
+ </varlistentry>
+ </variablelist>
+
+ <section id="v2.reference.tools.compiler.msvc.64">
+ <title>64-bit support</title>
+
+ <indexterm><primary>64-bit compilation</primary>
+ <secondary>Microsoft Visual Studio</secondary></indexterm>
+
+ <para>Starting with version 8.0, Microsoft Visual Studio can
+ generate binaries for 64-bit processor, both 64-bit flavours of x86
+ (codenamed AMD64/EM64T), and Itanium (codenamed IA64). In addition,
+ compilers that are itself run in 64-bit mode, for better
+ performance, are provided. The complete list of compiler
+ configurations are as follows (we abbreviate AMD64/EM64T to just
+ AMD64):</para>
+
+ <itemizedlist>
+ <listitem><para>32-bit x86 host, 32-bit x86 target</para>
+ </listitem>
+ <listitem><para>32-bit x86 host, 64-bit AMD64 target</para>
+ </listitem>
+ <listitem><para>32-bit x86 host, 64-bit IA64 target</para>
+ </listitem>
+ <listitem><para>64-bit AMD64 host, 64-bit AMD64 target</para>
+ </listitem>
+ <listitem><para>64-bit IA64 host, 64-bit IA64 target</para>
+ </listitem>
+ </itemizedlist>
+ <para>
+ The 32-bit host compilers can be always used, even on 64-bit
+ Windows. On the contrary, 64-bit host compilers require both 64-bit
+ host processor and 64-bit Windows, but can be faster. By default,
+ only 32-bit host, 32-bit target compiler is installed, and
+ additional compilers need to be installed explicitly.
+ </para>
+
+ <para>To use 64-bit compilation you should:</para>
+ <orderedlist>
+ <listitem><para>Configure you compiler as usual. If you provide a
+ path to the compiler explicitly, provide the path to the 32-bit
+ compiler. If you try to specify the path to any of 64-bit
+ compilers, configuration will not work.</para></listitem>
+
+ <listitem><para>When compiling, use <code>address-model=64</code>,
+ to generate AMD64 code.</para></listitem>
+
+ <listitem><para>To generate IA64 code, use
+ <code>architecture=ia64</code></para></listitem>
+ </orderedlist>
+
+ <para>The (AMD64 host, AMD64 target) compiler will be used
+ automatically when you are generating AMD64 code and are running
+ 64-bit Windows on AMD64. The (IA64 host, IA64 target) compiler will
+ never be used, since nobody has an IA64 machine to test.</para>
+
+ <para>It is believed that AMD64 and EM64T targets are essentially
+ compatible. The compiler options <code>/favor:AMD64</code> and
+ <code>/favor:EM64T</code>, which are accepted only by AMD64
+ targeting compilers, cause the generated code to be tuned to a
+ specific flavor of 64-bit x86. Boost.Build will make use of those
+ options depending on the value of the<code>instruction-set</code>
+ feature.</para>
+ </section>
+
+ <section id="v2.reference.tools.compiler.msvc.winrt">
+ <title>Windows Runtime support</title>
+
+ <indexterm><primary>Windows Runtime support</primary>
+ <secondary>Microsoft Visual Studio</secondary></indexterm>
+
+ <para>
+ Starting with version 11.0, Microsoft Visual Studio can
+ produce binaries for Windows Store and Phone in addition to
+ traditional Win32 desktop. To specify which Windows API set
+ to target, use the <literal>windows-api</literal> feature.
+ Available options are <literal>desktop</literal>,
+ <literal>store</literal>, or <literal>phone</literal>. If not
+ specified, <literal>desktop</literal> will be used.
+ </para>
+
+ <para>
+ When using <literal>store</literal> or <literal>phone</literal>
+ the specified toolset determines what Windows version is
+ targeted. The following options are available:
+ </para>
+
+ <itemizedlist>
+ <listitem><para>Windows 8.0: toolset=msvc-11.0 windows-api=store</para>
+ </listitem>
+ <listitem><para>Windows 8.1: toolset=msvc-12.0 windows-api=store</para>
+ </listitem>
+ <listitem><para>Windows Phone 8.0: toolset=msvc-11.0 windows-api=phone</para>
+ </listitem>
+ <listitem><para>Windows Phone 8.1: toolset=msvc-12.0 windows-api=phone</para>
+ </listitem>
+ </itemizedlist>
+
+ <para>
+ For example use the following to build for Windows Store 8.1
+ with the ARM architecture:
+ </para>
+ <programlisting>
+.\b2 toolset=msvc=12.0 windows-api=store architecture=arm</programlisting>
+
+ <para>
+ Note that when targeting Windows Phone 8.1, version 12.0 didn't
+ include the vcvars phone setup scripts. They can be separately
+ downloaded from
+ <ulink url="http://blogs.msdn.com/b/vcblog/archive/2014/07/18/using-boost-libraries-in-windows-store-and-phone-applications.aspx">here</ulink>.
+ </para>
+
+ </section>
+ </section>
+
+ <section id="bbv2.reference.tools.compiler.intel">
+
+ <title>Intel C++</title>
+
+ <para>The <code>intel-linux</code> and <code>intel-win</code> modules
+ support the Intel C++ command-line compiler&#x2014;the <ulink url=
+ "http://www.intel.com/software/products/compilers/clin/index.htm">Linux</ulink>
+ and <ulink url=
+ "http://www.intel.com/cd/software/products/asmo-na/eng/compilers/284527.htm">
+ Windows</ulink> versions respectively.</para>
+
+ <para>The module is initialized using the following syntax:</para>
+ <programlisting>
+using intel-linux : &toolset_ops; ;</programlisting>
+ <para>or</para>
+ <programlisting>
+using intel-win : &toolset_ops; ;</programlisting>
+ <para>respectively.</para>
+
+ &using_repeation;
+
+ <para>
+ If compiler command is not specified, then Boost.Build will
+ look in <envar>PATH</envar> for an executable <command>icpc</command>
+ (on Linux), or <command>icc.exe</command> (on Windows).
+ </para>
+
+ &option_list_intro;
+ <variablelist>
+
+ <xi:include href="fragments.xml" xpointer="xpointer(id('common_options')/*)"
+ parse="xml"/>
+
+ </variablelist>
+
+ <para>The Linux version supports the following additional options:</para>
+ <variablelist>
+
+ <xi:include href="fragments.xml" xpointer="xpointer(id('root_option')/*)"
+ parse="xml"/>
+
+ </variablelist>
+
+ <!-- the compatibility option appears to be messed up -->
+
+ </section>
+
+ <section id="bbv2.reference.tools.compiler.acc">
+
+ <title>HP aC++ compiler</title>
+
+ <para>The <code>acc</code> module supports the
+<ulink url="http://h21007.www2.hp.com/dspp/tech/tech_TechSoftwareDetailPage_IDX/1,1703,1740,00.html">HP aC++ compiler</ulink>
+ for the HP-UX operating system.</para>
+
+ <para>The module is initialized using the following
+ syntax:</para>
+ <programlisting>
+using acc : &toolset_ops; ;</programlisting>
+
+ &using_repeation;
+
+
+ <para>
+ If the command is not specified, the <command>aCC</command>
+ binary will be searched in <envar>PATH</envar>.</para>
+
+ &option_list_intro;
+ <variablelist>
+ <xi:include href="fragments.xml" xpointer="xpointer(id('common_options')/*)"
+ parse="xml"/>
+ </variablelist>
+
+ </section>
+
+ <section id="bbv2.reference.tools.compiler.borland">
+
+ <title>Borland C++ Compiler</title>
+
+ <para>The <code>borland</code> module supports the command line
+ C++ compiler included in
+ <ulink url="http://www.borland.com/us/products/cbuilder/index.html">C++ Builder 2006</ulink>
+ product and earlier version of it, running on Microsoft Windows.</para>
+
+ <para>The supported products are listed below. The version reported
+ by the command lines tools is also listed for reference.:</para>
+ <itemizedlist>
+ <listitem><para>C++ Builder 2006&#x2014;5.8.2</para></listitem>
+ <listitem><para>CBuilderX&#x2014;5.6.5, 5.6.4 (depending on release)</para></listitem>
+ <listitem><para>CBuilder6&#x2014;5.6.4</para></listitem>
+ <listitem><para>Free command line tools&#x2014;5.5.1</para></listitem>
+ </itemizedlist>
+
+ <para>The module is initialized using the following syntax:</para>
+ <programlisting>
+using borland : &toolset_ops; ;</programlisting>
+
+ &using_repeation;
+
+ <para>If the command is not specified, Boost.Build will search for
+ a binary named <command>bcc32</command> in <envar>PATH</envar>.</para>
+
+ &option_list_intro;
+ <variablelist>
+ <xi:include href="fragments.xml" xpointer="xpointer(id('common_options')/*)"
+ parse="xml"/>
+ </variablelist>
+
+ </section>
+
+ <section id="bbv2.reference.tools.compiler.como">
+
+ <title>Comeau C/C++ Compiler</title>
+
+ <para>The <code>como-linux</code> and the <code>como-win</code>
+ modules supports the
+ <ulink url="http://www.comeaucomputing.com/">Comeau C/C++ Compiler</ulink>
+ on Linux and Windows respectively.</para>
+
+ <para>The module is initialized using the following syntax:</para>
+ <programlisting>
+using como-linux : &toolset_ops; ;</programlisting>
+
+ &using_repeation;
+
+ <para>If the command is not specified, Boost.Build will search for
+ a binary named <command>como</command> in
+ <envar>PATH</envar>.</para>
+
+ &option_list_intro;
+ <variablelist>
+ <xi:include href="fragments.xml" xpointer="xpointer(id('common_options')/*)"
+ parse="xml"/>
+ </variablelist>
+
+ <para>Before using the Windows version of the compiler, you need to
+ setup necessary environment variables per compiler's documentation. In
+ particular, the <envar>COMO_XXX_INCLUDE</envar> variable should be
+ set, where <envar>XXX</envar> corresponds to the used backend C
+ compiler.</para>
+ </section>
+
+ <section id="bbv2.reference.tools.compiler.cw">
+
+ <title>Code Warrior</title>
+
+ <para>The <code>cw</code> module support CodeWarrior compiler,
+ originally produced by Metrowerks and presently developed by
+ Freescale. Boost.Build supports only the versions of the compiler that
+ target x86 processors. All such versions were released by Metrowerks
+ before aquisition and are not sold any longer. The last version known
+ to work is 9.4.</para>
+
+ <para>The module is initialized using the following syntax:</para>
+ <programlisting>
+using cw : &toolset_ops; ;</programlisting>
+
+ &using_repeation;
+
+ <para>If the command is not specified, Boost.Build will search for a
+ binary named <command>mwcc</command> in default installation paths and
+ in <envar>PATH</envar>.</para>
+
+ &option_list_intro;
+ <variablelist>
+
+ <xi:include href="fragments.xml" xpointer="xpointer(id('common_options')/*)"
+ parse="xml"/>
+
+ <xi:include href="fragments.xml" xpointer="xpointer(id('root_option')/*)"
+ parse="xml"/>
+
+ <varlistentry>
+ <term><literal>setup</literal></term>
+
+ <listitem><para>The command that sets up environment variables
+ prior to invoking the compiler. If not specified,
+ <command>cwenv.bat</command> alongside the compiler binary
+ will be used.</para>
+ </listitem>
+ </varlistentry>
+
+
+ <varlistentry>
+ <term><literal>compiler</literal></term>
+
+ <listitem><para>The command that compiles C and C++ sources.
+ If not specified, <command>mwcc</command> will be used. The
+ command will be invoked after the setup script was
+ executed and adjusted the <envar>PATH</envar> variable.</para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><literal>linker</literal></term>
+
+ <listitem><para>The command that links executables and dynamic
+ libraries.
+ If not specified, <command>mwld</command> will be used. The
+ command will be invoked after the setup script was
+ executed and adjusted the <envar>PATH</envar> variable.</para>
+ </listitem>
+ </varlistentry>
+
+ </variablelist>
+
+ </section>
+
+ <section id="bbv2.reference.tools.compiler.dmc">
+
+ <title>Digital Mars C/C++ Compiler</title>
+
+ <para>The <code>dmc</code> module supports the
+ <ulink url="http://www.digitalmars.com/">Digital Mars C++ compiler.</ulink>
+ </para>
+
+ <para>The module is initialized using the following syntax:</para>
+ <programlisting>
+using dmc : &toolset_ops; ;</programlisting>
+
+ &using_repeation;
+
+ <para>If the command is not specified, Boost.Build will search for
+ a binary named <command>dmc</command> in
+ <envar>PATH</envar>.</para>
+
+ &option_list_intro;
+ <variablelist>
+ <xi:include href="fragments.xml" xpointer="xpointer(id('common_options')/*)"
+ parse="xml"/>
+ </variablelist>
+
+ </section>
+
+ <section id="bbv2.reference.tools.compiler.hp_cxx">
+
+ <title>HP C++ Compiler for Tru64 Unix</title>
+
+ <para>The <code>hp_cxx</code> modules supports the
+ <ulink url="http://h30097.www3.hp.com/cplus/?jumpid=reg_R1002_USEN">
+ HP C++ Compiler</ulink> for Tru64 Unix.</para>
+
+ <para>The module is initialized using the following syntax:</para>
+ <programlisting>
+using hp_cxx : &toolset_ops; ;</programlisting>
+
+ &using_repeation;
+
+ <para>If the command is not specified, Boost.Build will search for
+ a binary named <command>hp_cxx</command> in <envar>PATH</envar>.</para>
+
+ &option_list_intro;
+ <variablelist>
+ <xi:include href="fragments.xml" xpointer="xpointer(id('common_options')/*)"
+ parse="xml"/>
+ </variablelist>
+
+ </section>
+
+ <section id="bbv2.reference.tools.compiler.sun">
+
+ <title>Sun Studio</title>
+
+ <para>The <code>sun</code> module supports the
+ <ulink url="http://developers.sun.com/sunstudio/index.jsp">
+ Sun Studio</ulink> C++ compilers for the Solaris OS.</para>
+
+ <para>The module is initialized using the following syntax:</para>
+ <programlisting>
+using sun : &toolset_ops; ;</programlisting>
+
+ &using_repeation;
+
+ <para>If the command is not specified, Boost.Build will search for
+ a binary named <command>CC</command>
+ in <filename>/opt/SUNWspro/bin</filename> and in
+ <envar>PATH</envar>.</para>
+
+ <para>When using this compiler on complex C++ code, such as the
+ <ulink url="http://boost.org">Boost C++ library</ulink>, it is
+ recommended to specify the following options when intializing the
+ <code>sun</code> module:
+ <screen>
+-library=stlport4 -features=tmplife -features=tmplrefstatic
+ </screen> See the <ulink url="http://blogs.sun.com/sga/entry/command_line_options">
+ Sun C++ Frontend Tales</ulink> for details.</para>
+
+ &option_list_intro;
+ <variablelist>
+ <xi:include href="fragments.xml" xpointer="xpointer(id('common_options')/*)"
+ parse="xml"/>
+ </variablelist>
+
+ <indexterm><primary>64-bit compilation</primary>
+ <secondary>Sun Studio</secondary></indexterm>
+ Starting with Sun Studio 12, you can create 64-bit applications
+ by using the <code>address-model=64</code> property.
+
+ </section>
+
+ <section id="bbv2.reference.tools.compiler.vacpp">
+
+ <title>IBM Visual Age</title>
+ <para>The <code>vacpp</code> module supports the
+ <ulink url="http://www.ibm.com/software/ad/vacpp">IBM Visual
+ Age</ulink> C++ Compiler, for the AIX operating system. Versions
+ 7.1 and 8.0 are known to work.</para>
+
+ <para>The module is initialized using the following
+ syntax:</para>
+ <programlisting>
+using vacpp ;</programlisting>
+
+ <para>The module does not accept any initialization options. The
+ compiler should be installed in the <filename>/usr/vacpp/bin</filename>
+ directory.</para>
+
+ <para>Later versions of Visual Age are known as XL C/C++. They
+ were not tested with the the <code>vacpp</code> module.</para>
+
+ </section>
+
+ </section>
+
+ <section>
+ <title>Third-party libraries</title>
+
+ <para>Boost.Build provides special support for some
+ third-party C++ libraries, documented below.</para>
+
+ <section id="bbv2.reference.tools.libraries.stlport">
+ <title>STLport library</title>
+ <indexterm><primary>STLport</primary></indexterm>
+
+ <para>The <ulink url="http://stlport.org">STLport</ulink> library
+ is an alternative implementation of C++ runtime library. Boost.Build
+ supports using that library on Windows platfrom. Linux is
+ hampered by different naming of libraries in each STLport
+ version and is not officially supported.</para>
+
+ <para>Before using STLport, you need to configure it in
+ <filename>user-config.jam</filename> using the following syntax:
+ </para>
+ <programlisting>
+using stlport : <optional><replaceable>version</replaceable></optional> : <replaceable>header-path</replaceable> : <optional><replaceable>library-path</replaceable></optional> ;
+</programlisting>
+ <para>
+ Where <replaceable>version</replaceable> is the version of
+ STLport, for example <literal>5.1.4</literal>,
+ <replaceable>headers</replaceable> is the location where
+ STLport headers can be found, and <replaceable>libraries</replaceable>
+ is the location where STLport libraries can be found.
+ The version should always be provided, and the library path should
+ be provided if you're using STLport's implementation of
+ iostreams. Note that STLport 5.* always uses its own iostream
+ implementation, so the library path is required.
+ </para>
+
+ <para>When STLport is configured, you can build with STLport by
+ requesting <literal>stdlib=stlport</literal> on the command line.
+ </para>
+
+ </section>
+
+ <section id="bbv2.reference.tools.libraries.zlib">
+ <title>zlib</title>
+ <indexterm><primary>zlib</primary></indexterm>
+
+ <para>Provides support for the
+ <ulink url="http://www.zlib.net">zlib</ulink> library. zlib
+ can be configured either to use precompiled binaries or to
+ build the library from source.</para>
+
+ <para>zlib can be initialized using the following syntax</para>
+ <programlisting>
+using zlib : <optional><replaceable>version</replaceable></optional> : <optional><replaceable>options</replaceable></optional> : <optional><replaceable>condition</replaceable></optional> : <optional><replaceable>is-default</replaceable></optional> ;
+ </programlisting>
+ <para>Options for using a prebuilt library:</para>
+ <variablelist>
+ <varlistentry>
+ <term><literal>search</literal></term>
+ <listitem>
+ <para>The directory containing the zlib binaries.</para>
+ </listitem>
+ </varlistentry>
+ <varlistentry>
+ <term><literal>name</literal></term>
+ <listitem>
+ <para>Overrides the default library name.</para>
+ </listitem>
+ </varlistentry>
+ <varlistentry>
+ <term><literal>include</literal></term>
+ <listitem>
+ <para>The directory containing the zlib headers.</para>
+ </listitem>
+ </varlistentry>
+ </variablelist>
+ <para>If none of these options is specified, then the environmental
+ variables ZLIB_LIBRARY_PATH, ZLIB_NAME, and ZLIB_INCLUDE will be
+ used instead.</para>
+ <para>Options for building zlib from source:</para>
+ <variablelist>
+ <varlistentry>
+ <term><literal>source</literal></term>
+ <listitem>
+ <para>The zlib source directory. Defaults to the
+ environmental variable ZLIB_SOURCE.</para>
+ </listitem>
+ </varlistentry>
+ <varlistentry>
+ <term><literal>tag</literal></term>
+ <listitem>
+ <para>Sets the <link linkend="bbv2.builtin.features.tag">tag</link>
+ property to adjust the file name of the library. Ignored
+ when using precompiled binaries.</para>
+ </listitem>
+ </varlistentry>
+ <varlistentry>
+ <term><literal>build-name</literal></term>
+ <listitem>
+ <para>The base name to use for the compiled library.
+ Ignored when using precompiled binaries.</para>
+ </listitem>
+ </varlistentry>
+ </variablelist>
+ <para>Examples:</para>
+ <programlisting>
+# Find zlib in the default system location
+using zlib ;
+# Build zlib from source
+using zlib : 1.2.7 : &lt;source&gt;/home/steven/zlib-1.2.7 ;
+# Find zlib in /usr/local
+using zlib : 1.2.7 : &lt;include&gt;/usr/local/include &lt;search&gt;/usr/local/lib ;
+# Build zlib from source for msvc and find
+# prebuilt binaries for gcc.
+using zlib : 1.2.7 : &lt;source&gt;C:/Devel/src/zlib-1.2.7 : &lt;toolset&gt;msvc ;
+using zlib : 1.2.7 : : &lt;toolset&gt;gcc ;
+</programlisting>
+ </section>
+
+ </section>
+
+ <section>
+ <title>Documentation tools</title>
+
+ <para>Boost.Build support for the Boost documentation tools is
+ documented below.
+ </para>
+
+ <section id="bbv2.reference.tools.doc.xsltproc">
+ <title>xsltproc</title>
+ <indexterm><primary>xsltproc</primary></indexterm>
+
+ <para>To use xsltproc, you first need to configure it using the following syntax:</para>
+ <programlisting>
+using xsltproc : <optional><replaceable>xsltproc</replaceable></optional> ;
+</programlisting>
+ <para>
+ Where <replaceable>xsltproc</replaceable> is the xsltproc executable.
+ If <replaceable>xsltproc</replaceable> is not specified, and the
+ variable XSLTPROC is set, the value of XSLTPROC will be used.
+ Otherwise, xsltproc will be searched for in PATH.
+ </para>
+
+
+ &option_list_intro;
+ <variablelist>
+
+ <varlistentry>
+ <indexterm><primary>xsl:param</primary></indexterm>
+ <term><literal>xsl:param</literal></term>
+ <listitem>
+ <para>Values should have the form
+ <replaceable>name</replaceable>=<replaceable>value</replaceable></para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <indexterm><primary>xsl:path</primary></indexterm>
+ <term><literal>xsl:path</literal></term>
+ <listitem>
+ <para>Sets an additional search path for xi:include elements.</para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <indexterm><primary>catalog</primary></indexterm>
+ <term><literal>catalog</literal></term>
+ <listitem>
+ <para>A catalog file used to rewrite remote URL's to a local copy.</para>
+ </listitem>
+ </varlistentry>
+
+ </variablelist>
+
+ <para>The xsltproc module provides the following rules. Note that
+ these operate on jam targets and are intended to be used by another
+ toolset, such as boostbook, rather than directly by users.
+ </para>
+ <variablelist>
+
+ <varlistentry>
+ <indexterm><primary>xslt</primary></indexterm>
+ <term><literal>xslt</literal></term>
+ <listitem>
+ <programlisting>
+rule xslt ( target : source stylesheet : properties * )
+</programlisting>
+ <para>Runs xsltproc to create a single output file.</para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <indexterm><primary>xslt-dir</primary></indexterm>
+ <term><literal>xslt-dir</literal></term>
+ <listitem>
+ <programlisting>
+rule xslt-dir ( target : source stylesheet : properties * : dirname )
+</programlisting>
+ <para>Runs xsltproc to create multiple outputs in a directory.
+ <literal>dirname</literal> is unused, but exists for
+ historical reasons. The output directory is determined from the
+ target.
+ </para>
+ </listitem>
+ </varlistentry>
+
+ </variablelist>
+
+ </section>
+
+ <section id="bbv2.reference.tools.doc.boostbook">
+ <title>boostbook</title>
+ <indexterm><primary>boostbook</primary><secondary>module</secondary></indexterm>
+
+ <para>To use boostbook, you first need to configure it using the following syntax:</para>
+ <programlisting>
+using boostbook : <optional><replaceable>docbook-xsl-dir</replaceable></optional> : <optional><replaceable>docbook-dtd-dir</replaceable></optional> : <optional><replaceable>boostbook-dir</replaceable></optional> ;
+</programlisting>
+ <para>
+ <replaceable>docbook-xsl-dir</replaceable> is the DocBook XSL stylesheet
+ directory. If not provided, we use DOCBOOK_XSL_DIR from the environment
+ (if available) or look in standard locations. Otherwise, we let the
+ XML processor load the stylesheets remotely.
+ </para>
+
+ <para>
+ <replaceable>docbook-dtd-dir</replaceable> is the DocBook DTD directory.
+ If not provided, we use DOCBOOK_DTD_DIR From the environment (if
+ available) or look in standard locations. Otherwise, we let the XML
+ processor load the DTD remotely.
+ </para>
+
+ <para>
+ <replaceable>boostbook-dir</replaceable> is the BoostBook directory
+ with the DTD and XSL subdirs.
+ </para>
+
+ <para>The boostbook module depends on xsltproc. For pdf or ps output,
+ it also depends on fop.
+ </para>
+
+ &option_list_intro;
+ <variablelist>
+
+ <varlistentry>
+ <indexterm><primary>format</primary></indexterm>
+ <indexterm><primary>html</primary></indexterm>
+ <indexterm><primary>xhtml</primary></indexterm>
+ <indexterm><primary>htmlhelp</primary></indexterm>
+ <indexterm><primary>onehtml</primary></indexterm>
+ <indexterm><primary>man</primary></indexterm>
+ <indexterm><primary>pdf</primary></indexterm>
+ <indexterm><primary>ps</primary></indexterm>
+ <indexterm><primary>docbook</primary></indexterm>
+ <indexterm><primary>fo</primary></indexterm>
+ <indexterm><primary>tests</primary></indexterm>
+ <term><literal>format</literal></term>
+ <listitem>
+ <para>
+ <emphasis role="bold">Allowed values:</emphasis>
+ <literal>html</literal>, <literal>xhtml</literal>,
+ <literal>htmlhelp</literal>, <literal>onehtml</literal>,
+ <literal>man</literal>, <literal>pdf</literal>,
+ <literal>ps</literal>, <literal>docbook</literal>,
+ <literal>fo</literal>, <literal>tests</literal>.
+ </para>
+
+
+ <para>The <literal>format</literal> feature determines the type
+ of output produced by the boostbook rule.</para>
+ </listitem>
+ </varlistentry>
+
+ </variablelist>
+
+ <para>The boostbook module defines a rule for creating a target
+ following the common syntax.</para>
+
+ <variablelist>
+
+ <varlistentry>
+ <indexterm><primary>boostbook</primary><secondary>rule</secondary></indexterm>
+ <term><literal>boostbook</literal></term>
+ <listitem>
+ <programlisting>
+rule boostbook ( target-name : sources * : requirements * : default-build * )
+</programlisting>
+ <para>Creates a boostbook target.</para>
+ </listitem>
+ </varlistentry>
+
+ </variablelist>
+
+ </section>
+
+ <section id="bbv2.reference.tools.doc.doxygen">
+ <title>doxygen</title>
+ <indexterm><primary>doxygen</primary></indexterm>
+
+ <para>To use doxygen, you first need to configure it using the following syntax:</para>
+ <programlisting>
+using doxygen : <optional><replaceable>name</replaceable></optional> ;
+</programlisting>
+ <para>
+ <replaceable>name</replaceable> is the doxygen command.
+ If it is not specified, it will be found in the PATH.
+ </para>
+
+ <para>The doxygen module depends on the boostbook module when
+ generating BoostBook XML.
+ </para>
+
+ &option_list_intro;
+ <variablelist>
+
+ <varlistentry>
+ <indexterm><primary>doxygen:param</primary></indexterm>
+ <term><literal>doxygen:param</literal></term>
+ <listitem>
+ <para>All the values of <literal>doxygen:param</literal>
+ are added to the doxyfile.</para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <indexterm><primary>prefix</primary></indexterm>
+ <term><literal>prefix</literal></term>
+ <listitem>
+ <para>Specifies the common prefix of all headers
+ when generating BoostBook XML. Everything before
+ this will be stripped off.
+ </para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <indexterm><primary>reftitle</primary></indexterm>
+ <term><literal>reftitle</literal></term>
+ <listitem>
+ <para>Specifies the title of the library-reference section,
+ when generating BoostBook XML.</para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <indexterm><primary>doxygen:xml-imagedir</primary></indexterm>
+ <term><literal>doxygen:xml-imagedir</literal></term>
+ <listitem>
+ <para>When generating BoostBook XML, specifies the
+ directory in which to place the images generated
+ from LaTex formulae.</para>
+ <warning><para>The path is interpreted relative to the
+ current working directory, not relative to the Jamfile.
+ This is necessary to match the behavior of BoostBook.
+ </para></warning>
+ </listitem>
+ </varlistentry>
+
+ </variablelist>
+
+ <para>The doxygen module defines a rule for creating a target
+ following the common syntax.</para>
+
+ <variablelist>
+
+ <varlistentry>
+ <indexterm><primary>doxygen</primary><secondary>rule</secondary></indexterm>
+ <term><literal>doxygen</literal></term>
+ <listitem>
+ <programlisting>
+rule doxygen ( target : sources * : requirements * : default-build * : usage-requirements * )
+</programlisting>
+ <para>Creates a doxygen target. If the target name
+ ends with .html, then this will generate an html
+ directory. Otherwise it will generate BoostBook XML.
+ </para>
+ </listitem>
+ </varlistentry>
+
+ </variablelist>
+
+ </section>
+
+ <section id="bbv2.reference.tools.doc.quickbook">
+ <title>quickbook</title>
+ <indexterm><primary>quickbook</primary></indexterm>
+
+ <para>The quickbook module provides a generator to convert from
+ Quickbook to BoostBook XML.</para>
+
+ <para>To use quickbook, you first need to configure it using the following syntax:</para>
+ <programlisting>
+using quickbook : <optional><replaceable>command</replaceable></optional> ;
+</programlisting>
+ <para>
+ <replaceable>command</replaceable> is the quickbook executable.
+ If it is not specified, Boost.Build will compile it from source.
+ If it is unable to find the source it will search for a quickbook
+ executable in PATH.
+ </para>
+
+ </section>
+
+ <section id="bbv2.reference.tools.doc.fop">
+ <title>fop</title>
+ <indexterm><primary>fop</primary></indexterm>
+
+ <para>The fop module provides generators to convert from
+ XSL formatting objects to Postscript and PDF.</para>
+
+ <para>To use fop, you first need to configure it using the following syntax:</para>
+ <programlisting>
+using fop : <optional><replaceable>fop-command</replaceable></optional> : <optional><replaceable>java-home</replaceable></optional> : <optional><replaceable>java</replaceable></optional> ;
+</programlisting>
+ <para>
+ <replaceable>fop-command</replaceable> is the command to run fop.
+ If it is not specified, Boost.Build will search for it in PATH and
+ FOP_HOME.
+ </para>
+ <para>
+ Either <replaceable>java-home</replaceable> or
+ <replaceable>java</replaceable>
+ can be used to specify where to find java.
+ </para>
+
+ </section>
+
+ </section>
+
+ </section>
+
+ <section id="bbv2.reference.modules">
+ <title>Builtin modules</title>
+
+ <para>
+ This section describes the modules that are provided
+ by Boost.Build. The import rule allows rules from
+ one module to be used in another module or Jamfile.
+ </para>
+
+ <section id="bbv2.reference.modules.modules">
+ <title>modules</title>
+ <indexterm><primary>modules</primary></indexterm>
+
+ <para>
+ The <code>modules</code> module defines basic functionality
+ for handling modules.
+ </para>
+
+ <para>
+ A module defines a number of rules that can be used in other
+ modules. Modules can contain code at the top level to initialize
+ the module. This code is executed the first time the
+ module is loaded.
+ <note>
+ <para>
+ A Jamfile is a special kind of module which is managed by
+ the build system. Although they cannot be loaded directly
+ by users, the other features of modules are still useful
+ for Jamfiles.
+ </para>
+ </note>
+ </para>
+
+ <para>
+ Each module has its own namespaces for variables and rules. If two
+ modules A and B both use a variable named X, each one gets its own
+ copy of X. They won't interfere with each other in any way.
+ Similarly, importing rules into one module has no effect on any other
+ module.
+ </para>
+
+ <para>
+ Every module has two special variables.
+ <code>$(__file__)</code> contains the name of the file that
+ the module was loaded from and <code>$(__name__)</code>
+ contains the name of the module.
+ <note><para><code>$(__file__)</code> does not contain
+ the full path to the file. If you need this, use
+ <code>modules.binding</code>.</para></note>
+ </para>
+
+ <orderedlist>
+
+ <listitem id="bbv2.reference.modules.modules.binding">
+ <indexterm zone="bbv2.reference.modules.modules.binding"><primary>binding</primary></indexterm>
+ <code language="jam">rule binding ( module-name )</code>
+ <para>Returns the filesystem binding of the given module.</para>
+ <para>For example, a module can get its own location with:
+ <programlisting language="jam">me = [ modules.binding $(__name__) ] ;</programlisting>
+ </para>
+ </listitem>
+
+ <listitem id="bbv2.reference.modules.modules.poke">
+ <indexterm zone="bbv2.reference.modules.modules.poke"><primary>poke</primary></indexterm>
+ <code language="jam">rule poke ( module-name ? : variables + : value * )</code>
+ <para>Sets the module-local value of a variable.</para>
+ <para>For example, to set a variable in the global module:
+ <programlisting language="jam">modules.poke : ZLIB_INCLUDE : /usr/local/include ;</programlisting>
+ </para>
+ </listitem>
+
+ <listitem id="bbv2.reference.modules.modules.peek">
+ <indexterm zone="bbv2.reference.modules.modules.peek"><primary>peek</primary></indexterm>
+ <code language="jam">rule peek ( module-name ? : variables + )</code>
+ <para>Returns the module-local value of a variable.</para>
+ <para>
+ For example, to read a variable from the global module:
+ <programlisting language="jam">local ZLIB_INCLUDE = [ modules.peek : ZLIB_INCLUDE ] ;</programlisting>
+ </para>
+ </listitem>
+
+ <listitem id="bbv2.reference.modules.modules.call-in">
+ <indexterm zone="bbv2.reference.modules.modules.call-in"><primary>call-in</primary></indexterm>
+ <code language="jam">rule call-in ( module-name ? : rule-name args * : * ) </code>
+ <para>Call the given rule locally in the given module. Use
+ this for rules accepting rule names as arguments, so that
+ the passed rule may be invoked in the context of the rule's
+ caller (for example, if the rule accesses module globals or
+ is a local rule).
+ <note><para>rules called this way may accept at most
+ 8 parameters.</para></note></para>
+ <para>Example:
+<programlisting language="jam">
+rule filter ( f : values * )
+{
+ local m = [ CALLER_MODULE ] ;
+ local result ;
+ for v in $(values)
+ {
+ if [ modules.call-in $(m) : $(f) $(v) ]
+ {
+ result += $(v) ;
+ }
+ }
+ return result ;
+}
+</programlisting>
+ </para>
+ </listitem>
+
+ <listitem id="bbv2.reference.modules.modules.load">
+ <indexterm zone="bbv2.reference.modules.modules.load"><primary>load</primary></indexterm>
+ <code language="jam">rule load ( module-name : filename ? : search * )</code>
+ <para>Load the indicated module if it is not already loaded.</para>
+ <variablelist>
+ <varlistentry>
+ <term><literal>module-name</literal></term>
+ <listitem><para>Name of module to load.</para></listitem>
+ </varlistentry>
+ </variablelist>
+ <variablelist>
+ <varlistentry>
+ <term><literal>filename</literal></term>
+ <listitem><para>(partial) path to file; Defaults to <code>$(module-name).jam</code></para></listitem>
+ </varlistentry>
+ </variablelist>
+ <variablelist>
+ <varlistentry>
+ <term><literal>search</literal></term>
+ <listitem><para>Directories in which to search for filename.
+ Defaults to <code>$(BOOST_BUILD_PATH)</code>.</para></listitem>
+ </varlistentry>
+ </variablelist>
+ </listitem>
+
+ <listitem id="bbv2.reference.modules.modules.import">
+ <indexterm zone="bbv2.reference.modules.modules.import"><primary>import</primary></indexterm>
+ <code language="jam">rule import ( module-names + : rules-opt * : rename-opt * )</code>
+ <para>Load the indicated module and import rule names into the
+ current module. Any members of <code>rules-opt</code> will be
+ available without qualification in the caller's module. Any
+ members of <code>rename-opt</code> will be taken as the names
+ of the rules in the caller's module, in place of the names they
+ have in the imported module. If <code>rules-opt = '*'</code>,
+ all rules from the indicated module are imported into the
+ caller's module. If <code>rename-opt</code> is supplied, it must have the
+ same number of elements as <code>rules-opt</code>.</para>
+ <note><para>The <literal>import</literal> rule is available
+ without qualification in all modules.</para></note>
+ <para>Examples:
+<programlisting language="jam">
+import path ;
+import path : * ;
+import path : join ;
+import path : native make : native-path make-path ;
+</programlisting>
+ </para>
+ </listitem>
+
+ <listitem id="bbv2.reference.modules.modules.clone-rules">
+ <indexterm zone="bbv2.reference.modules.modules.clone-rules"><primary>clone-rules</primary></indexterm>
+ <code language="jam">rule clone-rules ( source-module target-module )</code>
+ <para>Define exported copies in <code>$(target-module)</code>
+ of all rules exported from <code>$(source-module)</code>. Also
+ make them available in the global module with qualification,
+ so that it is just as though the rules were defined originally
+ in <code>$(target-module)</code>.</para>
+ </listitem>
+
+ </orderedlist>
+
+ </section>
+
+ <xi:include href="path.xml"/>
+ <xi:include href="regex.xml"/>
+ <xi:include href="sequence.xml"/>
+ <xi:include href="type.xml"/>
+
+ </section>
+
+ <section id="bbv2.reference.class">
+ <title>Builtin classes</title>
+ <xi:include href="abstract-target.xml"/>
+ <xi:include href="project-target.xml"/>
+ <xi:include href="main-target.xml"/>
+ <xi:include href="basic-target.xml"/>
+ <xi:include href="typed-target.xml"/>
+ <xi:include href="property-set.xml"/>
+ </section>
+
+ <section id="bbv2.reference.buildprocess">
+ <title>Build process</title>
+
+ <para>The general overview of the build process was given in the
+ <link linkend="bbv2.overview.build_process">user documentation</link>.
+ This section provides additional details, and some specific rules.
+ </para>
+
+ <para>To recap, building a target with specific properties includes the
+ following steps:
+ <orderedlist>
+
+ <listitem><para>applying default build,</para></listitem>
+
+ <listitem><para>selecting the main target alternative to use,
+ </para></listitem>
+
+ <listitem><para>determining "common" properties,</para></listitem>
+
+ <listitem><para>building targets referred by the sources list and
+ dependency properties,</para></listitem>
+
+ <listitem><para>adding the usage requirements produces when building
+ dependencies to the "common" properties,</para></listitem>
+
+ <listitem><para>building the target using generators,</para></listitem>
+
+ <listitem><para>computing the usage requirements to be returned.</para></listitem>
+
+ </orderedlist>
+ </para>
+
+ <section id="bbv2.reference.buildprocess.alternatives">
+ <title>Alternative selection</title>
+
+ <para>When there are several alternatives, one of them must be
+ selected. The process is as follows:</para>
+
+ <orderedlist>
+ <listitem>
+ <simpara>
+ For each alternative <emphasis>condition</emphasis> is defined as
+ the set of base properties in requirements. [Note: it might be
+ better to specify the condition explicitly, as in conditional
+ requirements].
+ </simpara>
+ </listitem>
+
+ <listitem>
+ <simpara>
+ An alternative is viable only if all properties in condition
+ are present in build request.
+ </simpara>
+ </listitem>
+
+ <listitem>
+ <simpara>
+ If there's one viable alternative, it's choosen. Otherwise,
+ an attempt is made to find one best alternative. An alternative
+ a is better than another alternative b, iff the set of properties
+ in b's condition is a strict subset of the set of properities of
+ 'a's condition. If there's one viable alternative, which is
+ better than all others, it's selected. Otherwise, an error is
+ reported.
+ </simpara>
+ </listitem>
+ </orderedlist>
+
+ </section>
+
+ <section id="bbv2.reference.buildprocess.common">
+ <title>Determining common properties</title>
+
+ <para>The "common" properties is a somewhat artificial term. Those are
+ the intermediate property set from which both the build request for
+ dependencies and properties for building the target are derived.
+ </para>
+
+ <para>Since default build and alternatives are already handled, we have
+ only two inputs: build requests and requirements. Here are the rules
+ about common properties.
+ </para>
+
+ <orderedlist>
+ <listitem><para>Non-free feature can have only one
+ value</para></listitem>
+
+ <listitem><para>A non-conditional property in requirement in always
+ present in common properties.</para></listitem>
+
+ <listitem><para>A property in build request is present in
+ common properties, unless (2) tells otherwise.</para></listitem>
+
+ <listitem><para>If either build request, or requirements (non-conditional
+ or conditional) include an expandable property (either composite,
+ or property with specified subfeature value), the behaviour is
+ equivalent to explicitly adding all expanded properties to build
+ request or requirements.</para></listitem>
+
+ <listitem><para>If requirements include a conditional property, and
+ condiiton of this property is true in context of common
+ properties, then the conditional property should be in common
+ properties as well.</para></listitem>
+
+ <listitem><para>If no value for a feature is given by other rules
+ here, it has default value in common properties.</para></listitem>
+ </orderedlist>
+
+ <para>Those rules are declarative, they don't specify how to compute the
+ common properties. However, they provide enough information for the
+ user. The important point is the handling of conditional
+ requirements. The condition can be satisfied either by property in
+ build request, by non-conditional requirements, or even by another
+ conditional property. For example, the following example works as
+ expected:
+<programlisting>
+exe a : a.cpp
+ : &lt;toolset&gt;gcc:&lt;variant&gt;release
+ &lt;variant&gt;release:&lt;define&gt;FOO ;
+</programlisting>
+ </para>
+
+ </section>
+
+ <section id="bbv2.reference.buildprocess.targetpath">
+ <title>Target Paths</title>
+ <indexterm><primary>path</primary><secondary>for targets</secondary></indexterm>
+
+ <para>Several factors determine the location of a concrete
+ file target. All files in a project are built under
+ the directory bin unless this is overriden by the build-dir project
+ attribute. Under bin is a path that depends on the properties
+ used to build each target. This path is uniquely determined by
+ all non-free, non-incidental properties. For example,
+ given a property set containing:
+ <code>&lt;toolset&gt;gcc &lt;toolset-gcc:version&gt;4.6.1 &lt;variant&gt;debug
+ &lt;warnings&gt;all &lt;define&gt;_DEBUG &lt;include&gt;/usr/local/include
+ &lt;link&gt;static</code>,
+ the path will be gcc-4.6.1/debug/link-static. &lt;warnings&gt; is an
+ incidental feature and &lt;define&gt; and &lt;include&gt; are
+ free features, so they do not affect the path.</para>
+
+ <para>Sometimes the paths produced by Boost.Build can become excessively
+ long. There are a couple of command line options that can help with this.
+ --abbreviate-paths reduces each element to no more than five characters.
+ For example, link-static becomes lnk-sttc. The --hash option reduces the
+ path to a single directory using an MD5 hash.</para>
+
+ <para>There are two features that affect the build
+ directory. The &lt;location&gt; feature completely
+ overrides the default build directory. For example,
+ <programlisting>exe a : a.cpp : &lt;location&gt;. ;</programlisting>
+ builds all the files produced by <code>a</code>
+ in the directory of the Jamfile. This is generally
+ discouraged, as it precludes variant builds.</para>
+
+ <para>The &lt;location-prefix&gt; feature adds a
+ prefix to the path, under the project's build
+ directory. For example,
+ <programlisting>exe a : a.cpp : &lt;location-prefix&gt;subdir ;</programlisting>
+ will create the files for <code>a</code> in bin/subdir/gcc-4.6.1/debug</para>
+
+ </section>
+
+ </section>
+
+
+
+ <section id="bbv2.reference.definitions">
+
+ <title>Definitions</title>
+
+ <section id="bbv2.reference.features">
+ <title>Features and properties</title>
+
+ <para>A <emphasis>feature</emphasis> is a normalized (toolset-independent)
+ aspect of a build configuration, such as whether inlining is
+ enabled. Feature names may not contain the '<literal>&gt;</literal>'
+ character.</para>
+
+ <!--
+ And what about dash?
+ -->
+
+ <para>Each feature in a build configuration has one or more
+ associated <emphasis>value</emphasis>s. Feature values for non-free features
+ may not contain the '<literal>&lt;</literal>', '<literal>:</literal>', or
+ '<literal>=</literal>' characters. Feature values for free features may not
+ contain the '<literal>&lt;</literal>' character.</para>
+
+ <para>A <emphasis>property</emphasis> is a (feature,value) pair, expressed as
+ &lt;feature&gt;value.</para>
+
+ <para>A <emphasis>subfeature</emphasis> is a feature that only exists in the
+ presence of its parent feature, and whose identity can be derived
+ (in the context of its parent) from its value. A subfeature's
+ parent can never be another subfeature. Thus, features and their
+ subfeatures form a two-level hierarchy.</para>
+
+ <para>A <emphasis>value-string</emphasis> for a feature <emphasis role="bold">F</emphasis> is a string of
+ the form
+ <literal>value-subvalue1-subvalue2</literal>...<literal>-subvalueN</literal>, where
+ <literal>value</literal> is a legal value for <emphasis role="bold">F</emphasis> and
+ <literal>subvalue1</literal>...<literal>subvalueN</literal> are legal values of some
+ of <emphasis role="bold">F</emphasis>'s subfeatures. For example, the properties
+ <literal>&lt;toolset&gt;gcc &lt;toolset-version&gt;3.0.1</literal> can be
+ expressed more conscisely using a value-string, as
+ <literal>&lt;toolset&gt;gcc-3.0.1</literal>.</para>
+
+ <para>A <emphasis>property set</emphasis> is a set of properties (i.e. a
+ collection without duplicates), for instance:
+ <literal>&lt;toolset&gt;gcc &lt;runtime-link&gt;static</literal>.</para>
+
+ <para>A <emphasis>property path</emphasis> is a property set whose elements have
+ been joined into a single string separated by slashes. A property
+ path representation of the previous example would be
+ <literal>&lt;toolset&gt;gcc/&lt;runtime-link&gt;static</literal>.</para>
+
+ <para>A <emphasis>build specification</emphasis> is a property set that fully
+ describes the set of features used to build a target.</para>
+
+ <section id="bbv2.reference.features.validity">
+ <title>Property Validity</title>
+
+ <para>
+ For <link linkend=
+ "bbv2.reference.features.attributes.free">free</link>
+ features, all values are valid. For all other features,
+ the valid values are explicitly specified, and the build
+ system will report an error for the use of an invalid
+ feature-value. Subproperty validity may be restricted so
+ that certain values are valid only in the presence of
+ certain other subproperties. For example, it is possible
+ to specify that the <code>&lt;gcc-target&gt;mingw</code>
+ property is only valid in the presence of
+ <code>&lt;gcc-version&gt;2.95.2</code>.
+ </para>
+
+ </section>
+ <section id="bbv2.reference.features.attributes">
+ <title>Feature Attributes</title>
+
+ <para>Each feature has a collection of zero or more of the following
+ attributes. Feature attributes are low-level descriptions of how the
+ build system should interpret a feature's values when they appear in
+ a build request. We also refer to the attributes of properties, so
+ that an <emphasis>incidental</emphasis> property, for example, is
+ one whose feature has the <emphasis>incidental</emphasis>
+ attribute.</para>
+
+ <itemizedlist>
+ <listitem>
+ <para><emphasis>incidental</emphasis></para>
+
+ <para>Incidental features are assumed not to affect build
+ products at all. As a consequence, the build system may use
+ the same file for targets whose build specification differs
+ only in incidental features. A feature that controls a
+ compiler's warning level is one example of a likely
+ incidental feature.</para>
+
+ <para>Non-incidental features are assumed to affect build
+ products, so the files for targets whose build specification
+ differs in non-incidental features are placed in different
+ directories as described in <xref linkend="bbv2.reference.buildprocess.targetpath"/>.
+ </para>
+ </listitem>
+
+ <listitem>
+ <para>
+ <anchor id="bbv2.reference.features.attributes.propagated"/>
+ <emphasis>propagated</emphasis>
+ </para>
+
+ <para>Features of this kind are
+ propagated to dependencies. That is, if a <link linkend=
+ "bbv2.overview.targets.main">main target</link> is built using a
+ propagated
+ property, the build systems attempts to use the same property
+ when building any of its dependencies as part of that main
+ target. For instance, when an optimized exectuable is
+ requested, one usually wants it to be linked with optimized
+ libraries. Thus, the <literal>&lt;optimization&gt;</literal> feature is
+ propagated.</para>
+ </listitem>
+
+ <listitem>
+ <para>
+ <anchor id="bbv2.reference.features.attributes.free"/>
+ <emphasis>free</emphasis>
+ </para>
+
+ <para>Most features have a finite set of allowed values, and can
+ only take on a single value from that set in a given build
+ specification. Free features, on the other hand, can have
+ several values at a time and each value can be an arbitrary
+ string. For example, it is possible to have several
+ preprocessor symbols defined simultaneously:</para>
+
+<programlisting>
+&lt;define&gt;NDEBUG=1 &lt;define&gt;HAS_CONFIG_H=1
+</programlisting>
+
+ </listitem>
+
+ <listitem>
+ <para><emphasis>optional</emphasis></para>
+
+ <para>An optional feature is a feature that is not required to
+ appear in a build specification. Every non-optional non-free
+ feature has a default value that is used when a value for
+ the feature is not otherwise specified, either in a target's
+ requirements or in the user's build request. [A feature's
+ default value is given by the first value listed in the
+ feature's declaration. -- move this elsewhere - dwa]</para>
+ </listitem>
+
+ <listitem>
+ <para><emphasis>symmetric</emphasis></para>
+
+ <para>Normally a feature only generates a subvariant directory
+ when its value differs from its default value,
+ leading to an assymmetric subvariant directory structure for
+ certain values of the feature. A symmetric feature
+ always generates a corresponding
+ subvariant directory.</para>
+ </listitem>
+
+ <listitem>
+ <para><emphasis>path</emphasis></para>
+
+ <para>The value of a path feature specifies a path. The path is
+ treated as relative to the directory of Jamfile where path
+ feature is used and is translated appropriately by the build
+ system when the build is invoked from a different
+ directory</para>
+ </listitem>
+
+ <listitem>
+ <para><emphasis>implicit</emphasis></para>
+
+ <para>Values of implicit features alone identify the feature.
+ For example, a user is not required to write
+ "&lt;toolset&gt;gcc", but can simply write "gcc". Implicit
+ feature names also don't appear in variant paths, although
+ the values do. Thus: bin/gcc/... as opposed to
+ bin/toolset-gcc/.... There should typically be only a few
+ such features, to avoid possible name clashes.</para>
+ </listitem>
+
+ <listitem>
+ <para><emphasis>composite</emphasis></para>
+
+ <para>Composite features actually correspond to groups of
+ properties. For example, a build variant is a composite
+ feature. When generating targets from a set of build
+ properties, composite features are recursively expanded and
+ <emphasis>added</emphasis> to the build property set, so rules can find
+ them if necessary. Non-composite non-free features override
+ components of composite features in a build property set.</para>
+ </listitem>
+
+ <listitem>
+ <para><emphasis>dependency</emphasis></para>
+
+ <para>The value of a dependency feature is a target reference.
+ When used for building of a main target, the value of
+ dependency feature is treated as additional dependency.</para>
+
+ <para>For example, dependency features allow to state that
+ library A depends on library B. As the result, whenever an
+ application will link to A, it will also link to B.
+ Specifying B as dependency of A is different from adding B to
+ the sources of A. <!-- Need to clarify this. --></para>
+ </listitem>
+ </itemizedlist>
+
+ <para>Features that are neither free nor incidental are called
+ <emphasis>base</emphasis> features.</para>
+
+
+ </section>
+ <section id="bbv2.reference.features.declaration">
+ <title>Feature Declaration</title>
+
+ <para>The low-level feature declaration interface is the
+ <literal>feature</literal> rule from the
+ <literal>feature</literal> module:
+
+<programlisting>
+rule feature ( name : allowed-values * : attributes * )
+</programlisting>
+
+ A feature's allowed-values may be extended with the
+ <code>feature.extend</code> rule.
+ </para>
+
+ </section>
+ </section>
+
+ <section id="bbv2.reference.variants.proprefine">
+ <title>Property refinement</title>
+
+ <para>When a target with certain properties is requested, and that
+ target requires some set of properties, it is needed to find the
+ set of properties to use for building. This process is called
+ <emphasis>property refinement</emphasis> and is performed by these rules</para>
+
+ <orderedlist>
+
+ <listitem>
+ <simpara>
+ Each property in the required set is added to the original
+ property set
+ </simpara>
+ </listitem>
+
+ <listitem>
+ <simpara>
+ If the original property set includes property with a different
+ value of non free feature, that property is removed.
+ </simpara>
+ </listitem>
+ </orderedlist>
+ </section>
+
+ <section id="bbv2.reference.variants.propcond">
+ <title>Conditional properties</title>
+
+ <para>Sometime it's desirable to apply certain requirements only for
+ a specific combination of other properties. For example, one of
+ compilers that you use issues a pointless warning that you want to
+ suppress by passing a command line option to it. You would not
+ want to pass that option to other compilers. Conditional
+ properties allow you to do just that. Their syntax is:</para>
+
+ <programlisting>
+ property ( "," property ) * ":" property
+ </programlisting>
+
+ <para>
+ For example, the problem above would be solved by:
+
+<programlisting>
+exe hello : hello.cpp : &lt;toolset&gt;yfc:&lt;cxxflags&gt;-disable-pointless-warning ;
+</programlisting>
+ </para>
+
+ <para>The syntax also allows several properties in the condition, for
+ example:
+<programlisting>
+exe hello : hello.cpp : &lt;os&gt;NT,&lt;toolset&gt;gcc:&lt;link&gt;static ;
+</programlisting>
+ </para>
+
+ </section>
+
+ <section id="bbv2.reference.ids">
+ <title>Target identifiers and references</title>
+
+ <para><emphasis>Target identifier</emphasis> is used to denote a
+ target. The syntax is:</para>
+
+<programlisting>
+target-id -&gt; (project-id | target-name | file-name )
+ | (project-id | directory-name) "//" target-name
+project-id -&gt; path
+target-name -&gt; path
+file-name -&gt; path
+directory-name -&gt; path
+</programlisting>
+
+ <para>
+ This grammar allows some elements to be recognized as either
+
+ <itemizedlist>
+ <listitem>
+ <simpara>
+ project id (at this point, all project ids start with slash).
+ </simpara>
+ </listitem>
+
+ <listitem>
+ <simpara>
+ name of target declared in current Jamfile (note that target
+ names may include slash).
+ </simpara>
+ </listitem>
+
+ <listitem>
+ <simpara>
+ a regular file, denoted by absolute name or name relative to
+ project's sources location.
+ </simpara>
+ </listitem>
+ </itemizedlist>
+
+ To determine the real meaning a check is made if project-id
+ by the specified name exists, and then if main target of that
+ name exists. For example, valid target ids might be:
+
+<screen>
+a -- target in current project
+lib/b.cpp -- regular file
+/boost/thread -- project "/boost/thread"
+/home/ghost/build/lr_library//parser -- target in specific project
+</screen>
+
+ </para>
+
+ <para><emphasis role="bold">Rationale:</emphasis>Target is separated from project by special
+ separator (not just slash), because:</para>
+
+ <itemizedlist>
+ <listitem>
+ <simpara>
+ It emphasises that projects and targets are different things.
+ </simpara>
+ </listitem>
+
+ <listitem>
+ <simpara>
+ It allows to have main target names with slashes.
+
+ <!-- The motivation for which is:
+
+ So, to summarize:
+
+ 1. The project that extract tarfile may extract all possible kinds
+ of targets, and it's reasonable to use them directly from other
+ project.
+
+ 2. The rule for unpacking tar is inplemented in terms of
+ "patch-file", for maintainability, and therefore, must use main
+ target name that contains slashes?
+
+ 3. Using sub-Jamfile in "foo" to declare extracted file "foo/b" is
+ not an option, because you should not change existing tree
+
+ That makes good rationale for why main target must contain names.
+ -->
+ </simpara>
+ </listitem>
+ </itemizedlist>
+
+ <para id="bbv2.reference.targets.references">
+ <emphasis>Target reference</emphasis> is used to
+ specify a source target, and may additionally specify desired
+ properties for that target. It has this syntax:</para>
+
+<programlisting>
+target-reference -&gt; target-id [ "/" requested-properties ]
+requested-properties -&gt; property-path
+</programlisting>
+
+ <para>
+ For example,
+
+ <programlisting>
+ exe compiler : compiler.cpp libs/cmdline/&lt;optimization&gt;space ;
+ </programlisting>
+
+ would cause the version of <literal>cmdline</literal> library,
+ optimized for space, to be linked in even if the
+ <literal>compiler</literal> executable is build with optimization for
+ speed.
+ </para>
+ </section>
+
+ </section>
+
+</chapter>
+
+<!--
+ Local Variables:
+ mode: nxml
+ sgml-indent-data: t
+ sgml-parent-document: ("userman.xml" "chapter")
+ sgml-set-face: t
+ End:
+-->
diff --git a/tools/build/doc/src/regex.xml b/tools/build/doc/src/regex.xml
new file mode 100644
index 0000000000..234c6eae4c
--- /dev/null
+++ b/tools/build/doc/src/regex.xml
@@ -0,0 +1,170 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE section PUBLIC "-//Boost//DTD BoostBook XML V1.0//EN"
+ "http://www.boost.org/tools/boostbook/dtd/boostbook.dtd">
+
+<section id="bbv2.reference.modules.regex">
+
+ <title>regex</title>
+ <indexterm>
+ <primary>regex</primary>
+ <secondary>module</secondary>
+ </indexterm>
+
+ <para>
+ Contains rules for string processing using regular expressions.
+ </para>
+
+ <itemizedlist>
+
+ <listitem><para>
+ <code language="jam">"x*"</code> matches the pattern
+ <code language="jam">"x"</code> zero or more times.
+ </para></listitem>
+
+ <listitem><para>
+ <code language="jam">"x+"</code> matches <code language="jam">"x"</code>
+ one or more times.
+ </para></listitem>
+
+ <listitem><para>
+ <code language="jam">"x?"</code> matches <code language="jam">"x"</code>
+ zero or one time.
+ </para></listitem>
+
+ <listitem><para>
+ <code language="jam">"[abcd]"</code> matches any of the characters,
+ <code language="jam">"a"</code>, <code language="jam">"b"</code>,
+ <code language="jam">"c"</code>, and <code language="jam">"d"</code>.
+ A character range such as <code language="jam">"[a-z]"</code> matches
+ any character between <code language="jam">"a"</code> and
+ <code language="jam">"z"</code>. <code language="jam">"[^abc]"</code>
+ matches any character which is not <code language="jam">"a"</code>,
+ <code language="jam">"b"</code>, or <code language="jam">"c"</code>.
+ </para></listitem>
+
+ <listitem><para>
+ <code language="jam">"x|y"</code> matches either pattern
+ <code language="jam">"x"</code> or pattern <code language="jam">"y"</code>
+ </para></listitem>
+
+ <listitem><para>
+ <code language="jam">(x)</code> matches <code language="jam">"x"</code>
+ and captures it.
+ </para></listitem>
+
+ <listitem><para>
+ <code language="jam">"^"</code> matches the beginning of the string.
+ </para></listitem>
+
+ <listitem><para>
+ <code language="jam">"$"</code> matches the end of the string.
+ </para></listitem>
+
+ <listitem><para>
+ "\&lt;" matches the beginning of a word.
+ </para></listitem>
+
+ <listitem><para>
+ "\&gt;" matches the end of a word.
+ </para></listitem>
+
+ </itemizedlist>
+
+ <orderedlist>
+
+ <listitem id="bbv2.reference.modules.regex.split">
+ <indexterm zone="bbv2.reference.modules.regex.split">
+ <primary>split</primary>
+ <secondary>regex</secondary>
+ </indexterm>
+ <code language="jam">rule split ( string separator )</code>
+ <para>Returns a list of the following substrings:
+ <orderedlist>
+ <listitem><para>from beginning till the first occurrence of
+ <code language="jam">separator</code> or till the end,
+ </para></listitem>
+ <listitem><para>between each occurrence of
+ <code language="jam">separator</code> and the next occurrence,
+ </para></listitem>
+ <listitem><para>from the last occurrence of
+ <code language="jam">separator</code> till the end.
+ </para></listitem>
+ </orderedlist>
+ If no separator is present, the result will contain only one element.
+ </para>
+ </listitem>
+
+ <listitem id="bbv2.reference.modules.regex.split-list">
+ <indexterm zone="bbv2.reference.modules.regex.split-list">
+ <primary>split-list</primary>
+ <secondary>regex</secondary>
+ </indexterm>
+ <code language="jam">rule split-list ( list * : separator )</code>
+ <para>Returns the concatenated results of applying
+ <link linkend="bbv2.reference.modules.regex.split">regex.split</link>
+ to every element of the list using the separator pattern.</para>
+ </listitem>
+
+ <listitem id="bbv2.reference.modules.regex.match">
+ <indexterm zone="bbv2.reference.modules.regex.match">
+ <primary>match</primary>
+ <secondary>regex</secondary>
+ </indexterm>
+ <code language="jam">rule match ( pattern : string : indices * )</code>
+ <para>Match <code language="jam">string</code> against
+ <code language="jam">pattern</code>, and return the elements
+ indicated by <code language="jam">indices</code>.
+ </para>
+ </listitem>
+
+ <listitem id="bbv2.reference.modules.regex.transform">
+ <indexterm zone="bbv2.reference.modules.regex.transform">
+ <primary>transform</primary>
+ <secondary>regex</secondary>
+ </indexterm>
+ <code language="jam">rule transform ( list * : pattern : indices * )</code>
+ <para>Matches all elements of <code language="jam">list</code> against
+ the <code language="jam">pattern</code> and returns a list of elements
+ indicated by <code language="jam">indices</code> of all successful
+ matches. If <code language="jam">indices</code> is omitted returns a list
+ of first parenthesized groups of all successful matches.</para>
+ </listitem>
+
+ <listitem id="bbv2.reference.modules.regex.escape">
+ <indexterm zone="bbv2.reference.modules.regex.escape">
+ <primary>escape</primary>
+ <secondary>regex</secondary>
+ </indexterm>
+ <code language="jam">rule escape ( string : symbols : escape-symbol )</code>
+ <para>Escapes all of the characters in <code language="jam">symbols</code>
+ using the escape symbol <code language="jam">escape-symbol</code> for
+ the given string, and returns the escaped string.</para>
+ </listitem>
+
+ <listitem id="bbv2.reference.modules.regex.replace">
+ <indexterm zone="bbv2.reference.modules.regex.replace">
+ <primary>replace</primary>
+ <secondary>regex</secondary>
+ </indexterm>
+ <code language="jam">rule replace ( string match replacement )</code>
+ <para>Replaces occurrences of a match string in a given string and
+ returns the new string. The match string can be a regex expression.</para>
+ </listitem>
+
+ <listitem id="bbv2.reference.modules.regex.replace-list">
+ <indexterm zone="bbv2.reference.modules.regex.replace-list">
+ <primary>replace-list</primary>
+ <secondary>regex</secondary>
+ </indexterm>
+ <code language="jam">rule replace-list ( list * : match : replacement )</code>
+ <para>Replaces occurrences of a match string in a given list of strings
+ and returns a list of new strings. The match string can be a regex
+ expression.
+ </para>
+ </listitem>
+
+ </orderedlist>
+
+ <para>See also: <link linkend="jam.language.rules.builtins.utility._match__">MATCH</link></para>
+
+</section>
diff --git a/tools/build/doc/src/sequence.xml b/tools/build/doc/src/sequence.xml
new file mode 100644
index 0000000000..54b2fbdba4
--- /dev/null
+++ b/tools/build/doc/src/sequence.xml
@@ -0,0 +1,135 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE section PUBLIC "-//Boost//DTD BoostBook XML V1.0//EN"
+ "http://www.boost.org/tools/boostbook/dtd/boostbook.dtd">
+
+<section id="bbv2.reference.modules.sequence">
+
+ <title>sequence</title>
+ <indexterm>
+ <primary>sequence</primary>
+ <secondary>module</secondary>
+ </indexterm>
+
+ <para>
+ Various useful list functions. Note that algorithms in this module
+ execute largely in the caller's module namespace, so that local
+ rules can be used as function objects. Also note that most predicates
+ can be multi-element lists. In that case, all but the first element
+ are prepended to the first argument which is passed to the rule named
+ by the first element.
+ </para>
+
+ <orderedlist>
+
+ <listitem id="bbv2.reference.modules.sequence.filter">
+ <indexterm zone="bbv2.reference.modules.sequence.filter">
+ <primary>filter</primary>
+ <secondary>sequence</secondary>
+ </indexterm>
+ <code language="jam">rule filter ( predicate + : sequence * )</code>
+ <para>Return the elements <code language="jam">e</code> of
+ <code language="jam">$(sequence)</code> for which
+ <code language="jam">[ $(predicate) e ]</code> has a non-null value.
+ </para>
+ </listitem>
+
+ <listitem id="bbv2.reference.modules.sequence.transform">
+ <indexterm zone="bbv2.reference.modules.sequence.transform">
+ <primary>transform</primary>
+ <secondary>sequence</secondary>
+ </indexterm>
+ <code language="jam">rule transform ( function + : sequence * )</code>
+ <para>Return a new sequence consisting of
+ <code language="jam">[ $(function) $(e) ]</code> for each element
+ <code language="jam">e</code> of <code language="jam">$(sequence)</code>.
+ </para>
+ </listitem>
+
+ <listitem id="bbv2.reference.modules.sequence.reverse">
+ <indexterm zone="bbv2.reference.modules.sequence.reverse">
+ <primary>reverse</primary>
+ <secondary>sequence</secondary>
+ </indexterm>
+ <code language="jam">rule reverse ( s * )</code>
+ <para>Returns the elements of <code language="jam">s</code> in
+ reverse order.</para>
+ </listitem>
+
+ <listitem id="bbv2.reference.modules.sequence.insertion-sort">
+ <indexterm zone="bbv2.reference.modules.sequence.insertion-sort">
+ <primary>insertion-sort</primary>
+ <secondary>sequence</secondary>
+ </indexterm>
+ <code language="jam">rule insertion-sort ( s * : ordered * )</code>
+ <para>Insertion-sort <code language="jam">s</code> using the
+ BinaryPredicate <code language="jam">ordered</code>.</para>
+ </listitem>
+
+ <listitem id="bbv2.reference.modules.sequence.merge">
+ <indexterm zone="bbv2.reference.modules.sequence.merge">
+ <primary>merge</primary>
+ <secondary>sequence</secondary>
+ </indexterm>
+ <code language="jam">rule merge ( s1 * : s2 * : ordered * )</code>
+ <para>Merge two ordered sequences using the BinaryPredicate
+ <code language="jam">ordered</code>.</para>
+ </listitem>
+
+ <listitem id="bbv2.reference.modules.sequence.join">
+ <indexterm zone="bbv2.reference.modules.sequence.join">
+ <primary>join</primary>
+ <secondary>sequence</secondary>
+ </indexterm>
+ <code language="jam">rule join ( s * : joint ? )</code>
+ <para>Join the elements of <code language="jam">s</code> into one
+ long string. If <code language="jam">joint</code> is supplied, it
+ is used as a separator.</para>
+ </listitem>
+
+ <listitem id="bbv2.reference.modules.sequence.length">
+ <indexterm zone="bbv2.reference.modules.sequence.length">
+ <primary>length</primary>
+ <secondary>sequence</secondary>
+ </indexterm>
+ <code language="jam">rule length ( s * )</code>
+ <para>Find the length of any sequence.</para>
+ </listitem>
+
+ <listitem id="bbv2.reference.modules.sequence.unique">
+ <indexterm zone="bbv2.reference.modules.sequence.unique">
+ <primary>unique</primary>
+ <secondary>sequence</secondary>
+ </indexterm>
+ <code language="jam">rule unique ( list * : stable ? )</code>
+ <para>Removes duplicates from <code language="jam">list</code>.
+ If <code language="jam">stable</code> is passed, then the order
+ of the elements will be unchanged.</para>
+ </listitem>
+
+ <listitem id="bbv2.reference.modules.sequence.max-element">
+ <indexterm zone="bbv2.reference.modules.sequence.max-element">
+ <primary>max-element</primary>
+ <secondary>sequence</secondary>
+ </indexterm>
+ <code language="jam">rule max-element ( elements + : ordered ? )</code>
+ <para>Returns the maximum number in <code language="jam">elements</code>.
+ Uses <code language="jam">ordered</code> for comparisons or
+ <link linkend="bbv2.reference.modules.numbers.less">numbers.less</link>
+ if none is provided.</para>
+ </listitem>
+
+ <listitem id="bbv2.reference.modules.sequence.select-highest-ranked">
+ <indexterm zone="bbv2.reference.modules.sequence.select-highest-ranked">
+ <primary>select-highest-ranked</primary>
+ <secondary>sequence</secondary>
+ </indexterm>
+ <code language="jam">rule select-highest-ranked ( elements * : ranks * )</code>
+ <para>Returns all of <code language="jam">elements</code> for which
+ the corresponding element in the parallel list
+ <code language="jam">rank</code> is equal to the maximum value in
+ <code language="jam">rank</code>.</para>
+ </listitem>
+
+ </orderedlist>
+
+</section>
diff --git a/tools/build/doc/src/standalone.xml b/tools/build/doc/src/standalone.xml
new file mode 100644
index 0000000000..a48d2a0f8f
--- /dev/null
+++ b/tools/build/doc/src/standalone.xml
@@ -0,0 +1,48 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE book PUBLIC "-//Boost//DTD BoostBook XML V1.0//EN"
+ "http://www.boost.org/tools/boostbook/dtd/boostbook.dtd">
+
+<book xmlns:xi="http://www.w3.org/2001/XInclude"
+ id="bbv2" last-revision="$Date$">
+ <bookinfo>
+ <copyright>
+ <year>2006</year>
+ <year>2007</year>
+ <year>2008</year>
+ <year>2009</year>
+ <year>2014</year>
+ <holder>Vladimir Prus</holder>
+ </copyright>
+
+ <legalnotice>
+ <para>Distributed under the Boost Software License, Version 1.0.
+ (See accompanying file <filename>LICENSE_1_0.txt</filename> or copy at
+ <ulink
+ url="http://www.boost.org/LICENSE_1_0.txt">http://www.boost.org/LICENSE_1_0.txt</ulink>)
+ </para>
+ </legalnotice>
+ </bookinfo>
+
+ <title>Boost.Build V2 User Manual</title>
+
+ <!-- Chapters -->
+ <xi:include href="howto.xml"/>
+ <xi:include href="install.xml"/>
+ <xi:include href="tutorial.xml"/>
+ <xi:include href="overview.xml"/>
+ <xi:include href="tasks.xml"/>
+ <xi:include href="reference.xml"/>
+ <xi:include href="extending.xml"/>
+ <xi:include href="faq.xml"/>
+
+ <!-- Appendicies -->
+<!-- <xi:include href="architecture.xml"/> -->
+ <appendix id="bbv2.jam">
+ <title>Boost.Jam Documentation</title>
+ <xi:include href="jam_docs.xml" parse="xml"
+ xpointer="xpointer(id('jam.building')|id('jam.building')/following-sibling::*)"/>
+ </appendix>
+
+ <index/>
+
+</book>
diff --git a/tools/build/doc/src/tasks.xml b/tools/build/doc/src/tasks.xml
new file mode 100644
index 0000000000..8ff82f0b81
--- /dev/null
+++ b/tools/build/doc/src/tasks.xml
@@ -0,0 +1,842 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE chapter PUBLIC "-//Boost//DTD BoostBook XML V1.0//EN"
+ "http://www.boost.org/tools/boostbook/dtd/boostbook.dtd">
+
+<!-- Copyright 2006 Vladimir Prus -->
+<!-- Distributed under the Boost Software License, Version 1.0. -->
+<!-- (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) -->
+
+<chapter id="bbv2.tasks">
+ <title>Common tasks</title>
+
+ <para>
+ This section describes main targets types that Boost.Build supports
+ out-of-the-box. Unless otherwise noted, all mentioned main target rules have
+ the common signature, described in <xref linkend="bbv2.overview.targets"/>.
+ </para>
+
+ <section id="bbv2.tasks.programs">
+ <title>Programs</title>
+
+ <indexterm><primary>exe</primary></indexterm>
+ <para>
+ Programs are created using the <code>exe</code> rule, which follows the
+ <link linkend="bbv2.main-target-rule-syntax">common syntax</link>. For
+ example:
+<programlisting>
+exe hello : hello.cpp some_library.lib /some_project//library
+ : &lt;threading&gt;multi
+ ;
+</programlisting>
+ This will create an executable file from the sources&mdash;in this case, one
+ C++ file, one library file present in the same directory, and another
+ library that is created by Boost.Build. Generally, sources can include C
+ and C++ files, object files and libraries. Boost.Build will automatically
+ try to convert targets of other types.
+ </para>
+
+ <tip>
+ <para>
+ On Windows, if an application uses shared libraries, and both the
+ application and the libraries are built using Boost.Build, it is not
+ possible to immediately run the application, because the <literal>PATH
+ </literal> environment variable should include the path to the
+ libraries. It means you have to either add the paths manually, or have
+ the build place the application and the libraries into the same
+ directory. See <xref linkend="bbv2.tasks.installing"/>.
+ </para>
+ <!-- We should be emphasizing the use of the built-in testing rules
+ rather than continually discussing these quirks of running programs
+ with shared libraries. -->
+ </tip>
+ </section>
+
+ <section id="bbv2.tasks.libraries">
+ <title>Libraries</title>
+
+ <indexterm>
+ <primary>library</primary>
+ <secondary>target</secondary>
+ </indexterm>
+
+ <para>
+ Library targets are created using the <code>lib</code> rule, which
+ follows the <link linkend="bbv2.main-target-rule-syntax">common syntax
+ </link>. For example:
+<programlisting>
+lib helpers : helpers.cpp ;
+</programlisting>
+ This will define a library target named <code>helpers</code> built from
+ the <code>helpers.cpp</code> source file.
+ It can be either a static library or a shared library,
+ depending on the value of the <link linkend="bbv2.overview.builtins.features.link">&lt;link&gt;</link> feature.
+ </para>
+ <para>
+ Library targets can represent:
+ <itemizedlist>
+ <listitem>
+ <para>
+ Libraries that should be built from source,
+ as in the example above.
+ </para>
+ </listitem>
+ <listitem>
+ <para>
+ Prebuilt libraries which already exist on the system.
+ Such libraries can be searched for by the tools using them (typically
+ with the linker's <option>-l</option> option) or their paths can be
+ known in advance by the build system.
+ </para>
+ </listitem>
+ </itemizedlist>
+ </para>
+
+ <para>
+ The syntax for prebuilt libraries is given below:
+<programlisting>
+lib z : : &lt;name&gt;z &lt;search&gt;/home/ghost ;
+lib compress : : &lt;file&gt;/opt/libs/compress.a ;
+</programlisting>
+ The <code>name</code> property specifies the name of the library
+ without the standard prefixes and suffixes. For example, depending
+ on the system, <code>z</code> could refer to a file called
+ z.so, libz.a, or z.lib, etc. The <code>search</code> feature
+ specifies paths in which to search for the library in addition
+ to the default compiler paths. <code>search</code> can be specified
+ several times or it can be omitted, in which case only the default
+ compiler paths will be searched. The <code>file</code> property
+ specifies the file location.
+ </para>
+
+ <para>
+ The difference between using the <code>file</code> feature and
+ using a combination of the <code>name</code> and <code>search</code>
+ features is that <code>file</code> is more precise.
+
+ <warning>
+ <para>
+ The value of the <code>search</code> feature is just added to the
+ linker search path. When linking to multiple libraries,
+ the paths specified by <code>search</code> are combined without
+ regard to which <code>lib</code> target each path came from.
+ Thus, given
+<programlisting>
+lib a : : &lt;name&gt;a &lt;search&gt;/pool/release ;
+lib b : : &lt;name&gt;b &lt;search&gt;/pool/debug ;
+</programlisting>
+ If /pool/release/a.so, /pool/release/b.so, /pool/debug/a.so,
+ and /pool/release/b.so all exist, the linker will probably
+ take both <code>a</code> and <code>b</code> from the same
+ directory, instead of finding <code>a</code> in /pool/release
+ and <code>b</code> in /pool/debug. If you need to distinguish
+ between multiple libraries with the same name, it's safer
+ to use <code>file</code>.
+ </para>
+ </warning>
+ </para>
+
+ <para>
+ For convenience, the following syntax is allowed:
+<programlisting>
+lib z ;
+lib gui db aux ;
+</programlisting>
+ which has exactly the same effect as:
+<programlisting>
+lib z : : &lt;name&gt;z ;
+lib gui : : &lt;name&gt;gui ;
+lib db : : &lt;name&gt;db ;
+lib aux : : &lt;name&gt;aux ;
+</programlisting>
+ </para>
+
+ <para>
+ When a library references another library you should put that other
+ library in its list of sources. This will do the right thing in all cases.
+ <!--Add a link to the notes below. --> For portability, you should specify
+ library dependencies even for searched and prebuilt libraries, othewise,
+ static linking on Unix will not work. For example:
+<programlisting>
+lib z ;
+lib png : z : &lt;name&gt;png ;
+</programlisting>
+ </para>
+
+ <note>
+ <para>
+ When a library has a shared library as a source, or a static
+ library has another static library as a source then any target
+ linking to the first library with automatically link to its source
+ library as well.
+ </para>
+ <para>
+ On the other hand, when a shared library has a static library as
+ a source then the first library will be built so that it completely
+ includes the second one.
+ </para>
+ <para>
+ If you do not want a shared library to include all the libraries specified
+ in its sources (especially statically linked ones), you would need to
+ use the following:
+<programlisting>
+lib b : a.cpp ;
+lib a : a.cpp : &lt;use&gt;b : : &lt;library&gt;b ;
+</programlisting>
+ This specifies that library <code>a</code> uses library <code>b</code>,
+ and causes all executables that link to <code>a</code> to link to
+ <code>b</code> also. In this case, even for shared linking, the
+ <code>a</code> library will not refer to <code>b</code>.
+ </para>
+ </note>
+
+ <para>
+ <!-- FIXME: After adding a full subsection on usage requirements, link to it -->
+ <link linkend="bbv2.overview.targets">Usage requirements</link> are often
+ very useful for defining library targets. For example, imagine that
+ you want you build a <code>helpers</code> library and its interface is
+ described in its <code>helpers.hpp</code> header file located in the same
+ directory as the <code>helpers.cpp</code> source file. Then you could add
+ the following to the Jamfile located in that same directory:
+<programlisting>
+lib helpers : helpers.cpp : : : &lt;include&gt;. ;
+</programlisting>
+ which would automatically add the directory where the target has been
+ defined (and where the library's header file is located) to the compiler's
+ include path for all targets using the <code>helpers</code> library. This
+ feature greatly simplifies Jamfiles.
+ </para>
+ </section>
+
+ <section id="bbv2.tasks.alias">
+ <title>Alias</title>
+
+ <para>
+ The <code language="jam">alias</code> rule gives an alternative name to a
+ group of targets. For example, to give the name <filename>core</filename>
+ to a group of three other targets with the following code:
+<programlisting>
+alias core : im reader writer ;
+</programlisting>
+ Using <filename>core</filename> on the command line, or in the source list
+ of any other target is the same as explicitly using <filename>im
+ </filename>, <filename>reader</filename>, and <filename>writer</filename>.
+ </para>
+
+ <para>
+ Another use of the <code>alias</code> rule is to change build properties.
+ For example, if you want to use link statically to the Boost Threads
+ library, you can write the following:
+<programlisting>
+alias threads : /boost/thread//boost_thread : &lt;link&gt;static ;
+</programlisting>
+ and use only the <code>threads</code> alias in your Jamfiles.
+ </para>
+
+ <para>
+ You can also specify usage requirements for the <code>alias</code> target.
+ If you write the following:
+<programlisting>
+alias header_only_library : : : : &lt;include&gt;/usr/include/header_only_library ;
+</programlisting>
+ then using <code>header_only_library</code> in sources will only add an
+ include path. Also note that when an alias has sources, their usage
+ requirements are propagated as well. For example:
+<programlisting>
+lib library1 : library1.cpp : : : &lt;include&gt;/library/include1 ;
+lib library2 : library2.cpp : : : &lt;include&gt;/library/include2 ;
+alias static_libraries : library1 library2 : &lt;link&gt;static ;
+exe main : main.cpp static_libraries ;
+</programlisting>
+ will compile <filename>main.cpp</filename> with additional includes
+ required for using the specified static libraries.
+ </para>
+ </section>
+
+ <section id="bbv2.tasks.installing">
+ <title>Installing</title>
+
+ <para>
+ This section describes various ways to install built target and arbitrary
+ files.
+ </para>
+
+ <bridgehead>Basic install</bridgehead>
+
+ <para>
+ For installing a built target you should use the <code>install</code>
+ rule, which follows the <link linkend="bbv2.main-target-rule-syntax">
+ common syntax</link>. For example:
+<programlisting>
+install dist : hello helpers ;
+</programlisting>
+ will cause the targets <code>hello</code> and <code>helpers</code> to be
+ moved to the <filename>dist</filename> directory, relative to the
+ Jamfile's directory. The directory can be changed using the
+ <code>location</code> property:
+<programlisting>
+install dist : hello helpers : &lt;location&gt;/usr/bin ;
+</programlisting>
+ While you can achieve the same effect by changing the target name to
+ <filename>/usr/bin</filename>, using the <code>location</code> property is
+ better as it allows you to use a mnemonic target name.
+ </para>
+
+ <para>
+ The <code>location</code> property is especially handy when the location
+ is not fixed, but depends on the build variant or environment variables:
+<programlisting>
+install dist : hello helpers :
+ &lt;variant&gt;release:&lt;location&gt;dist/release
+ &lt;variant&gt;debug:&lt;location&gt;dist/debug ;
+install dist2 : hello helpers : &lt;location&gt;$(DIST) ;
+</programlisting>
+ See also <link linkend="bbv2.reference.variants.propcond">conditional
+ properties</link> and <link linkend="bbv2.faq.envar">environment
+ variables</link>
+ </para>
+
+ <bridgehead>Installing with all dependencies</bridgehead>
+
+ <para>
+ Specifying the names of all libraries to install can be boring. The
+ <code>install</code> allows you to specify only the top-level executable
+ targets to install, and automatically install all dependencies:
+<programlisting>
+install dist : hello
+ : &lt;install-dependencies&gt;on &lt;install-type&gt;EXE
+ &lt;install-type&gt;LIB
+ ;
+</programlisting>
+ will find all targets that <code>hello</code> depends on, and install all
+ of those which are either executables or libraries. More specifically, for
+ each target, other targets that were specified as sources or as dependency
+ properties, will be recursively found. One exception is that targets
+ referred with the <link linkend="bbv2.builtin.features.use">
+ <code>use</code></link> feature are not considered, as that feature is
+ typically used to refer to header-only libraries. If the set of target
+ types is specified, only targets of that type will be installed,
+ otherwise, all found target will be installed.
+ </para>
+
+ <bridgehead>Preserving Directory Hierarchy</bridgehead>
+
+ <indexterm><primary>install-source-root</primary></indexterm>
+
+ <para>
+ By default, the <code>install</code> rule will strip paths from its
+ sources. So, if sources include <filename>a/b/c.hpp</filename>, the
+ <filename>a/b</filename> part will be ignored. To make the
+ <code>install</code> rule preserve the directory hierarchy you need to
+ use the <literal>&lt;install-source-root&gt;</literal> feature to specify
+ the root of the hierarchy you are installing. Relative paths from that
+ root will be preserved. For example, if you write:
+<programlisting>
+install headers
+ : a/b/c.h
+ : &lt;location&gt;/tmp &lt;install-source-root&gt;a
+ ;
+</programlisting>
+ the a file named <filename>/tmp/b/c.h</filename> will be created.
+ </para>
+
+ <para>
+ The <link linkend="bbv2.reference.glob-tree">glob-tree</link> rule can be
+ used to find all files below a given directory, making it easy to install
+ an entire directory tree.
+ </para>
+
+ <bridgehead>Installing into Several Directories</bridgehead>
+
+ <para>
+ The <link linkend="bbv2.tasks.alias"><code>alias</code></link> rule can be
+ used when targets need to be installed into several directories:
+<programlisting>
+alias install : install-bin install-lib ;
+install install-bin : applications : /usr/bin ;
+install install-lib : helper : /usr/lib ;
+</programlisting>
+ </para>
+
+ <para>
+ Because the <code>install</code> rule just copies targets, most free
+ features <footnote><para>see the definition of "free" in <xref
+ linkend="bbv2.reference.features.attributes"/>.</para></footnote> have no
+ effect when used in requirements of the <code>install</code> rule. The
+ only two that matter are <link linkend="bbv2.builtin.features.dependency">
+ <varname>dependency</varname></link> and, on Unix, <link
+ linkend="bbv2.reference.features.dll-path"><varname>dll-path</varname>
+ </link>.
+ </para>
+
+ <note>
+ <para>
+ (Unix specific) On Unix, executables built using Boost.Build typically
+ contain the list of paths to all used shared libraries. For installing,
+ this is not desired, so Boost.Build relinks the executable with an empty
+ list of paths. You can also specify additional paths for installed
+ executables using the <varname>dll-path</varname> feature.
+ </para>
+ </note>
+ </section>
+
+ <section id="bbv2.builtins.testing">
+ <title>Testing</title>
+
+ <para>
+ Boost.Build has convenient support for running unit tests. The simplest
+ way is the <code>unit-test</code> rule, which follows the <link
+ linkend="bbv2.main-target-rule-syntax">common syntax</link>. For example:
+<programlisting>
+unit-test helpers_test : helpers_test.cpp helpers ;
+</programlisting>
+ </para>
+
+ <para>
+ The <code language="jam">unit-test</code> rule behaves like the
+ <link linkend="bbv2.tasks.programs">exe</link> rule, but after the executable is created
+ it is also run. If the executable returns an error code, the build system
+ will also return an error and will try running the executable on the next
+ invocation until it runs successfully. This behaviour ensures that you can
+ not miss a unit test failure.
+ </para>
+
+
+ <para>
+ There are few specialized testing rules, listed below:
+<programlisting>
+rule compile ( sources : requirements * : target-name ? )
+rule compile-fail ( sources : requirements * : target-name ? )
+rule link ( sources + : requirements * : target-name ? )
+rule link-fail ( sources + : requirements * : target-name ? )
+</programlisting>
+ They are given a list of sources and requirements. If the target name is
+ not provided, the name of the first source file is used instead. The
+ <literal>compile*</literal> tests try to compile the passed source. The
+ <literal>link*</literal> rules try to compile and link an application from
+ all the passed sources. The <literal>compile</literal> and <literal>link
+ </literal> rules expect that compilation/linking succeeds. The <literal>
+ compile-fail</literal> and <literal>link-fail</literal> rules expect that
+ the compilation/linking fails.
+ </para>
+
+ <para>
+ There are two specialized rules for running applications, which are more
+ powerful than the <code>unit-test</code> rule. The <code>run</code> rule
+ has the following signature:
+<programlisting>
+rule run ( sources + : args * : input-files * : requirements * : target-name ?
+ : default-build * )
+</programlisting>
+ The rule builds application from the provided sources and runs it, passing
+ <varname>args</varname> and <varname>input-files</varname> as command-line
+ arguments. The <varname>args</varname> parameter is passed verbatim and
+ the values of the <varname>input-files</varname> parameter are treated as
+ paths relative to containing Jamfile, and are adjusted if <command>b2</command>
+ is invoked from a different directory. The
+ <code>run-fail</code> rule is identical to the <code>run</code> rule,
+ except that it expects that the run fails.
+ </para>
+
+ <para>
+ All rules described in this section, if executed successfully, create a
+ special manifest file to indicate that the test passed. For the
+ <code>unit-test</code> rule the files is named <filename><replaceable>
+ target-name</replaceable>.passed</filename> and for the other rules it is
+ called <filename><replaceable>target-name</replaceable>.test</filename>.
+ The <code>run*</code> rules also capture all output from the program, and
+ store it in a file named <filename><replaceable>
+ target-name</replaceable>.output</filename>.
+ </para>
+
+ <para>
+ <indexterm><primary>preserve-test-targets</primary></indexterm>
+ If the <literal>preserve-test-targets</literal> feature has the value
+ <literal>off</literal>, then <code>run</code> and the <code>run-fail</code>
+ rules will remove the executable after running it. This somewhat decreases
+ disk space requirements for continuous testing environments. The default
+ value of <literal>preserve-test-targets</literal> feature is <literal>on</literal>.
+ </para>
+
+ <para>
+ It is possible to print the list of all test targets (except for
+ <code>unit-test</code>) declared in your project, by passing the <literal>
+ --dump-tests</literal> command-line option. The output will consist of
+ lines of the form:
+<screen>
+boost-test(<replaceable>test-type</replaceable>) <replaceable>path</replaceable> : <replaceable>sources</replaceable>
+</screen>
+ </para>
+
+ <para>
+ It is possible to process the list of tests, Boost.Build output
+ and the presense/absense of the <filename>*.test</filename>
+ files created when test passes into human-readable status table of tests.
+ Such processing utilities are not included in Boost.Build.
+ </para>
+
+ <para>The following features adjust behaviour of the testing metatargets.</para>
+
+ <variablelist>
+ <varlistentry>
+ <term><literal>testing.arg</literal></term>
+ <indexterm><primary>testing.arg</primary></indexterm>
+
+ <listitem>
+ <para>
+ Defines an argument to be passed to the target when it is executed
+ before the list of input files.
+ </para>
+
+ <para>
+<programlisting>
+unit-test helpers_test
+ : helpers_test.cpp helpers
+ : <emphasis role="bold">&lt;testing.arg&gt;"--foo bar"</emphasis>
+ ;
+</programlisting>
+ </para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><literal>testing.input-file</literal></term>
+ <indexterm><primary>testing.input-file</primary></indexterm>
+
+ <listitem>
+ <para>
+ Specifies a file to be passed to the executable on the command line
+ after the arguments. All files must be specified in alphabetical
+ order due to constrainsts in the current implementation.
+ </para>
+
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><literal>testing.launcher</literal></term>
+ <indexterm><primary>testing.launcher</primary></indexterm>
+
+ <listitem>
+ <para>
+ By default, the executable is run directly. Sometimes, it is
+ desirable to run the executable using some helper command. You
+ should use the this property to specify the name of the helper
+ command. For example, if you write:
+<programlisting>
+unit-test helpers_test
+ : helpers_test.cpp helpers
+ : <emphasis role="bold">&lt;testing.launcher&gt;valgrind</emphasis>
+ ;
+</programlisting>
+ The command used to run the executable will be:
+<screen>
+<emphasis role="bold">valgrind</emphasis> bin/$toolset/debug/helpers_test
+</screen>
+ </para>
+ </listitem>
+ </varlistentry>
+
+ <varlistentry>
+ <term><literal>test-info</literal></term>
+ <indexterm><primary>test-info</primary></indexterm>
+
+ <listitem>
+ <para>
+ A description of the test. This is displayed as part of the
+ <literal>--dump-tests</literal> command-line option.
+ </para>
+ </listitem>
+ </varlistentry>
+
+ </variablelist>
+ </section>
+
+ <section id="bbv2.builtins.raw">
+ <title>Custom commands</title>
+
+ <para>
+ For most main target rules, Boost.Build automatically figures out
+ the commands to run. When you want to use new
+ file types or support new tools, one approach is to extend Boost.Build to
+ support them smoothly, as documented in <xref linkend="bbv2.extender"/>.
+ However, if the new tool is only used in a single place, it
+ might be easier just to specify the commands to run explicitly.
+ </para>
+
+ <para>
+ <!-- This paragraph requires links to where the terms 'virtual target' &
+ 'target' are defined. -->
+ Three main target rules can be used for that. The <code language="jam">make
+ </code> rule allows you to construct a single file from any number
+ of source file, by running a command you specify. The <code language="jam">
+ notfile</code> rule allows you to run an arbitrary command,
+ without creating any files. And finaly, the <code language="jam">generate
+ </code> rule allows you to describe a transformation using
+ Boost.Build's virtual targets. This is higher-level than the file names that
+ the <code language="jam">make</code> rule operates with and allows you to
+ create more than one target, create differently named targets depending on
+ properties or use more than one tool.
+ </para>
+
+ <para>
+ The <code language="jam">make</code> rule is used when you want to create
+ one file from a number of sources using some specific command. The
+ <code language="jam">notfile</code> is used to unconditionally run a
+ command.
+ </para>
+
+ <!-- We need to specify somewhere that the user can get rules like make,
+ notfile & generate defined in his Jamfiles by importing an appropriate
+ Boost.Build module. Also, each of those rules should get a separate
+ documentation page explicitly listing which module needs to be imported for
+ them to become accessible. -->
+
+ <para>
+ Suppose you want to create the file <filename>file.out</filename> from
+ the file <filename>file.in</filename> by running the command <command>
+ in2out</command>. Here is how you would do this in Boost.Build:
+<programlisting>
+make file.out : file.in : @in2out ;
+actions in2out
+{
+ in2out $(&lt;) $(&gt;)
+}
+</programlisting>
+ If you run <command>b2</command> and <filename>file.out</filename> does
+ not exist, Boost.Build will run the <command>in2out</command> command to
+ create that file. For more details on specifying actions, see <xref
+ linkend="bbv2.overview.jam_language.actions"/>.
+ </para>
+
+ <para>
+ It could be that you just want to run some command unconditionally, and
+ that command does not create any specific files. For that you can use the
+ <code language="jam">notfile</code> rule. For example:
+<programlisting>
+notfile echo_something : @echo ;
+actions echo
+{
+ echo "something"
+}
+</programlisting>
+ The only difference from the <code language="jam">make</code> rule is
+ that the name of the target is not considered a name of a file, so
+ Boost.Build will unconditionally run the action.
+ </para>
+
+ <para>
+ <!-- This paragraph requires links to where terms like 'virtual target',
+ 'target', 'project-target' & 'property-set' are defined. -->
+ The <code language="jam">generate</code> rule is used when you want to
+ express transformations using Boost.Build's virtual targets, as opposed to
+ just filenames. The <code language="jam">generate</code> rule has the
+ standard main target rule signature, but you are required to specify the
+ <literal>generating-rule</literal> property. The value of the property
+ should be in the form <literal>
+ @<replaceable>rule-name</replaceable></literal>, the named rule should
+ have the following signature:
+<programlisting>
+rule generating-rule ( project name : property-set : sources * )
+</programlisting>
+ and will be called with an instance of the <code>project-target</code>
+ class, the name of the main target, an instance of the
+ <code>property-set</code> class containing build properties, and the list
+ of instances of the <code>virtual-target</code> class corresponding to
+ sources. The rule must return a list of <code>virtual-target</code>
+ instances. The interface of the <code>virtual-target</code> class can be
+ learned by looking at the <filename>build/virtual-target.jam</filename>
+ file. The <filename>generate</filename> example contained in the
+ Boost.Build distribution illustrates how the <literal>generate</literal>
+ rule can be used.
+ </para>
+ </section>
+
+ <section id="bbv2.reference.precompiled_headers">
+ <title>Precompiled Headers</title>
+
+ <para>
+ Precompiled headers is a mechanism to speed up compilation by creating a
+ partially processed version of some header files, and then using that
+ version during compilations rather then repeatedly parsing the original
+ headers. Boost.Build supports precompiled headers with gcc and msvc
+ toolsets.
+ </para>
+
+ <para>
+ To use precompiled headers, follow the following steps:
+ </para>
+
+ <orderedlist>
+ <listitem>
+ <para>
+ Create a header that includes headers used by your project that you
+ want precompiled. It is better to include only headers that are
+ sufficiently stable &#x2014; like headers from the compiler and
+ external libraries. Please wrap the header in <code>#ifdef
+ BOOST_BUILD_PCH_ENABLED</code>, so that the potentially expensive
+ inclusion of headers is not done when PCH is not enabled. Include the
+ new header at the top of your source files.
+ </para>
+ </listitem>
+
+ <listitem>
+ <para>
+ Declare a new Boost.Build target for the precompiled header and add
+ that precompiled header to the sources of the target whose compilation
+ you want to speed up:
+<programlisting>
+cpp-pch pch : pch.hpp ;
+exe main : main.cpp pch ;
+</programlisting>
+ You can use the <code language="jam">c-pch</code> rule if you want to
+ use the precompiled header in C programs.
+ </para></listitem>
+ </orderedlist>
+
+ <para>
+ The <filename>pch</filename> example in Boost.Build distribution can be
+ used as reference.
+ </para>
+
+ <para>
+ Please note the following:
+ </para>
+
+ <itemizedlist>
+ <listitem>
+ <para>
+ The inclusion of the precompiled header must be the first thing in a
+ source file, before any code or preprocessor directives.
+ </para>
+ </listitem>
+
+ <listitem>
+ <para>
+ The build properties used to compile the source files and the
+ precompiled header must be the same. Consider using project
+ requirements to assure this.
+ </para>
+ </listitem>
+
+ <listitem>
+ <para>
+ Precompiled headers must be used purely as a way to improve
+ compilation time, not to save the number of <code>#include</code>
+ statements. If a source file needs to include some header, explicitly
+ include it in the source file, even if the same header is included
+ from the precompiled header. This makes sure that your project will
+ build even if precompiled headers are not supported.
+ </para>
+ </listitem>
+
+ <listitem>
+ <para>
+ On the gcc compiler, the name of the header being precompiled must be
+ equal to the name of the <code>cpp-pch</code> target. This is a gcc
+ requirement.
+ </para>
+ </listitem>
+
+ <listitem>
+ <para>
+ Prior to version 4.2, the gcc compiler did not allow anonymous
+ namespaces in precompiled headers, which limits their utility. See the
+ <ulink url="http://gcc.gnu.org/bugzilla/show_bug.cgi?id=29085"> bug
+ report</ulink> for details.
+ </para>
+ </listitem>
+ </itemizedlist>
+ </section>
+
+ <section id="bbv2.reference.generated_headers">
+ <title>Generated headers</title>
+
+ <para>
+ Usually, Boost.Build handles implicit dependendies completely
+ automatically. For example, for C++ files, all <literal>#include</literal>
+ statements are found and handled. The only aspect where user help might be
+ needed is implicit dependency on generated files.
+ </para>
+
+ <para>
+ By default, Boost.Build handles such dependencies within one main target.
+ For example, assume that main target "app" has two sources, "app.cpp" and
+ "parser.y". The latter source is converted into "parser.c" and "parser.h".
+ Then, if "app.cpp" includes "parser.h", Boost.Build will detect this
+ dependency. Moreover, since "parser.h" will be generated into a build
+ directory, the path to that directory will automatically added to include
+ path.
+ </para>
+
+ <para>
+ Making this mechanism work across main target boundaries is possible, but
+ imposes certain overhead. For that reason, if there is implicit dependency
+ on files from other main targets, the <literal>&lt;implicit-dependency&gt;
+ </literal> feature must be used, for example:
+<programlisting>
+lib parser : parser.y ;
+exe app : app.cpp : &lt;implicit-dependency&gt;parser ;
+</programlisting>
+ The above example tells the build system that when scanning all sources of
+ "app" for implicit-dependencies, it should consider targets from "parser"
+ as potential dependencies.
+ </para>
+ </section>
+
+ <section id="bbv2.tasks.crosscompile">
+ <title>Cross-compilation</title>
+
+ <indexterm><primary>cross compilation</primary></indexterm>
+
+ <para>Boost.Build supports cross compilation with the gcc and msvc
+ toolsets.</para>
+
+ <para>
+ When using gcc, you first need to specify your cross compiler
+ in <filename>user-config.jam</filename> (see <xref linkend="bbv2.overview.configuration"/>),
+ for example:</para>
+<programlisting>
+using gcc : arm : arm-none-linux-gnueabi-g++ ;
+</programlisting>
+ <para>
+ After that, if the host and target os are the same, for example Linux, you can
+ just request that this compiler version to be used:
+ </para>
+<screen>
+b2 toolset=gcc-arm
+</screen>
+
+ <para>
+ If you want to target different operating system from the host, you need
+ to additionally specify the value for the <code>target-os</code> feature, for
+ example:
+ </para>
+<screen>
+# On windows box
+b2 toolset=gcc-arm <emphasis role="bold">target-os=linux</emphasis>
+# On Linux box
+b2 toolset=gcc-mingw <emphasis role="bold">target-os=windows</emphasis>
+</screen>
+ <para>
+ For the complete list of allowed opeating system names, please see the documentation for
+ <link linkend="bbv2.reference.features.target-os">target-os feature</link>.
+ </para>
+
+ <para>
+ When using the msvc compiler, it's only possible to cross-compiler to a 64-bit system
+ on a 32-bit host. Please see <xref linkend="v2.reference.tools.compiler.msvc.64"/> for
+ details.
+ </para>
+
+ </section>
+
+</chapter>
+
+<!--
+ Local Variables:
+ mode: nxml
+ sgml-indent-data: t
+ sgml-parent-document: ("userman.xml" "chapter")
+ sgml-set-face: t
+ End:
+-->
diff --git a/tools/build/doc/src/tutorial.xml b/tools/build/doc/src/tutorial.xml
new file mode 100644
index 0000000000..29a0143930
--- /dev/null
+++ b/tools/build/doc/src/tutorial.xml
@@ -0,0 +1,682 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE chapter PUBLIC "-//Boost//DTD BoostBook XML V1.0//EN"
+ "http://www.boost.org/tools/boostbook/dtd/boostbook.dtd">
+
+<?psgml nofill screen programlisting literallayout?>
+
+<chapter id="bbv2.tutorial">
+ <title>Tutorial</title>
+
+<!-- You can't launch into this stuff without describing how to configure -->
+<!-- Boost.Build... unless of course you think it's likely to work with -->
+<!-- no configuration. But even if you do you have to tell people how to -->
+<!-- configure their installation in case it doesn't work. -->
+<!--
+ VP: need also mention the examples which correspond to specific
+ sections.
+-->
+
+ <para>
+ This section will guide you though the most basic features of Boost.Build
+ V2. We will start with the &#x201C;Hello, world&#x201D; example, learn how
+ to use libraries, and finish with testing and installing features.
+ </para>
+
+ <section id="bbv2.tutorial.hello">
+ <title>Hello, world</title>
+
+ <para>
+ The simplest project that Boost.Build can construct is stored in
+ <filename>example/hello/</filename> directory. The project is described by
+ a file called <filename>Jamroot</filename> that contains:
+
+<programlisting language="jam">
+exe hello : hello.cpp ;
+</programlisting>
+
+ Even with this simple setup, you can do some interesting things. First of
+ all, just invoking <command>b2</command> will build the <filename>hello
+ </filename> executable by compiling and linking <filename>hello.cpp
+ </filename>. By default, the debug variant is built. Now, to build the release
+ variant of <filename>hello</filename>, invoke
+
+<screen>
+b2 release
+</screen>
+
+ Note that the debug and release variants are created in different directories,
+ so you can switch between variants or even build multiple variants at
+ once, without any unnecessary recompilation. Let us extend the example by
+ adding another line to our project's <filename>Jamroot</filename>:
+
+<programlisting language="jam">
+exe hello2 : hello.cpp ;
+</programlisting>
+
+ Now let us build both the debug and release variants of our project again:
+
+<screen>
+b2 debug release
+</screen>
+
+ Note that two variants of <filename>hello2</filename> are linked. Since we
+ have already built both variants of <filename>hello</filename>, hello.cpp
+ will not be recompiled; instead the existing object files will just be
+ linked into the corresponding variants of <filename>hello2</filename>. Now
+ let us remove all the built products:
+
+<screen>
+b2 --clean debug release
+</screen>
+
+ It is also possible to build or clean specific targets. The following two
+ commands, respectively, build or clean only the debug version of
+ <filename>hello2</filename>.
+
+<screen>
+b2 hello2
+b2 --clean hello2
+</screen>
+ </para>
+ </section>
+
+ <section id="bbv2.tutorial.properties">
+ <title>Properties</title>
+
+ <para>
+ To represent aspects of target configuration such as
+ debug and release variants, or single- and multi-threaded
+ builds portably, Boost.Build uses <firstterm>features</firstterm> with
+ associated <firstterm>values</firstterm>. For
+ example, the <code>debug-symbols</code> feature can have a value of <code>on</code> or
+ <code>off</code>. A <firstterm>property</firstterm> is just a (feature,
+ value) pair. When a user initiates a build, Boost.Build
+ automatically translates the requested properties into appropriate
+ command-line flags for invoking toolset components like compilers
+ and linkers.
+ </para>
+
+ <para>
+ There are many built-in features that can be combined to
+ produce arbitrary build configurations. The following command
+ builds the project's <code>release</code> variant with inlining
+ disabled and debug symbols enabled:
+<screen>
+b2 release inlining=off debug-symbols=on
+</screen>
+ </para>
+
+ <para>
+ Properties on the command-line are specified with the syntax:
+
+<screen>
+<replaceable>feature-name</replaceable>=<replaceable>feature-value</replaceable>
+</screen>
+ </para>
+
+ <para>
+ The <option>release</option> and <option>debug</option> that we have seen
+ in <command>b2</command> invocations are just a shorthand way to specify
+ values of the <varname>variant</varname> feature. For example, the
+ command above could also have been written this way:
+
+ <screen>
+b2 variant=release inlining=off debug-symbols=on
+ </screen>
+ </para>
+
+ <para>
+ <varname>variant</varname> is so commonly-used that it has been given
+ special status as an <firstterm>implicit</firstterm> feature&#x2014;
+ Boost.Build will deduce its identity just from the name of one of its
+ values.
+ </para>
+
+ <para>
+ A complete description of features can be found in <xref linkend="bbv2.reference.features"/>.
+ </para>
+
+ <section id="bbv2.tutorial.properties.requirements">
+ <title>Build Requests and Target Requirements</title>
+
+ <para>
+ The set of properties specified on the command line constitutes
+ a <firstterm>build request</firstterm>&#x2014;a description of
+ the desired properties for building the requested targets (or,
+ if no targets were explicitly requested, the project in the
+ current directory). The <emphasis>actual</emphasis>
+ properties used for building targets are typically a
+ combination of the build request and properties derived from
+ the project's <filename>Jamroot</filename> (and its other
+ Jamfiles, as described in <xref
+ linkend="bbv2.tutorial.hierarchy"/>). For example, the
+ locations of <code>#include</code>d header files are normally
+ not specified on the command-line, but described in
+ Jamfiles as <firstterm>target
+ requirements</firstterm> and automatically combined with the
+ build request for those targets. Multithread-enabled
+ compilation is another example of a typical target
+ requirement. The Jamfile fragment below
+ illustrates how these requirements might be specified.
+ </para>
+
+<programlisting language="jam">
+exe hello
+ : hello.cpp
+ : &lt;include&gt;boost &lt;threading&gt;multi
+ ;
+</programlisting>
+
+ <para>
+ When <filename>hello</filename> is built, the two requirements specified
+ above will always be present. If the build request given on the
+ <command>b2</command> command-line explictly contradicts a target's
+ requirements, the target requirements usually override (or, in the case
+ of &#x201C;free&rdquo;&#x201D; features like
+ <varname>&lt;include&gt;</varname>,
+ <footnote>
+ <para>
+ See <xref linkend="bbv2.reference.features.attributes"/>
+ </para>
+ </footnote>
+ augments) the build request.
+ </para>
+
+ <tip>
+ <para>
+ The value of the <varname>&lt;include&gt;</varname> feature is
+ relative to the location of <filename>Jamroot</filename> where it is
+ used.
+ </para>
+ </tip>
+ </section>
+
+ <section id="bbv2.tutorial.properties.project_attributes">
+ <title>Project Attributes</title>
+
+ <para>
+ If we want the same requirements for our other target,
+ <filename>hello2</filename>, we could simply duplicate them. However,
+ as projects grow, that approach leads to a great deal of repeated
+ boilerplate in Jamfiles.
+
+ Fortunately, there's a better way. Each project can specify a set of
+ <firstterm>attributes</firstterm>, including requirements:
+
+<programlisting language="jam">
+project
+ : requirements &lt;include&gt;/home/ghost/Work/boost &lt;threading&gt;multi
+ ;
+
+exe hello : hello.cpp ;
+exe hello2 : hello.cpp ;</programlisting>
+
+ The effect would be as if we specified the same requirement for both
+ <filename>hello</filename> and <filename>hello2</filename>.
+ </para>
+ </section>
+ </section>
+
+ <section id="bbv2.tutorial.hierarchy">
+ <title>Project Hierarchies</title>
+
+ <para>
+ So far we have only considered examples with one project, with
+ one user-written Boost.Jam file, <filename>Jamroot</filename>. A typical
+ large codebase would be composed of many projects organized into a tree.
+ The top of the tree is called the <firstterm>project root</firstterm>.
+ Every subproject is defined by a file called <filename>Jamfile</filename>
+ in a descendant directory of the project root. The parent project of a
+ subproject is defined by the nearest <filename>Jamfile</filename> or
+ <filename>Jamroot</filename> file in an ancestor directory. For example,
+ in the following directory layout:
+
+<screen>
+top/
+ |
+ +-- Jamroot
+ |
+ +-- app/
+ | |
+ | +-- Jamfile
+ | `-- app.cpp
+ |
+ `-- util/
+ |
+ +-- foo/
+ . |
+ . +-- Jamfile
+ . `-- bar.cpp
+</screen>
+
+ the project root is <filename>top/</filename>. The projects in
+ <filename>top/app/</filename> and <filename>top/util/foo/</filename> are
+ immediate children of the root project.
+
+ <note>
+ <para>
+ When we refer to a &#x201C;Jamfile,&#x201D; set in normal
+ type, we mean a file called either
+ <filename>Jamfile</filename> or
+ <filename>Jamroot</filename>. When we need to be more
+ specific, the filename will be set as
+ &#x201C;<filename>Jamfile</filename>&#x201D; or
+ &#x201C;<filename>Jamroot</filename>.&#x201D;
+ </para>
+ </note>
+ </para>
+
+ <para>
+ Projects inherit all attributes (such as requirements)
+ from their parents. Inherited requirements are combined with
+ any requirements specified by the subproject.
+ For example, if <filename>top/Jamroot</filename> has
+
+<programlisting language="jam">
+&lt;include&gt;/home/ghost/local
+</programlisting>
+
+ in its requirements, then all of its subprojects will have it
+ in their requirements, too. Of course, any project can add
+ include paths to those specified by its parents. <footnote>
+ <para>Many
+ features will be overridden,
+ rather than added-to, in subprojects. See <xref
+ linkend="bbv2.reference.features.attributes"/> for more
+ information</para>
+ </footnote>
+ More details can be found in
+ <xref linkend= "bbv2.overview.projects"/>.
+ </para>
+
+ <para>
+ Invoking <command>b2</command> without explicitly specifying
+ any targets on the command line builds the project rooted in the
+ current directory. Building a project does not automatically
+ cause its subprojects to be built unless the parent project's
+ Jamfile explicitly requests it. In our example,
+ <filename>top/Jamroot</filename> might contain:
+
+<programlisting language="jam">
+build-project app ;
+</programlisting>
+
+ which would cause the project in <filename>top/app/</filename>
+ to be built whenever the project in <filename>top/</filename> is
+ built. However, targets in <filename>top/util/foo/</filename>
+ will be built only if they are needed by targets in
+ <filename>top/</filename> or <filename>top/app/</filename>.
+ </para>
+ </section>
+
+ <section id="bbv2.tutorial.libs">
+ <title>Dependent Targets</title>
+
+ <para>
+ When building a target <filename>X</filename> that depends on first
+ building another target <filename>Y</filename> (such as a
+ library that must be linked with <firstterm>X</firstterm>),
+ <filename>Y</filename> is called a
+ <firstterm>dependency</firstterm> of <filename>X</filename> and
+ <filename>X</filename> is termed a
+ <firstterm>dependent</firstterm> of <filename>Y</filename>.
+ </para>
+
+ <para>To get a feeling of target dependencies, let's continue the
+ above example and see how <filename>top/app/Jamfile</filename> can
+ use libraries from <filename>top/util/foo</filename>. If
+ <filename>top/util/foo/Jamfile</filename> contains
+
+<programlisting language="jam">
+lib bar : bar.cpp ;
+</programlisting>
+
+ then to use this library in <filename>top/app/Jamfile</filename>, we can
+ write:
+
+<programlisting language="jam">
+exe app : app.cpp ../util/foo//bar ;
+</programlisting>
+
+ While <code>app.cpp</code> refers to a regular source file,
+ <code>../util/foo//bar</code> is a reference to another target:
+ a library <filename>bar</filename> declared in the Jamfile at
+ <filename>../util/foo</filename>.
+ </para>
+
+ <tip>
+ <para>Some other build system have special syntax for listing dependent
+ libraries, for example <varname>LIBS</varname> variable. In Boost.Build,
+ you just add the library to the list of sources.
+ </para>
+ </tip>
+
+ <para>Suppose we build <filename>app</filename> with:
+ <screen>
+b2 app optimization=full define=USE_ASM
+ </screen>
+ Which properties will be used to build <code>foo</code>? The answer is
+ that some features are
+ <firstterm>propagated</firstterm>&#x2014;Boost.Build attempts to use
+ dependencies with the same value of propagated features. The
+ <varname>&lt;optimization&gt;</varname> feature is propagated, so both
+ <filename>app</filename> and <filename>foo</filename> will be compiled
+ with full optimization. But <varname>&lt;define&gt;</varname> is not
+ propagated: its value will be added as-is to the compiler flags for
+ <filename>a.cpp</filename>, but won't affect <filename>foo</filename>.
+ </para>
+
+
+ <para>
+ Let's improve this project further. The library probably has some headers
+ that must be used when compiling <filename>app.cpp</filename>. We could
+ manually add the necessary <code>#include</code> paths to
+ <filename>app</filename>'s requirements as values of the
+ <varname>&lt;include&gt; </varname> feature, but then this work will be
+ repeated for all programs that use <filename>foo</filename>. A better
+ solution is to modify <filename>util/foo/Jamfile</filename> in this way:
+
+ <programlisting language="jam">
+project
+ : usage-requirements &lt;include&gt;.
+ ;
+
+lib foo : foo.cpp ;</programlisting>
+
+ Usage requirements are applied not to the target being declared but to its
+ dependants. In this case, <literal>&lt;include&gt;.</literal> will be
+ applied to all targets that directly depend on <filename>foo</filename>.
+ </para>
+
+ <para>
+ Another improvement is using symbolic identifiers to refer to the library,
+ as opposed to <filename>Jamfile</filename> location. In a large project, a
+ library can be used by many targets, and if they all use <filename>Jamfile
+ </filename> location, a change in directory organization entails much
+ work. The solution is to use project ids&#x2014;symbolic names not tied to
+ directory layout. First, we need to assign a project id by adding this
+ code to <filename>Jamroot</filename>:
+ </para>
+
+ <programlisting language="jam">
+use-project /library-example/foo : util/foo ;</programlisting>
+
+ <para>
+ Second, we modify <filename>app/Jamfile</filename> to use the project id:
+ <programlisting>
+exe app : app.cpp /library-example/foo//bar ;</programlisting>
+
+ The <filename>/library-example/foo//bar</filename> syntax is used to refer
+ to the target <filename>bar</filename> in the project with id <filename>
+ /library-example/foo</filename>. We've achieved our goal&#x2014;if the
+ library is moved to a different directory, only <filename>Jamroot
+ </filename> must be modified. Note that project ids are global&#x2014;two
+ Jamfiles are not allowed to assign the same project id to different
+ directories.
+ </para>
+
+ <tip>
+ <para>If you want all applications in some project to link to a certain
+ library, you can avoid having to specify it directly the sources of
+ every target by using the <varname>&lt;library&gt;</varname> property.
+ For example, if <filename>/boost/filesystem//fs</filename> should be
+ linked to all applications in your project, you can add
+ <code>&lt;library&gt;/boost/filesystem//fs</code> to the project's
+ requirements, like this:
+ </para>
+
+ <programlisting language="jam">
+project
+ : requirements &lt;library&gt;/boost/filesystem//fs
+ ;</programlisting>
+ </tip>
+ </section>
+
+ <section id="bbv2.tutorial.linkage">
+ <title>Static and shared libaries</title>
+
+ <para>
+ Libraries can be either <emphasis>static</emphasis>, which means they are
+ included in executable files that use them, or <emphasis>shared</emphasis>
+ (a.k.a. <emphasis>dynamic</emphasis>), which are only referred to from
+ executables, and must be available at run time. Boost.Build can create and
+ use both kinds.
+ </para>
+
+ <para>
+ The kind of library produced from a <code>lib</code> target is determined
+ by the value of the <varname>link</varname> feature. Default value is
+ <literal>shared</literal>, and to build a static library, the value should
+ be <literal>static</literal>. You can request a static build either on the
+ command line:
+ <programlisting>b2 link=static</programlisting>
+ or in the library's requirements:
+ <programlisting language="jam">lib l : l.cpp : &lt;link&gt;static ;</programlisting>
+ </para>
+
+ <para>
+ We can also use the <varname>&lt;link&gt;</varname> property to express
+ linking requirements on a per-target basis. For example, if a particular
+ executable can be correctly built only with the static version of a
+ library, we can qualify the executable's <link
+ linkend="bbv2.reference.targets.references">target reference</link> to the
+ library as follows:
+
+<!-- There has been no earlier indication that target references can contain
+ properties. You can't assume that the reader will recognize that strange
+ incantation as a target reference, or that she'll know what it means. You
+ also can't assume that hyperlinks will help the reader, because she may be
+ working from a printout, as I was.
+ VP: to be addressed when this section is moved. See comment below.
+-->
+
+ <programlisting language="jam">
+exe important : main.cpp helpers/&lt;link&gt;static ;</programlisting>
+
+ No matter what arguments are specified on the <command>b2</command>
+ command line, <filename>important</filename> will only be linked with the
+ static version of <filename>helpers</filename>.
+ </para>
+
+ <para>
+ Specifying properties in target references is especially useful if you use
+ a library defined in some other project (one you can't change) but you
+ still want static (or dynamic) linking to that library in all cases. If
+ that library is used by many targets, you <emphasis>could</emphasis> use
+ target references everywhere:
+
+ <programlisting language="jam">
+exe e1 : e1.cpp /other_project//bar/&lt;link&gt;static ;
+exe e10 : e10.cpp /other_project//bar/&lt;link&gt;static ;</programlisting>
+
+ but that's far from being convenient. A better approach is to introduce a
+ level of indirection. Create a local <type>alias</type> target that refers
+ to the static (or dynamic) version of <filename>foo</filename>:
+
+ <programlisting>
+alias foo : /other_project//bar/&lt;link&gt;static ;
+exe e1 : e1.cpp foo ;
+exe e10 : e10.cpp foo ;</programlisting>
+
+ The <link linkend="bbv2.tasks.alias">alias</link> rule is specifically
+ used to rename a reference to a target and possibly change the
+ properties.
+
+ <!-- You should introduce the alias rule in an earlier section, before
+ describing how it applies to this specific use-case, and the
+ foregoing sentence should go there.
+ VP: we've agreed that this section should be moved further in the
+ docs, since it's more like advanced reading. When I move it, I'll
+ make sure 'alias' is already mentioned.
+ -->
+ </para>
+
+ <tip>
+ <para>
+ When one library uses another, you put the second library in the source
+ list of the first. For example:
+ <programlisting language="jam">
+lib utils : utils.cpp /boost/filesystem//fs ;
+lib core : core.cpp utils ;
+exe app : app.cpp core ;</programlisting>
+ This works no matter what kind of linking is used. When <filename>core
+ </filename> is built as a shared library, it is linked directly into
+ <filename>utils</filename>. Static libraries can't link to other
+ libraries, so when <filename>core</filename> is built as a static
+ library, its dependency on <filename>utils</filename> is passed along to
+ <filename>core</filename>'s dependents, causing <filename>app</filename>
+ to be linked with both <filename>core</filename> and <filename>utils
+ </filename>.
+ </para>
+ </tip>
+
+ <note>
+ <para>
+ (Note for non-UNIX system). Typically, shared libraries must be
+ installed to a directory in the dynamic linker's search path. Otherwise,
+ applications that use shared libraries can't be started. On Windows, the
+ dynamic linker's search path is given by the <envar>PATH</envar>
+ environment variable. This restriction is lifted when you use
+ Boost.Build testing facilities&#x2014;the <envar>PATH</envar> variable
+ will be automatically adjusted before running the executable.
+ <!-- Need ref here to 'testing facilities' -->
+ </para>
+ </note>
+ </section>
+
+ <section id="bbv2.tutorial.conditions">
+ <title>Conditions and alternatives</title>
+
+ <para>
+ Sometimes, particular relationships need to be maintained among a target's
+ build properties. For example, you might want to set specific <code>
+ #define</code> when a library is built as shared, or when a target's
+ <code>release</code> variant is built. This can be achieved using
+ <firstterm>conditional requirements</firstterm>.
+
+ <programlisting language="jam">
+lib network : network.cpp
+ : <emphasis role="bold">&lt;link&gt;shared:&lt;define&gt;NETWORK_LIB_SHARED</emphasis>
+ &lt;variant&gt;release:&lt;define&gt;EXTRA_FAST
+ ;</programlisting>
+
+ In the example above, whenever <filename>network</filename> is built with
+ <code language="jam">&lt;link&gt;shared</code>, <code language="jam">&lt;define&gt;NETWORK_LIB_SHARED
+ </code> will be in its properties, too. Also, whenever its release variant
+ is built, <code>&lt;define&gt;EXTRA_FAST</code> will appear in its
+ properties.
+ </para>
+
+ <para>
+ Sometimes the ways a target is built are so different that describing them
+ using conditional requirements would be hard. For example, imagine that a
+ library actually uses different source files depending on the toolset used
+ to build it. We can express this situation using <firstterm>target
+ alternatives</firstterm>:
+ <programlisting language="jam">
+lib demangler : dummy_demangler.cpp ; # alternative 1
+lib demangler : demangler_gcc.cpp : &lt;toolset&gt;gcc ; # alternative 2
+lib demangler : demangler_msvc.cpp : &lt;toolset&gt;msvc ; # alternative 3</programlisting>
+ When building <filename>demangler</filename>, Boost.Build will compare
+ requirements for each alternative with build properties to find the best
+ match. For example, when building with <code language="jam">&lt;toolset&gt;gcc</code>
+ alternative 2, will be selected, and when building with
+ <code language="jam">&lt;toolset&gt;msvc</code> alternative 3 will be selected. In all
+ other cases, the most generic alternative 1 will be built.
+ </para>
+ </section>
+
+ <section id="bbv2.tutorial.prebuilt">
+ <title>Prebuilt targets</title>
+
+ <para>
+ To link to libraries whose build instructions aren't given in a Jamfile,
+ you need to create <code>lib</code> targets with an appropriate
+ <varname>file</varname> property. Target alternatives can be used to
+ associate multiple library files with a single conceptual target. For
+ example:
+ <programlisting language="jam">
+# util/lib2/Jamfile
+lib lib2
+ :
+ : &lt;file&gt;lib2_release.a &lt;variant&gt;release
+ ;
+
+lib lib2
+ :
+ : &lt;file&gt;lib2_debug.a &lt;variant&gt;debug
+ ;</programlisting>
+
+ This example defines two alternatives for <filename>lib2</filename>, and
+ for each one names a prebuilt file. Naturally, there are no sources.
+ Instead, the <varname>&lt;file&gt;</varname> feature is used to specify
+ the file name.
+ </para>
+
+ <para>
+ Once a prebuilt target has been declared, it can be used just like any
+ other target:
+
+ <programlisting language="jam">
+exe app : app.cpp ../util/lib2//lib2 ;</programlisting>
+
+ As with any target, the alternative selected depends on the properties
+ propagated from <filename>lib2</filename>'s dependants. If we build the
+ release and debug versions of <filename>app</filename> will be linked
+ with <filename>lib2_release.a</filename> and <filename>lib2_debug.a
+ </filename>, respectively.
+ </para>
+
+ <para>
+ System libraries&#x2014;those that are automatically found by the toolset
+ by searching through some set of predetermined paths&#x2014;should be
+ declared almost like regular ones:
+
+ <programlisting language="jam">
+lib pythonlib : : &lt;name&gt;python22 ;</programlisting>
+
+ We again don't specify any sources, but give a <varname>name</varname>
+ that should be passed to the compiler. If the gcc toolset were used to
+ link an executable target to <filename>pythonlib</filename>,
+ <option>-lpython22</option> would appear in the command line (other
+ compilers may use different options).
+ </para>
+
+ <para>
+ We can also specify where the toolset should look for the library:
+
+ <programlisting language="jam">
+lib pythonlib : : &lt;name&gt;python22 &lt;search&gt;/opt/lib ;</programlisting>
+
+ And, of course, target alternatives can be used in the usual way:
+
+ <programlisting language="jam">
+lib pythonlib : : &lt;name&gt;python22 &lt;variant&gt;release ;
+lib pythonlib : : &lt;name&gt;python22_d &lt;variant&gt;debug ;</programlisting>
+ </para>
+
+ <para>
+ A more advanced use of prebuilt targets is described in <xref linkend=
+ "bbv2.recipies.site-config"/>.
+ </para>
+ </section>
+</chapter>
+
+<!--
+ Local Variables:
+ mode: nxml
+ sgml-indent-data:t
+ sgml-parent-document:("userman.xml" "chapter")
+ sgml-set-face: t
+ sgml-omittag:nil
+ sgml-shorttag:nil
+ sgml-namecase-general:t
+ sgml-general-insert-case:lower
+ sgml-minimize-attributes:nil
+ sgml-always-quote-attributes:t
+ sgml-indent-step:2
+ sgml-exposed-tags:nil
+ sgml-local-catalogs:nil
+ sgml-local-ecat-files:nil
+ End:
+-->
diff --git a/tools/build/v2/doc/src/type.xml b/tools/build/doc/src/type.xml
index 6ca7cc7ae1..6ca7cc7ae1 100644
--- a/tools/build/v2/doc/src/type.xml
+++ b/tools/build/doc/src/type.xml
diff --git a/tools/build/v2/doc/src/typed-target.xml b/tools/build/doc/src/typed-target.xml
index 2a7d7c98c7..2a7d7c98c7 100644
--- a/tools/build/v2/doc/src/typed-target.xml
+++ b/tools/build/doc/src/typed-target.xml
diff --git a/tools/build/doc/src/userman.xml b/tools/build/doc/src/userman.xml
new file mode 100644
index 0000000000..72bcce72d9
--- /dev/null
+++ b/tools/build/doc/src/userman.xml
@@ -0,0 +1,41 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE part PUBLIC "-//Boost//DTD BoostBook XML V1.0//EN"
+ "http://www.boost.org/tools/boostbook/dtd/boostbook.dtd">
+
+<part xmlns:xi="http://www.w3.org/2001/XInclude"
+ id="bbv2" last-revision="$Date$">
+ <partinfo>
+ <copyright>
+ <year>2006</year>
+ <year>2007</year>
+ <year>2008</year>
+ <year>2009</year>
+ <year>2014</year>
+ <holder>Vladimir Prus</holder>
+ </copyright>
+
+ <legalnotice>
+ <para>Distributed under the Boost Software License, Version 1.0.
+ (See accompanying file <filename>LICENSE_1_0.txt</filename> or copy at
+ <ulink
+ url="http://www.boost.org/LICENSE_1_0.txt">http://www.boost.org/LICENSE_1_0.txt</ulink>)
+ </para>
+ </legalnotice>
+ </partinfo>
+
+ <title>Boost.Build V2 User Manual</title>
+
+ <!-- Chapters -->
+ <xi:include href="howto.xml"/>
+ <xi:include href="install.xml"/>
+ <xi:include href="tutorial.xml"/>
+ <xi:include href="overview.xml"/>
+ <xi:include href="tasks.xml"/>
+ <xi:include href="reference.xml"/>
+ <xi:include href="extending.xml"/>
+ <xi:include href="faq.xml"/>
+
+ <!-- Appendicies -->
+<!-- <xi:include href="architecture.xml"/> -->
+
+</part>
diff --git a/tools/build/v2/doc/tools.html b/tools/build/doc/tools.html
index 8b4929d290..8b4929d290 100644
--- a/tools/build/v2/doc/tools.html
+++ b/tools/build/doc/tools.html
diff --git a/tools/build/example/boost-build.jam b/tools/build/example/boost-build.jam
new file mode 100644
index 0000000000..02abe407f7
--- /dev/null
+++ b/tools/build/example/boost-build.jam
@@ -0,0 +1,6 @@
+# Copyright 2002, 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+
+boost-build ../src/kernel ;
diff --git a/tools/build/v2/example/built_tool/Jamroot.jam b/tools/build/example/built_tool/Jamroot.jam
index c458650e8c..c458650e8c 100644
--- a/tools/build/v2/example/built_tool/Jamroot.jam
+++ b/tools/build/example/built_tool/Jamroot.jam
diff --git a/tools/build/v2/example/built_tool/core/Jamfile.jam b/tools/build/example/built_tool/core/Jamfile.jam
index 2d96f7182b..2d96f7182b 100644
--- a/tools/build/v2/example/built_tool/core/Jamfile.jam
+++ b/tools/build/example/built_tool/core/Jamfile.jam
diff --git a/tools/build/v2/example/built_tool/core/a.td b/tools/build/example/built_tool/core/a.td
index e69de29bb2..e69de29bb2 100644
--- a/tools/build/v2/example/built_tool/core/a.td
+++ b/tools/build/example/built_tool/core/a.td
diff --git a/tools/build/v2/example/built_tool/core/core.cpp b/tools/build/example/built_tool/core/core.cpp
index 31a133726c..31a133726c 100644
--- a/tools/build/v2/example/built_tool/core/core.cpp
+++ b/tools/build/example/built_tool/core/core.cpp
diff --git a/tools/build/v2/example/built_tool/readme.txt b/tools/build/example/built_tool/readme.txt
index bbb9f9b3a7..bbb9f9b3a7 100644
--- a/tools/build/v2/example/built_tool/readme.txt
+++ b/tools/build/example/built_tool/readme.txt
diff --git a/tools/build/v2/example/built_tool/tblgen/Jamfile.jam b/tools/build/example/built_tool/tblgen/Jamfile.jam
index af4906278f..af4906278f 100644
--- a/tools/build/v2/example/built_tool/tblgen/Jamfile.jam
+++ b/tools/build/example/built_tool/tblgen/Jamfile.jam
diff --git a/tools/build/v2/example/built_tool/tblgen/tblgen.cpp b/tools/build/example/built_tool/tblgen/tblgen.cpp
index fbd0581334..fbd0581334 100644
--- a/tools/build/v2/example/built_tool/tblgen/tblgen.cpp
+++ b/tools/build/example/built_tool/tblgen/tblgen.cpp
diff --git a/tools/build/v2/example/customization/class.verbatim b/tools/build/example/customization/class.verbatim
index 5c0d7b803c..5c0d7b803c 100644
--- a/tools/build/v2/example/customization/class.verbatim
+++ b/tools/build/example/customization/class.verbatim
diff --git a/tools/build/v2/example/customization/codegen.cpp b/tools/build/example/customization/codegen.cpp
index 6cdb45e4de..6cdb45e4de 100644
--- a/tools/build/v2/example/customization/codegen.cpp
+++ b/tools/build/example/customization/codegen.cpp
diff --git a/tools/build/v2/example/customization/inline_file.py b/tools/build/example/customization/inline_file.py
index a48c5fc9d9..a48c5fc9d9 100755..100644
--- a/tools/build/v2/example/customization/inline_file.py
+++ b/tools/build/example/customization/inline_file.py
diff --git a/tools/build/v2/example/customization/jamroot.jam b/tools/build/example/customization/jamroot.jam
index 5e986d91c2..5e986d91c2 100644
--- a/tools/build/v2/example/customization/jamroot.jam
+++ b/tools/build/example/customization/jamroot.jam
diff --git a/tools/build/v2/example/customization/readme.txt b/tools/build/example/customization/readme.txt
index 7ee04f1a24..7ee04f1a24 100644
--- a/tools/build/v2/example/customization/readme.txt
+++ b/tools/build/example/customization/readme.txt
diff --git a/tools/build/v2/example/customization/t1.verbatim b/tools/build/example/customization/t1.verbatim
index 144540f29b..144540f29b 100644
--- a/tools/build/v2/example/customization/t1.verbatim
+++ b/tools/build/example/customization/t1.verbatim
diff --git a/tools/build/v2/example/customization/t2.verbatim b/tools/build/example/customization/t2.verbatim
index e69de29bb2..e69de29bb2 100644
--- a/tools/build/v2/example/customization/t2.verbatim
+++ b/tools/build/example/customization/t2.verbatim
diff --git a/tools/build/v2/example/customization/usage.verbatim b/tools/build/example/customization/usage.verbatim
index 0fc4b4a377..0fc4b4a377 100644
--- a/tools/build/v2/example/customization/usage.verbatim
+++ b/tools/build/example/customization/usage.verbatim
diff --git a/tools/build/v2/example/customization/verbatim.jam b/tools/build/example/customization/verbatim.jam
index 931fdce336..931fdce336 100644
--- a/tools/build/v2/example/customization/verbatim.jam
+++ b/tools/build/example/customization/verbatim.jam
diff --git a/tools/build/v2/example/customization/verbatim.py b/tools/build/example/customization/verbatim.py
index be285976c1..be285976c1 100644
--- a/tools/build/v2/example/customization/verbatim.py
+++ b/tools/build/example/customization/verbatim.py
diff --git a/tools/build/v2/example/generate/REAME.txt b/tools/build/example/generate/REAME.txt
index fc2b207723..fc2b207723 100644
--- a/tools/build/v2/example/generate/REAME.txt
+++ b/tools/build/example/generate/REAME.txt
diff --git a/tools/build/v2/example/generate/a.cpp b/tools/build/example/generate/a.cpp
index 364975671d..364975671d 100644
--- a/tools/build/v2/example/generate/a.cpp
+++ b/tools/build/example/generate/a.cpp
diff --git a/tools/build/v2/example/generate/gen.jam b/tools/build/example/generate/gen.jam
index 73232aab5e..73232aab5e 100644
--- a/tools/build/v2/example/generate/gen.jam
+++ b/tools/build/example/generate/gen.jam
diff --git a/tools/build/v2/example/generate/gen.py b/tools/build/example/generate/gen.py
index 09ee15b43a..09ee15b43a 100644
--- a/tools/build/v2/example/generate/gen.py
+++ b/tools/build/example/generate/gen.py
diff --git a/tools/build/v2/example/generate/jamroot.jam b/tools/build/example/generate/jamroot.jam
index c48f2207bf..c48f2207bf 100644
--- a/tools/build/v2/example/generate/jamroot.jam
+++ b/tools/build/example/generate/jamroot.jam
diff --git a/tools/build/v2/example/generator/README.txt b/tools/build/example/generator/README.txt
index f26a856a58..f26a856a58 100644
--- a/tools/build/v2/example/generator/README.txt
+++ b/tools/build/example/generator/README.txt
diff --git a/tools/build/v2/example/generator/foo.gci b/tools/build/example/generator/foo.gci
index 2ccc45c6c5..2ccc45c6c5 100644
--- a/tools/build/v2/example/generator/foo.gci
+++ b/tools/build/example/generator/foo.gci
diff --git a/tools/build/v2/example/generator/jamroot.jam b/tools/build/example/generator/jamroot.jam
index 9703134dba..9703134dba 100644
--- a/tools/build/v2/example/generator/jamroot.jam
+++ b/tools/build/example/generator/jamroot.jam
diff --git a/tools/build/v2/example/generator/soap.jam b/tools/build/example/generator/soap.jam
index d28bfdecc5..d28bfdecc5 100644
--- a/tools/build/v2/example/generator/soap.jam
+++ b/tools/build/example/generator/soap.jam
diff --git a/tools/build/v2/example/gettext/jamfile.jam b/tools/build/example/gettext/jamfile.jam
index d5096df305..d5096df305 100644
--- a/tools/build/v2/example/gettext/jamfile.jam
+++ b/tools/build/example/gettext/jamfile.jam
diff --git a/tools/build/v2/example/gettext/jamroot.jam b/tools/build/example/gettext/jamroot.jam
index 862f8930c7..862f8930c7 100644
--- a/tools/build/v2/example/gettext/jamroot.jam
+++ b/tools/build/example/gettext/jamroot.jam
diff --git a/tools/build/v2/example/gettext/main.cpp b/tools/build/example/gettext/main.cpp
index 6888e1abae..6888e1abae 100644
--- a/tools/build/v2/example/gettext/main.cpp
+++ b/tools/build/example/gettext/main.cpp
diff --git a/tools/build/v2/example/gettext/readme.txt b/tools/build/example/gettext/readme.txt
index 9c8fee6fd9..9c8fee6fd9 100644
--- a/tools/build/v2/example/gettext/readme.txt
+++ b/tools/build/example/gettext/readme.txt
diff --git a/tools/build/v2/example/gettext/russian.po b/tools/build/example/gettext/russian.po
index daa7121c3d..daa7121c3d 100644
--- a/tools/build/v2/example/gettext/russian.po
+++ b/tools/build/example/gettext/russian.po
diff --git a/tools/build/v2/example/hello/hello.cpp b/tools/build/example/hello/hello.cpp
index 6808022897..6808022897 100644
--- a/tools/build/v2/example/hello/hello.cpp
+++ b/tools/build/example/hello/hello.cpp
diff --git a/tools/build/v2/example/hello/jamroot.jam b/tools/build/example/hello/jamroot.jam
index 672ec02e90..672ec02e90 100644
--- a/tools/build/v2/example/hello/jamroot.jam
+++ b/tools/build/example/hello/jamroot.jam
diff --git a/tools/build/v2/example/hello/readme.txt b/tools/build/example/hello/readme.txt
index f416be675a..f416be675a 100644
--- a/tools/build/v2/example/hello/readme.txt
+++ b/tools/build/example/hello/readme.txt
diff --git a/tools/build/v2/example/libraries/app/app.cpp b/tools/build/example/libraries/app/app.cpp
index f62c1c35db..f62c1c35db 100644
--- a/tools/build/v2/example/libraries/app/app.cpp
+++ b/tools/build/example/libraries/app/app.cpp
diff --git a/tools/build/v2/example/libraries/app/jamfile.jam b/tools/build/example/libraries/app/jamfile.jam
index ed2054e130..ed2054e130 100644
--- a/tools/build/v2/example/libraries/app/jamfile.jam
+++ b/tools/build/example/libraries/app/jamfile.jam
diff --git a/tools/build/v2/example/libraries/jamroot.jam b/tools/build/example/libraries/jamroot.jam
index 5e0dc48148..5e0dc48148 100644
--- a/tools/build/v2/example/libraries/jamroot.jam
+++ b/tools/build/example/libraries/jamroot.jam
diff --git a/tools/build/v2/example/libraries/util/foo/bar.cpp b/tools/build/example/libraries/util/foo/bar.cpp
index e6339ee9bb..e6339ee9bb 100644
--- a/tools/build/v2/example/libraries/util/foo/bar.cpp
+++ b/tools/build/example/libraries/util/foo/bar.cpp
diff --git a/tools/build/v2/example/libraries/util/foo/include/lib1.h b/tools/build/example/libraries/util/foo/include/lib1.h
index 50f5e19d20..50f5e19d20 100644
--- a/tools/build/v2/example/libraries/util/foo/include/lib1.h
+++ b/tools/build/example/libraries/util/foo/include/lib1.h
diff --git a/tools/build/v2/example/libraries/util/foo/jamfile.jam b/tools/build/example/libraries/util/foo/jamfile.jam
index 7b6359ea4e..7b6359ea4e 100644
--- a/tools/build/v2/example/libraries/util/foo/jamfile.jam
+++ b/tools/build/example/libraries/util/foo/jamfile.jam
diff --git a/tools/build/v2/example/make/foo.py b/tools/build/example/make/foo.py
index e4c0b824a0..e4c0b824a0 100644
--- a/tools/build/v2/example/make/foo.py
+++ b/tools/build/example/make/foo.py
diff --git a/tools/build/v2/example/make/jamroot.jam b/tools/build/example/make/jamroot.jam
index 7bb98e3537..7bb98e3537 100644
--- a/tools/build/v2/example/make/jamroot.jam
+++ b/tools/build/example/make/jamroot.jam
diff --git a/tools/build/v2/example/make/main_cpp.pro b/tools/build/example/make/main_cpp.pro
index 237c8ce181..237c8ce181 100644
--- a/tools/build/v2/example/make/main_cpp.pro
+++ b/tools/build/example/make/main_cpp.pro
diff --git a/tools/build/v2/example/make/readme.txt b/tools/build/example/make/readme.txt
index 333c55a716..333c55a716 100644
--- a/tools/build/v2/example/make/readme.txt
+++ b/tools/build/example/make/readme.txt
diff --git a/tools/build/v2/example/pch/include/pch.hpp b/tools/build/example/pch/include/pch.hpp
index 8f05cc43d9..8f05cc43d9 100644
--- a/tools/build/v2/example/pch/include/pch.hpp
+++ b/tools/build/example/pch/include/pch.hpp
diff --git a/tools/build/v2/example/pch/jamroot.jam b/tools/build/example/pch/jamroot.jam
index 115164aaef..115164aaef 100644
--- a/tools/build/v2/example/pch/jamroot.jam
+++ b/tools/build/example/pch/jamroot.jam
diff --git a/tools/build/v2/example/pch/source/hello_world.cpp b/tools/build/example/pch/source/hello_world.cpp
index f618056a0b..f618056a0b 100644
--- a/tools/build/v2/example/pch/source/hello_world.cpp
+++ b/tools/build/example/pch/source/hello_world.cpp
diff --git a/tools/build/v2/example/python_modules/jamroot.jam b/tools/build/example/python_modules/jamroot.jam
index c53e75d58b..c53e75d58b 100644
--- a/tools/build/v2/example/python_modules/jamroot.jam
+++ b/tools/build/example/python_modules/jamroot.jam
diff --git a/tools/build/v2/example/python_modules/python_helpers.jam b/tools/build/example/python_modules/python_helpers.jam
index 5a79aeebf2..5a79aeebf2 100644
--- a/tools/build/v2/example/python_modules/python_helpers.jam
+++ b/tools/build/example/python_modules/python_helpers.jam
diff --git a/tools/build/v2/example/python_modules/python_helpers.py b/tools/build/example/python_modules/python_helpers.py
index 8148f57c35..8148f57c35 100644
--- a/tools/build/v2/example/python_modules/python_helpers.py
+++ b/tools/build/example/python_modules/python_helpers.py
diff --git a/tools/build/v2/example/python_modules/readme.txt b/tools/build/example/python_modules/readme.txt
index 0fe6ee55e6..0fe6ee55e6 100644
--- a/tools/build/v2/example/python_modules/readme.txt
+++ b/tools/build/example/python_modules/readme.txt
diff --git a/tools/build/v2/example/qt/README.txt b/tools/build/example/qt/README.txt
index d187c31c36..d187c31c36 100644
--- a/tools/build/v2/example/qt/README.txt
+++ b/tools/build/example/qt/README.txt
diff --git a/tools/build/v2/example/qt/qt3/hello/canvas.cpp b/tools/build/example/qt/qt3/hello/canvas.cpp
index c6d23c9d47..c6d23c9d47 100644
--- a/tools/build/v2/example/qt/qt3/hello/canvas.cpp
+++ b/tools/build/example/qt/qt3/hello/canvas.cpp
diff --git a/tools/build/v2/example/qt/qt3/hello/canvas.h b/tools/build/example/qt/qt3/hello/canvas.h
index f9f9502679..f9f9502679 100644
--- a/tools/build/v2/example/qt/qt3/hello/canvas.h
+++ b/tools/build/example/qt/qt3/hello/canvas.h
diff --git a/tools/build/v2/example/qt/qt3/hello/jamroot.jam b/tools/build/example/qt/qt3/hello/jamroot.jam
index 03be582e50..03be582e50 100644
--- a/tools/build/v2/example/qt/qt3/hello/jamroot.jam
+++ b/tools/build/example/qt/qt3/hello/jamroot.jam
diff --git a/tools/build/v2/example/qt/qt3/hello/main.cpp b/tools/build/example/qt/qt3/hello/main.cpp
index 8f1ffc2fba..8f1ffc2fba 100644
--- a/tools/build/v2/example/qt/qt3/hello/main.cpp
+++ b/tools/build/example/qt/qt3/hello/main.cpp
diff --git a/tools/build/v2/example/qt/qt3/moccable-cpp/jamroot.jam b/tools/build/example/qt/qt3/moccable-cpp/jamroot.jam
index 85778da208..85778da208 100644
--- a/tools/build/v2/example/qt/qt3/moccable-cpp/jamroot.jam
+++ b/tools/build/example/qt/qt3/moccable-cpp/jamroot.jam
diff --git a/tools/build/v2/example/qt/qt3/moccable-cpp/main.cpp b/tools/build/example/qt/qt3/moccable-cpp/main.cpp
index ed36f74698..ed36f74698 100644
--- a/tools/build/v2/example/qt/qt3/moccable-cpp/main.cpp
+++ b/tools/build/example/qt/qt3/moccable-cpp/main.cpp
diff --git a/tools/build/v2/example/qt/qt3/uic/hello_world_widget.ui b/tools/build/example/qt/qt3/uic/hello_world_widget.ui
index 26cc734871..26cc734871 100644
--- a/tools/build/v2/example/qt/qt3/uic/hello_world_widget.ui
+++ b/tools/build/example/qt/qt3/uic/hello_world_widget.ui
diff --git a/tools/build/v2/example/qt/qt3/uic/jamroot.jam b/tools/build/example/qt/qt3/uic/jamroot.jam
index d0b8062940..d0b8062940 100644
--- a/tools/build/v2/example/qt/qt3/uic/jamroot.jam
+++ b/tools/build/example/qt/qt3/uic/jamroot.jam
diff --git a/tools/build/v2/example/qt/qt3/uic/main.cpp b/tools/build/example/qt/qt3/uic/main.cpp
index f2a08b5faa..f2a08b5faa 100644
--- a/tools/build/v2/example/qt/qt3/uic/main.cpp
+++ b/tools/build/example/qt/qt3/uic/main.cpp
diff --git a/tools/build/v2/example/qt/qt4/hello/arrow.cpp b/tools/build/example/qt/qt4/hello/arrow.cpp
index e821b16909..e821b16909 100644
--- a/tools/build/v2/example/qt/qt4/hello/arrow.cpp
+++ b/tools/build/example/qt/qt4/hello/arrow.cpp
diff --git a/tools/build/v2/example/qt/qt4/hello/arrow.h b/tools/build/example/qt/qt4/hello/arrow.h
index d7743864f3..d7743864f3 100644
--- a/tools/build/v2/example/qt/qt4/hello/arrow.h
+++ b/tools/build/example/qt/qt4/hello/arrow.h
diff --git a/tools/build/v2/example/qt/qt4/hello/jamroot.jam b/tools/build/example/qt/qt4/hello/jamroot.jam
index 83952f17b2..83952f17b2 100644
--- a/tools/build/v2/example/qt/qt4/hello/jamroot.jam
+++ b/tools/build/example/qt/qt4/hello/jamroot.jam
diff --git a/tools/build/v2/example/qt/qt4/hello/main.cpp b/tools/build/example/qt/qt4/hello/main.cpp
index df27444bdd..df27444bdd 100644
--- a/tools/build/v2/example/qt/qt4/hello/main.cpp
+++ b/tools/build/example/qt/qt4/hello/main.cpp
diff --git a/tools/build/v2/example/qt/qt4/moccable-cpp/jamroot.jam b/tools/build/example/qt/qt4/moccable-cpp/jamroot.jam
index d07b9c7d3b..d07b9c7d3b 100644
--- a/tools/build/v2/example/qt/qt4/moccable-cpp/jamroot.jam
+++ b/tools/build/example/qt/qt4/moccable-cpp/jamroot.jam
diff --git a/tools/build/v2/example/qt/qt4/moccable-cpp/main.cpp b/tools/build/example/qt/qt4/moccable-cpp/main.cpp
index ffc96cc3e6..ffc96cc3e6 100644
--- a/tools/build/v2/example/qt/qt4/moccable-cpp/main.cpp
+++ b/tools/build/example/qt/qt4/moccable-cpp/main.cpp
diff --git a/tools/build/v2/example/qt/qt4/uic/hello_world_widget.ui b/tools/build/example/qt/qt4/uic/hello_world_widget.ui
index 67060b336e..67060b336e 100644
--- a/tools/build/v2/example/qt/qt4/uic/hello_world_widget.ui
+++ b/tools/build/example/qt/qt4/uic/hello_world_widget.ui
diff --git a/tools/build/v2/example/qt/qt4/uic/jamroot.jam b/tools/build/example/qt/qt4/uic/jamroot.jam
index 40675a72eb..40675a72eb 100644
--- a/tools/build/v2/example/qt/qt4/uic/jamroot.jam
+++ b/tools/build/example/qt/qt4/uic/jamroot.jam
diff --git a/tools/build/v2/example/qt/qt4/uic/main.cpp b/tools/build/example/qt/qt4/uic/main.cpp
index fc72fd5e6b..fc72fd5e6b 100644
--- a/tools/build/v2/example/qt/qt4/uic/main.cpp
+++ b/tools/build/example/qt/qt4/uic/main.cpp
diff --git a/tools/build/v2/site-config.jam b/tools/build/example/site-config.jam
index ad22d67447..ad22d67447 100644
--- a/tools/build/v2/site-config.jam
+++ b/tools/build/example/site-config.jam
diff --git a/tools/build/example/testing/compile-fail.cpp b/tools/build/example/testing/compile-fail.cpp
new file mode 100644
index 0000000000..cd3e094094
--- /dev/null
+++ b/tools/build/example/testing/compile-fail.cpp
@@ -0,0 +1,16 @@
+// Copyright (c) 2014 Rene Rivera
+//
+// Distributed under the Boost Software License, Version 1.0. (See
+// accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+//
+// http://www.boost.org
+//
+
+#include <iostream>
+
+int main()
+{
+ std::cout << "Bye!\n";
+ return 1
+}
diff --git a/tools/build/example/testing/fail.cpp b/tools/build/example/testing/fail.cpp
new file mode 100644
index 0000000000..f1efa1ee2a
--- /dev/null
+++ b/tools/build/example/testing/fail.cpp
@@ -0,0 +1,16 @@
+// Copyright (c) 2014 Rene Rivera
+//
+// Distributed under the Boost Software License, Version 1.0. (See
+// accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+//
+// http://www.boost.org
+//
+
+#include <iostream>
+
+int main()
+{
+ std::cout << "Bye!\n";
+ return 1;
+}
diff --git a/tools/build/example/testing/jamroot.jam b/tools/build/example/testing/jamroot.jam
new file mode 100644
index 0000000000..047aff39c2
--- /dev/null
+++ b/tools/build/example/testing/jamroot.jam
@@ -0,0 +1,10 @@
+# Copyright 2014 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+using testing ;
+
+run success.cpp : : ;
+run-fail fail.cpp : : ;
+compile success.cpp : : success-compile ;
+compile-fail compile-fail.cpp ;
diff --git a/tools/build/example/testing/success.cpp b/tools/build/example/testing/success.cpp
new file mode 100644
index 0000000000..e2fa7a4a9c
--- /dev/null
+++ b/tools/build/example/testing/success.cpp
@@ -0,0 +1,16 @@
+// Copyright (c) 2014 Rene Rivera
+//
+// Distributed under the Boost Software License, Version 1.0. (See
+// accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+//
+// http://www.boost.org
+//
+
+#include <iostream>
+
+int main()
+{
+ std::cout << "Hi!\n";
+ return 0;
+}
diff --git a/tools/build/v2/user-config.jam b/tools/build/example/user-config.jam
index fbbf13fd0c..fbbf13fd0c 100644
--- a/tools/build/v2/user-config.jam
+++ b/tools/build/example/user-config.jam
diff --git a/tools/build/v2/example/variant/a.cpp b/tools/build/example/variant/a.cpp
index 42b69f3358..42b69f3358 100644
--- a/tools/build/v2/example/variant/a.cpp
+++ b/tools/build/example/variant/a.cpp
diff --git a/tools/build/v2/example/variant/jamfile.jam b/tools/build/example/variant/jamfile.jam
index 9f8c580be0..9f8c580be0 100644
--- a/tools/build/v2/example/variant/jamfile.jam
+++ b/tools/build/example/variant/jamfile.jam
diff --git a/tools/build/v2/example/variant/jamroot.jam b/tools/build/example/variant/jamroot.jam
index e19476ccc0..e19476ccc0 100644
--- a/tools/build/v2/example/variant/jamroot.jam
+++ b/tools/build/example/variant/jamroot.jam
diff --git a/tools/build/v2/example/variant/libs/jamfile.jam b/tools/build/example/variant/libs/jamfile.jam
index 4366b76246..4366b76246 100644
--- a/tools/build/v2/example/variant/libs/jamfile.jam
+++ b/tools/build/example/variant/libs/jamfile.jam
diff --git a/tools/build/v2/example/variant/libs/l.cpp b/tools/build/example/variant/libs/l.cpp
index 26cb4b1e63..26cb4b1e63 100644
--- a/tools/build/v2/example/variant/libs/l.cpp
+++ b/tools/build/example/variant/libs/l.cpp
diff --git a/tools/build/v2/example/variant/readme.txt b/tools/build/example/variant/readme.txt
index 5ab1b938a9..5ab1b938a9 100644
--- a/tools/build/v2/example/variant/readme.txt
+++ b/tools/build/example/variant/readme.txt
diff --git a/tools/build/index.html b/tools/build/index.html
index 12f22e4132..6769bcaec5 100644
--- a/tools/build/index.html
+++ b/tools/build/index.html
@@ -1,21 +1,165 @@
-<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
- "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
-<!--
- Copyright 2003 Rene Rivera.
- Distributed under the Boost Software License, Version 1.0.
- (See accompanying file LICENSE_1_0.txt or copy at
- http://www.boost.org/LICENSE_1_0.txt)
- -->
-
-<html xmlns="http://www.w3.org/1999/xhtml">
-<head>
- <meta http-equiv="refresh" content="0; URL=v2/index.html" />
-
- <title></title>
-</head>
-
-<body>
- Automatic redirection failed, please go to <a href=
- "v2/index.html">v2/index.html</a>.
-</body>
+<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
+
+<!-- Copyright 2004 Aleksey Gurtovoy -->
+<!-- Copyright 2004, 2005, 2006 Vladimir Prus -->
+<!-- Distributed under the Boost Software License, Version 1.0. -->
+<!-- (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) -->
+
+<html>
+ <head>
+ <meta name="generator" content=
+ "HTML Tidy for Linux/x86 (vers 1st April 2002), see www.w3.org">
+ <meta name="generator" content="Microsoft FrontPage 5.0">
+ <meta http-equiv="Content-Type" content=
+ "text/html; charset=windows-1252">
+ <!-- tidy options: &dash;&dash;tidy-mark false -i -wrap 78 !-->
+<style type="text/css">
+div.sidebar {
+ margin-left: 1em ;
+ border: medium outset ;
+ padding: 0em 1em ;
+ background-color: #adbed2;
+ border-color: #000000;
+ border-width: 1;
+ width: 40% ;
+ float: right ;
+ clear: right }
+}
+
+div.sidebar p.rubric {
+ font-family: sans-serif ;
+ font-size: medium }
+</style>
+
+ <title>Boost.Build V2</title>
+ </head>
+
+ <body bgcolor="#FFFFFF" text="#000000">
+
+ <p align="center"><img src="website/boost_build.png" width="396" height="60" alt="Boost.Build V2"></img>
+
+ <div class="contents sidebar topic" id="index">
+ <p>
+ <b>Quick access</b>
+ <ul>
+ <li>Download: <a href=
+ "http://prdownloads.sourceforge.net/boost/boost-build-2.0-m12.zip">[zip]
+ </a>, <a href=
+ "http://prdownloads.sourceforge.net/boost/boost-build-2.0-m12.tar.bz2">[tar.bz2]
+ </a>
+ <li>Nightly build: <a href="http://boost.org/boost-build2/boost-build.zip">[zip]</a>,
+ <a href="http://boost.org/boost-build2/boost-build.tar.bz2">[tar.bz2]</a>
+ <li><a href="../../../doc/html/bbv2.html">Documentation</a>
+ (<a href="doc/userman.pdf">PDF</a>)
+ <li><a
+ href="http://www.crystalclearsoftware.com/cgi-bin/boost_wiki/wiki.pl?Boost.Build_V2">Wiki
+ (User-contibuted documentation)</a>
+ <li>Feedback: <a
+ href="http://lists.boost.org/mailman/listinfo.cgi/boost-build">[mailing list]</a>,
+ <a
+ href="news://news.gmane.org/gmane.comp.lib.boost.build">[newsgroup]</a>
+ <ul>
+ <li>Before posting, <a href="http://lists.boost.org/mailman/listinfo.cgi/boost-build">subscribe</a>
+ <!--
+ <li><form method="get" action="http://search.gmane.org/">
+ <input type="text" name="query">
+ <input type="hidden" name="group" value="gmane.comp.lib.boost.build">
+ <input type="submit" value="Search">
+ </form> -->
+ </ul>
+ <li><a href="https://trac.lvk.cs.msu.su/boost.build">Bug tracker</a>
+<!-- <li>Rate Boost.Build: <a href="http://freshmeat.net/rate/38012/">Freshmeat</a> -->
+ </ul>
+ </p>
+ </div>
+
+<!-- <h1>Boost.Build V2</h1> -->
+
+
+ <h2>Overview</h2>
+
+ <p>Boost.Build is an easy way to build C++ projects, everywhere. You
+ name you executables and libraries and list their sources. Boost.Build
+ takes care about compiling your sources with right options, creating
+ static and shared libraries, making executables, and other chores --
+ whether you're using gcc, msvc, or a dozen more supported C++
+ compilers -- on Windows, OSX, Linux and commercial UNIX systems.
+
+ <p>Some of the most important features:
+ <ul>
+ <li><b>Simple and high level build description</b>. In most
+ cases a name of target and list of sources is all you need.</li>
+
+ <li><b>Portability</b>. Most important build properties have symbolic
+ names that work everywhere. Why memorize compiler flags necessary
+ for multi-threaded 64-bit shared library, if Boost.Build can do it for you?
+
+ <li><b>Variant builds</b>. When you build the same project
+ twice with different properties, all produced files are placed
+ in different directories, so you can build with 2 versions of
+ gcc, or both debug and release variants in one invocation.</li>
+
+ <li><b>Global dependencies</b>. No matter what directory you build
+ in, Boost.Build will always check all dependencies in your entire
+ project, preventing inconsistent binaries. And it's easy to
+ use one Boost.Build project in other, again with full dependency
+ tracking.
+
+ <li><b>Usage requirements</b>. A target can specify properties,
+ like include paths and preprocessor defines, that are necessary to use
+ it. Those properties will be automatically applied whenever the target
+ is used.</li>
+
+ <li><b>Standalone</b>. Boost.Build's only dependency is a C compiler,
+ so it's easy to setup. You can even include all of Boost.Build in your
+ project. Boost.Build does not depend on C++ Boost in any way.</li>
+ </ul>
+
+ <h2>Status and future</h2>
+
+ <p>Boost.Build is ready to use today, and new features are being actively
+ developed.
+
+ <p>The current version of 2.0 Milestone 12, which added support for
+ precompiled headers on gcc, and added 3 new C++ compilers
+ (<a href="http://svn.boost.org/svn/boost/trunk/tools/build/v2/changes.txt">full changelog</a>).
+
+ <p>Milestone 13 is planned as bugfix release. Milestone 14 will
+ focus on improving user documentation. Milestone 15 will see most
+ of Boost.Build reimplemented in Python, to make extending
+ Boost.Build even easier for end users (see <a href="https://trac.lvk.cs.msu.su/boost.build/wiki/PythonPort">PythonPort</a>).
+ The specific issues planned for each release can be found on the
+ <a href="https://trac.lvk.cs.msu.su/boost.build/roadmap">roadmap</a>.
+
+
+
+ <h2>Feedback and contributing</h2>
+
+ <p>Should you have any questions or comments, we'd be glad to hear them.
+ Post everything to the <a href="http://lists.boost.org/mailman/listinfo.cgi/boost-build">mailing list</a>.</p>
+
+ <p>Bugs and feature requests can be entered at our
+ <a href="https://trac.lvk.cs.msu.su/boost.build">bug tracker</a>.
+
+ <p>If you'd like to help with development, just pick a bug
+ in the tracker that you'd like to fix, or feel free to implement
+ any feature you like. There's a separate
+ <a href="hacking.txt">guidelines document</a> for working on code.</p>
+ <hr>
+
+ <p>&copy; Copyright David Abrahams and Vladimir Prus 2002-2007.
+ Permission to copy, use, modify, sell and distribute this document is
+ granted provided this copyright notice appears in all copies. This
+ document is provided "as is" without express or implied warranty, and
+ with no claim as to its suitability for any purpose.</p>
+
+ <p>Revised
+ <!--webbot bot="Timestamp" s-type="EDITED" s-format="%d %B, %Y" startspan
+ -->Oct 4, 2007
+ <!--webbot bot="Timestamp" endspan i-checksum="13972"
+ -->
+ </p>
+
+ </body>
</html>
+
diff --git a/tools/build/notes/README.txt b/tools/build/notes/README.txt
new file mode 100644
index 0000000000..96ef0c3aaf
--- /dev/null
+++ b/tools/build/notes/README.txt
@@ -0,0 +1,8 @@
+Copyright 2005 Vladimir Prus
+Distributed under the Boost Software License, Version 1.0.
+(See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+
+This directory contains various development notes. Some of them
+may eventually find the way into documentation, so are purely
+implementation comments.
diff --git a/tools/build/v2/notes/build_dir_option.txt b/tools/build/notes/build_dir_option.txt
index 6dbecaed68..6dbecaed68 100644
--- a/tools/build/v2/notes/build_dir_option.txt
+++ b/tools/build/notes/build_dir_option.txt
diff --git a/tools/build/v2/changes.txt b/tools/build/notes/changes.txt
index 7ac8ffa2ff..7ac8ffa2ff 100644
--- a/tools/build/v2/changes.txt
+++ b/tools/build/notes/changes.txt
diff --git a/tools/build/notes/hacking.txt b/tools/build/notes/hacking.txt
new file mode 100644
index 0000000000..3c059173bd
--- /dev/null
+++ b/tools/build/notes/hacking.txt
@@ -0,0 +1,138 @@
+Copyright 2003, 2006 Vladimir Prus
+Distributed under the Boost Software License, Version 1.0.
+(See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+
+ ----------------------------------
+ Boost.Build contributor guidelines
+ ----------------------------------
+
+Boost.Build is an open-source project. This means that we welcome and appreciate
+all contributions --- be it ideas, bug reports, or patches. This document
+contains guidelines which helps to assure that development goes on smoothly, and
+changes are made quickly.
+
+The guidelines are not mandatory, and you can decide for yourself which one to
+follow. But note, that 10 mins that you spare writing a comment, for example,
+might lead to significally longer delay for everyone.
+
+Before contributing, make sure you are subscribed to our mailing list
+
+ boost-build@lists.boost.org
+
+Additional resources include
+
+ - The issue tracker
+ http://trac.lvk.cs.msu.su/boost.build/
+
+ - mailing list:
+ boost-build@lists.boost.org
+ http://lists.boost.org/boost-build/
+
+
+BUGS and PATCHES
+
+Both bugs and patches can be send to our mailing list.
+
+When reporting a bug, please try to provide the following information.
+
+ - What you did. A minimal reproducible testcase is very much appreciated.
+ Shell script with some annotations is much better than verbose description
+ of the problem. A regression test is the best (see test/test_system.html).
+ - What you got.
+ - What you expected.
+ - What version of Boost.Build and Boost.Jam did you use. If possible,
+ please try to test with the CVS HEAD state.
+
+When submitting a patch, please:
+
+ - make a single patch for a single logical change
+ - follow the policies and coding conventions below,
+ - send patches in unified diff format,
+ (using either "cvs diff -u" or "diff -u")
+ - provide a log message together with the patch
+ - put the patch and the log message as attachment to your email.
+
+The purpose of log message serves to communicate what was changed, and *why*.
+Without a good log message, you might spend a lot of time later, wondering where
+a strange piece of code came from and why it was necessary.
+
+The good log message mentions each changed file and each rule/method, saying
+what happend to it, and why. Consider, the following log message
+
+ Better direct request handling.
+
+ * new/build-request.jam
+ (directly-requested-properties-adjuster): Redo.
+
+ * new/targets.jam
+ (main-target.generate-really): Adjust properties here.
+
+ * new/virtual-target.jam
+ (register-actual-name): New rule.
+ (virtual-target.actualize-no-scanner): Call the above, to detected bugs,
+ where two virtual target correspond to one Jam target name.
+
+The log messages for the last two files are good. They tell what was changed.
+The change to the first file is clearly undercommented.
+
+It's OK to use terse log messages for uninteresting changes, like ones induced
+by interface changes elsewhere.
+
+
+POLICIES.
+
+1. Testing.
+
+All serious changes must be tested. New rules must be tested by the module where
+they are declared. Test system (test/test_system.html) should be used to verify
+user-observable behaviour.
+
+2. Documentation.
+
+It turns out that it's hard to have too much comments, but it's easy to have too
+little. Please prepend each rule with a comment saying what the rule does and
+what arguments mean. Stop for a minute and consider if the comment makes sense
+for anybody else, and completely describes what the rules does. Generic phrases
+like "adjusts properties" are really not enough.
+
+When applicable, make changes to the user documentation as well.
+
+
+CODING CONVENTIONS.
+
+ 1. All names of rules and variables are lowercase with "-" to separate
+ words.
+
+ rule call-me-ishmael ( ) ...
+
+ 2. Names with dots in them are "intended globals". Ordinary globals use a
+ dot prefix:
+
+ .foobar
+ $(.foobar)
+
+ 3. Pseudofunctions or associations are <parameter>.<property>:
+
+ $(argument).name = hello ;
+ $($(argument).name)
+
+ 4. Class attribute names are prefixed with "self.":
+
+ self.x
+ $(self.x)
+
+ 5. Builtin rules are called via their ALL_UPPERCASE_NAMES:
+
+ DEPENDS $(target) : $(sources) ;
+
+ 6. Opening and closing braces go on separate lines:
+
+ if $(a)
+ {
+ #
+ }
+ else
+ {
+ #
+ }
diff --git a/tools/build/v2/notes/relative_source_paths.txt b/tools/build/notes/relative_source_paths.txt
index 2f05578932..2f05578932 100644
--- a/tools/build/v2/notes/relative_source_paths.txt
+++ b/tools/build/notes/relative_source_paths.txt
diff --git a/tools/build/v2/release_procedure.txt b/tools/build/notes/release_procedure.txt
index 007921a614..007921a614 100644
--- a/tools/build/v2/release_procedure.txt
+++ b/tools/build/notes/release_procedure.txt
diff --git a/tools/build/v2/nightly.sh b/tools/build/scripts/nightly.sh
index 0d9f310c05..0d9f310c05 100755
--- a/tools/build/v2/nightly.sh
+++ b/tools/build/scripts/nightly.sh
diff --git a/tools/build/scripts/roll.sh b/tools/build/scripts/roll.sh
new file mode 100755
index 0000000000..3702d31991
--- /dev/null
+++ b/tools/build/scripts/roll.sh
@@ -0,0 +1,64 @@
+#!/bin/bash
+
+# Copyright 2004 Aleksey Gurtovoy
+# Copyright 2006 Rene Rivera
+# Copyright 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+set -e
+
+# Capture the version
+revision=`svnversion .`
+echo "SVN Revision $revision" >> timestamp.txt
+date >> timestamp.txt
+
+# Remove unnecessary top-level files
+find . -maxdepth 1 -type f | egrep -v "boost-build.jam|timestamp.txt|roll.sh|bootstrap.jam|build-system.jam|boost_build.png|index.html|hacking.txt|site-config.jam|user-config.jam|bootstrap.sh|bootstrap.bat|Jamroot.jam" | xargs rm -f
+
+# Build the documentation
+touch doc/jamroot.jam
+export BOOST_BUILD_PATH=`pwd`
+./bootstrap.sh
+cd doc
+../bjam --v2
+../bjam --v2 pdf
+cp `find bin -name "*.pdf"` ../..
+mv ../../standalone.pdf ../../userman.pdf
+cp ../../userman.pdf .
+rm -rf bin
+cd ..
+rm bjam
+
+# Get the boost logo.
+wget http://boost.sf.net/boost-build2/boost.png
+
+# Adjust the links, so they work with the standalone package
+perl -pi -e 's%../../../boost.png%boost.png%' index.html
+perl -pi -e 's%../../../doc/html/bbv2.html%doc/html/index.html%' index.html
+perl -pi -e 's%../../../doc/html/bbv2.installation.html%doc/html/bbv2.installation.html%' index.html
+
+# Make packages
+find . -name ".svn" | xargs rm -rf
+rm roll.sh
+chmod a+x engine/build.bat
+cd .. && zip -r boost-build.zip boost-build && tar --bzip2 -cf boost-build.tar.bz2 boost-build
+# Copy packages to a location where they are grabbed for beta.boost.org
+cp userman.pdf boost-build.zip boost-build.tar.bz2 ~/public_html/boost_build_nightly
+cd boost-build
+
+chmod -R u+w *
+# Upload docs to sourceforge
+x=`cat <<EOF
+<script src="http://www.google-analytics.com/urchin.js" type="text/javascript">
+</script>
+<script type="text/javascript">
+_uacct = "UA-2917240-2";
+urchinTracker();
+</script>
+EOF`
+echo $x
+perl -pi -e "s|</body>|$x</body>|" index.html `find doc -name '*.html'`
+scp -r doc example boost_build.png *.html hacking.txt vladimir_prus,boost@web.sourceforge.net:/home/groups/b/bo/boost/htdocs/boost-build2
+scp ../userman.pdf vladimir_prus,boost@web.sourceforge.net:/home/groups/b/bo/boost/htdocs/boost-build2/doc
diff --git a/tools/build/v2/to_merge.sh b/tools/build/scripts/to_merge.sh
index c514f2101e..c514f2101e 100755
--- a/tools/build/v2/to_merge.sh
+++ b/tools/build/scripts/to_merge.sh
diff --git a/tools/build/v2/bootstrap.jam b/tools/build/src/bootstrap.jam
index af3e8bf506..af3e8bf506 100644
--- a/tools/build/v2/bootstrap.jam
+++ b/tools/build/src/bootstrap.jam
diff --git a/tools/build/src/build-system.jam b/tools/build/src/build-system.jam
new file mode 100644
index 0000000000..247326a96f
--- /dev/null
+++ b/tools/build/src/build-system.jam
@@ -0,0 +1,981 @@
+# Copyright 2003, 2005, 2007 Dave Abrahams
+# Copyright 2006, 2007 Rene Rivera
+# Copyright 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# This file is part of Boost Build version 2. You can think of it as forming the
+# main() routine. It is invoked by the bootstrapping code in bootstrap.jam.
+
+import build-request ;
+import builtin ;
+import "class" : new ;
+import configure ;
+import config-cache ;
+import feature ;
+import generators ;
+import make ;
+import modules ;
+import os ;
+import path ;
+import project ;
+import property ;
+import property-set ;
+import regex ;
+import sequence ;
+import targets ;
+import toolset ;
+import utility ;
+import version ;
+import virtual-target ;
+
+
+################################################################################
+#
+# Module global data.
+#
+################################################################################
+
+# Shortcut used in this module for accessing used command-line parameters.
+.argv = [ modules.peek : ARGV ] ;
+
+# Flag indicating we should display additional debugging information related to
+# locating and loading Boost Build configuration files.
+.debug-config = [ MATCH ^(--debug-configuration)$ : $(.argv) ] ;
+
+# Virtual targets obtained when building main targets references on the command
+# line. When running 'bjam --clean main_target' we want to clean only files
+# belonging to that main target so we need to record which targets are produced
+# for it.
+.results-of-main-targets = ;
+
+# Was an XML dump requested?
+.out-xml = [ MATCH ^--out-xml=(.*)$ : $(.argv) ] ;
+
+# Default toolset & version to be used in case no other toolset has been used
+# explicitly by either the loaded configuration files, the loaded project build
+# scripts or an explicit toolset request on the command line. If not specified,
+# an arbitrary default will be used based on the current host OS. This value,
+# while not strictly necessary, has been added to allow testing Boost-Build's
+# default toolset usage functionality.
+.default-toolset = ;
+.default-toolset-version = ;
+
+
+################################################################################
+#
+# Public rules.
+#
+################################################################################
+
+# Returns the property set with the free features from the currently processed
+# build request.
+#
+rule command-line-free-features ( )
+{
+ return $(.command-line-free-features) ;
+}
+
+
+# Returns the location of the build system. The primary use case is building
+# Boost where it is sometimes needed to get the location of other components
+# (e.g. BoostBook files) and it is convenient to use locations relative to the
+# Boost Build path.
+#
+rule location ( )
+{
+ local r = [ modules.binding build-system ] ;
+ return $(r:P) ;
+}
+
+
+# Sets the default toolset & version to be used in case no other toolset has
+# been used explicitly by either the loaded configuration files, the loaded
+# project build scripts or an explicit toolset request on the command line. For
+# more detailed information see the comment related to used global variables.
+#
+rule set-default-toolset ( toolset : version ? )
+{
+ .default-toolset = $(toolset) ;
+ .default-toolset-version = $(version) ;
+}
+
+rule set-pre-build-hook ( function )
+{
+ .pre-build-hook = $(function) ;
+}
+
+rule set-post-build-hook ( function )
+{
+ .post-build-hook = $(function) ;
+}
+
+################################################################################
+#
+# Local rules.
+#
+################################################################################
+
+# Returns actual Jam targets to be used for executing a clean request.
+#
+local rule actual-clean-targets ( )
+{
+ # The cleaning is tricky. Say, if user says 'bjam --clean foo' where 'foo'
+ # is a directory, then we want to clean targets which are in 'foo' as well
+ # as those in any children Jamfiles under foo but not in any unrelated
+ # Jamfiles. To achieve this we first mark all projects explicitly detected
+ # as targets for this build system run as needing to be cleaned.
+ for local t in $(targets)
+ {
+ if [ class.is-a $(t) : project-target ]
+ {
+ local project = [ $(t).project-module ] ;
+ .should-clean-project.$(project) = true ;
+ }
+ }
+
+ # Construct a list of targets explicitly detected on this build system run
+ # as a result of building main targets.
+ local targets-to-clean ;
+ for local t in $(.results-of-main-targets)
+ {
+ # Do not include roots or sources.
+ targets-to-clean += [ virtual-target.traverse $(t) ] ;
+ }
+ targets-to-clean = [ sequence.unique $(targets-to-clean) ] ;
+
+ local to-clean ;
+ for local t in [ virtual-target.all-targets ]
+ {
+ # Remove only derived targets and only those asked to be cleaned,
+ # whether directly or by belonging to one of the removed projects.
+ local p = [ $(t).project ] ;
+ if [ $(t).action ] && ( $(t) in $(targets-to-clean) ||
+ [ should-clean-project [ $(p).project-module ] ] )
+ {
+ to-clean += $(t) ;
+ }
+ }
+
+ local to-clean-actual ;
+ for local t in $(to-clean)
+ {
+ to-clean-actual += [ $(t).actualize ] ;
+ }
+ return $(to-clean-actual) ;
+}
+
+
+# Given a target id, try to find and return the corresponding target. This is
+# only invoked when there is no Jamfile in ".". This code somewhat duplicates
+# code in project-target.find but we can not reuse that code without a
+# project-targets instance.
+#
+local rule find-target ( target-id )
+{
+ local split = [ MATCH (.*)//(.*) : $(target-id) ] ;
+
+ local pm ;
+ if $(split)
+ {
+ pm = [ project.find $(split[1]) : "." ] ;
+ }
+ else
+ {
+ pm = [ project.find $(target-id) : "." ] ;
+ }
+
+ local result ;
+ if $(pm)
+ {
+ result = [ project.target $(pm) ] ;
+ }
+
+ if $(split)
+ {
+ result = [ $(result).find $(split[2]) ] ;
+ }
+
+ return $(result) ;
+}
+
+
+# Initializes a new configuration module.
+#
+local rule initialize-config-module ( module-name : location ? )
+{
+ project.initialize $(module-name) : $(location) ;
+ if USER_MODULE in [ RULENAMES ]
+ {
+ USER_MODULE $(module-name) ;
+ }
+}
+
+
+# Helper rule used to load configuration files. Loads the first configuration
+# file with the given 'filename' at 'path' into module with name 'module-name'.
+# Not finding the requested file may or may not be treated as an error depending
+# on the must-find parameter. Returns a normalized path to the loaded
+# configuration file or nothing if no file was loaded.
+#
+local rule load-config ( module-name : filename : path + : must-find ? )
+{
+ if $(.debug-config)
+ {
+ local path-string = $(path) ;
+ if $(path-string) = "" { path-string = . ; }
+ ECHO notice: Searching '$(path-string)' for $(module-name)
+ configuration file '$(filename)'. ;
+ }
+ local where = [ GLOB $(path) : $(filename) ] ;
+ if $(where)
+ {
+ where = [ NORMALIZE_PATH $(where[1]) ] ;
+ if $(.debug-config)
+ {
+ local where-string = $(where:D) ;
+ if $(where-string) = "" { where-string = . ; }
+ where-string = '$(where-string)' ;
+ ECHO notice: Loading $(module-name) configuration file '$(filename)'
+ from $(where-string:J=" "). ;
+ }
+
+ # Set source location so that path-constant in config files with
+ # relative paths work. This is of most importance for
+ # project-config.jam, but may be used in other config files as well.
+ local attributes = [ project.attributes $(module-name) ] ;
+ $(attributes).set source-location : $(where:D) : exact ;
+ modules.load $(module-name) : $(filename) : $(path) ;
+ project.load-used-projects $(module-name) ;
+ }
+ else if $(must-find) || $(.debug-config)
+ {
+ local path-string = $(path) ;
+ if $(path-string) = "" { path-string = . ; }
+ path-string = '$(path-string)' ;
+ path-string = $(path-string:J=" ") ;
+ if $(must-find)
+ {
+ import errors ;
+ errors.user-error Configuration file '$(filename)' not found "in"
+ $(path-string). ;
+ }
+ ECHO notice: Configuration file '$(filename)' not found "in"
+ $(path-string). ;
+ }
+ return $(where) ;
+}
+
+
+# Loads all the configuration files used by Boost Build in the following order:
+#
+# -- test-config --
+# Loaded only if specified on the command-line using the --test-config
+# command-line parameter. It is ok for this file not to exist even if specified.
+# If this configuration file is loaded, regular site and user configuration
+# files will not be. If a relative path is specified, file is searched for in
+# the current folder.
+#
+# -- site-config --
+# Always named site-config.jam. Will only be found if located on the system
+# root path (Windows), /etc (non-Windows), user's home folder or the Boost Build
+# path, in that order. Not loaded in case the test-config configuration file is
+# loaded or the --ignore-site-config command-line option is specified.
+#
+# -- user-config --
+# Named user-config.jam by default or may be named explicitly using the
+# --user-config command-line option or the BOOST_BUILD_USER_CONFIG environment
+# variable. If named explicitly the file is looked for from the current working
+# directory and if the default one is used then it is searched for in the
+# user's home directory and the Boost Build path, in that order. Not loaded in
+# case either the test-config configuration file is loaded or an empty file name
+# is explicitly specified. If the file name has been given explicitly then the
+# file must exist.
+#
+# -- project-config --
+# Always named project-config.jam. Looked up in the current working folder and
+# then upwards through its parents up to the root folder.
+#
+# Test configurations have been added primarily for use by Boost Build's
+# internal unit testing system but may be used freely in other places as well.
+#
+local rule load-configuration-files
+{
+ # Flag indicating that site configuration should not be loaded.
+ local ignore-site-config =
+ [ MATCH ^(--ignore-site-config)$ : $(.argv) ] ;
+
+ initialize-config-module test-config ;
+ local test-config = [ MATCH ^--test-config=(.*)$ : $(.argv) ] ;
+ local uq = [ MATCH \"(.*)\" : $(test-config) ] ;
+ if $(uq)
+ {
+ test-config = $(uq) ;
+ }
+ if $(test-config)
+ {
+ local where = [ load-config test-config : $(test-config:BS) :
+ $(test-config:D) ] ;
+ if $(where)
+ {
+ if $(.debug-config)
+ {
+ ECHO "notice: Regular site and user configuration files will" ;
+ ECHO "notice: be ignored due to the test configuration being"
+ "loaded." ;
+ }
+ }
+ else
+ {
+ test-config = ;
+ }
+ }
+
+ local user-path = [ os.home-directories ] [ os.environ BOOST_BUILD_PATH ] ;
+ local site-path = /etc $(user-path) ;
+ if [ os.name ] in NT CYGWIN
+ {
+ site-path = [ modules.peek : SystemRoot ] $(user-path) ;
+ }
+
+ if $(.debug-config) && ! $(test-config) && $(ignore-site-config)
+ {
+ ECHO "notice: Site configuration files will be ignored due to the" ;
+ ECHO "notice: --ignore-site-config command-line option." ;
+ }
+
+ initialize-config-module site-config ;
+ if ! $(test-config) && ! $(ignore-site-config)
+ {
+ load-config site-config : site-config.jam : $(site-path) ;
+ }
+
+ initialize-config-module user-config ;
+ if ! $(test-config)
+ {
+ local user-config = [ MATCH ^--user-config=(.*)$ : $(.argv) ] ;
+ user-config = $(user-config[-1]) ;
+ user-config ?= [ os.environ BOOST_BUILD_USER_CONFIG ] ;
+ # Special handling for the case when the OS does not strip the quotes
+ # around the file name, as is the case when using Cygwin bash.
+ user-config = [ utility.unquote $(user-config) ] ;
+ local explicitly-requested = $(user-config) ;
+ user-config ?= user-config.jam ;
+
+ if $(user-config)
+ {
+ if $(explicitly-requested)
+ {
+ # Treat explicitly entered user paths as native OS path
+ # references and, if non-absolute, root them at the current
+ # working directory.
+ user-config = [ path.make $(user-config) ] ;
+ user-config = [ path.root $(user-config) [ path.pwd ] ] ;
+ user-config = [ path.native $(user-config) ] ;
+
+ if $(.debug-config)
+ {
+ ECHO notice: Loading explicitly specified user configuration
+ file: ;
+ ECHO " $(user-config)" ;
+ }
+
+ load-config user-config : $(user-config:BS) : $(user-config:D)
+ : must-exist ;
+ }
+ else
+ {
+ load-config user-config : $(user-config) : $(user-path) ;
+ }
+ }
+ else if $(.debug-config)
+ {
+ ECHO notice: User configuration file loading explicitly disabled. ;
+ }
+ }
+
+ # We look for project-config.jam from "." upward. I am not sure this is 100%
+ # right decision, we might as well check for it only alongside the Jamroot
+ # file. However:
+ # - We need to load project-config.jam before Jamroot
+ # - We probably need to load project-config.jam even if there is no Jamroot
+ # - e.g. to implement automake-style out-of-tree builds.
+ local file = [ path.glob "." : project-config.jam ] ;
+ if ! $(file)
+ {
+ file = [ path.glob-in-parents "." : project-config.jam ] ;
+ }
+ if $(file)
+ {
+ initialize-config-module project-config : $(file:D) ;
+ load-config project-config : project-config.jam : $(file:D) ;
+ }
+
+ project.end-load ;
+}
+
+
+# Autoconfigure toolsets based on any instances of --toolset=xx,yy,...zz or
+# toolset=xx,yy,...zz in the command line. May return additional properties to
+# be processed as if they had been specified by the user.
+#
+local rule process-explicit-toolset-requests
+{
+ local extra-properties ;
+
+ local option-toolsets = [ regex.split-list [ MATCH ^--toolset=(.*)$ : $(.argv) ] : "," ] ;
+ local feature-toolsets = [ regex.split-list [ MATCH ^toolset=(.*)$ : $(.argv) ] : "," ] ;
+
+ for local t in $(option-toolsets) $(feature-toolsets)
+ {
+ # Parse toolset-version/properties.
+ local toolset = [ MATCH ([^/]+)/?.* : $(t) ] ;
+ local properties = [ feature.expand-subfeatures <toolset>$(toolset) : true ] ;
+ local toolset-property = [ property.select <toolset> : $(properties) ] ;
+ local known ;
+ if $(toolset-property:G=) in [ feature.values <toolset> ]
+ {
+ known = true ;
+ }
+
+ # If the toolset is not known, configure it now.
+
+ # TODO: we should do 'using $(toolset)' in case no version has been
+ # specified and there are no versions defined for the given toolset to
+ # allow the toolset to configure its default version. For this we need
+ # to know how to detect whether a given toolset has any versions
+ # defined. An alternative would be to do this whenever version is not
+ # specified but that would require that toolsets correctly handle the
+ # case when their default version is configured multiple times which
+ # should be checked for all existing toolsets first.
+
+ if ! $(known)
+ {
+ if $(.debug-config)
+ {
+ ECHO "notice: [cmdline-cfg] toolset $(toolset) not"
+ "previously configured; attempting to auto-configure now" ;
+ }
+ local t,v = [ MATCH ([^-]+)-?(.+)? : $(toolset) ] ;
+ toolset.using $(t,v[1]) : $(t,v[2]) ;
+ }
+
+ # Make sure we get an appropriate property into the build request in
+ # case toolset has been specified using the "--toolset=..." command-line
+ # option form.
+ if ! $(t) in $(.argv) $(feature-toolsets)
+ {
+ if $(.debug-config)
+ {
+ ECHO notice: [cmdline-cfg] adding toolset=$(t) to the build
+ request. ;
+ }
+ extra-properties += toolset=$(t) ;
+ }
+ }
+
+ return $(extra-properties) ;
+}
+
+
+# Returns whether the given project (identifed by its project module) should be
+# cleaned because it or any of its parent projects have already been marked as
+# needing to be cleaned in this build. As an optimization, will explicitly mark
+# all encountered project needing to be cleaned in case thay have not already
+# been marked so.
+#
+local rule should-clean-project ( project )
+{
+ if ! $(.should-clean-project.$(project))-is-defined
+ {
+ local r = "" ;
+ if ! [ project.is-jamroot-module $(project) ]
+ {
+ local parent = [ project.attribute $(project) parent-module ] ;
+ if $(parent)
+ {
+ r = [ should-clean-project $(parent) ] ;
+ }
+ }
+ .should-clean-project.$(project) = $(r) ;
+ }
+
+ return $(.should-clean-project.$(project)) ;
+}
+
+
+################################################################################
+#
+# main()
+# ------
+#
+################################################################################
+
+{
+ if --version in $(.argv)
+ {
+ version.print ;
+ EXIT ;
+ }
+
+ version.verify-engine-version ;
+
+ load-configuration-files ;
+
+ # Load explicitly specified toolset modules.
+ local extra-properties = [ process-explicit-toolset-requests ] ;
+
+ # Load the actual project build script modules. We always load the project
+ # in the current folder so 'use-project' directives have any chance of being
+ # seen. Otherwise, we would not be able to refer to subprojects using target
+ # ids.
+ local current-project ;
+ {
+ local current-module = [ project.find "." : "." ] ;
+ if $(current-module)
+ {
+ current-project = [ project.target $(current-module) ] ;
+ }
+ }
+
+ # Load the default toolset module if no other has already been specified.
+ if ! [ feature.values <toolset> ]
+ {
+ local default-toolset = $(.default-toolset) ;
+ local default-toolset-version = ;
+ if $(default-toolset)
+ {
+ default-toolset-version = $(.default-toolset-version) ;
+ }
+ else
+ {
+ default-toolset = gcc ;
+ if [ os.name ] = NT
+ {
+ default-toolset = msvc ;
+ }
+ else if [ os.name ] = MACOSX
+ {
+ default-toolset = darwin ;
+ }
+ }
+
+ ECHO "warning: No toolsets are configured." ;
+ ECHO "warning: Configuring default toolset" \"$(default-toolset)\". ;
+ ECHO "warning: If the default is wrong, your build may not work correctly." ;
+ ECHO "warning: Use the \"toolset=xxxxx\" option to override our guess." ;
+ ECHO "warning: For more configuration options, please consult" ;
+ ECHO "warning: http://boost.org/boost-build2/doc/html/bbv2/advanced/configuration.html" ;
+
+ toolset.using $(default-toolset) : $(default-toolset-version) ;
+ }
+
+
+ # Parse command line for targets and properties. Note that this requires
+ # that all project files already be loaded.
+ # FIXME: This is not entirely true. Additional project files may be loaded
+ # only later via the project.find() rule when dereferencing encountered
+ # target ids containing explicit project references. See what to do about
+ # those as such 'lazy loading' may cause problems that are then extremely
+ # difficult to debug.
+ local build-request = [ build-request.from-command-line $(.argv)
+ $(extra-properties) ] ;
+ local target-ids = [ $(build-request).get-at 1 ] ;
+ local properties = [ $(build-request).get-at 2 ] ;
+
+
+ # Expand properties specified on the command line into multiple property
+ # sets consisting of all legal property combinations. Each expanded property
+ # set will be used for a single build run. E.g. if multiple toolsets are
+ # specified then requested targets will be built with each of them.
+ if $(properties)
+ {
+ expanded = [ build-request.expand-no-defaults $(properties) ] ;
+ local xexpanded ;
+ for local e in $(expanded)
+ {
+ xexpanded += [ property-set.create [ feature.split $(e) ] ] ;
+ }
+ expanded = $(xexpanded) ;
+ }
+ else
+ {
+ expanded = [ property-set.empty ] ;
+ }
+
+
+ # Check that we actually found something to build.
+ if ! $(current-project) && ! $(target-ids)
+ {
+ import errors ;
+ errors.user-error no Jamfile "in" current directory found, and no target
+ references specified. ;
+ }
+
+
+ # Flags indicating that this build system run has been started in order to
+ # clean existing instead of create new targets. Note that these are not the
+ # final flag values as they may get changed later on due to some special
+ # targets being specified on the command line.
+ local clean ; if "--clean" in $(.argv) { clean = true ; }
+ local cleanall ; if "--clean-all" in $(.argv) { cleanall = true ; }
+
+
+ # List of explicitly requested files to build. Any target references read
+ # from the command line parameter not recognized as one of the targets
+ # defined in the loaded Jamfiles will be interpreted as an explicitly
+ # requested file to build. If any such files are explicitly requested then
+ # only those files and the targets they depend on will be built and they
+ # will be searched for among targets that would have been built had there
+ # been no explicitly requested files.
+ local explicitly-requested-files
+
+
+ # List of Boost Build meta-targets, virtual-targets and actual Jam targets
+ # constructed in this build system run.
+ local targets ;
+ local virtual-targets ;
+ local actual-targets ;
+
+
+ # Process each target specified on the command-line and convert it into
+ # internal Boost Build target objects. Detect special clean target. If no
+ # main Boost Build targets were explictly requested use the current project
+ # as the target.
+ for local id in $(target-ids)
+ {
+ if $(id) = clean
+ {
+ clean = true ;
+ }
+ else
+ {
+ local t ;
+ if $(current-project)
+ {
+ t = [ $(current-project).find $(id) : no-error ] ;
+ }
+ else
+ {
+ t = [ find-target $(id) ] ;
+ }
+
+ if ! $(t)
+ {
+ ECHO "notice: could not find main target" $(id) ;
+ ECHO "notice: assuming it is a name of file to create." ;
+ explicitly-requested-files += $(id) ;
+ }
+ else
+ {
+ targets += $(t) ;
+ }
+ }
+ }
+ if ! $(targets)
+ {
+ targets += [ project.target [ project.module-name "." ] ] ;
+ }
+
+ if [ option.get dump-generators : : true ]
+ {
+ generators.dump ;
+ }
+
+ # We wish to put config.log in the build directory corresponding to Jamroot,
+ # so that the location does not differ depending on the directory we run the
+ # build from. The amount of indirection necessary here is scary.
+ local first-project = [ $(targets[0]).project ] ;
+ local first-project-root-location = [ $(first-project).get project-root ] ;
+ local first-project-root-module = [ project.load
+ $(first-project-root-location) ] ;
+ local first-project-root = [ project.target $(first-project-root-module) ] ;
+ local first-build-build-dir = [ $(first-project-root).build-dir ] ;
+ configure.set-log-file $(first-build-build-dir)/config.log ;
+ config-cache.load $(first-build-build-dir)/project-cache.jam ;
+
+ # Now that we have a set of targets to build and a set of property sets to
+ # build the targets with, we can start the main build process by using each
+ # property set to generate virtual targets from all of our listed targets
+ # and any of their dependants.
+ for local p in $(expanded)
+ {
+ .command-line-free-features = [ property-set.create [ $(p).free ] ] ;
+ for local t in $(targets)
+ {
+ local g = [ $(t).generate $(p) ] ;
+ if ! [ class.is-a $(t) : project-target ]
+ {
+ .results-of-main-targets += $(g[2-]) ;
+ }
+ virtual-targets += $(g[2-]) ;
+ }
+ }
+
+
+ # Convert collected virtual targets into actual raw Jam targets.
+ for t in $(virtual-targets)
+ {
+ actual-targets += [ $(t).actualize ] ;
+ }
+
+ config-cache.save ;
+
+
+ # If XML data output has been requested prepare additional rules and targets
+ # so we can hook into Jam to collect build data while its building and have
+ # it trigger the final XML report generation after all the planned targets
+ # have been built.
+ if $(.out-xml)
+ {
+ # Get a qualified virtual target name.
+ rule full-target-name ( target )
+ {
+ local name = [ $(target).name ] ;
+ local project = [ $(target).project ] ;
+ local project-path = [ $(project).get location ] ;
+ return $(project-path)//$(name) ;
+ }
+
+ # Generate an XML file containing build statistics for each constituent.
+ #
+ rule out-xml ( xml-file : constituents * )
+ {
+ # Prepare valid XML header and footer with some basic info.
+ local nl = "
+" ;
+ local os = [ modules.peek : OS OSPLAT JAMUNAME ] "" ;
+ local timestamp = [ modules.peek : JAMDATE ] ;
+ local cwd = [ PWD ] ;
+ local command = $(.argv) ;
+ local bb-version = [ version.boost-build ] ;
+ .header on $(xml-file) =
+ "<?xml version=\"1.0\" encoding=\"utf-8\"?>"
+ "$(nl)<build format=\"1.0\" version=\"$(bb-version)\">"
+ "$(nl) <os name=\"$(os[1])\" platform=\"$(os[2])\"><![CDATA[$(os[3-]:J= )]]></os>"
+ "$(nl) <timestamp><![CDATA[$(timestamp)]]></timestamp>"
+ "$(nl) <directory><![CDATA[$(cwd)]]></directory>"
+ "$(nl) <command><![CDATA[\"$(command:J=\" \")\"]]></command>"
+ ;
+ .footer on $(xml-file) =
+ "$(nl)</build>" ;
+
+ # Generate the target dependency graph.
+ .contents on $(xml-file) +=
+ "$(nl) <targets>" ;
+ for local t in [ virtual-target.all-targets ]
+ {
+ local action = [ $(t).action ] ;
+ if $(action)
+ # If a target has no action, it has no dependencies.
+ {
+ local name = [ full-target-name $(t) ] ;
+ local sources = [ $(action).sources ] ;
+ local dependencies ;
+ for local s in $(sources)
+ {
+ dependencies += [ full-target-name $(s) ] ;
+ }
+
+ local path = [ $(t).path ] ;
+ local jam-target = [ $(t).actual-name ] ;
+
+ .contents on $(xml-file) +=
+ "$(nl) <target>"
+ "$(nl) <name><![CDATA[$(name)]]></name>"
+ "$(nl) <dependencies>"
+ "$(nl) <dependency><![CDATA[$(dependencies)]]></dependency>"
+ "$(nl) </dependencies>"
+ "$(nl) <path><![CDATA[$(path)]]></path>"
+ "$(nl) <jam-target><![CDATA[$(jam-target)]]></jam-target>"
+ "$(nl) </target>"
+ ;
+ }
+ }
+ .contents on $(xml-file) +=
+ "$(nl) </targets>" ;
+
+ # Build $(xml-file) after $(constituents). Do so even if a
+ # constituent action fails and regenerate the xml on every bjam run.
+ INCLUDES $(xml-file) : $(constituents) ;
+ ALWAYS $(xml-file) ;
+ __ACTION_RULE__ on $(xml-file) =
+ build-system.out-xml.generate-action ;
+ out-xml.generate $(xml-file) ;
+ }
+
+ # The actual build actions are here; if we did this work in the actions
+ # clause we would have to form a valid command line containing the
+ # result of @(...) below (the name of the XML file).
+ #
+ rule out-xml.generate-action ( args * : xml-file
+ : command status start end user system : output ? )
+ {
+ local contents =
+ [ on $(xml-file) return $(.header) $(.contents) $(.footer) ] ;
+ local f = @($(xml-file):E=$(contents)) ;
+ }
+
+ # Nothing to do here; the *real* actions happen in
+ # out-xml.generate-action.
+ actions quietly out-xml.generate { }
+
+ # Define the out-xml file target, which depends on all the targets so
+ # that it runs the collection after the targets have run.
+ out-xml $(.out-xml) : $(actual-targets) ;
+
+ # Set up a global __ACTION_RULE__ that records all the available
+ # statistics about each actual target in a variable "on" the --out-xml
+ # target.
+ #
+ rule out-xml.collect ( xml-file : target : command status start end user
+ system : output ? )
+ {
+ local nl = "
+" ;
+ # Open the action with some basic info.
+ .contents on $(xml-file) +=
+ "$(nl) <action status=\"$(status)\" start=\"$(start)\" end=\"$(end)\" user=\"$(user)\" system=\"$(system)\">" ;
+
+ # If we have an action object we can print out more detailed info.
+ local action = [ on $(target) return $(.action) ] ;
+ if $(action)
+ {
+ local action-name = [ $(action).action-name ] ;
+ local action-sources = [ $(action).sources ] ;
+ local action-props = [ $(action).properties ] ;
+
+ # The qualified name of the action which we created the target.
+ .contents on $(xml-file) +=
+ "$(nl) <name><![CDATA[$(action-name)]]></name>" ;
+
+ # The sources that made up the target.
+ .contents on $(xml-file) +=
+ "$(nl) <sources>" ;
+ for local source in $(action-sources)
+ {
+ local source-actual = [ $(source).actual-name ] ;
+ .contents on $(xml-file) +=
+ "$(nl) <source><![CDATA[$(source-actual)]]></source>" ;
+ }
+ .contents on $(xml-file) +=
+ "$(nl) </sources>" ;
+
+ # The properties that define the conditions under which the
+ # target was built.
+ .contents on $(xml-file) +=
+ "$(nl) <properties>" ;
+ for local prop in [ $(action-props).raw ]
+ {
+ local prop-name = [ MATCH ^<(.*)>$ : $(prop:G) ] ;
+ .contents on $(xml-file) +=
+ "$(nl) <property name=\"$(prop-name)\"><![CDATA[$(prop:G=)]]></property>" ;
+ }
+ .contents on $(xml-file) +=
+ "$(nl) </properties>" ;
+ }
+
+ local locate = [ on $(target) return $(LOCATE) ] ;
+ locate ?= "" ;
+ .contents on $(xml-file) +=
+ "$(nl) <jam-target><![CDATA[$(target)]]></jam-target>"
+ "$(nl) <path><![CDATA[$(target:G=:R=$(locate))]]></path>"
+ "$(nl) <command><![CDATA[$(command)]]></command>"
+ "$(nl) <output><![CDATA[$(output)]]></output>" ;
+ .contents on $(xml-file) +=
+ "$(nl) </action>" ;
+ }
+
+ # When no __ACTION_RULE__ is set "on" a target, the search falls back to
+ # the global module.
+ module
+ {
+ __ACTION_RULE__ = build-system.out-xml.collect
+ [ modules.peek build-system : .out-xml ] ;
+ }
+
+ IMPORT
+ build-system :
+ out-xml.collect
+ out-xml.generate-action
+ : :
+ build-system.out-xml.collect
+ build-system.out-xml.generate-action
+ ;
+ }
+
+ local j = [ option.get jobs ] ;
+ if $(j)
+ {
+ modules.poke : PARALLELISM : $(j) ;
+ }
+
+ local k = [ option.get keep-going : true : true ] ;
+ if $(k) in "on" "yes" "true"
+ {
+ modules.poke : KEEP_GOING : 1 ;
+ }
+ else if $(k) in "off" "no" "false"
+ {
+ modules.poke : KEEP_GOING : 0 ;
+ }
+ else
+ {
+ EXIT "error: Invalid value for the --keep-going option" ;
+ }
+
+ # The 'all' pseudo target is not strictly needed expect in the case when we
+ # use it below but people often assume they always have this target
+ # available and do not declare it themselves before use which may cause
+ # build failures with an error message about not being able to build the
+ # 'all' target.
+ NOTFILE all ;
+
+ # And now that all the actual raw Jam targets and all the dependencies
+ # between them have been prepared all that is left is to tell Jam to update
+ # those targets.
+ if $(explicitly-requested-files)
+ {
+ # Note that this case can not be joined with the regular one when only
+ # exact Boost Build targets are requested as here we do not build those
+ # requested targets but only use them to construct the dependency tree
+ # needed to build the explicitly requested files.
+ UPDATE $(explicitly-requested-files:G=e) $(.out-xml) ;
+ }
+ else if $(cleanall)
+ {
+ UPDATE clean-all ;
+ }
+ else if $(clean)
+ {
+ common.Clean clean : [ actual-clean-targets ] ;
+ UPDATE clean ;
+ }
+ else
+ {
+ configure.print-configure-checks-summary ;
+
+ if $(.pre-build-hook)
+ {
+ $(.pre-build-hook) ;
+ }
+
+ DEPENDS all : $(actual-targets) ;
+ if UPDATE_NOW in [ RULENAMES ]
+ {
+ local ok = [ UPDATE_NOW all $(.out-xml) ] ;
+ if $(.post-build-hook)
+ {
+ $(.post-build-hook) $(ok) ;
+ }
+ # Prevent automatic update of the 'all' target, now that we have
+ # explicitly updated what we wanted.
+ UPDATE ;
+ }
+ else
+ {
+ UPDATE all $(.out-xml) ;
+ }
+ }
+}
diff --git a/tools/build/v2/build/__init__.py b/tools/build/src/build/__init__.py
index e69de29bb2..e69de29bb2 100644
--- a/tools/build/v2/build/__init__.py
+++ b/tools/build/src/build/__init__.py
diff --git a/tools/build/src/build/ac.jam b/tools/build/src/build/ac.jam
new file mode 100644
index 0000000000..71bc16c374
--- /dev/null
+++ b/tools/build/src/build/ac.jam
@@ -0,0 +1,303 @@
+# Copyright (c) 2010 Vladimir Prus.
+# Copyright (c) 2013 Steven Watanabe
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import property-set ;
+import path ;
+import modules ;
+import "class" ;
+import errors ;
+import configure ;
+import project ;
+import virtual-target ;
+import generators ;
+import property ;
+import print ;
+
+project.initialize $(__name__) ;
+.project = [ project.current ] ;
+project ac ;
+
+rule generate-include ( target : sources * : properties * )
+{
+ local header = [ property.select <include> : $(properties) ] ;
+ print.output $(target) ;
+ print.text "#include <$(header:G=)>" : true ;
+}
+
+rule generate-main ( target : sources * : properties * )
+{
+ print.output $(target) ;
+ print.text "int main() {}" : true ;
+}
+
+rule find-include-path ( properties : header : provided-path ? )
+{
+ if $(provided-path) && [ path.exists [ path.root $(header) $(provided-path) ] ]
+ {
+ return $(provided-path) ;
+ }
+ else
+ {
+ local a = [ class.new action : ac.generate-include : [ property-set.create <include>$(header) ] ] ;
+ local cpp = [ class.new file-target $(header).cpp exact : CPP : $(.project) : $(a) ] ;
+ cpp = [ virtual-target.register $(cpp) ] ;
+ local result = [ generators.construct $(.project) $(header) : OBJ : $(properties) : $(cpp) : true ] ;
+ local jam-targets ;
+ for t in $(result[2-])
+ {
+ jam-targets += [ $(t).actualize ] ;
+ }
+ if [ UPDATE_NOW $(jam-targets) : [ modules.peek configure : .log-fd ]
+ : ignore-minus-n : ignore-minus-q ]
+ {
+ return %default ;
+ }
+ }
+}
+
+rule construct-library ( name : property-set : provided-path ? )
+{
+ property-set = [ $(property-set).refine [ property-set.create $(link-opt) ] ] ;
+ local lib-props = [ $(property-set).add-raw <name>$(name) <search>$(provided-path) ] ;
+ return [ generators.construct $(.project) lib-$(name)
+ : SEARCHED_LIB : $(lib-props) : : true ] ;
+}
+
+
+rule find-library ( properties : names + : provided-path ? )
+{
+ local result ;
+ if ! $(.main.cpp)
+ {
+ local a = [ class.new action : ac.generate-main :
+ [ property-set.empty ] ] ;
+ .main.cpp = [ virtual-target.register
+ [ class.new file-target main.cpp exact
+ : CPP : $(.project) : $(a) ] ] ;
+ }
+ if [ $(properties).get <link> ] = shared
+ {
+ link-opts = <link>shared <link>static ;
+ }
+ else
+ {
+ link-opts = <link>static <link>shared ;
+ }
+ while $(link-opts)
+ {
+ local names-iter = $(names) ;
+ properties = [ $(properties).refine [ property-set.create $(link-opts[1]) ] ] ;
+ while $(names-iter)
+ {
+ local name = $(names-iter[1]) ;
+ local lib = [ construct-library $(name) : $(properties) : $(provided-path) ] ;
+ local test = [ generators.construct $(.project) $(name) : EXE
+ : [ $(properties).add $(lib[1]) ] : $(.main.cpp) $(lib[2-])
+ : true ] ;
+ local jam-targets ;
+ for t in $(test[2-])
+ {
+ jam-targets += [ $(t).actualize ] ;
+ }
+ if [ UPDATE_NOW $(jam-targets) : [ modules.peek configure : .log-fd ]
+ : ignore-minus-n : ignore-minus-q ]
+ {
+ result = $(name) $(link-opts[1]) ;
+ names-iter = ; link-opts = ; # break
+ }
+ names-iter = $(names-iter[2-]) ;
+ }
+ link-opts = $(link-opts[2-]) ;
+ }
+ return $(result) ;
+}
+
+class ac-library : basic-target
+{
+ import errors ;
+ import indirect ;
+ import virtual-target ;
+ import ac ;
+ import configure ;
+ import config-cache ;
+
+ rule __init__ ( name : project : requirements * : include-path ? : library-path ? : library-name ? )
+ {
+ basic-target.__init__ $(name) : $(project) : : $(requirements) ;
+
+ reconfigure $(include-path) : $(library-path) : $(library-name) ;
+ }
+
+ rule set-header ( header )
+ {
+ self.header = $(header) ;
+ }
+
+ rule set-default-names ( names + )
+ {
+ self.default-names = $(names) ;
+ }
+
+ rule reconfigure ( include-path ? : library-path ? : library-name ? )
+ {
+ if $(include-path) || $(library-path) || $(library-name)
+ {
+ check-not-configured ;
+
+ self.include-path = $(include-path) ;
+ self.library-path = $(library-path) ;
+ self.library-name = $(library-name) ;
+ }
+ }
+
+ rule set-target ( target )
+ {
+ check-not-configured ;
+ self.target = $(target) ;
+ }
+
+ rule check-not-configured ( )
+ {
+ if $(self.include-path) || $(self.library-path) || $(self.library-name) || $(self.target)
+ {
+ errors.user-error [ name ] "is already configured" ;
+ }
+ }
+
+ rule construct ( name : sources * : property-set )
+ {
+ if $(self.target)
+ {
+ return [ $(self.target).generate $(property-set) ] ;
+ }
+ else
+ {
+ local use-environment ;
+ if ! $(self.library-name) && ! $(self.include-path) && ! $(self.library-path)
+ {
+ use-environment = true ;
+ }
+ local libnames = $(self.library-name) ;
+ if ! $(libnames) && $(use-environment)
+ {
+ libnames = [ modules.peek : $(name:U)_NAME ] ;
+ # Backward compatibility only.
+ libnames ?= [ modules.peek : $(name:U)_BINARY ] ;
+ }
+ libnames ?= $(self.default-names) ;
+
+ local include-path = $(self.include-path) ;
+ if ! $(include-path) && $(use-environment)
+ {
+ include-path = [ modules.peek : $(name:U)_INCLUDE ] ;
+ }
+
+ local library-path = $(self.library-path) ;
+ if ! $(library-path) && $(use-environment)
+ {
+ library-path = [ modules.peek : $(name:U)_LIBRARY_PATH ] ;
+ # Backwards compatibility only
+ library-path ?= [ modules.peek : $(name:U)_LIBPATH ] ;
+ }
+
+ local toolset = [ $(property-set).get <toolset> ] ;
+ local toolset-version-property = "<toolset-$(toolset):version>" ;
+ local relevant = [ property.select <target-os> <toolset>
+ $(toolset-version-property) <link> <address-model> <architecture> :
+ [ $(property-set).raw ] ] ;
+
+ local key = ac-library-$(name)-$(relevant:J=-) ;
+ local lookup = [ config-cache.get $(key) ] ;
+
+ if $(lookup)
+ {
+ if $(lookup) = missing
+ {
+ configure.log-library-search-result $(name) : "no (cached)" ;
+ return [ property-set.empty ] ;
+ }
+ else
+ {
+ local includes = $(lookup[1]) ;
+ if $(includes) = %default
+ {
+ includes = ;
+ }
+ local library = [ ac.construct-library $(lookup[2]) :
+ [ $(property-set).refine [ property-set.create $(lookup[3]) ] ] : $(library-path) ] ;
+ configure.log-library-search-result $(name) : "yes (cached)" ;
+ return [ $(library[1]).add-raw <include>$(includes) ] $(library[2-]) ;
+ }
+ }
+ else
+ {
+ local includes = [ ac.find-include-path $(property-set) : $(self.header) : $(include-path) ] ;
+ local library = [ ac.find-library $(property-set) : $(libnames) : $(library-path) ] ;
+ if $(includes) && $(library)
+ {
+ config-cache.set $(key) : $(includes) $(library) ;
+ if $(includes) = %default
+ {
+ includes = ;
+ }
+ library = [ ac.construct-library $(library[1]) :
+ [ $(property-set).refine [ property-set.create $(library[2]) ] ] : $(library-path) ] ;
+ configure.log-library-search-result $(name) : "yes" ;
+ return [ $(library[1]).add-raw <include>$(includes) ] $(library[2-]) ;
+ }
+ else
+ {
+ config-cache.set $(key) : missing ;
+ configure.log-library-search-result $(name) : "no" ;
+ return [ property-set.empty ] ;
+ }
+ }
+ }
+ }
+}
+
+class check-library-worker
+{
+ import property-set ;
+ import targets ;
+ import property ;
+
+ rule __init__ ( target : true-properties * : false-properties * )
+ {
+ self.target = $(target) ;
+ self.true-properties = $(true-properties) ;
+ self.false-properties = $(false-properties) ;
+ }
+
+ rule check ( properties * )
+ {
+ local choosen ;
+ local t = [ targets.current ] ;
+ local p = [ $(t).project ] ;
+ local ps = [ property-set.create $(properties) ] ;
+ ps = [ $(ps).propagated ] ;
+ local generated =
+ [ targets.generate-from-reference $(self.target) : $(p) : $(ps) ] ;
+ if $(generated[2])
+ {
+ choosen = $(self.true-properties) ;
+ }
+ else
+ {
+ choosen = $(self.false-properties) ;
+ }
+ return [ property.evaluate-conditionals-in-context $(choosen) :
+ $(properties) ] ;
+ }
+}
+
+rule check-library ( target : true-properties * : false-properties * )
+{
+ local instance = [ class.new check-library-worker $(target) :
+ $(true-properties) : $(false-properties) ] ;
+ return <conditional>@$(instance).check ;
+}
diff --git a/tools/build/src/build/alias.jam b/tools/build/src/build/alias.jam
new file mode 100644
index 0000000000..9ac8cb8950
--- /dev/null
+++ b/tools/build/src/build/alias.jam
@@ -0,0 +1,74 @@
+# Copyright 2003, 2004, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# This module defines the 'alias' rule and the associated target class.
+#
+# Alias is just a main target which returns its source targets without any
+# processing. For example:
+#
+# alias bin : hello test_hello ;
+# alias lib : helpers xml_parser ;
+#
+# Another important use of 'alias' is to conveniently group source files:
+#
+# alias platform-src : win.cpp : <os>NT ;
+# alias platform-src : linux.cpp : <os>LINUX ;
+# exe main : main.cpp platform-src ;
+#
+# Lastly, it is possible to create a local alias for some target, with different
+# properties:
+#
+# alias big_lib : : @/external_project/big_lib/<link>static ;
+#
+
+import "class" : new ;
+import project ;
+import property-set ;
+import targets ;
+
+
+class alias-target-class : basic-target
+{
+ rule __init__ ( name : project : sources * : requirements *
+ : default-build * : usage-requirements * )
+ {
+ basic-target.__init__ $(name) : $(project) : $(sources) :
+ $(requirements) : $(default-build) : $(usage-requirements) ;
+ }
+
+ rule construct ( name : source-targets * : property-set )
+ {
+ return [ property-set.empty ] $(source-targets) ;
+ }
+
+ rule compute-usage-requirements ( subvariant )
+ {
+ local base = [ basic-target.compute-usage-requirements $(subvariant) ] ;
+ return [ $(base).add [ $(subvariant).sources-usage-requirements ] ] ;
+ }
+}
+
+
+# Declares the 'alias' target. It will process its sources virtual-targets by
+# returning them unaltered as its own constructed virtual-targets.
+#
+rule alias ( name : sources * : requirements * : default-build * :
+ usage-requirements * )
+{
+ local project = [ project.current ] ;
+
+ targets.main-target-alternative
+ [ new alias-target-class $(name) : $(project)
+ : [ targets.main-target-sources $(sources) : $(name) : no-renaming ]
+ : [ targets.main-target-requirements $(requirements) : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project)
+ ]
+ : [ targets.main-target-usage-requirements $(usage-requirements) :
+ $(project) ]
+ ] ;
+}
+
+
+IMPORT $(__name__) : alias : : alias ;
diff --git a/tools/build/v2/build/alias.py b/tools/build/src/build/alias.py
index 575e53609d..575e53609d 100755
--- a/tools/build/v2/build/alias.py
+++ b/tools/build/src/build/alias.py
diff --git a/tools/build/src/build/build-request.jam b/tools/build/src/build/build-request.jam
new file mode 100644
index 0000000000..2a1bbb467c
--- /dev/null
+++ b/tools/build/src/build/build-request.jam
@@ -0,0 +1,322 @@
+# Copyright 2002 Dave Abrahams
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import "class" : new ;
+import sequence ;
+import set ;
+import regex ;
+import feature ;
+import property ;
+import container ;
+import string ;
+
+
+# Transform property-set by applying f to each component property.
+#
+local rule apply-to-property-set ( f property-set )
+{
+ local properties = [ feature.split $(property-set) ] ;
+ return [ string.join [ $(f) $(properties) ] : / ] ;
+}
+
+
+# Expand the given build request by combining all property-sets which do not
+# specify conflicting non-free features. Expects all the project files to
+# already be loaded.
+#
+rule expand-no-defaults ( property-sets * )
+{
+ # First make all features and subfeatures explicit.
+ local expanded-property-sets = [ sequence.transform apply-to-property-set
+ feature.expand-subfeatures : $(property-sets) ] ;
+
+ # Now combine all of the expanded property-sets
+ local product = [ x-product $(expanded-property-sets) : $(feature-space) ] ;
+
+ return $(product) ;
+}
+
+
+# Implementation of x-product, below. Expects all the project files to already
+# be loaded.
+#
+local rule x-product-aux ( property-sets + )
+{
+ local result ;
+ local p = [ feature.split $(property-sets[1]) ] ;
+ local f = [ set.difference $(p:G) : [ feature.free-features ] ] ;
+ local seen ;
+ # No conflict with things used at a higher level?
+ if ! [ set.intersection $(f) : $(x-product-used) ]
+ {
+ local x-product-seen ;
+ {
+ # Do not mix in any conflicting features.
+ local x-product-used = $(x-product-used) $(f) ;
+
+ if $(property-sets[2])
+ {
+ local rest = [ x-product-aux $(property-sets[2-]) : $(feature-space) ] ;
+ result = $(property-sets[1])/$(rest) ;
+ }
+
+ result ?= $(property-sets[1]) ;
+ }
+
+ # If we did not encounter a conflicting feature lower down, do not
+ # recurse again.
+ if ! [ set.intersection $(f) : $(x-product-seen) ]
+ {
+ property-sets = ;
+ }
+
+ seen = $(x-product-seen) ;
+ }
+
+ if $(property-sets[2])
+ {
+ result += [ x-product-aux $(property-sets[2-]) : $(feature-space) ] ;
+ }
+
+ # Note that we have seen these features so that higher levels will recurse
+ # again without them set.
+ x-product-seen += $(f) $(seen) ;
+ return $(result) ;
+}
+
+
+# Return the cross-product of all elements of property-sets, less any that would
+# contain conflicting values for single-valued features. Expects all the project
+# files to already be loaded.
+#
+local rule x-product ( property-sets * )
+{
+ if $(property-sets).non-empty
+ {
+ # Prepare some "scoped globals" that can be used by the implementation
+ # function, x-product-aux.
+ local x-product-seen x-product-used ;
+ return [ x-product-aux $(property-sets) : $(feature-space) ] ;
+ }
+ # Otherwise return empty.
+}
+
+
+# Returns true if either 'v' or the part of 'v' before the first '-' symbol is
+# an implicit value. Expects all the project files to already be loaded.
+#
+local rule looks-like-implicit-value ( v )
+{
+ if [ feature.is-implicit-value $(v) ]
+ {
+ return true ;
+ }
+ else
+ {
+ local split = [ regex.split $(v) - ] ;
+ if [ feature.is-implicit-value $(split[1]) ]
+ {
+ return true ;
+ }
+ }
+}
+
+
+# Takes the command line tokens (such as taken from the ARGV rule) and
+# constructs a build request from them. Returns a vector of two vectors (where
+# "vector" means container.jam's "vector"). First is the set of targets
+# specified in the command line, and second is the set of requested build
+# properties. Expects all the project files to already be loaded.
+#
+rule from-command-line ( command-line * )
+{
+ local targets ;
+ local properties ;
+
+ command-line = $(command-line[2-]) ;
+ local skip-next = ;
+ for local e in $(command-line)
+ {
+ if $(skip-next)
+ {
+ skip-next = ;
+ }
+ else if ! [ MATCH ^(-) : $(e) ]
+ {
+ # Build request spec either has "=" in it or completely consists of
+ # implicit feature values.
+ local fs = feature-space ;
+ if [ MATCH "(.*=.*)" : $(e) ]
+ || [ looks-like-implicit-value $(e:D=) : $(feature-space) ]
+ {
+ properties += [ convert-command-line-element $(e) :
+ $(feature-space) ] ;
+ }
+ else if $(e)
+ {
+ targets += $(e) ;
+ }
+ }
+ else if [ MATCH "^(-[-ldjfsto])$" : $(e) ]
+ {
+ skip-next = true ;
+ }
+ }
+ return [ new vector
+ [ new vector $(targets) ]
+ [ new vector $(properties) ] ] ;
+}
+
+
+# Converts one element of command line build request specification into internal
+# form. Expects all the project files to already be loaded.
+#
+local rule convert-command-line-element ( e )
+{
+ local result ;
+ local parts = [ regex.split $(e) "/" ] ;
+ while $(parts)
+ {
+ local p = $(parts[1]) ;
+ local m = [ MATCH "([^=]*)=(.*)" : $(p) ] ;
+ local lresult ;
+ local feature ;
+ local values ;
+ if $(m)
+ {
+ feature = $(m[1]) ;
+ values = [ regex.split $(m[2]) "," ] ;
+ lresult = <$(feature)>$(values) ;
+ }
+ else
+ {
+ lresult = [ regex.split $(p) "," ] ;
+ }
+
+ if $(feature) && free in [ feature.attributes <$(feature)> ]
+ {
+ # If we have free feature, then the value is everything
+ # until the end of the command line token. Slashes in
+ # the following string are not taked to mean separation
+ # of properties. Commas are also not interpreted specially.
+ values = $(values:J=,) ;
+ values = $(values) $(parts[2-]) ;
+ values = $(values:J=/) ;
+ lresult = <$(feature)>$(values) ;
+ parts = ;
+ }
+
+ if ! [ MATCH (.*-.*) : $(p) ]
+ {
+ # property.validate cannot handle subfeatures, so we avoid the check
+ # here.
+ for local p in $(lresult)
+ {
+ property.validate $(p) : $(feature-space) ;
+ }
+ }
+
+ if ! $(result)
+ {
+ result = $(lresult) ;
+ }
+ else
+ {
+ result = $(result)/$(lresult) ;
+ }
+
+ parts = $(parts[2-]) ;
+ }
+
+ return $(result) ;
+}
+
+
+rule __test__ ( )
+{
+ import assert ;
+ import feature ;
+
+ feature.prepare-test build-request-test-temp ;
+
+ import build-request ;
+ import build-request : expand-no-defaults : build-request.expand-no-defaults ;
+ import errors : try catch ;
+ import feature : feature subfeature ;
+
+ feature toolset : gcc msvc borland : implicit ;
+ subfeature toolset gcc : version : 2.95.2 2.95.3 2.95.4
+ 3.0 3.0.1 3.0.2 : optional ;
+
+ feature variant : debug release : implicit composite ;
+ feature inlining : on off ;
+ feature "include" : : free ;
+
+ feature stdlib : native stlport : implicit ;
+
+ feature runtime-link : dynamic static : symmetric ;
+
+ # Empty build requests should expand to empty.
+ assert.result
+ : build-request.expand-no-defaults ;
+
+ assert.result
+ <toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>debug
+ <toolset>msvc/<stdlib>stlport/<variant>debug
+ <toolset>msvc/<variant>debug
+ : build-request.expand-no-defaults gcc-3.0.1/stlport msvc/stlport msvc debug ;
+
+ assert.result
+ <toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>debug
+ <toolset>msvc/<variant>debug
+ <variant>debug/<toolset>msvc/<stdlib>stlport
+ : build-request.expand-no-defaults gcc-3.0.1/stlport msvc debug msvc/stlport ;
+
+ assert.result
+ <toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>debug/<inlining>off
+ <toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>release/<inlining>off
+ : build-request.expand-no-defaults gcc-3.0.1/stlport debug release <inlining>off ;
+
+ assert.result
+ <include>a/b/c/<toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>debug/<include>x/y/z
+ <include>a/b/c/<toolset>msvc/<stdlib>stlport/<variant>debug/<include>x/y/z
+ <include>a/b/c/<toolset>msvc/<variant>debug/<include>x/y/z
+ : build-request.expand-no-defaults <include>a/b/c gcc-3.0.1/stlport msvc/stlport msvc debug <include>x/y/z ;
+
+ local r ;
+
+ r = [ build-request.from-command-line bjam debug runtime-link=dynamic ] ;
+ assert.equal [ $(r).get-at 1 ] : ;
+ assert.equal [ $(r).get-at 2 ] : debug <runtime-link>dynamic ;
+
+ try ;
+ {
+ build-request.from-command-line bjam gcc/debug runtime-link=dynamic/static ;
+ }
+ catch \"static\" is not an implicit feature value ;
+
+ r = [ build-request.from-command-line bjam -d2 --debug debug target runtime-link=dynamic ] ;
+ assert.equal [ $(r).get-at 1 ] : target ;
+ assert.equal [ $(r).get-at 2 ] : debug <runtime-link>dynamic ;
+
+ r = [ build-request.from-command-line bjam debug runtime-link=dynamic,static ] ;
+ assert.equal [ $(r).get-at 1 ] : ;
+ assert.equal [ $(r).get-at 2 ] : debug <runtime-link>dynamic <runtime-link>static ;
+
+ r = [ build-request.from-command-line bjam debug gcc/runtime-link=dynamic,static ] ;
+ assert.equal [ $(r).get-at 1 ] : ;
+ assert.equal [ $(r).get-at 2 ] : debug gcc/<runtime-link>dynamic
+ gcc/<runtime-link>static ;
+
+ r = [ build-request.from-command-line bjam msvc gcc,borland/runtime-link=static ] ;
+ assert.equal [ $(r).get-at 1 ] : ;
+ assert.equal [ $(r).get-at 2 ] : msvc gcc/<runtime-link>static
+ borland/<runtime-link>static ;
+
+ r = [ build-request.from-command-line bjam gcc-3.0 ] ;
+ assert.equal [ $(r).get-at 1 ] : ;
+ assert.equal [ $(r).get-at 2 ] : gcc-3.0 ;
+
+ feature.finish-test build-request-test-temp ;
+}
diff --git a/tools/build/src/build/build_request.py b/tools/build/src/build/build_request.py
new file mode 100644
index 0000000000..118033e1e1
--- /dev/null
+++ b/tools/build/src/build/build_request.py
@@ -0,0 +1,216 @@
+# Status: being ported by Vladimir Prus
+# TODO: need to re-compare with mainline of .jam
+# Base revision: 40480
+#
+# (C) Copyright David Abrahams 2002. Permission to copy, use, modify, sell and
+# distribute this software is granted provided this copyright notice appears in
+# all copies. This software is provided "as is" without express or implied
+# warranty, and with no claim as to its suitability for any purpose.
+
+import b2.build.feature
+feature = b2.build.feature
+
+from b2.util.utility import *
+import b2.build.property_set as property_set
+
+def expand_no_defaults (property_sets):
+ """ Expand the given build request by combining all property_sets which don't
+ specify conflicting non-free features.
+ """
+ # First make all features and subfeatures explicit
+ expanded_property_sets = [ps.expand_subfeatures() for ps in property_sets]
+
+ # Now combine all of the expanded property_sets
+ product = __x_product (expanded_property_sets)
+
+ return [property_set.create(p) for p in product]
+
+
+def __x_product (property_sets):
+ """ Return the cross-product of all elements of property_sets, less any
+ that would contain conflicting values for single-valued features.
+ """
+ x_product_seen = set()
+ return __x_product_aux (property_sets, x_product_seen)[0]
+
+def __x_product_aux (property_sets, seen_features):
+ """Returns non-conflicting combinations of property sets.
+
+ property_sets is a list of PropertySet instances. seen_features is a set of Property
+ instances.
+
+ Returns a tuple of:
+ - list of lists of Property instances, such that within each list, no two Property instance
+ have the same feature, and no Property is for feature in seen_features.
+ - set of features we saw in property_sets
+ """
+ if not property_sets:
+ return ([], set())
+
+ properties = property_sets[0].all()
+
+ these_features = set()
+ for p in property_sets[0].non_free():
+ these_features.add(p.feature())
+
+ # Note: the algorithm as implemented here, as in original Jam code, appears to
+ # detect conflicts based on features, not properties. For example, if command
+ # line build request say:
+ #
+ # <a>1/<b>1 c<1>/<b>1
+ #
+ # It will decide that those two property sets conflict, because they both specify
+ # a value for 'b' and will not try building "<a>1 <c1> <b1>", but rather two
+ # different property sets. This is a topic for future fixing, maybe.
+ if these_features & seen_features:
+
+ (inner_result, inner_seen) = __x_product_aux(property_sets[1:], seen_features)
+ return (inner_result, inner_seen | these_features)
+
+ else:
+
+ result = []
+ (inner_result, inner_seen) = __x_product_aux(property_sets[1:], seen_features | these_features)
+ if inner_result:
+ for inner in inner_result:
+ result.append(properties + inner)
+ else:
+ result.append(properties)
+
+ if inner_seen & these_features:
+ # Some of elements in property_sets[1:] conflict with elements of property_sets[0],
+ # Try again, this time omitting elements of property_sets[0]
+ (inner_result2, inner_seen2) = __x_product_aux(property_sets[1:], seen_features)
+ result.extend(inner_result2)
+
+ return (result, inner_seen | these_features)
+
+
+
+def looks_like_implicit_value(v):
+ """Returns true if 'v' is either implicit value, or
+ the part before the first '-' symbol is implicit value."""
+ if feature.is_implicit_value(v):
+ return 1
+ else:
+ split = v.split("-")
+ if feature.is_implicit_value(split[0]):
+ return 1
+
+ return 0
+
+def from_command_line(command_line):
+ """Takes the command line tokens (such as taken from ARGV rule)
+ and constructs build request from it. Returns a list of two
+ lists. First is the set of targets specified in the command line,
+ and second is the set of requested build properties."""
+
+ targets = []
+ properties = []
+
+ for e in command_line:
+ if e[:1] != "-":
+ # Build request spec either has "=" in it, or completely
+ # consists of implicit feature values.
+ if e.find("=") != -1 or looks_like_implicit_value(e.split("/")[0]):
+ properties += convert_command_line_element(e)
+ elif e:
+ targets.append(e)
+
+ return [targets, properties]
+
+# Converts one element of command line build request specification into
+# internal form.
+def convert_command_line_element(e):
+
+ result = None
+ parts = e.split("/")
+ for p in parts:
+ m = p.split("=")
+ if len(m) > 1:
+ feature = m[0]
+ values = m[1].split(",")
+ lresult = [("<%s>%s" % (feature, v)) for v in values]
+ else:
+ lresult = p.split(",")
+
+ if p.find('-') == -1:
+ # FIXME: first port property.validate
+ # property.validate cannot handle subfeatures,
+ # so we avoid the check here.
+ #for p in lresult:
+ # property.validate(p)
+ pass
+
+ if not result:
+ result = lresult
+ else:
+ result = [e1 + "/" + e2 for e1 in result for e2 in lresult]
+
+ return [property_set.create(b2.build.feature.split(r)) for r in result]
+
+###
+### rule __test__ ( )
+### {
+### import assert feature ;
+###
+### feature.prepare-test build-request-test-temp ;
+###
+### import build-request ;
+### import build-request : expand_no_defaults : build-request.expand_no_defaults ;
+### import errors : try catch ;
+### import feature : feature subfeature ;
+###
+### feature toolset : gcc msvc borland : implicit ;
+### subfeature toolset gcc : version : 2.95.2 2.95.3 2.95.4
+### 3.0 3.0.1 3.0.2 : optional ;
+###
+### feature variant : debug release : implicit composite ;
+### feature inlining : on off ;
+### feature "include" : : free ;
+###
+### feature stdlib : native stlport : implicit ;
+###
+### feature runtime-link : dynamic static : symmetric ;
+###
+###
+### local r ;
+###
+### r = [ build-request.from-command-line bjam debug runtime-link=dynamic ] ;
+### assert.equal [ $(r).get-at 1 ] : ;
+### assert.equal [ $(r).get-at 2 ] : debug <runtime-link>dynamic ;
+###
+### try ;
+### {
+###
+### build-request.from-command-line bjam gcc/debug runtime-link=dynamic/static ;
+### }
+### catch \"static\" is not a value of an implicit feature ;
+###
+###
+### r = [ build-request.from-command-line bjam -d2 --debug debug target runtime-link=dynamic ] ;
+### assert.equal [ $(r).get-at 1 ] : target ;
+### assert.equal [ $(r).get-at 2 ] : debug <runtime-link>dynamic ;
+###
+### r = [ build-request.from-command-line bjam debug runtime-link=dynamic,static ] ;
+### assert.equal [ $(r).get-at 1 ] : ;
+### assert.equal [ $(r).get-at 2 ] : debug <runtime-link>dynamic <runtime-link>static ;
+###
+### r = [ build-request.from-command-line bjam debug gcc/runtime-link=dynamic,static ] ;
+### assert.equal [ $(r).get-at 1 ] : ;
+### assert.equal [ $(r).get-at 2 ] : debug gcc/<runtime-link>dynamic
+### gcc/<runtime-link>static ;
+###
+### r = [ build-request.from-command-line bjam msvc gcc,borland/runtime-link=static ] ;
+### assert.equal [ $(r).get-at 1 ] : ;
+### assert.equal [ $(r).get-at 2 ] : msvc gcc/<runtime-link>static
+### borland/<runtime-link>static ;
+###
+### r = [ build-request.from-command-line bjam gcc-3.0 ] ;
+### assert.equal [ $(r).get-at 1 ] : ;
+### assert.equal [ $(r).get-at 2 ] : gcc-3.0 ;
+###
+### feature.finish-test build-request-test-temp ;
+### }
+###
+###
diff --git a/tools/build/src/build/config-cache.jam b/tools/build/src/build/config-cache.jam
new file mode 100644
index 0000000000..5297dbb84b
--- /dev/null
+++ b/tools/build/src/build/config-cache.jam
@@ -0,0 +1,64 @@
+# Copyright 2012 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import modules ;
+import errors ;
+import regex ;
+import path ;
+import project ;
+
+rule get ( name )
+{
+ return $(.vars.$(name)) ;
+}
+
+rule set ( name : value * )
+{
+ .all-vars += $(name) ;
+ .vars.$(name) = $(value) ;
+}
+
+rule save ( )
+{
+ if $(.cache-file)
+ {
+ local cache-file-native = [ path.native $(.cache-file) ] ;
+ local target = <new-cache-file>$(cache-file-native) ;
+ local contents = "# Automatically generated by Boost.Build.\n# Do not edit.\n\nmodule config-cache {\n" ;
+ for local var in $(.all-vars)
+ {
+ local transformed ;
+ for local value in $(.vars.$(var))
+ {
+ transformed += [ regex.escape $(value) : \"\\ : \\ ] ;
+ }
+ local quoted = \"$(transformed)\" ;
+ contents += " set \"$(var)\" : $(quoted:J= ) ;\n" ;
+ }
+ contents += "}\n" ;
+ FILE_CONTENTS on $(target) = $(contents) ;
+ ALWAYS $(target) ;
+ config-cache.write $(target) ;
+ UPDATE_NOW $(target) : [ modules.peek configure : .log-fd ] : ignore-minus-n ;
+ }
+}
+
+actions write
+{
+ @($(STDOUT):E=$(FILE_CONTENTS:J=)) > "$(<)"
+}
+
+rule load ( cache-file )
+{
+ if $(.cache-file)
+ {
+ errors.error duplicate load of cache file ;
+ }
+ cache-file = [ path.native $(cache-file) ] ;
+ if [ path.exists $(cache-file) ] && ! ( --reconfigure in [ modules.peek : ARGV ] )
+ {
+ include <old-cache-file>$(cache-file) ;
+ }
+ .cache-file = $(cache-file) ;
+}
diff --git a/tools/build/src/build/configure.jam b/tools/build/src/build/configure.jam
new file mode 100644
index 0000000000..543bade359
--- /dev/null
+++ b/tools/build/src/build/configure.jam
@@ -0,0 +1,292 @@
+# Copyright (c) 2010 Vladimir Prus.
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# This module defines function to help with two main tasks:
+#
+# - Discovering build-time configuration for the purposes of adjusting the build
+# process.
+# - Reporting what is built, and how it is configured.
+
+import "class" : new ;
+import common ;
+import path ;
+import property ;
+import property-set ;
+import targets ;
+import config-cache ;
+
+
+rule log-summary ( )
+{
+}
+
+
+.width = 30 ;
+
+rule set-width ( width )
+{
+ .width = $(width) ;
+}
+
+
+# Declare that the components specified by the parameter exist.
+#
+rule register-components ( components * )
+{
+ .components += $(components) ;
+}
+
+
+# Declare that the components specified by the parameters will be built.
+#
+rule components-building ( components * )
+{
+ .built-components += $(components) ;
+}
+
+
+# Report something about component configuration that the user should better
+# know.
+#
+rule log-component-configuration ( component : message )
+{
+ # FIXME: Implement per-property-set logs.
+ .component-logs.$(component) += $(message) ;
+}
+
+
+rule log-check-result ( result )
+{
+ if ! $(.announced-checks)
+ {
+ ECHO "Performing configuration checks\n" ;
+ .announced-checks = 1 ;
+ }
+
+ ECHO $(result) ;
+ # FIXME: Unfinished code. Nothing seems to set .check-results at the moment.
+ #.check-results += $(result) ;
+}
+
+
+rule log-library-search-result ( library : result )
+{
+ local x = [ PAD " - $(library)" : $(.width) ] ;
+ log-check-result "$(x) : $(result)" ;
+}
+
+
+rule print-component-configuration ( )
+{
+ # FIXME: See what was intended with this initial assignment.
+ # local c = [ sequence.unique $(.components) ] ;
+
+ ECHO "\nComponent configuration:\n" ;
+ local c ;
+ for c in $(.components)
+ {
+ local s ;
+ if $(c) in $(.built-components)
+ {
+ s = "building" ;
+ }
+ else
+ {
+ s = "not building" ;
+ }
+ ECHO [ PAD " - $(c)" : $(.width) ] ": $(s)" ;
+ for local m in $(.component-logs.$(c))
+ {
+ ECHO " -" $(m) ;
+ }
+ }
+ ECHO ;
+}
+
+
+rule print-configure-checks-summary ( )
+{
+ # FIXME: The problem with this approach is that the user sees the checks
+ # summary when all checks are done, and has no progress reporting while the
+ # checks are being executed.
+ if $(.check-results)
+ {
+ ECHO "Configuration checks summary\n" ;
+ for local r in $(.check-results)
+ {
+ ECHO $(r) ;
+ }
+ ECHO ;
+ }
+}
+
+# Attempts to build a set of virtual targets
+rule try-build ( targets * : ps : what : retry ? )
+{
+ local cache-name = $(what) [ $(ps).raw ] ;
+ cache-name = $(cache-name:J=-) ;
+ local value = [ config-cache.get $(cache-name) ] ;
+
+ local result ;
+ local jam-targets ;
+
+ for local t in $(targets)
+ {
+ jam-targets += [ $(t).actualize ] ;
+ }
+
+ if $(value)
+ {
+ local x = [ PAD " - $(what)" : $(.width) ] ;
+ if $(value) = true
+ {
+ .$(what)-supported.$(ps) = yes ;
+ result = true ;
+ log-check-result "$(x) : yes (cached)" ;
+ }
+ else
+ {
+ log-check-result "$(x) : no (cached)" ;
+ }
+ }
+ else if ! UPDATE_NOW in [ RULENAMES ]
+ {
+ # Cannot determine. Assume existance.
+ }
+ else
+ {
+ local x = [ PAD " - $(what)" : $(.width) ] ;
+ if [ UPDATE_NOW $(jam-targets) :
+ $(.log-fd) : ignore-minus-n : ignore-minus-q ]
+ {
+ .$(what)-supported.$(ps) = yes ;
+ result = true ;
+ log-check-result "$(x) : yes" ;
+ }
+ else
+ {
+ log-check-result "$(x) : no" ;
+ }
+ }
+ if ! $(value)
+ {
+ if $(result)
+ {
+ config-cache.set $(cache-name) : true ;
+ }
+ else
+ {
+ config-cache.set $(cache-name) : false ;
+ }
+ }
+ return $(result) ;
+}
+
+# Attempt to build a metatarget named by 'metatarget-reference'
+# in context of 'project' with properties 'ps'.
+# Returns non-empty value if build is OK.
+rule builds-raw ( metatarget-reference : project : ps : what : retry ? )
+{
+ local result ;
+
+ if ! $(retry) && ! $(.$(what)-tested.$(ps))
+ {
+ .$(what)-tested.$(ps) = true ;
+
+ local targets = [ targets.generate-from-reference
+ $(metatarget-reference) : $(project) : $(ps) ] ;
+
+ result = [ try-build $(targets[2-]) : $(ps) : $(what) : $(retry) ] ;
+ .$(what)-supported.$(ps) = $(result) ;
+
+ return $(result) ;
+
+ }
+ else
+ {
+ return $(.$(what)-supported.$(ps)) ;
+ }
+}
+
+rule builds ( metatarget-reference : properties * : what ? : retry ? )
+{
+ # FIXME: This should not be hardcoded. Other checks might want to consider a
+ # different set of features as relevant.
+ local toolset = [ property.select <toolset> : $(properties) ] ;
+ local toolset-version-property = "<toolset-$(toolset:G=):version>" ;
+ local relevant = [ property.select <target-os> <toolset>
+ $(toolset-version-property) <address-model> <architecture> :
+ $(properties) ] ;
+ local ps = [ property-set.create $(relevant) ] ;
+ local t = [ targets.current ] ;
+ local p = [ $(t).project ] ;
+
+ if ! $(what)
+ {
+ local resolved = [ targets.resolve-reference $(metatarget-reference) : $(p) ] ;
+ local name = [ $(resolved[1]).name ] ;
+ what = "$(name) builds" ;
+ }
+
+ return [ builds-raw $(metatarget-reference) : $(p) : $(ps) : $(what) :
+ $(retry) ] ;
+}
+
+
+# Called by Boost.Build startup code to specify the file to receive the
+# configuration check results. Should never be called by user code.
+#
+rule set-log-file ( log-file )
+{
+ path.makedirs [ path.parent $(log-file) ] ;
+ .log-fd = [ FILE_OPEN $(log-file) : "w" ] ;
+}
+
+
+# Frontend rules
+
+class check-target-builds-worker
+{
+ import configure ;
+ import property-set ;
+ import targets ;
+ import property ;
+
+ rule __init__ ( target message ? : true-properties * : false-properties * )
+ {
+ self.target = $(target) ;
+ self.message = $(message) ;
+ self.true-properties = $(true-properties) ;
+ self.false-properties = $(false-properties) ;
+ }
+
+ rule check ( properties * )
+ {
+ local choosen ;
+ if [ configure.builds $(self.target) : $(properties) : $(self.message) ]
+ {
+ choosen = $(self.true-properties) ;
+ }
+ else
+ {
+ choosen = $(self.false-properties) ;
+ }
+ return [ property.evaluate-conditionals-in-context $(choosen) :
+ $(properties) ] ;
+ }
+}
+
+
+rule check-target-builds ( target message ? : true-properties * :
+ false-properties * )
+{
+ local instance = [ new check-target-builds-worker $(target) $(message) :
+ $(true-properties) : $(false-properties) ] ;
+ return <conditional>@$(instance).check ;
+}
+
+
+IMPORT $(__name__) : check-target-builds : : check-target-builds ;
diff --git a/tools/build/v2/build/configure.py b/tools/build/src/build/configure.py
index 0426832c40..0426832c40 100644
--- a/tools/build/v2/build/configure.py
+++ b/tools/build/src/build/configure.py
diff --git a/tools/build/src/build/engine.py b/tools/build/src/build/engine.py
new file mode 100644
index 0000000000..35333eaa00
--- /dev/null
+++ b/tools/build/src/build/engine.py
@@ -0,0 +1,202 @@
+# Copyright Pedro Ferreira 2005.
+# Copyright Vladimir Prus 2007.
+# Distributed under the Boost
+# Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+bjam_interface = __import__('bjam')
+
+import operator
+import re
+
+import b2.build.property_set as property_set
+import b2.util
+
+class BjamAction:
+ """Class representing bjam action defined from Python."""
+
+ def __init__(self, action_name, function):
+ self.action_name = action_name
+ self.function = function
+
+ def __call__(self, targets, sources, property_set):
+
+ # Bjam actions defined from Python have only the command
+ # to execute, and no associated jam procedural code. So
+ # passing 'property_set' to it is not necessary.
+ bjam_interface.call("set-update-action", self.action_name,
+ targets, sources, [])
+ if self.function:
+ self.function(targets, sources, property_set)
+
+class BjamNativeAction:
+ """Class representing bjam action defined by Jam code.
+
+ We still allow to associate a Python callable that will
+ be called when this action is installed on any target.
+ """
+
+ def __init__(self, action_name, function):
+ self.action_name = action_name
+ self.function = function
+
+ def __call__(self, targets, sources, property_set):
+ if self.function:
+ self.function(targets, sources, property_set)
+
+ p = []
+ if property_set:
+ p = property_set.raw()
+
+ b2.util.set_jam_action(self.action_name, targets, sources, p)
+
+action_modifiers = {"updated": 0x01,
+ "together": 0x02,
+ "ignore": 0x04,
+ "quietly": 0x08,
+ "piecemeal": 0x10,
+ "existing": 0x20}
+
+class Engine:
+ """ The abstract interface to a build engine.
+
+ For now, the naming of targets, and special handling of some
+ target variables like SEARCH and LOCATE make this class coupled
+ to bjam engine.
+ """
+ def __init__ (self):
+ self.actions = {}
+
+ def add_dependency (self, targets, sources):
+ """Adds a dependency from 'targets' to 'sources'
+
+ Both 'targets' and 'sources' can be either list
+ of target names, or a single target name.
+ """
+ if isinstance (targets, str):
+ targets = [targets]
+ if isinstance (sources, str):
+ sources = [sources]
+
+ for target in targets:
+ for source in sources:
+ self.do_add_dependency (target, source)
+
+ def get_target_variable(self, targets, variable):
+ """Gets the value of `variable` on set on the first target in `targets`.
+
+ Args:
+ targets (str or list): one or more targets to get the variable from.
+ variable (str): the name of the variable
+
+ Returns:
+ the value of `variable` set on `targets` (list)
+
+ Example:
+
+ >>> ENGINE = get_manager().engine()
+ >>> ENGINE.set_target_variable(targets, 'MY-VAR', 'Hello World')
+ >>> ENGINE.get_target_variable(targets, 'MY-VAR')
+ ['Hello World']
+
+ Equivalent Jam code:
+
+ MY-VAR on $(targets) = "Hello World" ;
+ echo [ on $(targets) return $(MY-VAR) ] ;
+ "Hello World"
+ """
+ return bjam_interface.call('get-target-variable', targets, variable)
+
+ def set_target_variable (self, targets, variable, value, append=0):
+ """ Sets a target variable.
+
+ The 'variable' will be available to bjam when it decides
+ where to generate targets, and will also be available to
+ updating rule for that 'taret'.
+ """
+ if isinstance (targets, str):
+ targets = [targets]
+
+ for target in targets:
+ self.do_set_target_variable (target, variable, value, append)
+
+ def set_update_action (self, action_name, targets, sources, properties=property_set.empty()):
+ """ Binds a target to the corresponding update action.
+ If target needs to be updated, the action registered
+ with action_name will be used.
+ The 'action_name' must be previously registered by
+ either 'register_action' or 'register_bjam_action'
+ method.
+ """
+ assert(isinstance(properties, property_set.PropertySet))
+ if isinstance (targets, str):
+ targets = [targets]
+ self.do_set_update_action (action_name, targets, sources, properties)
+
+ def register_action (self, action_name, command, bound_list = [], flags = [],
+ function = None):
+ """Creates a new build engine action.
+
+ Creates on bjam side an action named 'action_name', with
+ 'command' as the command to be executed, 'bound_variables'
+ naming the list of variables bound when the command is executed
+ and specified flag.
+ If 'function' is not None, it should be a callable taking three
+ parameters:
+ - targets
+ - sources
+ - instance of the property_set class
+ This function will be called by set_update_action, and can
+ set additional target variables.
+ """
+ if self.actions.has_key(action_name):
+ raise "Bjam action %s is already defined" % action_name
+
+ assert(isinstance(flags, list))
+
+ bjam_flags = reduce(operator.or_,
+ (action_modifiers[flag] for flag in flags), 0)
+
+ # We allow command to be empty so that we can define 'action' as pure
+ # python function that would do some conditional logic and then relay
+ # to other actions.
+ assert command or function
+ if command:
+ bjam_interface.define_action(action_name, command, bound_list, bjam_flags)
+
+ self.actions[action_name] = BjamAction(action_name, function)
+
+ def register_bjam_action (self, action_name, function=None):
+ """Informs self that 'action_name' is declared in bjam.
+
+ From this point, 'action_name' is a valid argument to the
+ set_update_action method. The action_name should be callable
+ in the global module of bjam.
+ """
+
+ # We allow duplicate calls to this rule for the same
+ # action name. This way, jamfile rules that take action names
+ # can just register them without specially checking if
+ # action is already registered.
+ if not self.actions.has_key(action_name):
+ self.actions[action_name] = BjamNativeAction(action_name, function)
+
+ # Overridables
+
+
+ def do_set_update_action (self, action_name, targets, sources, property_set):
+ action = self.actions.get(action_name)
+ if not action:
+ raise Exception("No action %s was registered" % action_name)
+ action(targets, sources, property_set)
+
+ def do_set_target_variable (self, target, variable, value, append):
+ if append:
+ bjam_interface.call("set-target-variable", target, variable, value, "true")
+ else:
+ bjam_interface.call("set-target-variable", target, variable, value)
+
+ def do_add_dependency (self, target, source):
+ bjam_interface.call("DEPENDS", target, source)
+
+
diff --git a/tools/build/v2/build/errors.py b/tools/build/src/build/errors.py
index d9dceefe08..d9dceefe08 100644
--- a/tools/build/v2/build/errors.py
+++ b/tools/build/src/build/errors.py
diff --git a/tools/build/src/build/feature.jam b/tools/build/src/build/feature.jam
new file mode 100644
index 0000000000..ee6abc5916
--- /dev/null
+++ b/tools/build/src/build/feature.jam
@@ -0,0 +1,1350 @@
+# Copyright 2001, 2002, 2003 Dave Abrahams
+# Copyright 2002, 2006 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import assert : * ;
+import "class" : * ;
+import indirect ;
+import modules ;
+import regex ;
+import sequence ;
+import set ;
+import utility ;
+
+
+local rule setup ( )
+{
+ .all-attributes =
+ implicit
+ composite
+ optional
+ symmetric
+ free
+ incidental
+ path
+ dependency
+ propagated
+ link-incompatible
+ subfeature
+ order-sensitive
+ ;
+
+ .all-features = ;
+ .all-subfeatures = ;
+ .all-top-features = ; # non-subfeatures
+ .all-implicit-values = ;
+}
+setup ;
+
+
+# Prepare a fresh space to test in by moving all global variable settings into
+# the given temporary module and erasing them here.
+#
+rule prepare-test ( temp-module )
+{
+ DELETE_MODULE $(temp-module) ;
+
+ # Transfer globals to temp-module.
+ for local v in [ VARNAMES feature ]
+ {
+ if [ MATCH (\\.) : $(v) ]
+ {
+ modules.poke $(temp-module) : $(v) : $($(v)) ;
+ $(v) = ;
+ }
+ }
+ setup ;
+}
+
+
+# Clear out all global variables and recover all variables from the given
+# temporary module.
+#
+rule finish-test ( temp-module )
+{
+ # Clear globals.
+ for local v in [ VARNAMES feature ]
+ {
+ if [ MATCH (\\.) : $(v) ]
+ {
+ $(v) = ;
+ }
+ }
+
+ for local v in [ VARNAMES $(temp-module) ]
+ {
+ $(v) = [ modules.peek $(temp-module) : $(v) ] ;
+ }
+ DELETE_MODULE $(temp-module) ;
+}
+
+
+# Transform features by bracketing any elements which are not already bracketed
+# by "<>".
+#
+local rule grist ( features * )
+{
+ local empty = "" ;
+ return $(empty:G=$(features)) ;
+}
+
+
+# Declare a new feature with the given name, values, and attributes.
+#
+rule feature (
+ name # Feature name.
+ : values * # Allowable values - may be extended later using feature.extend.
+ : attributes * # Feature attributes (e.g. implicit, free, propagated...).
+)
+{
+ name = [ grist $(name) ] ;
+
+ local error ;
+
+ # Check for any unknown attributes.
+ if ! ( $(attributes) in $(.all-attributes) )
+ {
+ error = unknown attributes:
+ [ set.difference $(attributes) : $(.all-attributes) ] ;
+ }
+ else if $(name) in $(.all-features)
+ {
+ error = feature already defined: ;
+ }
+ else if implicit in $(attributes) && free in $(attributes)
+ {
+ error = free features cannot also be implicit ;
+ }
+ else if free in $(attributes) && propagated in $(attributes)
+ {
+ error = free features cannot be propagated ;
+ }
+ else
+ {
+ local m = [ MATCH (.*=.*) : $(values) ] ;
+ if $(m[1])
+ {
+ error = "feature value may not contain '='" ;
+ }
+ }
+
+ if $(error)
+ {
+ import errors ;
+ errors.error $(error)
+ : "in" feature declaration:
+ : feature [ errors.lol->list $(1) : $(2) : $(3) ] ;
+ }
+
+ $(name).values ?= ;
+ $(name).attributes = $(attributes) ;
+ $(name).subfeatures ?= ;
+ $(attributes).features += $(name) ;
+
+ .all-features += $(name) ;
+ if subfeature in $(attributes)
+ {
+ .all-subfeatures += $(name) ;
+ }
+ else
+ {
+ .all-top-features += $(name) ;
+ }
+ extend $(name) : $(values) ;
+}
+
+
+# Sets the default value of the given feature, overriding any previous default.
+#
+rule set-default ( feature : value )
+{
+ local f = [ grist $(feature) ] ;
+ local a = $($(f).attributes) ;
+ local bad-attribute = ;
+ if free in $(a)
+ {
+ bad-attribute = free ;
+ }
+ else if optional in $(a)
+ {
+ bad-attribute = optional ;
+ }
+ if $(bad-attribute)
+ {
+ import errors ;
+ errors.error $(bad-attribute) property $(f) cannot have a default. ;
+ }
+ if ! $(value) in $($(f).values)
+ {
+ import errors ;
+ errors.error The specified default value, '$(value)' is invalid :
+ allowed values are: $($(f).values) ;
+ }
+ $(f).default = $(value) ;
+}
+
+
+# Returns the default property values for the given features.
+#
+rule defaults ( features * )
+{
+ local result ;
+ for local f in $(features)
+ {
+ local gf = $(:E=:G=$(f)) ;
+ local a = $($(gf).attributes) ;
+ if ( free in $(a) ) || ( optional in $(a) )
+ {
+ }
+ else
+ {
+ result += $(gf)$($(gf).default) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Returns true iff all 'names' elements are valid features.
+#
+rule valid ( names + )
+{
+ if $(names) in $(.all-features)
+ {
+ return true ;
+ }
+}
+
+
+# Returns the attibutes of the given feature.
+#
+rule attributes ( feature )
+{
+ return $($(feature).attributes) ;
+}
+
+
+# Returns the values of the given feature.
+#
+rule values ( feature )
+{
+ return $($(:E=:G=$(feature)).values) ;
+}
+
+
+# Returns true iff 'value-string' is a value-string of an implicit feature.
+#
+rule is-implicit-value ( value-string )
+{
+ local v = [ regex.split $(value-string) - ] ;
+ local failed ;
+ if ! $(v[1]) in $(.all-implicit-values)
+ {
+ failed = true ;
+ }
+ else
+ {
+ local feature = $($(v[1]).implicit-feature) ;
+ for local subvalue in $(v[2-])
+ {
+ if ! [ find-implied-subfeature $(feature) $(subvalue) : $(v[1]) ]
+ {
+ failed = true ;
+ }
+ }
+ }
+
+ if ! $(failed)
+ {
+ return true ;
+ }
+}
+
+
+# Returns the implicit feature associated with the given implicit value.
+#
+rule implied-feature ( implicit-value )
+{
+ local components = [ regex.split $(implicit-value) "-" ] ;
+ local feature = $($(components[1]).implicit-feature) ;
+ if ! $(feature)
+ {
+ import errors ;
+ errors.error \"$(implicit-value)\" is not an implicit feature value ;
+ feature = "" ; # Keep testing happy; it expects a result.
+ }
+ return $(feature) ;
+}
+
+
+local rule find-implied-subfeature ( feature subvalue : value-string ? )
+{
+ # Feature should be of the form <feature-name>.
+ if $(feature) != $(feature:G)
+ {
+ import errors ;
+ errors.error invalid feature $(feature) ;
+ }
+ return $($(feature)$(value-string:E="")<>$(subvalue).subfeature) ;
+}
+
+
+# Given a feature and a value of one of its subfeatures, find the name of the
+# subfeature. If value-string is supplied, looks for implied subfeatures that
+# are specific to that value of feature
+#
+rule implied-subfeature (
+ feature # The main feature name.
+ subvalue # The value of one of its subfeatures.
+ : value-string ? # The value of the main feature.
+)
+{
+ local subfeature = [ find-implied-subfeature $(feature) $(subvalue)
+ : $(value-string) ] ;
+ if ! $(subfeature)
+ {
+ value-string ?= "" ;
+ import errors ;
+ errors.error \"$(subvalue)\" is not a known subfeature value of
+ $(feature)$(value-string) ;
+ }
+ return $(subfeature) ;
+}
+
+
+# Generate an error if the feature is unknown.
+#
+local rule validate-feature ( feature )
+{
+ if ! $(feature) in $(.all-features)
+ {
+ import errors ;
+ errors.error unknown feature \"$(feature)\" ;
+ }
+}
+
+
+# Given a feature and its value or just a value corresponding to an implicit
+# feature, returns a property set consisting of all component subfeatures and
+# their values. For example all the following calls:
+#
+# expand-subfeatures-aux <toolset>gcc-2.95.2-linux-x86
+# expand-subfeatures-aux gcc-2.95.2-linux-x86
+#
+# return:
+#
+# <toolset>gcc <toolset-version>2.95.2 <toolset-os>linux <toolset-cpu>x86
+#
+local rule expand-subfeatures-aux (
+ feature ? # Feature name or empty if value corresponds to an
+ # implicit property.
+ : value # Feature value.
+ : dont-validate ? # If set, no value string validation will be done.
+)
+{
+ if $(feature)
+ {
+ feature = $(feature) ;
+ }
+
+ if ! $(feature)
+ {
+ feature = [ implied-feature $(value) ] ;
+ }
+ else
+ {
+ validate-feature $(feature) ;
+ }
+ if ! $(dont-validate)
+ {
+ validate-value-string $(feature) $(value) ;
+ }
+
+ local components = [ regex.split $(value) "-" ] ;
+
+ # Get the top-level feature's value.
+ local value = $(components[1]:G=) ;
+
+ local result = $(components[1]:G=$(feature)) ;
+
+ local subvalues = $(components[2-]) ;
+ while $(subvalues)
+ {
+ local subvalue = $(subvalues[1]) ; # Pop the head off of subvalues.
+ subvalues = $(subvalues[2-]) ;
+
+ local subfeature = [ find-implied-subfeature $(feature) $(subvalue) :
+ $(value) ] ;
+
+ # If no subfeature was found reconstitute the value string and use that.
+ if ! $(subfeature)
+ {
+ result = $(components:J=-) ;
+ result = $(result:G=$(feature)) ;
+ subvalues = ; # Stop looping.
+ }
+ else
+ {
+ local f = [ MATCH ^<(.*)>$ : $(feature) ] ;
+ result += $(subvalue:G=$(f)-$(subfeature)) ;
+ }
+ }
+
+ return $(result) ;
+}
+
+
+# Make all elements of properties corresponding to implicit features explicit,
+# and express all subfeature values as separate properties in their own right.
+# For example, all of the following properties
+#
+# gcc-2.95.2-linux-x86
+# <toolset>gcc-2.95.2-linux-x86
+#
+# might expand to
+#
+# <toolset>gcc <toolset-version>2.95.2 <toolset-os>linux <toolset-cpu>x86
+#
+rule expand-subfeatures (
+ properties * # Property set with elements of the form
+ # <feature>value-string or just value-string in the case
+ # of implicit features.
+ : dont-validate ?
+)
+{
+ local result ;
+ for local p in $(properties)
+ {
+ # Don't expand subfeatures in subfeatures
+ if ! [ MATCH "(:)" : $(p:G) ]
+ {
+ result += [ expand-subfeatures-aux $(p:G) : $(p:G=) : $(dont-validate) ] ;
+ }
+ else
+ {
+ result += $(p) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Helper for extend, below. Handles the feature case.
+#
+local rule extend-feature ( feature : values * )
+{
+ feature = [ grist $(feature) ] ;
+ validate-feature $(feature) ;
+ if implicit in $($(feature).attributes)
+ {
+ for local v in $(values)
+ {
+ if $($(v).implicit-feature)
+ {
+ import errors ;
+ errors.error $(v) is already associated with the
+ \"$($(v).implicit-feature)\" feature ;
+ }
+ $(v).implicit-feature = $(feature) ;
+ }
+
+ .all-implicit-values += $(values) ;
+ }
+ if ! $($(feature).values)
+ {
+ # This is the first value specified for this feature so make it be the
+ # default.
+ $(feature).default = $(values[1]) ;
+ }
+ $(feature).values += $(values) ;
+}
+
+
+# Checks that value-string is a valid value-string for the given feature.
+#
+rule validate-value-string ( feature value-string )
+{
+ if ! (
+ free in $($(feature).attributes)
+ || ( $(value-string) in $(feature).values )
+ )
+ {
+ local values = $(value-string) ;
+
+ if $($(feature).subfeatures)
+ {
+ if ! $(value-string) in $($(feature).values)
+ $($(feature).subfeatures)
+ {
+ values = [ regex.split $(value-string) - ] ;
+ }
+ }
+
+ if ! ( $(values[1]) in $($(feature).values) ) &&
+
+ # An empty value is allowed for optional features.
+ ( $(values[1]) || ! ( optional in $($(feature).attributes) ) )
+ {
+ import errors ;
+ errors.error \"$(values[1])\" is not a known value of feature
+ $(feature) : legal values: \"$($(feature).values)\" ;
+ }
+
+ for local v in $(values[2-])
+ {
+ # This will validate any subfeature values in value-string.
+ implied-subfeature $(feature) $(v) : $(values[1]) ;
+ }
+ }
+}
+
+
+# A helper that computes:
+# * name(s) of module-local variable(s) used to record the correspondence
+# between subvalue(s) and a subfeature
+# * value of that variable when such a subfeature/subvalue has been defined and
+# returns a list consisting of the latter followed by the former.
+#
+local rule subvalue-var (
+ feature # Main feature name.
+ value-string ? # If supplied, specifies a specific value of the main
+ # feature for which the subfeature values are valid.
+ : subfeature # Subfeature name.
+ : subvalues * # Subfeature values.
+)
+{
+ feature = [ grist $(feature) ] ;
+ validate-feature $(feature) ;
+ if $(value-string)
+ {
+ validate-value-string $(feature) $(value-string) ;
+ }
+
+ local subfeature-name = [ get-subfeature-name $(subfeature) $(value-string) ] ;
+
+ return $(subfeature-name)
+ $(feature)$(value-string:E="")<>$(subvalues).subfeature ;
+}
+
+
+# Extends the given subfeature with the subvalues. If the optional value-string
+# is provided, the subvalues are only valid for the given value of the feature.
+# Thus, you could say that <target-platform>mingw is specific to
+# <toolset>gcc-2.95.2 as follows:
+#
+# extend-subfeature toolset gcc-2.95.2 : target-platform : mingw ;
+#
+rule extend-subfeature (
+ feature # The feature whose subfeature is being extended.
+
+ value-string ? # If supplied, specifies a specific value of the main
+ # feature for which the new subfeature values are valid.
+
+ : subfeature # Subfeature name.
+ : subvalues * # Additional subfeature values.
+)
+{
+ local subfeature-vars = [ subvalue-var $(feature) $(value-string)
+ : $(subfeature) : $(subvalues) ] ;
+
+ local f = [ utility.ungrist [ grist $(feature) ] ] ;
+ extend $(f)-$(subfeature-vars[1]) : $(subvalues) ;
+
+ # Provide a way to get from the given feature or property and subfeature
+ # value to the subfeature name.
+ $(subfeature-vars[2-]) = $(subfeature-vars[1]) ;
+}
+
+
+# Returns true iff the subvalues are valid for the feature. When the optional
+# value-string is provided, returns true iff the subvalues are valid for the
+# given value of the feature.
+#
+rule is-subvalue ( feature : value-string ? : subfeature : subvalue )
+{
+ local subfeature-vars = [ subvalue-var $(feature) $(value-string)
+ : $(subfeature) : $(subvalue) ] ;
+
+ if $($(subfeature-vars[2])) = $(subfeature-vars[1])
+ {
+ return true ;
+ }
+}
+
+
+# Can be called three ways:
+#
+# 1. extend feature : values *
+# 2. extend <feature> subfeature : values *
+# 3. extend <feature>value-string subfeature : values *
+#
+# * Form 1 adds the given values to the given feature.
+# * Forms 2 and 3 add subfeature values to the given feature.
+# * Form 3 adds the subfeature values as specific to the given property
+# value-string.
+#
+rule extend ( feature-or-property subfeature ? : values * )
+{
+ local feature ; # If a property was specified this is its feature.
+ local value-string ; # E.g., the gcc-2.95-2 part of <toolset>gcc-2.95.2.
+
+ # If a property was specified.
+ if $(feature-or-property:G) && $(feature-or-property:G=)
+ {
+ # Extract the feature and value-string, if any.
+ feature = $(feature-or-property:G) ;
+ value-string = $(feature-or-property:G=) ;
+ }
+ else
+ {
+ feature = [ grist $(feature-or-property) ] ;
+ }
+
+ # Dispatch to the appropriate handler.
+ if $(subfeature)
+ {
+ extend-subfeature $(feature) $(value-string) : $(subfeature)
+ : $(values) ;
+ }
+ else
+ {
+ # If no subfeature was specified, we do not expect to see a
+ # value-string.
+ if $(value-string)
+ {
+ import errors ;
+ errors.error can only specify a property as the first argument when
+ extending a subfeature
+ : usage:
+ : " extend" feature ":" values...
+ : " | extend" <feature>value-string subfeature ":" values... ;
+ }
+
+ extend-feature $(feature) : $(values) ;
+ }
+}
+
+
+local rule get-subfeature-name ( subfeature value-string ? )
+{
+ local prefix = $(value-string): ;
+ return $(prefix:E="")$(subfeature) ;
+}
+
+
+# Declares a subfeature.
+#
+rule subfeature (
+ feature # Root feature that is not a subfeature.
+ value-string ? # A value-string specifying which feature or subfeature
+ # values this subfeature is specific to, if any.
+ : subfeature # The name of the subfeature being declared.
+ : subvalues * # The allowed values of this subfeature.
+ : attributes * # The attributes of the subfeature.
+)
+{
+ feature = [ grist $(feature) ] ;
+ validate-feature $(feature) ;
+
+ # Add grist to the subfeature name if a value-string was supplied.
+ local subfeature-name = [ get-subfeature-name $(subfeature) $(value-string) ] ;
+
+ if $(subfeature-name) in $($(feature).subfeatures)
+ {
+ import errors ;
+ errors.error \"$(subfeature)\" already declared as a subfeature of
+ \"$(feature)\" "specific to "$(value-string) ;
+ }
+ $(feature).subfeatures += $(subfeature-name) ;
+
+ # First declare the subfeature as a feature in its own right.
+ local f = [ utility.ungrist $(feature) ] ;
+ feature $(f)-$(subfeature-name) : $(subvalues) : $(attributes) subfeature ;
+
+ # Now make sure the subfeature values are known.
+ extend-subfeature $(feature) $(value-string) : $(subfeature) : $(subvalues) ;
+}
+
+
+# Set components of the given composite property.
+#
+rule compose ( composite-property : component-properties * )
+{
+ local feature = $(composite-property:G) ;
+ if ! ( composite in [ attributes $(feature) ] )
+ {
+ import errors ;
+ errors.error "$(feature)" is not a composite feature ;
+ }
+
+ $(composite-property).components ?= ;
+ if $($(composite-property).components)
+ {
+ import errors ;
+ errors.error components of "$(composite-property)" already set:
+ $($(composite-property).components) ;
+ }
+
+ if $(composite-property) in $(component-properties)
+ {
+ import errors ;
+ errors.error composite property "$(composite-property)" cannot have itself as a component ;
+ }
+ $(composite-property).components = $(component-properties) ;
+}
+
+
+local rule expand-composite ( property )
+{
+ return $(property)
+ [ sequence.transform expand-composite : $($(property).components) ] ;
+}
+
+
+# Return all values of the given feature specified by the given property set.
+#
+rule get-values ( feature : properties * )
+{
+ local result ;
+
+ feature = $(:E=:G=$(feature)) ; # Add <> if necessary.
+ for local p in $(properties)
+ {
+ if $(p:G) = $(feature)
+ {
+ # Use MATCH instead of :G= to get the value, in order to preserve
+ # the value intact instead of having bjam treat it as a decomposable
+ # path.
+ result += [ MATCH ">(.*)" : $(p) ] ;
+ }
+ }
+ return $(result) ;
+}
+
+
+rule free-features ( )
+{
+ return $(free.features) ;
+}
+
+
+# Expand all composite properties in the set so that all components are
+# explicitly expressed.
+#
+rule expand-composites ( properties * )
+{
+ local explicit-features = $(properties:G) ;
+ local result ;
+
+ # Now expand composite features.
+ for local p in $(properties)
+ {
+ local expanded = [ expand-composite $(p) ] ;
+
+ for local x in $(expanded)
+ {
+ if ! $(x) in $(result)
+ {
+ local f = $(x:G) ;
+
+ if $(f) in $(free.features)
+ {
+ result += $(x) ;
+ }
+ else if ! $(x) in $(properties) # x is the result of expansion
+ {
+ if ! $(f) in $(explicit-features) # not explicitly-specified
+ {
+ if $(f) in $(result:G)
+ {
+ import errors ;
+ errors.error expansions of composite features result
+ in conflicting values for $(f)
+ : values: [ get-values $(f) : $(result) ] $(x:G=)
+ : one contributing composite property was $(p) ;
+ }
+ else
+ {
+ result += $(x) ;
+ }
+ }
+ }
+ else if $(f) in $(result:G)
+ {
+ import errors ;
+ errors.error explicitly-specified values of non-free feature
+ $(f) conflict :
+ "existing values:" [ get-values $(f) : $(properties) ] :
+ "value from expanding " $(p) ":" $(x:G=) ;
+ }
+ else
+ {
+ result += $(x) ;
+ }
+ }
+ }
+ }
+ return $(result) ;
+}
+
+
+# Return true iff f is an ordinary subfeature of the parent-property's feature,
+# or if f is a subfeature of the parent-property's feature specific to the
+# parent-property's value.
+#
+local rule is-subfeature-of ( parent-property f )
+{
+ if subfeature in $($(f).attributes)
+ {
+ local specific-subfeature = [ MATCH <(.*):(.*)> : $(f) ] ;
+ if $(specific-subfeature)
+ {
+ # The feature has the form <topfeature-topvalue:subfeature>, e.g.
+ # <toolset-msvc:version>.
+ local feature-value = [ split-top-feature $(specific-subfeature[1])
+ ] ;
+ if <$(feature-value[1])>$(feature-value[2]) = $(parent-property)
+ {
+ return true ;
+ }
+ }
+ else
+ {
+ # The feature has the form <topfeature-subfeature>, e.g.
+ # <toolset-version>
+ local top-sub = [ split-top-feature [ utility.ungrist $(f) ] ] ;
+ if $(top-sub[2]) && <$(top-sub[1])> = $(parent-property:G)
+ {
+ return true ;
+ }
+ }
+ }
+}
+
+
+# As for is-subfeature-of but for subproperties.
+#
+local rule is-subproperty-of ( parent-property p )
+{
+ return [ is-subfeature-of $(parent-property) $(p:G) ] ;
+}
+
+
+# Given a property, return the subset of features consisting of all ordinary
+# subfeatures of the property's feature, and all specific subfeatures of the
+# property's feature which are conditional on the property's value.
+#
+local rule select-subfeatures ( parent-property : features * )
+{
+ return [ sequence.filter is-subfeature-of $(parent-property) : $(features) ] ;
+}
+
+
+# As for select-subfeatures but for subproperties.
+#
+local rule select-subproperties ( parent-property : properties * )
+{
+ return [ sequence.filter is-subproperty-of $(parent-property) : $(properties) ] ;
+}
+
+
+# Given a property set which may consist of composite and implicit properties
+# and combined subfeature values, returns an expanded, normalized property set
+# with all implicit features expressed explicitly, all subfeature values
+# individually expressed, and all components of composite properties expanded.
+# Non-free features directly expressed in the input properties cause any values
+# of those features due to composite feature expansion to be dropped. If two
+# values of a given non-free feature are directly expressed in the input, an
+# error is issued.
+#
+rule expand ( properties * )
+{
+ local expanded = [ expand-subfeatures $(properties) ] ;
+ return [ expand-composites $(expanded) ] ;
+}
+
+
+# Helper rule for minimize. Returns true iff property's feature is present in
+# the contents of the variable named by feature-set-var.
+#
+local rule in-features ( feature-set-var property )
+{
+ if $(property:G) in $($(feature-set-var))
+ {
+ return true ;
+ }
+}
+
+
+# Helper rule for minimize. Returns the list with the same properties, but with
+# all subfeatures moved to the end of the list.
+#
+local rule move-subfeatures-to-the-end ( properties * )
+{
+ local x1 ;
+ local x2 ;
+ for local p in $(properties)
+ {
+ if subfeature in $($(p:G).attributes)
+ {
+ x2 += $(p) ;
+ }
+ else
+ {
+ x1 += $(p) ;
+ }
+ }
+ return $(x1) $(x2) ;
+}
+
+
+# Given an expanded property set, eliminate all redundancy: properties that are
+# elements of other (composite) properties in the set will be eliminated.
+# Non-symmetric properties equal to default values will be eliminated unless
+# they override a value from some composite property. Implicit properties will
+# be expressed without feature grist, and sub-property values will be expressed
+# as elements joined to the corresponding main property.
+#
+rule minimize ( properties * )
+{
+ # Precondition checking
+ local implicits = [ set.intersection $(p:G=) : $(p:G) ] ;
+ if $(implicits)
+ {
+ import errors ;
+ errors.error minimize requires an expanded property set, but
+ \"$(implicits[1])\" appears to be the value of an un-expanded
+ implicit feature ;
+ }
+
+ # Remove properties implied by composite features.
+ local components = $($(properties).components) ;
+ local x = [ set.difference $(properties) : $(components) ] ;
+
+ # Handle subfeatures and implicit features.
+ x = [ move-subfeatures-to-the-end $(x) ] ;
+ local result ;
+ while $(x)
+ {
+ local p fullp = $(x[1]) ;
+ local f = $(p:G) ;
+ local v = $(p:G=) ;
+
+ # Eliminate features in implicit properties.
+ if implicit in [ attributes $(f) ]
+ {
+ p = $(v) ;
+ }
+
+ # Locate all subproperties of $(x[1]) in the property set.
+ local subproperties = [ select-subproperties $(fullp) : $(x) ] ;
+ if $(subproperties)
+ {
+ # Reconstitute the joined property name.
+ local sorted = [ sequence.insertion-sort $(subproperties) ] ;
+ result += $(p)-$(sorted:G="":J=-) ;
+
+ x = [ set.difference $(x[2-]) : $(subproperties) ] ;
+ }
+ else
+ {
+ # Eliminate properties whose value is equal to feature's default,
+ # which are not symmetric and which do not contradict values implied
+ # by composite properties.
+
+ # Since all component properties of composites in the set have been
+ # eliminated, any remaining property whose feature is the same as a
+ # component of a composite in the set must have a non-redundant
+ # value.
+ if $(fullp) != [ defaults $(f) ]
+ || symmetric in [ attributes $(f) ]
+ || $(fullp:G) in $(components:G)
+ {
+ result += $(p) ;
+ }
+
+ x = $(x[2-]) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Combine all subproperties into their parent properties
+#
+# Requires: for every subproperty, there is a parent property. All features are
+# explicitly expressed.
+#
+# This rule probably should not be needed, but build-request.expand-no-defaults
+# is being abused for unintended purposes and it needs help.
+#
+rule compress-subproperties ( properties * )
+{
+ local all-subs ;
+ local matched-subs ;
+ local result ;
+
+ for local p in $(properties)
+ {
+ if ! $(p:G)
+ {
+ # Expecting fully-gristed properties.
+ assert.variable-not-empty p:G ;
+ }
+
+ if ! subfeature in $($(p:G).attributes)
+ {
+ local subs = [ sequence.insertion-sort
+ [ sequence.filter is-subproperty-of $(p) : $(properties) ] ] ;
+
+ matched-subs += $(subs) ;
+
+ local subvalues = -$(subs:G=:J=-) ;
+ subvalues ?= "" ;
+ result += $(p)$(subvalues) ;
+ }
+ else
+ {
+ all-subs += $(p) ;
+ }
+ }
+ assert.result true : set.equal $(all-subs) : $(matched-subs) ;
+ return $(result) ;
+}
+
+
+# Given an ungristed string, finds the longest prefix which is a top-level
+# feature name followed by a dash, and return a pair consisting of the parts
+# before and after that dash. More interesting than a simple split because
+# feature names may contain dashes.
+#
+local rule split-top-feature ( feature-plus )
+{
+ local e = [ regex.split $(feature-plus) - ] ;
+ local f = $(e[1]) ;
+ local v ;
+ while $(e)
+ {
+ if <$(f)> in $(.all-top-features)
+ {
+ v = $(f) $(e[2-]:J=-) ;
+ }
+ e = $(e[2-]) ;
+ f = $(f)-$(e[1]) ;
+ }
+ return $(v) ;
+}
+
+
+# Given a set of properties, add default values for features not represented in
+# the set.
+#
+# Note: if there's an ordinary feature F1 and a composite feature F2 which
+# includes some value for F1 and both feature have default values then the
+# default value of F1 will be added (as opposed to the value in F2). This might
+# not be the right idea, e.g. consider:
+#
+# feature variant : debug ... ;
+# <variant>debug : .... <runtime-debugging>on
+# feature <runtime-debugging> : off on ;
+#
+# Here, when adding default for an empty property set, we'll get
+#
+# <variant>debug <runtime_debugging>off
+#
+# and that's kind of strange.
+#
+rule add-defaults ( properties * )
+{
+ for local v in $(properties:G=)
+ {
+ if $(v) in $(properties)
+ {
+ import errors ;
+ errors.error add-defaults requires explicitly specified features,
+ but \"$(v)\" appears to be the value of an un-expanded implicit
+ feature ;
+ }
+ }
+ # We don't add default for elements with ":" inside. This catches:
+ # 1. Conditional properties --- we don't want <variant>debug:<define>DEBUG
+ # to be takes as specified value for <variant>
+ # 2. Free properties with ":" in values. We don't care, since free
+ # properties don't have defaults.
+ local xproperties = [ MATCH "^([^:]+)$" : $(properties) ] ;
+ local missing-top = [ set.difference $(.all-top-features) : $(xproperties:G) ] ;
+ local more = [ defaults $(missing-top) ] ;
+ properties += $(more) ;
+ xproperties += $(more) ;
+
+ # Add defaults for subfeatures of features which are present.
+ for local p in $(xproperties)
+ {
+ local s = $($(p:G).subfeatures) ;
+ local f = [ utility.ungrist $(p:G) ] ;
+ local missing-subs = [ set.difference <$(f)-$(s)> : $(properties:G) ] ;
+ properties += [ defaults [ select-subfeatures $(p) : $(missing-subs) ] ] ;
+ }
+
+ return $(properties) ;
+}
+
+
+# Given a property-set of the form
+# v1/v2/...vN-1/<fN>vN/<fN+1>vN+1/...<fM>vM
+#
+# Returns
+# v1 v2 ... vN-1 <fN>vN <fN+1>vN+1 ... <fM>vM
+#
+# Note that vN...vM may contain slashes. This needs to be resilient to the
+# substitution of backslashes for slashes, since Jam, unbidden, sometimes swaps
+# slash direction on NT.
+#
+rule split ( property-set )
+{
+ local pieces = [ regex.split $(property-set) [\\/] ] ;
+ local result ;
+
+ for local x in $(pieces)
+ {
+ if ( ! $(x:G) ) && $(result[-1]:G)
+ {
+ result = $(result[1--2]) $(result[-1])/$(x) ;
+ }
+ else
+ {
+ result += $(x) ;
+ }
+ }
+
+ return $(result) ;
+}
+
+
+# Tests of module feature.
+#
+rule __test__ ( )
+{
+ # Use a fresh copy of the feature module.
+ prepare-test feature-test-temp ;
+
+ import assert ;
+ import errors : try catch ;
+
+ # These are local rules and so must be explicitly reimported into the
+ # testing module.
+ import feature : extend-feature validate-feature select-subfeatures ;
+
+ feature toolset : gcc : implicit ;
+ feature define : : free ;
+ feature runtime-link : dynamic static : symmetric ;
+ feature optimization : on off ;
+ feature variant : debug release profile : implicit composite symmetric ;
+ feature stdlib : native stlport ;
+ feature magic : : free ;
+
+ compose <variant>debug : <define>_DEBUG <optimization>off ;
+ compose <variant>release : <define>NDEBUG <optimization>on ;
+
+ assert.result dynamic static : values <runtime-link> ;
+ assert.result dynamic static : values runtime-link ;
+
+ try ;
+ {
+ compose <variant>profile : <variant>profile ;
+ }
+ catch composite property <variant>profile cannot have itself as a component ;
+
+ extend-feature toolset : msvc metrowerks ;
+ subfeature toolset gcc : version : 2.95.2 2.95.3 2.95.4 3.0 3.0.1 3.0.2 ;
+
+ assert.true is-subvalue toolset : gcc : version : 2.95.3 ;
+ assert.false is-subvalue toolset : gcc : version : 1.1 ;
+
+ assert.false is-subvalue toolset : msvc : version : 2.95.3 ;
+ assert.false is-subvalue toolset : : version : yabba ;
+
+ feature yabba ;
+ subfeature yabba : version : dabba ;
+ assert.true is-subvalue yabba : : version : dabba ;
+
+ subfeature toolset gcc : platform : linux cygwin : optional ;
+
+ assert.result <toolset-gcc:version>
+ : select-subfeatures <toolset>gcc
+ : <toolset-gcc:version>
+ <toolset-msvc:version>
+ <toolset-version>
+ <stdlib> ;
+
+ subfeature stdlib : version : 3 4 : optional ;
+
+ assert.result <stdlib-version>
+ : select-subfeatures <stdlib>native
+ : <toolset-gcc:version>
+ <toolset-msvc:version>
+ <toolset-version>
+ <stdlib-version> ;
+
+ assert.result <toolset>gcc <toolset-gcc:version>3.0.1
+ : expand-subfeatures <toolset>gcc-3.0.1 ;
+
+ assert.result <toolset>gcc <toolset-gcc:version>3.0.1 <toolset-gcc:platform>linux
+ : expand-subfeatures <toolset>gcc-3.0.1-linux ;
+
+ assert.result <toolset>gcc <toolset-gcc:version>3.0.1
+ : expand <toolset>gcc <toolset-gcc:version>3.0.1 ;
+
+ assert.result <define>foo=x-y
+ : expand-subfeatures <define>foo=x-y ;
+
+ assert.result <toolset>gcc <toolset-gcc:version>3.0.1
+ : expand-subfeatures gcc-3.0.1 ;
+
+ assert.result a c e
+ : get-values <x> : <x>a <y>b <x>c <y>d <x>e ;
+
+ assert.result <toolset>gcc <toolset-gcc:version>3.0.1
+ <variant>debug <define>_DEBUG <optimization>on
+ : expand gcc-3.0.1 debug <optimization>on ;
+
+ assert.result <variant>debug <define>_DEBUG <optimization>on
+ : expand debug <optimization>on ;
+
+ assert.result <optimization>on <variant>debug <define>_DEBUG
+ : expand <optimization>on debug ;
+
+ assert.result <runtime-link>dynamic <optimization>on
+ : defaults <runtime-link> <define> <optimization> ;
+
+ # Make sure defaults is resilient to missing grist.
+ assert.result <runtime-link>dynamic <optimization>on
+ : defaults runtime-link define optimization ;
+
+ feature dummy : dummy1 dummy2 ;
+ subfeature dummy : subdummy : x y z : optional ;
+
+ feature fu : fu1 fu2 : optional ;
+ subfeature fu : subfu : x y z : optional ;
+ subfeature fu : subfu2 : q r s ;
+
+ assert.result optional : attributes <fu> ;
+
+ assert.result <runtime-link>static <define>foobar <optimization>on
+ <toolset>gcc:<define>FOO <toolset>gcc <variant>debug <stdlib>native
+ <dummy>dummy1 <toolset-gcc:version>2.95.2
+ : add-defaults <runtime-link>static <define>foobar <optimization>on
+ <toolset>gcc:<define>FOO ;
+
+ assert.result <runtime-link>static <define>foobar <optimization>on
+ <toolset>gcc:<define>FOO <fu>fu1 <toolset>gcc <variant>debug
+ <stdlib>native <dummy>dummy1 <fu-subfu2>q <toolset-gcc:version>2.95.2
+ : add-defaults <runtime-link>static <define>foobar <optimization>on
+ <toolset>gcc:<define>FOO <fu>fu1 ;
+
+ set-default <runtime-link> : static ;
+ assert.result <runtime-link>static : defaults <runtime-link> ;
+
+ assert.result gcc-3.0.1 debug <optimization>on
+ : minimize [ expand gcc-3.0.1 debug <optimization>on <stdlib>native ] ;
+
+ assert.result gcc-3.0.1 debug <runtime-link>dynamic
+ : minimize
+ [ expand gcc-3.0.1 debug <optimization>off <runtime-link>dynamic ] ;
+
+ assert.result gcc-3.0.1 debug
+ : minimize [ expand gcc-3.0.1 debug <optimization>off ] ;
+
+ assert.result debug <optimization>on
+ : minimize [ expand debug <optimization>on ] ;
+
+ assert.result gcc-3.0
+ : minimize <toolset>gcc <toolset-gcc:version>3.0 ;
+
+ assert.result gcc-3.0
+ : minimize <toolset-gcc:version>3.0 <toolset>gcc ;
+
+ assert.result <x>y/z <a>b/c <d>e/f
+ : split <x>y/z/<a>b/c/<d>e/f ;
+
+ assert.result <x>y/z <a>b/c <d>e/f
+ : split <x>y\\z\\<a>b\\c\\<d>e\\f ;
+
+ assert.result a b c <d>e/f/g <h>i/j/k
+ : split a/b/c/<d>e/f/g/<h>i/j/k ;
+
+ assert.result a b c <d>e/f/g <h>i/j/k
+ : split a\\b\\c\\<d>e\\f\\g\\<h>i\\j\\k ;
+
+ # Test error checking.
+
+ try ;
+ {
+ expand release <optimization>off <optimization>on ;
+ }
+ catch explicitly-specified values of non-free feature <optimization> conflict ;
+
+ try ;
+ {
+ validate-feature <foobar> ;
+ }
+ catch unknown feature ;
+
+ validate-value-string <toolset> gcc ;
+ validate-value-string <toolset> gcc-3.0.1 ;
+
+ try ;
+ {
+ validate-value-string <toolset> digital_mars ;
+ }
+ catch \"digital_mars\" is not a known value of <toolset> ;
+
+ try ;
+ {
+ feature foobar : : baz ;
+ }
+ catch unknown attributes: baz ;
+
+ feature feature1 ;
+ try ;
+ {
+ feature feature1 ;
+ }
+ catch feature already defined: ;
+
+ try ;
+ {
+ feature feature2 : : free implicit ;
+ }
+ catch free features cannot also be implicit ;
+
+ try ;
+ {
+ feature feature3 : : free propagated ;
+ }
+ catch free features cannot be propagated ;
+
+ try ;
+ {
+ implied-feature lackluster ;
+ }
+ catch \"lackluster\" is not an implicit feature value ;
+
+ try ;
+ {
+ implied-subfeature <toolset> 3.0.1 ;
+ }
+ catch \"3.0.1\" is not a known subfeature value of <toolset> ;
+
+ try ;
+ {
+ implied-subfeature <toolset> not-a-version : gcc ;
+ }
+ catch \"not-a-version\" is not a known subfeature value of <toolset>gcc ;
+
+ # Leave a clean copy of the features module behind.
+ finish-test feature-test-temp ;
+}
diff --git a/tools/build/src/build/feature.py b/tools/build/src/build/feature.py
new file mode 100644
index 0000000000..386e49931c
--- /dev/null
+++ b/tools/build/src/build/feature.py
@@ -0,0 +1,907 @@
+# Status: ported, except for unit tests.
+# Base revision: 64488
+#
+# Copyright 2001, 2002, 2003 Dave Abrahams
+# Copyright 2002, 2006 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import re
+
+from b2.util import utility, bjam_signature
+import b2.util.set
+from b2.util.utility import add_grist, get_grist, ungrist, replace_grist, to_seq
+from b2.exceptions import *
+
+__re_split_subfeatures = re.compile ('<(.*):(.*)>')
+__re_no_hyphen = re.compile ('^([^:]+)$')
+__re_slash_or_backslash = re.compile (r'[\\/]')
+
+class Feature(object):
+
+ # Map from string attribute names to integers bit flags.
+ # This will be initialized after declaration of the class.
+ _attribute_name_to_integer = {}
+
+ def __init__(self, name, values, attributes):
+ self._name = name
+ self._values = values
+ self._default = None
+ self._attributes = 0
+ for a in attributes:
+ self._attributes = self._attributes | Feature._attribute_name_to_integer[a]
+ self._attributes_string_list = attributes
+ self._subfeatures = []
+ self._parent = None
+
+ def name(self):
+ return self._name
+
+ def values(self):
+ return self._values
+
+ def add_values(self, values):
+ self._values.extend(values)
+
+ def attributes(self):
+ return self._attributes
+
+ def set_default(self, value):
+ self._default = value
+
+ def default(self):
+ return self._default
+
+ # FIXME: remove when we fully move to using classes for features/properties
+ def attributes_string_list(self):
+ return self._attributes_string_list
+
+ def subfeatures(self):
+ return self._subfeatures
+
+ def add_subfeature(self, name):
+ self._subfeatures.append(name)
+
+ def parent(self):
+ """For subfeatures, return pair of (parent_feature, value).
+
+ Value may be None if this subfeature is not specific to any
+ value of the parent feature.
+ """
+ return self._parent
+
+ def set_parent(self, feature, value):
+ self._parent = (feature, value)
+
+ def __str__(self):
+ return self._name
+
+
+def reset ():
+ """ Clear the module state. This is mainly for testing purposes.
+ """
+ global __all_attributes, __all_features, __implicit_features, __composite_properties
+ global __features_with_attributes, __subfeature_from_value, __all_top_features, __free_features
+ global __all_subfeatures
+
+ # The list with all attribute names.
+ __all_attributes = [ 'implicit',
+ 'composite',
+ 'optional',
+ 'symmetric',
+ 'free',
+ 'incidental',
+ 'path',
+ 'dependency',
+ 'propagated',
+ 'link-incompatible',
+ 'subfeature',
+ 'order-sensitive'
+ ]
+ i = 1
+ for a in __all_attributes:
+ setattr(Feature, a.upper(), i)
+ Feature._attribute_name_to_integer[a] = i
+ def probe(self, flag=i):
+ return getattr(self, "_attributes") & flag
+ setattr(Feature, a.replace("-", "_"), probe)
+ i = i << 1
+
+ # A map containing all features. The key is the feature name.
+ # The value is an instance of Feature class.
+ __all_features = {}
+
+ # All non-subfeatures.
+ __all_top_features = []
+
+ # Maps valus to the corresponding implicit feature
+ __implicit_features = {}
+
+ # A map containing all composite properties. The key is a Property instance,
+ # and the value is a list of Property instances
+ __composite_properties = {}
+
+ __features_with_attributes = {}
+ for attribute in __all_attributes:
+ __features_with_attributes [attribute] = []
+
+ # Maps a value to the corresponding subfeature name.
+ __subfeature_from_value = {}
+
+ # All free features
+ __free_features = []
+
+ __all_subfeatures = []
+
+reset ()
+
+def enumerate ():
+ """ Returns an iterator to the features map.
+ """
+ return __all_features.iteritems ()
+
+def get(name):
+ """Return the Feature instance for the specified name.
+
+ Throws if no feature by such name exists
+ """
+ return __all_features[name]
+
+# FIXME: prepare-test/finish-test?
+
+@bjam_signature((["name"], ["values", "*"], ["attributes", "*"]))
+def feature (name, values, attributes = []):
+ """ Declares a new feature with the given name, values, and attributes.
+ name: the feature name
+ values: a sequence of the allowable values - may be extended later with feature.extend
+ attributes: a sequence of the feature's attributes (e.g. implicit, free, propagated, ...)
+ """
+ __validate_feature_attributes (name, attributes)
+
+ feature = Feature(name, [], attributes)
+ __all_features[name] = feature
+ # Temporary measure while we have not fully moved from 'gristed strings'
+ __all_features["<" + name + ">"] = feature
+
+ for attribute in attributes:
+ __features_with_attributes [attribute].append (name)
+
+ name = add_grist(name)
+
+ if 'subfeature' in attributes:
+ __all_subfeatures.append(name)
+ else:
+ __all_top_features.append(feature)
+
+ extend (name, values)
+
+ # FIXME: why his is needed.
+ if 'free' in attributes:
+ __free_features.append (name)
+
+ return feature
+
+@bjam_signature((["feature"], ["value"]))
+def set_default (feature, value):
+ """ Sets the default value of the given feature, overriding any previous default.
+ feature: the name of the feature
+ value: the default value to assign
+ """
+ f = __all_features[feature]
+ attributes = f.attributes()
+ bad_attribute = None
+
+ if attributes & Feature.FREE:
+ bad_attribute = "free"
+ elif attributes & Feature.OPTIONAL:
+ bad_attribute = "optional"
+
+ if bad_attribute:
+ raise InvalidValue ("%s property %s cannot have a default" % (bad_attribute, feature.name()))
+
+ if not value in f.values():
+ raise InvalidValue ("The specified default value, '%s' is invalid.\n" % value + "allowed values are: %s" % values)
+
+ f.set_default(value)
+
+def defaults(features):
+ """ Returns the default property values for the given features.
+ """
+ # FIXME: should merge feature and property modules.
+ import property
+
+ result = []
+ for f in features:
+ if not f.free() and not f.optional() and f.default():
+ result.append(property.Property(f, f.default()))
+
+ return result
+
+def valid (names):
+ """ Returns true iff all elements of names are valid features.
+ """
+ def valid_one (name): return __all_features.has_key (name)
+
+ if isinstance (names, str):
+ return valid_one (names)
+ else:
+ return all([ valid_one (name) for name in names ])
+
+def attributes (feature):
+ """ Returns the attributes of the given feature.
+ """
+ return __all_features[feature].attributes_string_list()
+
+def values (feature):
+ """ Return the values of the given feature.
+ """
+ validate_feature (feature)
+ return __all_features[feature].values()
+
+def is_implicit_value (value_string):
+ """ Returns true iff 'value_string' is a value_string
+ of an implicit feature.
+ """
+
+ if __implicit_features.has_key(value_string):
+ return __implicit_features[value_string]
+
+ v = value_string.split('-')
+
+ if not __implicit_features.has_key(v[0]):
+ return False
+
+ feature = __implicit_features[v[0]]
+
+ for subvalue in (v[1:]):
+ if not __find_implied_subfeature(feature, subvalue, v[0]):
+ return False
+
+ return True
+
+def implied_feature (implicit_value):
+ """ Returns the implicit feature associated with the given implicit value.
+ """
+ components = implicit_value.split('-')
+
+ if not __implicit_features.has_key(components[0]):
+ raise InvalidValue ("'%s' is not a value of an implicit feature" % implicit_value)
+
+ return __implicit_features[components[0]]
+
+def __find_implied_subfeature (feature, subvalue, value_string):
+
+ #if value_string == None: value_string = ''
+
+ if not __subfeature_from_value.has_key(feature) \
+ or not __subfeature_from_value[feature].has_key(value_string) \
+ or not __subfeature_from_value[feature][value_string].has_key (subvalue):
+ return None
+
+ return __subfeature_from_value[feature][value_string][subvalue]
+
+# Given a feature and a value of one of its subfeatures, find the name
+# of the subfeature. If value-string is supplied, looks for implied
+# subfeatures that are specific to that value of feature
+# feature # The main feature name
+# subvalue # The value of one of its subfeatures
+# value-string # The value of the main feature
+
+def implied_subfeature (feature, subvalue, value_string):
+ result = __find_implied_subfeature (feature, subvalue, value_string)
+ if not result:
+ raise InvalidValue ("'%s' is not a known subfeature value of '%s%s'" % (subvalue, feature, value_string))
+
+ return result
+
+def validate_feature (name):
+ """ Checks if all name is a valid feature. Otherwise, raises an exception.
+ """
+ if not __all_features.has_key(name):
+ raise InvalidFeature ("'%s' is not a valid feature name" % name)
+ else:
+ return __all_features[name]
+
+def valid (names):
+ """ Returns true iff all elements of names are valid features.
+ """
+ def valid_one (name): return __all_features.has_key (name)
+
+ if isinstance (names, str):
+ return valid_one (names)
+ else:
+ return [ valid_one (name) for name in names ]
+
+# Uses Property
+def __expand_subfeatures_aux (property, dont_validate = False):
+ """ Helper for expand_subfeatures.
+ Given a feature and value, or just a value corresponding to an
+ implicit feature, returns a property set consisting of all component
+ subfeatures and their values. For example:
+
+ expand_subfeatures <toolset>gcc-2.95.2-linux-x86
+ -> <toolset>gcc <toolset-version>2.95.2 <toolset-os>linux <toolset-cpu>x86
+ equivalent to:
+ expand_subfeatures gcc-2.95.2-linux-x86
+
+ feature: The name of the feature, or empty if value corresponds to an implicit property
+ value: The value of the feature.
+ dont_validate: If True, no validation of value string will be done.
+ """
+ f = property.feature()
+ v = property.value()
+ if not dont_validate:
+ validate_value_string(f, v)
+
+ components = v.split ("-")
+
+ v = components[0]
+
+ import property
+
+ result = [property.Property(f, components[0])]
+
+ subvalues = components[1:]
+
+ while len(subvalues) > 0:
+ subvalue = subvalues [0] # pop the head off of subvalues
+ subvalues = subvalues [1:]
+
+ subfeature = __find_implied_subfeature (f, subvalue, v)
+
+ # If no subfeature was found, reconstitute the value string and use that
+ if not subfeature:
+ return [property.Property(f, '-'.join(components))]
+
+ result.append(property.Property(subfeature, subvalue))
+
+ return result
+
+def expand_subfeatures(properties, dont_validate = False):
+ """
+ Make all elements of properties corresponding to implicit features
+ explicit, and express all subfeature values as separate properties
+ in their own right. For example, the property
+
+ gcc-2.95.2-linux-x86
+
+ might expand to
+
+ <toolset>gcc <toolset-version>2.95.2 <toolset-os>linux <toolset-cpu>x86
+
+ properties: A sequence with elements of the form
+ <feature>value-string or just value-string in the
+ case of implicit features.
+ : dont_validate: If True, no validation of value string will be done.
+ """
+ result = []
+ for p in properties:
+ # Don't expand subfeatures in subfeatures
+ if p.feature().subfeature():
+ result.append (p)
+ else:
+ result.extend(__expand_subfeatures_aux (p, dont_validate))
+
+ return result
+
+
+
+# rule extend was defined as below:
+ # Can be called three ways:
+ #
+ # 1. extend feature : values *
+ # 2. extend <feature> subfeature : values *
+ # 3. extend <feature>value-string subfeature : values *
+ #
+ # * Form 1 adds the given values to the given feature
+ # * Forms 2 and 3 add subfeature values to the given feature
+ # * Form 3 adds the subfeature values as specific to the given
+ # property value-string.
+ #
+ #rule extend ( feature-or-property subfeature ? : values * )
+#
+# Now, the specific rule must be called, depending on the desired operation:
+# extend_feature
+# extend_subfeature
+
+def extend (name, values):
+ """ Adds the given values to the given feature.
+ """
+ name = add_grist (name)
+ __validate_feature (name)
+ feature = __all_features [name]
+
+ if feature.implicit():
+ for v in values:
+ if __implicit_features.has_key(v):
+ raise BaseException ("'%s' is already associated with the feature '%s'" % (v, __implicit_features [v]))
+
+ __implicit_features[v] = feature
+
+ if len (feature.values()) == 0 and len (values) > 0:
+ # This is the first value specified for this feature,
+ # take it as default value
+ feature.set_default(values[0])
+
+ feature.add_values(values)
+
+def validate_value_string (f, value_string):
+ """ Checks that value-string is a valid value-string for the given feature.
+ """
+ if f.free() or value_string in f.values():
+ return
+
+ values = [value_string]
+
+ if f.subfeatures():
+ if not value_string in f.values() and \
+ not value_string in f.subfeatures():
+ values = value_string.split('-')
+
+ # An empty value is allowed for optional features
+ if not values[0] in f.values() and \
+ (values[0] or not f.optional()):
+ raise InvalidValue ("'%s' is not a known value of feature '%s'\nlegal values: '%s'" % (values [0], f.name(), f.values()))
+
+ for v in values [1:]:
+ # this will validate any subfeature values in value-string
+ implied_subfeature(f, v, values[0])
+
+
+""" Extends the given subfeature with the subvalues. If the optional
+ value-string is provided, the subvalues are only valid for the given
+ value of the feature. Thus, you could say that
+ <target-platform>mingw is specifc to <toolset>gcc-2.95.2 as follows:
+
+ extend-subfeature toolset gcc-2.95.2 : target-platform : mingw ;
+
+ feature: The feature whose subfeature is being extended.
+
+ value-string: If supplied, specifies a specific value of the
+ main feature for which the new subfeature values
+ are valid.
+
+ subfeature: The name of the subfeature.
+
+ subvalues: The additional values of the subfeature being defined.
+"""
+def extend_subfeature (feature_name, value_string, subfeature_name, subvalues):
+
+ feature = validate_feature(feature_name)
+
+ if value_string:
+ validate_value_string(feature, value_string)
+
+ subfeature_name = feature_name + '-' + __get_subfeature_name (subfeature_name, value_string)
+
+ extend(subfeature_name, subvalues) ;
+ subfeature = __all_features[subfeature_name]
+
+ if value_string == None: value_string = ''
+
+ if not __subfeature_from_value.has_key(feature):
+ __subfeature_from_value [feature] = {}
+
+ if not __subfeature_from_value[feature].has_key(value_string):
+ __subfeature_from_value [feature][value_string] = {}
+
+ for subvalue in subvalues:
+ __subfeature_from_value [feature][value_string][subvalue] = subfeature
+
+@bjam_signature((["feature_name", "value_string", "?"], ["subfeature"],
+ ["subvalues", "*"], ["attributes", "*"]))
+def subfeature (feature_name, value_string, subfeature, subvalues, attributes = []):
+ """ Declares a subfeature.
+ feature_name: Root feature that is not a subfeature.
+ value_string: An optional value-string specifying which feature or
+ subfeature values this subfeature is specific to,
+ if any.
+ subfeature: The name of the subfeature being declared.
+ subvalues: The allowed values of this subfeature.
+ attributes: The attributes of the subfeature.
+ """
+ parent_feature = validate_feature (feature_name)
+
+ # Add grist to the subfeature name if a value-string was supplied
+ subfeature_name = __get_subfeature_name (subfeature, value_string)
+
+ if subfeature_name in __all_features[feature_name].subfeatures():
+ message = "'%s' already declared as a subfeature of '%s'" % (subfeature, feature_name)
+ message += " specific to '%s'" % value_string
+ raise BaseException (message)
+
+ # First declare the subfeature as a feature in its own right
+ f = feature (feature_name + '-' + subfeature_name, subvalues, attributes + ['subfeature'])
+ f.set_parent(parent_feature, value_string)
+
+ parent_feature.add_subfeature(f)
+
+ # Now make sure the subfeature values are known.
+ extend_subfeature (feature_name, value_string, subfeature, subvalues)
+
+
+@bjam_signature((["composite_property_s"], ["component_properties_s", "*"]))
+def compose (composite_property_s, component_properties_s):
+ """ Sets the components of the given composite property.
+
+ All parameters are <feature>value strings
+ """
+ import property
+
+ component_properties_s = to_seq (component_properties_s)
+ composite_property = property.create_from_string(composite_property_s)
+ f = composite_property.feature()
+
+ if len(component_properties_s) > 0 and isinstance(component_properties_s[0], property.Property):
+ component_properties = component_properties_s
+ else:
+ component_properties = [property.create_from_string(p) for p in component_properties_s]
+
+ if not f.composite():
+ raise BaseException ("'%s' is not a composite feature" % f)
+
+ if __composite_properties.has_key(property):
+ raise BaseException ('components of "%s" already set: %s' % (composite_property, str (__composite_properties[composite_property])))
+
+ if composite_property in component_properties:
+ raise BaseException ('composite property "%s" cannot have itself as a component' % composite_property)
+
+ __composite_properties[composite_property] = component_properties
+
+
+def expand_composite(property):
+ result = [ property ]
+ if __composite_properties.has_key(property):
+ for p in __composite_properties[property]:
+ result.extend(expand_composite(p))
+ return result
+
+@bjam_signature((['feature'], ['properties', '*']))
+def get_values (feature, properties):
+ """ Returns all values of the given feature specified by the given property set.
+ """
+ if feature[0] != '<':
+ feature = '<' + feature + '>'
+ result = []
+ for p in properties:
+ if get_grist (p) == feature:
+ result.append (replace_grist (p, ''))
+
+ return result
+
+def free_features ():
+ """ Returns all free features.
+ """
+ return __free_features
+
+def expand_composites (properties):
+ """ Expand all composite properties in the set so that all components
+ are explicitly expressed.
+ """
+ explicit_features = set(p.feature() for p in properties)
+
+ result = []
+
+ # now expand composite features
+ for p in properties:
+ expanded = expand_composite(p)
+
+ for x in expanded:
+ if not x in result:
+ f = x.feature()
+
+ if f.free():
+ result.append (x)
+ elif not x in properties: # x is the result of expansion
+ if not f in explicit_features: # not explicitly-specified
+ if any(r.feature() == f for r in result):
+ raise FeatureConflict(
+ "expansions of composite features result in "
+ "conflicting values for '%s'\nvalues: '%s'\none contributing composite property was '%s'" %
+ (f.name(), [r.value() for r in result if r.feature() == f] + [x.value()], p))
+ else:
+ result.append (x)
+ elif any(r.feature() == f for r in result):
+ raise FeatureConflict ("explicitly-specified values of non-free feature '%s' conflict\n"
+ "existing values: '%s'\nvalue from expanding '%s': '%s'" % (f,
+ [r.value() for r in result if r.feature() == f], p, x.value()))
+ else:
+ result.append (x)
+
+ return result
+
+# Uses Property
+def is_subfeature_of (parent_property, f):
+ """ Return true iff f is an ordinary subfeature of the parent_property's
+ feature, or if f is a subfeature of the parent_property's feature
+ specific to the parent_property's value.
+ """
+ if not f.subfeature():
+ return False
+
+ p = f.parent()
+ if not p:
+ return False
+
+ parent_feature = p[0]
+ parent_value = p[1]
+
+ if parent_feature != parent_property.feature():
+ return False
+
+ if parent_value and parent_value != parent_property.value():
+ return False
+
+ return True
+
+def __is_subproperty_of (parent_property, p):
+ """ As is_subfeature_of, for subproperties.
+ """
+ return is_subfeature_of (parent_property, p.feature())
+
+
+# Returns true iff the subvalue is valid for the feature. When the
+# optional value-string is provided, returns true iff the subvalues
+# are valid for the given value of the feature.
+def is_subvalue(feature, value_string, subfeature, subvalue):
+
+ if not value_string:
+ value_string = ''
+
+ if not __subfeature_from_value.has_key(feature):
+ return False
+
+ if not __subfeature_from_value[feature].has_key(value_string):
+ return False
+
+ if not __subfeature_from_value[feature][value_string].has_key(subvalue):
+ return False
+
+ if __subfeature_from_value[feature][value_string][subvalue]\
+ != subfeature:
+ return False
+
+ return True
+
+def implied_subfeature (feature, subvalue, value_string):
+ result = __find_implied_subfeature (feature, subvalue, value_string)
+ if not result:
+ raise InvalidValue ("'%s' is not a known subfeature value of '%s%s'" % (subvalue, feature, value_string))
+
+ return result
+
+
+# Uses Property
+def expand (properties):
+ """ Given a property set which may consist of composite and implicit
+ properties and combined subfeature values, returns an expanded,
+ normalized property set with all implicit features expressed
+ explicitly, all subfeature values individually expressed, and all
+ components of composite properties expanded. Non-free features
+ directly expressed in the input properties cause any values of
+ those features due to composite feature expansion to be dropped. If
+ two values of a given non-free feature are directly expressed in the
+ input, an error is issued.
+ """
+ expanded = expand_subfeatures(properties)
+ return expand_composites (expanded)
+
+# Accepts list of Property objects
+def add_defaults (properties):
+ """ Given a set of properties, add default values for features not
+ represented in the set.
+ Note: if there's there's ordinary feature F1 and composite feature
+ F2, which includes some value for F1, and both feature have default values,
+ then the default value of F1 will be added, not the value in F2. This might
+ not be right idea: consider
+
+ feature variant : debug ... ;
+ <variant>debug : .... <runtime-debugging>on
+ feature <runtime-debugging> : off on ;
+
+ Here, when adding default for an empty property set, we'll get
+
+ <variant>debug <runtime_debugging>off
+
+ and that's kind of strange.
+ """
+ result = [x for x in properties]
+
+ handled_features = set()
+ for p in properties:
+ # We don't add default for conditional properties. We don't want
+ # <variant>debug:<define>DEBUG to be takes as specified value for <variant>
+ if not p.condition():
+ handled_features.add(p.feature())
+
+ missing_top = [f for f in __all_top_features if not f in handled_features]
+ more = defaults(missing_top)
+ result.extend(more)
+ for p in more:
+ handled_features.add(p.feature())
+
+ # Add defaults for subfeatures of features which are present
+ for p in result[:]:
+ s = p.feature().subfeatures()
+ more = defaults([s for s in p.feature().subfeatures() if not s in handled_features])
+ for p in more:
+ handled_features.add(p.feature())
+ result.extend(more)
+
+ return result
+
+def minimize (properties):
+ """ Given an expanded property set, eliminate all redundancy: properties
+ which are elements of other (composite) properties in the set will
+ be eliminated. Non-symmetric properties equal to default values will be
+ eliminated, unless the override a value from some composite property.
+ Implicit properties will be expressed without feature
+ grist, and sub-property values will be expressed as elements joined
+ to the corresponding main property.
+ """
+
+ # remove properties implied by composite features
+ components = []
+ for property in properties:
+ if __composite_properties.has_key (property):
+ components.extend(__composite_properties[property])
+ properties = b2.util.set.difference (properties, components)
+
+ # handle subfeatures and implicit features
+
+ # move subfeatures to the end of the list
+ properties = [p for p in properties if not p.feature().subfeature()] +\
+ [p for p in properties if p.feature().subfeature()]
+
+ result = []
+ while properties:
+ p = properties[0]
+ f = p.feature()
+
+ # locate all subproperties of $(x[1]) in the property set
+ subproperties = __select_subproperties (p, properties)
+
+ if subproperties:
+ # reconstitute the joined property name
+ subproperties.sort ()
+ joined = b2.build.property.Property(p.feature(), p.value() + '-' + '-'.join ([sp.value() for sp in subproperties]))
+ result.append(joined)
+
+ properties = b2.util.set.difference(properties[1:], subproperties)
+
+ else:
+ # eliminate properties whose value is equal to feature's
+ # default and which are not symmetric and which do not
+ # contradict values implied by composite properties.
+
+ # since all component properties of composites in the set
+ # have been eliminated, any remaining property whose
+ # feature is the same as a component of a composite in the
+ # set must have a non-redundant value.
+ if p.value() != f.default() or f.symmetric():
+ result.append (p)
+ #\
+ #or get_grist (fullp) in get_grist (components):
+ # FIXME: restore above
+
+
+ properties = properties[1:]
+
+ return result
+
+
+def split (properties):
+ """ Given a property-set of the form
+ v1/v2/...vN-1/<fN>vN/<fN+1>vN+1/...<fM>vM
+
+ Returns
+ v1 v2 ... vN-1 <fN>vN <fN+1>vN+1 ... <fM>vM
+
+ Note that vN...vM may contain slashes. This is resilient to the
+ substitution of backslashes for slashes, since Jam, unbidden,
+ sometimes swaps slash direction on NT.
+ """
+
+ def split_one (properties):
+ pieces = re.split (__re_slash_or_backslash, properties)
+ result = []
+
+ for x in pieces:
+ if not get_grist (x) and len (result) > 0 and get_grist (result [-1]):
+ result = result [0:-1] + [ result [-1] + '/' + x ]
+ else:
+ result.append (x)
+
+ return result
+
+ if isinstance (properties, str):
+ return split_one (properties)
+
+ result = []
+ for p in properties:
+ result += split_one (p)
+ return result
+
+
+def compress_subproperties (properties):
+ """ Combine all subproperties into their parent properties
+
+ Requires: for every subproperty, there is a parent property. All
+ features are explicitly expressed.
+
+ This rule probably shouldn't be needed, but
+ build-request.expand-no-defaults is being abused for unintended
+ purposes and it needs help
+ """
+ result = []
+ matched_subs = set()
+ all_subs = set()
+ for p in properties:
+ f = p.feature()
+
+ if not f.subfeature():
+ subs = __select_subproperties (p, properties)
+ if subs:
+
+ matched_subs.update(subs)
+
+ subvalues = '-'.join (sub.value() for sub in subs)
+ result.append(b2.build.property.Property(
+ p.feature(), p.value() + '-' + subvalues,
+ p.condition()))
+ else:
+ result.append(p)
+
+ else:
+ all_subs.add(p)
+
+ # TODO: this variables are used just for debugging. What's the overhead?
+ assert all_subs == matched_subs
+
+ return result
+
+######################################################################################
+# Private methods
+
+def __select_subproperties (parent_property, properties):
+ return [ x for x in properties if __is_subproperty_of (parent_property, x) ]
+
+def __get_subfeature_name (subfeature, value_string):
+ if value_string == None:
+ prefix = ''
+ else:
+ prefix = value_string + ':'
+
+ return prefix + subfeature
+
+
+def __validate_feature_attributes (name, attributes):
+ for attribute in attributes:
+ if not attribute in __all_attributes:
+ raise InvalidAttribute ("unknown attributes: '%s' in feature declaration: '%s'" % (str (b2.util.set.difference (attributes, __all_attributes)), name))
+
+ if name in __all_features:
+ raise AlreadyDefined ("feature '%s' already defined" % name)
+ elif 'implicit' in attributes and 'free' in attributes:
+ raise InvalidAttribute ("free features cannot also be implicit (in declaration of feature '%s')" % name)
+ elif 'free' in attributes and 'propagated' in attributes:
+ raise InvalidAttribute ("free features cannot also be propagated (in declaration of feature '%s')" % name)
+
+
+def __validate_feature (feature):
+ """ Generates an error if the feature is unknown.
+ """
+ if not __all_features.has_key (feature):
+ raise BaseException ('unknown feature "%s"' % feature)
+
+
+def __select_subfeatures (parent_property, features):
+ """ Given a property, return the subset of features consisting of all
+ ordinary subfeatures of the property's feature, and all specific
+ subfeatures of the property's feature which are conditional on the
+ property's value.
+ """
+ return [f for f in features if is_subfeature_of (parent_property, f)]
+
+# FIXME: copy over tests.
diff --git a/tools/build/src/build/generators.jam b/tools/build/src/build/generators.jam
new file mode 100644
index 0000000000..27fb224872
--- /dev/null
+++ b/tools/build/src/build/generators.jam
@@ -0,0 +1,1420 @@
+# Copyright 2002. Vladimir Prus
+# Copyright 2006. Rene Rivera
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Manages 'generators' --- objects which can do transformation between different
+# target types and contain algorithm for finding transformation from sources to
+# targets.
+#
+# The main entry point to this module is generators.construct rule. It is given
+# a list of source targets, desired target type and a set of properties. It
+# starts by selecting 'viable generators', which have any chances of producing
+# the desired target type with the required properties. Generators are ranked
+# and a set of the most specific ones is selected.
+#
+# The most specific generators have their 'run' methods called, with the
+# properties and list of sources. Each one selects a target which can be
+# directly consumed, and tries to convert the remaining ones to the types it can
+# consume. This is done by recursively calling 'construct' with all consumable
+# types.
+#
+# If the generator has collected all the targets it needs, it creates targets
+# corresponding to result, and returns it. When all generators have been run,
+# results of one of them are selected and returned as a result.
+#
+# It is quite possible for 'construct' to return more targets that it was asked
+# for. For example, if it were asked to generate a target of type EXE, but the
+# only found generator produces both EXE and TDS (file with debug) information.
+# The extra target will be returned.
+#
+# Likewise, when generator tries to convert sources to consumable types, it can
+# get more targets that it was asked for. The question is what to do with extra
+# targets. Boost.Build attempts to convert them to requested types, and attempts
+# that as early as possible. Specifically, this is done after invoking each
+# generator. TODO: An example is needed to document the rationale for trying
+# extra target conversion at that point.
+#
+# In order for the system to be able to use a specific generator instance 'when
+# needed', the instance needs to be registered with the system using
+# generators.register() or one of its related rules. Unregistered generators may
+# only be run explicitly and will not be considered by Boost.Build when when
+# converting between given target types.
+
+import "class" : new ;
+import property-set ;
+import sequence ;
+import set ;
+import type ;
+import utility ;
+import virtual-target ;
+
+
+if "--debug-generators" in [ modules.peek : ARGV ]
+{
+ .debug = true ;
+}
+
+
+# Updated cached viable source target type information as needed after a new
+# target type gets defined. This is needed because if a target type is a viable
+# source target type for some generator then all of the target type's derived
+# target types should automatically be considered as viable source target types
+# for the same generator as well. Does nothing if a non-derived target type is
+# passed to it.
+#
+rule update-cached-information-with-a-new-type ( type )
+{
+ local base-type = [ type.base $(type) ] ;
+ if $(base-type)
+ {
+ for local g in $(.vstg-cached-generators)
+ {
+ if $(base-type) in $(.vstg.$(g))
+ {
+ .vstg.$(g) += $(type) ;
+ }
+ }
+
+ for local t in $(.vst-cached-types)
+ {
+ if $(base-type) in $(.vst.$(t))
+ {
+ .vst.$(t) += $(type) ;
+ }
+ }
+ }
+}
+
+
+# Clears cached viable source target type information except for target types
+# and generators with all source types listed as viable. Should be called when
+# something invalidates those cached values by possibly causing some new source
+# types to become viable.
+#
+local rule invalidate-extendable-viable-source-target-type-cache ( )
+{
+ local generators-with-cached-source-types = $(.vstg-cached-generators) ;
+ .vstg-cached-generators = ;
+ for local g in $(generators-with-cached-source-types)
+ {
+ if $(.vstg.$(g)) = *
+ {
+ .vstg-cached-generators += $(g) ;
+ }
+ else
+ {
+ .vstg.$(g) = ;
+ }
+ }
+
+ local types-with-cached-source-types = $(.vst-cached-types) ;
+ .vst-cached-types = ;
+ for local t in $(types-with-cached-source-types)
+ {
+ if $(.vst.$(t)) = *
+ {
+ .vst-cached-types += $(t) ;
+ }
+ else
+ {
+ .vst.$(t) = ;
+ }
+ }
+}
+
+
+# Outputs a debug message if generators debugging is on. Each element of
+# 'message' is checked to see if it is a class instance. If so, instead of the
+# value, the result of 'str' call is output.
+#
+local rule generators.dout ( message * )
+{
+ if $(.debug)
+ {
+ ECHO [ sequence.transform utility.str : $(message) ] ;
+ }
+}
+
+
+local rule indent ( )
+{
+ return $(.indent:J="") ;
+}
+
+
+local rule increase-indent ( )
+{
+ .indent += " " ;
+}
+
+
+local rule decrease-indent ( )
+{
+ .indent = $(.indent[2-]) ;
+}
+
+
+# Models a generator.
+#
+class generator
+{
+ import "class" : new ;
+ import feature ;
+ import generators : indent increase-indent decrease-indent generators.dout ;
+ import utility ;
+ import path ;
+ import property ;
+ import sequence ;
+ import set ;
+ import type ;
+ import virtual-target ;
+
+ EXPORT class@generator : indent increase-indent decrease-indent
+ generators.dout ;
+
+ rule __init__ (
+ id # Identifies the generator - should be name
+ # of the rule which sets up the build
+ # actions.
+
+ composing ? # Whether generator processes each source
+ # target in turn, converting it to required
+ # types. Ordinary generators pass all
+ # sources together to the recursive
+ # generators.construct-types call.
+
+ : source-types * # Types that this generator can handle. If
+ # empty, the generator can consume anything.
+
+ : target-types-and-names + # Types the generator will create and,
+ # optionally, names for created targets.
+ # Each element should have the form
+ # type["(" name-pattern ")"], for example,
+ # obj(%_x). Generated target name will be
+ # found by replacing % with the name of
+ # source, provided an explicit name was not
+ # specified.
+
+ : requirements *
+ )
+ {
+ self.id = $(id) ;
+ self.rule-name = $(id) ;
+ self.composing = $(composing) ;
+ self.source-types = $(source-types) ;
+ self.target-types-and-names = $(target-types-and-names) ;
+ self.requirements = $(requirements) ;
+
+ for local e in $(target-types-and-names)
+ {
+ # Create three parallel lists: one with the list of target types,
+ # and two other with prefixes and postfixes to be added to target
+ # name. We use parallel lists for prefix and postfix (as opposed to
+ # mapping), because given target type might occur several times, for
+ # example "H H(%_symbols)".
+ local m = [ MATCH ([^\\(]*)(\\((.*)%(.*)\\))? : $(e) ] ;
+ self.target-types += $(m[1]) ;
+ self.name-prefix += $(m[3]:E="") ;
+ self.name-postfix += $(m[4]:E="") ;
+ }
+
+ for local r in [ requirements ]
+ {
+ if $(r:G=)
+ {
+ self.property-requirements += $(r) ;
+ }
+ else
+ {
+ self.feature-requirements += $(r) ;
+ }
+ }
+
+ # Note that 'transform' here, is the same as 'for_each'.
+ sequence.transform type.validate : $(self.source-types) ;
+ sequence.transform type.validate : $(self.target-types) ;
+ }
+
+ ################# End of constructor #################
+
+ rule id ( )
+ {
+ return $(self.id) ;
+ }
+
+ # Returns the list of target type the generator accepts.
+ #
+ rule source-types ( )
+ {
+ return $(self.source-types) ;
+ }
+
+ # Returns the list of target types that this generator produces. It is
+ # assumed to be always the same -- i.e. it can not change depending on some
+ # provided list of sources.
+ #
+ rule target-types ( )
+ {
+ return $(self.target-types) ;
+ }
+
+ # Returns the required properties for this generator. Properties in returned
+ # set must be present in build properties if this generator is to be used.
+ # If result has grist-only element, that build properties must include some
+ # value of that feature.
+ #
+ # XXX: remove this method?
+ #
+ rule requirements ( )
+ {
+ return $(self.requirements) ;
+ }
+
+ rule set-rule-name ( rule-name )
+ {
+ self.rule-name = $(rule-name) ;
+ }
+
+ rule rule-name ( )
+ {
+ return $(self.rule-name) ;
+ }
+
+ # Returns a true value if the generator can be run with the specified
+ # properties.
+ #
+ rule match-rank ( property-set-to-match )
+ {
+ # See if generator requirements are satisfied by 'properties'. Treat a
+ # feature name in requirements (i.e. grist-only element), as matching
+ # any value of the feature.
+
+ if [ $(property-set-to-match).contains-raw $(self.property-requirements) ] &&
+ [ $(property-set-to-match).contains-features $(self.feature-requirements) ]
+ {
+ return true ;
+ }
+ else
+ {
+ return ;
+ }
+ }
+
+ # Returns another generator which differs from $(self) in
+ # - id
+ # - value to <toolset> feature in properties
+ #
+ rule clone ( new-id : new-toolset-properties + )
+ {
+ local g = [ new $(__class__) $(new-id) $(self.composing) :
+ $(self.source-types) : $(self.target-types-and-names) :
+ # Note: this does not remove any subfeatures of <toolset> which
+ # might cause problems.
+ [ property.change $(self.requirements) : <toolset> ]
+ $(new-toolset-properties) ] ;
+ return $(g) ;
+ }
+
+ # Creates another generator that is the same as $(self), except that if
+ # 'base' is in target types of $(self), 'type' will in target types of the
+ # new generator.
+ #
+ rule clone-and-change-target-type ( base : type )
+ {
+ local target-types ;
+ for local t in $(self.target-types-and-names)
+ {
+ local m = [ MATCH ([^\\(]*)(\\(.*\\))? : $(t) ] ;
+ if $(m) = $(base)
+ {
+ target-types += $(type)$(m[2]:E="") ;
+ }
+ else
+ {
+ target-types += $(t) ;
+ }
+ }
+
+ local g = [ new $(__class__) $(self.id) $(self.composing) :
+ $(self.source-types) : $(target-types) : $(self.requirements) ] ;
+ if $(self.rule-name)
+ {
+ $(g).set-rule-name $(self.rule-name) ;
+ }
+ return $(g) ;
+ }
+
+ # Tries to invoke this generator on the given sources. Returns a list of
+ # generated targets (instances of 'virtual-target') and optionally a set of
+ # properties to be added to the usage-requirements for all the generated
+ # targets. Returning nothing from run indicates that the generator was
+ # unable to create the target.
+ #
+ rule run
+ (
+ project # Project for which the targets are generated.
+ name ? # Used when determining the 'name' attribute for all
+ # generated targets. See the 'generated-targets' method.
+ : property-set # Desired properties for generated targets.
+ : sources + # Source targets.
+ )
+ {
+ generators.dout [ indent ] " ** generator" $(self.id) ;
+ generators.dout [ indent ] " composing:" $(self.composing) ;
+
+ if ! $(self.composing) && $(sources[2]) && $(self.source-types[2])
+ {
+ import errors : error : errors.error ;
+ errors.error "Unsupported source/source-type combination" ;
+ }
+
+ # We do not run composing generators if no name is specified. The reason
+ # is that composing generator combines several targets, which can have
+ # different names, and it cannot decide which name to give for produced
+ # target. Therefore, the name must be passed.
+ #
+ # This in effect, means that composing generators are runnable only at
+ # the top-level of a transformation graph, or if their name is passed
+ # explicitly. Thus, we dissallow composing generators in the middle. For
+ # example, the transformation CPP -> OBJ -> STATIC_LIB -> RSP -> EXE
+ # will not be allowed as the OBJ -> STATIC_LIB generator is composing.
+ if ! $(self.composing) || $(name)
+ {
+ run-really $(project) $(name) : $(property-set) : $(sources) ;
+ }
+ }
+
+ rule run-really ( project name ? : property-set : sources + )
+ {
+ # Targets that this generator will consume directly.
+ local consumed = ;
+ # Targets that can not be consumed and will be returned as-is.
+ local bypassed = ;
+
+ if $(self.composing)
+ {
+ consumed = [ convert-multiple-sources-to-consumable-types $(project)
+ : $(property-set) : $(sources) ] ;
+ }
+ else
+ {
+ consumed = [ convert-to-consumable-types $(project) $(name)
+ : $(property-set) : $(sources) ] ;
+ }
+
+ local result ;
+ if $(consumed)
+ {
+ result = [ construct-result $(consumed) : $(project) $(name) :
+ $(property-set) ] ;
+ }
+
+ if $(result)
+ {
+ generators.dout [ indent ] " SUCCESS: " $(result) ;
+ }
+ else
+ {
+ generators.dout [ indent ] " FAILURE" ;
+ }
+ generators.dout ;
+ return $(result) ;
+ }
+
+ # Constructs the dependency graph to be returned by this generator.
+ #
+ rule construct-result
+ (
+ consumed + # Already prepared list of consumable targets.
+ # Composing generators may receive multiple sources
+ # all of which will have types matching those in
+ # $(self.source-types). Non-composing generators with
+ # multiple $(self.source-types) will receive exactly
+ # len $(self.source-types) sources with types matching
+ # those in $(self.source-types). And non-composing
+ # generators with only a single source type may
+ # receive multiple sources with all of them of the
+ # type listed in $(self.source-types).
+ : project name ?
+ : property-set # Properties to be used for all actions created here.
+ )
+ {
+ local result ;
+ # If this is a 1->1 transformation, apply it to all consumed targets in
+ # order.
+ if ! $(self.source-types[2]) && ! $(self.composing)
+ {
+ for local r in $(consumed)
+ {
+ result += [ generated-targets $(r) : $(property-set) :
+ $(project) $(name) ] ;
+ }
+ }
+ else if $(consumed)
+ {
+ result += [ generated-targets $(consumed) : $(property-set) :
+ $(project) $(name) ] ;
+ }
+ return $(result) ;
+ }
+
+ # Determine target name from fullname (maybe including path components)
+ # Place optional prefix and postfix around basename
+ #
+ rule determine-target-name ( fullname : prefix ? : postfix ? )
+ {
+ # See if we need to add directory to the target name.
+ local dir = $(fullname:D) ;
+ local name = $(fullname:B) ;
+
+ name = $(prefix:E=)$(name) ;
+ name = $(name)$(postfix:E=) ;
+
+ if $(dir)
+ # Never append '..' to target path.
+ && ! [ MATCH .*(\\.\\.).* : $(dir) ]
+ && ! [ path.is-rooted $(dir) ]
+ {
+ # Relative path is always relative to the source directory. Retain
+ # it, so that users can have files with the same name in two
+ # different subdirectories.
+ name = $(dir)/$(name) ;
+ }
+ return $(name) ;
+ }
+
+ # Determine the name of the produced target from the names of the sources.
+ #
+ rule determine-output-name ( sources + )
+ {
+ # The simple case if when a name of source has single dot. Then, we take
+ # the part before dot. Several dots can be caused by:
+ # - using source file like a.host.cpp, or
+ # - a type whose suffix has a dot. Say, we can type 'host_cpp' with
+ # extension 'host.cpp'.
+ # In the first case, we want to take the part up to the last dot. In the
+ # second case -- not sure, but for now take the part up to the last dot
+ # too.
+ name = [ utility.basename [ $(sources[1]).name ] ] ;
+ for local s in $(sources[2-])
+ {
+ if [ utility.basename [ $(s).name ] ] != $(name)
+ {
+ import errors : error : errors.error ;
+ errors.error "$(self.id): source targets have different names: cannot determine target name" ;
+ }
+ }
+ return [ determine-target-name [ $(sources[1]).name ] ] ;
+ }
+
+ # Constructs targets that are created after consuming 'sources'. The result
+ # will be the list of virtual-target, which has the same length as the
+ # 'target-types' attribute and with corresponding types.
+ #
+ # When 'name' is empty, all source targets must have the same 'name'
+ # attribute value, which will be used instead of the 'name' argument.
+ #
+ # The 'name' attribute value for each generated target will be equal to the
+ # 'name' parameter if there is no name pattern for this type. Otherwise, the
+ # '%' symbol in the name pattern will be replaced with the 'name' parameter
+ # to obtain the 'name' attribute.
+ #
+ # For example, if targets types are T1 and T2 (with name pattern "%_x"),
+ # suffixes for T1 and T2 are .t1 and .t2, and source is foo.z, then created
+ # files would be "foo.t1" and "foo_x.t2". The 'name' attribute actually
+ # determines the basename of a file.
+ #
+ # Note that this pattern mechanism has nothing to do with implicit patterns
+ # in make. It is a way to produce a target whose name is different than the
+ # name of its source.
+ #
+ rule generated-targets ( sources + : property-set : project name ? )
+ {
+ if ! $(name)
+ {
+ name = [ determine-output-name $(sources) ] ;
+ }
+
+ # Assign an action for each target.
+ local action = [ action-class ] ;
+ local a = [ class.new $(action) $(sources) : $(self.rule-name) :
+ $(property-set) ] ;
+
+ # Create generated target for each target type.
+ local targets ;
+ local pre = $(self.name-prefix) ;
+ local post = $(self.name-postfix) ;
+ for local t in $(self.target-types)
+ {
+ local generated-name = $(pre[1])$(name:BS)$(post[1]) ;
+ generated-name = $(generated-name:R=$(name:D)) ;
+ pre = $(pre[2-]) ;
+ post = $(post[2-]) ;
+
+ targets += [ class.new file-target $(generated-name) : $(t) :
+ $(project) : $(a) ] ;
+ }
+
+ return [ sequence.transform virtual-target.register : $(targets) ] ;
+ }
+
+ # Attempts to convert 'sources' to targets of types that this generator can
+ # handle. The intention is to produce the set of targets that can be used
+ # when the generator is run.
+ #
+ rule convert-to-consumable-types
+ (
+ project name ?
+ : property-set
+ : sources +
+ : only-one ? # Convert 'source' to only one of the source types. If
+ # there is more that one possibility, report an error.
+ )
+ {
+ local _consumed ;
+ local missing-types ;
+
+ if $(sources[2])
+ {
+ # Do not know how to handle several sources yet. Just try to pass
+ # the request to other generator.
+ missing-types = $(self.source-types) ;
+ }
+ else
+ {
+ local temp = [ consume-directly $(sources) ] ;
+ if $(temp[1])
+ {
+ _consumed = $(temp[1]) ;
+ }
+ missing-types = $(temp[2-]) ;
+ }
+
+ # No need to search for transformation if some source type has consumed
+ # source and no more source types are needed.
+ if $(only-one) && $(_consumed)
+ {
+ missing-types = ;
+ }
+
+ # TODO: we should check that only one source type is created if
+ # 'only-one' is true.
+
+ if $(missing-types)
+ {
+ local transformed = [ generators.construct-types $(project) $(name)
+ : $(missing-types) : $(property-set) : $(sources) ] ;
+
+ # Add targets of right type to 'consumed'. Add others to 'bypassed'.
+ # The 'generators.construct' rule has done its best to convert
+ # everything to the required type. There is no need to rerun it on
+ # targets of different types.
+
+ # NOTE: ignoring usage requirements.
+ for local t in $(transformed[2-])
+ {
+ if [ $(t).type ] in $(missing-types)
+ {
+ _consumed += $(t) ;
+ }
+ }
+ }
+
+ return [ sequence.unique $(_consumed) ] ;
+ }
+
+ # Converts several files to consumable types. Called for composing
+ # generators only.
+ #
+ rule convert-multiple-sources-to-consumable-types ( project : property-set :
+ sources * )
+ {
+ local result ;
+ # We process each source one-by-one, trying to convert it to a usable
+ # type.
+ if ! $(self.source-types)
+ {
+ # Anything is acceptible
+ return $(sources) ;
+ }
+ else
+ {
+ local acceptible-types = [ sequence.unique
+ [ sequence.transform type.all-derived : $(self.source-types) ] ] ;
+ for local source in $(sources)
+ {
+ if ! [ $(source).type ] in $(acceptible-types)
+ {
+ local transformed = [ generators.construct-types $(project)
+ : $(self.source-types) : $(property-set) : $(source) ] ;
+ for local t in $(transformed[2-])
+ {
+ if [ $(t).type ] in $(self.source-types)
+ {
+ result += $(t) ;
+ }
+ }
+ if ! $(transformed)
+ {
+ generators.dout [ indent ] " failed to convert " $(source) ;
+ }
+ }
+ else
+ {
+ result += $(source) ;
+ }
+ }
+ return [ sequence.unique $(result) : stable ] ;
+ }
+ }
+
+ rule consume-directly ( source )
+ {
+ local real-source-type = [ $(source).type ] ;
+
+ # If there are no source types, we can consume anything.
+ local source-types = $(self.source-types) ;
+ source-types ?= $(real-source-type) ;
+
+ local result = "" ;
+ local missing-types ;
+
+ for local st in $(source-types)
+ {
+ # The 'source' if of the right type already.
+ if $(real-source-type) = $(st) || [ type.is-derived
+ $(real-source-type) $(st) ]
+ {
+ result = $(source) ;
+ }
+ else
+ {
+ missing-types += $(st) ;
+ }
+ }
+ return $(result) $(missing-types) ;
+ }
+
+ # Returns the class to be used to actions. Default implementation returns
+ # "action".
+ #
+ rule action-class ( )
+ {
+ return "action" ;
+ }
+}
+
+
+# Registers a new generator instance 'g'.
+#
+rule register ( g )
+{
+ .all-generators += $(g) ;
+
+ # A generator can produce several targets of the same type. We want unique
+ # occurrence of that generator in .generators.$(t) in that case, otherwise,
+ # it will be tried twice and we will get a false ambiguity.
+ for local t in [ sequence.unique [ $(g).target-types ] ]
+ {
+ .generators.$(t) += $(g) ;
+ }
+
+ # Update the set of generators for toolset.
+
+ # TODO: should we check that generator with this id is not already
+ # registered. For example, the fop.jam module intentionally declared two
+ # generators with the same id, so such check will break it.
+ local id = [ $(g).id ] ;
+
+ # Some generators have multiple periods in their name, so a simple $(id:S=)
+ # will not generate the right toolset name. E.g. if id = gcc.compile.c++,
+ # then .generators-for-toolset.$(id:S=) will append to
+ # .generators-for-toolset.gcc.compile, which is a separate value from
+ # .generators-for-toolset.gcc. Correcting this makes generator inheritance
+ # work properly. See also inherit-generators in the toolset module.
+ local base = $(id) ;
+ while $(base:S)
+ {
+ base = $(base:B) ;
+ }
+ .generators-for-toolset.$(base) += $(g) ;
+
+
+ # After adding a new generator that can construct new target types, we need
+ # to clear the related cached viable source target type information for
+ # constructing a specific target type or using a specific generator. Cached
+ # viable source target type lists affected by this are those containing any
+ # of the target types constructed by the new generator or any of their base
+ # target types.
+ #
+ # A more advanced alternative to clearing that cached viable source target
+ # type information would be to expand it with additional source types or
+ # even better - mark it as needing to be expanded on next use.
+ #
+ # Also see the http://thread.gmane.org/gmane.comp.lib.boost.build/19077
+ # mailing list thread for an even more advanced idea of how we could convert
+ # Boost Build's Jamfile processing, target selection and generator selection
+ # into separate steps which would prevent these caches from ever being
+ # invalidated.
+ #
+ # For now we just clear all the cached viable source target type information
+ # that does not simply state 'all types' and may implement a more detailed
+ # algorithm later on if it becomes needed.
+
+ invalidate-extendable-viable-source-target-type-cache ;
+}
+
+
+# Creates a new non-composing 'generator' class instance and registers it.
+# Returns the created instance. Rationale: the instance is returned so that it
+# is possible to first register a generator and then call its 'run' method,
+# bypassing the whole generator selection process.
+#
+rule register-standard ( id : source-types * : target-types + : requirements * )
+{
+ local g = [ new generator $(id) : $(source-types) : $(target-types) :
+ $(requirements) ] ;
+ register $(g) ;
+ return $(g) ;
+}
+
+
+# Creates a new composing 'generator' class instance and registers it.
+#
+rule register-composing ( id : source-types * : target-types + : requirements *
+ )
+{
+ local g = [ new generator $(id) true : $(source-types) : $(target-types) :
+ $(requirements) ] ;
+ register $(g) ;
+ return $(g) ;
+}
+
+
+# Returns all generators belonging to the given 'toolset', i.e. whose ids are
+# '$(toolset).<something>'.
+#
+rule generators-for-toolset ( toolset )
+{
+ return $(.generators-for-toolset.$(toolset)) ;
+}
+
+
+# Make generator 'overrider-id' be preferred to 'overridee-id'. If, when
+# searching for generators that could produce a target of a certain type, both
+# those generators are among viable generators, the overridden generator is
+# immediately discarded.
+#
+# The overridden generators are discarded immediately after computing the list
+# of viable generators but before running any of them.
+#
+rule override ( overrider-id : overridee-id )
+{
+ .override.$(overrider-id) += $(overridee-id) ;
+}
+
+
+# Returns a list of source type which can possibly be converted to 'target-type'
+# by some chain of generator invocation.
+#
+# More formally, takes all generators for 'target-type' and returns a union of
+# source types for those generators and result of calling itself recursively on
+# source types.
+#
+# Returns '*' in case any type should be considered a viable source type for the
+# given type.
+#
+local rule viable-source-types-real ( target-type )
+{
+ local result ;
+
+ # 't0' is the initial list of target types we need to process to get a list
+ # of their viable source target types. New target types will not be added to
+ # this list.
+ local t0 = [ type.all-bases $(target-type) ] ;
+
+ # 't' is the list of target types which have not yet been processed to get a
+ # list of their viable source target types. This list will get expanded as
+ # we locate more target types to process.
+ local t = $(t0) ;
+
+ while $(t)
+ {
+ # Find all generators for the current type. Unlike
+ # 'find-viable-generators' we do not care about the property-set.
+ local generators = $(.generators.$(t[1])) ;
+ t = $(t[2-]) ;
+
+ while $(generators)
+ {
+ local g = $(generators[1]) ;
+ generators = $(generators[2-]) ;
+
+ if ! [ $(g).source-types ]
+ {
+ # Empty source types -- everything can be accepted.
+ result = * ;
+ # This will terminate this loop.
+ generators = ;
+ # This will terminate the outer loop.
+ t = ;
+ }
+
+ for local source-type in [ $(g).source-types ]
+ {
+ if ! $(source-type) in $(result)
+ {
+ # If a generator accepts a 'source-type' it will also
+ # happily accept any type derived from it.
+ for local n in [ type.all-derived $(source-type) ]
+ {
+ if ! $(n) in $(result)
+ {
+ # Here there is no point in adding target types to
+ # the list of types to process in case they are or
+ # have already been on that list. We optimize this
+ # check by realizing that we only need to avoid the
+ # original target type's base types. Other target
+ # types that are or have been on the list of target
+ # types to process have been added to the 'result'
+ # list as well and have thus already been eliminated
+ # by the previous if.
+ if ! $(n) in $(t0)
+ {
+ t += $(n) ;
+ }
+ result += $(n) ;
+ }
+ }
+ }
+ }
+ }
+ }
+
+ return $(result) ;
+}
+
+
+# Helper rule, caches the result of 'viable-source-types-real'.
+#
+rule viable-source-types ( target-type )
+{
+ local key = .vst.$(target-type) ;
+ if ! $($(key))
+ {
+ .vst-cached-types += $(target-type) ;
+ local v = [ viable-source-types-real $(target-type) ] ;
+ if ! $(v)
+ {
+ v = none ;
+ }
+ $(key) = $(v) ;
+ }
+
+ if $($(key)) != none
+ {
+ return $($(key)) ;
+ }
+}
+
+
+# Returns the list of source types, which, when passed to 'run' method of
+# 'generator', has some change of being eventually used (probably after
+# conversion by other generators).
+#
+# Returns '*' in case any type should be considered a viable source type for the
+# given generator.
+#
+rule viable-source-types-for-generator-real ( generator )
+{
+ local source-types = [ $(generator).source-types ] ;
+ if ! $(source-types)
+ {
+ # If generator does not specify any source types, it might be a special
+ # generator like builtin.lib-generator which just relays to other
+ # generators. Return '*' to indicate that any source type is possibly
+ # OK, since we do not know for sure.
+ return * ;
+ }
+ else
+ {
+ local result ;
+ while $(source-types)
+ {
+ local s = $(source-types[1]) ;
+ source-types = $(source-types[2-]) ;
+ local viable-sources = [ generators.viable-source-types $(s) ] ;
+ if $(viable-sources) = *
+ {
+ result = * ;
+ source-types = ; # Terminate the loop.
+ }
+ else
+ {
+ result += [ type.all-derived $(s) ] $(viable-sources) ;
+ }
+ }
+ return [ sequence.unique $(result) ] ;
+ }
+}
+
+
+# Helper rule, caches the result of 'viable-source-types-for-generator'.
+#
+local rule viable-source-types-for-generator ( generator )
+{
+ local key = .vstg.$(generator) ;
+ if ! $($(key))
+ {
+ .vstg-cached-generators += $(generator) ;
+ local v = [ viable-source-types-for-generator-real $(generator) ] ;
+ if ! $(v)
+ {
+ v = none ;
+ }
+ $(key) = $(v) ;
+ }
+
+ if $($(key)) != none
+ {
+ return $($(key)) ;
+ }
+}
+
+
+# Returns usage requirements + list of created targets.
+#
+local rule try-one-generator-really ( project name ? : generator : target-type
+ : property-set : sources * )
+{
+ local targets =
+ [ $(generator).run $(project) $(name) : $(property-set) : $(sources) ] ;
+
+ local usage-requirements ;
+ local success ;
+
+ generators.dout [ indent ] returned $(targets) ;
+
+ if $(targets)
+ {
+ success = true ;
+
+ if [ class.is-a $(targets[1]) : property-set ]
+ {
+ usage-requirements = $(targets[1]) ;
+ targets = $(targets[2-]) ;
+ }
+ else
+ {
+ usage-requirements = [ property-set.empty ] ;
+ }
+ }
+
+ generators.dout [ indent ] " generator" [ $(generator).id ] " spawned " ;
+ generators.dout [ indent ] " " $(targets) ;
+ if $(usage-requirements)
+ {
+ generators.dout [ indent ] " with usage requirements:" $(x) ;
+ }
+
+ if $(success)
+ {
+ return $(usage-requirements) $(targets) ;
+ }
+}
+
+
+# Checks if generator invocation can be pruned, because it is guaranteed to
+# fail. If so, quickly returns an empty list. Otherwise, calls
+# try-one-generator-really.
+#
+local rule try-one-generator ( project name ? : generator : target-type
+ : property-set : sources * )
+{
+ local source-types ;
+ for local s in $(sources)
+ {
+ source-types += [ $(s).type ] ;
+ }
+ local viable-source-types = [ viable-source-types-for-generator $(generator)
+ ] ;
+
+ if $(source-types) && $(viable-source-types) != * &&
+ ! [ set.intersection $(source-types) : $(viable-source-types) ]
+ {
+ local id = [ $(generator).id ] ;
+ generators.dout [ indent ] " ** generator '$(id)' pruned" ;
+ #generators.dout [ indent ] "source-types" '$(source-types)' ;
+ #generators.dout [ indent ] "viable-source-types" '$(viable-source-types)' ;
+ }
+ else
+ {
+ return [ try-one-generator-really $(project) $(name) : $(generator) :
+ $(target-type) : $(property-set) : $(sources) ] ;
+ }
+}
+
+
+rule construct-types ( project name ? : target-types + : property-set
+ : sources + )
+{
+ local result ;
+ local usage-requirements = [ property-set.empty ] ;
+ for local t in $(target-types)
+ {
+ local r = [ construct $(project) $(name) : $(t) : $(property-set) :
+ $(sources) ] ;
+ if $(r)
+ {
+ usage-requirements = [ $(usage-requirements).add $(r[1]) ] ;
+ result += $(r[2-]) ;
+ }
+ }
+ # TODO: have to introduce parameter controlling if several types can be
+ # matched and add appropriate checks.
+
+ # TODO: need to review the documentation for 'construct' to see if it should
+ # return $(source) even if nothing can be done with it. Currents docs seem
+ # to imply that, contrary to the behaviour.
+ if $(result)
+ {
+ return $(usage-requirements) $(result) ;
+ }
+ else
+ {
+ return $(usage-requirements) $(sources) ;
+ }
+}
+
+
+# Ensures all 'targets' have their type. If this is not so, exists with error.
+#
+local rule ensure-type ( targets * )
+{
+ for local t in $(targets)
+ {
+ if ! [ $(t).type ]
+ {
+ import errors ;
+ errors.error "target" [ $(t).str ] "has no type" ;
+ }
+ }
+}
+
+
+# Returns generators which can be used to construct target of specified type
+# with specified properties. Uses the following algorithm:
+# - iterates over requested target-type and all its bases (in the order returned
+# by type.all-bases).
+# - for each type find all generators that generate that type and whose
+# requirements are satisfied by properties.
+# - if the set of generators is not empty, returns that set.
+#
+# Note: this algorithm explicitly ignores generators for base classes if there
+# is at least one generator for the requested target-type.
+#
+local rule find-viable-generators-aux ( target-type : property-set )
+{
+ # Select generators that can create the required target type.
+ local viable-generators = ;
+
+ import type ;
+ local t = $(target-type) ;
+
+ if $(.debug)
+ {
+ generators.dout [ indent ] find-viable-generators target-type= $(target-type)
+ property-set= [ $(property-set).as-path ] ;
+ generators.dout [ indent ] "trying type" $(target-type) ;
+ }
+
+ local generators = $(.generators.$(target-type)) ;
+ if $(generators)
+ {
+ if $(.debug)
+ {
+ generators.dout [ indent ] "there are generators for this type" ;
+ }
+ }
+ else
+ {
+ local t = [ type.base $(target-type) ] ;
+
+ # Get the list of generators for the requested type. If no generator is
+ # registered, try base type, and so on.
+ while $(t)
+ {
+ if $(.debug)
+ {
+ generators.dout [ indent ] "trying type" $(t) ;
+ }
+ if $(.generators.$(t))
+ {
+ generators.dout [ indent ] "there are generators for this type" ;
+ generators = $(.generators.$(t)) ;
+
+ # We are here because there were no generators found for
+ # target-type but there are some generators for its base type.
+ # We will try to use them, but they will produce targets of
+ # base type, not of 'target-type'. So, we clone the generators
+ # and modify the list of target types.
+ local generators2 ;
+ for local g in $(generators)
+ {
+ # generators.register adds a generator to the list of
+ # generators for toolsets, which is a bit strange, but
+ # should work. That list is only used when inheriting a
+ # toolset, which should have been done before running
+ # generators.
+ generators2 += [ $(g).clone-and-change-target-type $(t) :
+ $(target-type) ] ;
+ generators.register $(generators2[-1]) ;
+ }
+ generators = $(generators2) ;
+ t = ;
+ }
+ else
+ {
+ t = [ type.base $(t) ] ;
+ }
+ }
+ }
+
+ for local g in $(generators)
+ {
+ if $(.debug)
+ {
+ generators.dout [ indent ] "trying generator" [ $(g).id ] "(" [ $(g).source-types ] -> [ $(g).target-types ] ")" ;
+ }
+
+ if [ $(g).match-rank $(property-set) ]
+ {
+ if $(.debug)
+ {
+ generators.dout [ indent ] " is viable" ;
+ }
+ viable-generators += $(g) ;
+ }
+ }
+
+ return $(viable-generators) ;
+}
+
+
+rule find-viable-generators ( target-type : property-set )
+{
+ local key = $(target-type).$(property-set) ;
+ local l = $(.fv.$(key)) ;
+ if ! $(l)
+ {
+ l = [ find-viable-generators-aux $(target-type) : $(property-set) ] ;
+ if ! $(l)
+ {
+ l = none ;
+ }
+ .fv.$(key) = $(l) ;
+ }
+
+ if $(l) = none
+ {
+ l = ;
+ }
+
+ local viable-generators ;
+ for local g in $(l)
+ {
+ # Avoid trying the same generator twice on different levels.
+ if ! $(g) in $(.active-generators)
+ {
+ viable-generators += $(g) ;
+ }
+ else
+ {
+ generators.dout [ indent ] " generator " [ $(g).id ] "is active, discaring" ;
+ }
+ }
+
+ # Generators which override 'all'.
+ local all-overrides ;
+ # Generators which are overriden.
+ local overriden-ids ;
+ for local g in $(viable-generators)
+ {
+ local id = [ $(g).id ] ;
+ local this-overrides = $(.override.$(id)) ;
+ overriden-ids += $(this-overrides) ;
+ if all in $(this-overrides)
+ {
+ all-overrides += $(g) ;
+ }
+ }
+ if $(all-overrides)
+ {
+ viable-generators = $(all-overrides) ;
+ }
+ local result ;
+ for local g in $(viable-generators)
+ {
+ if ! [ $(g).id ] in $(overriden-ids)
+ {
+ result += $(g) ;
+ }
+ }
+
+ return $(result) ;
+}
+
+
+.construct-stack = ;
+
+
+# Attempts to construct a target by finding viable generators, running them and
+# selecting the dependency graph.
+#
+local rule construct-really ( project name ? : target-type : property-set :
+ sources * )
+{
+ viable-generators = [ find-viable-generators $(target-type) :
+ $(property-set) ] ;
+
+ generators.dout [ indent ] "*** " [ sequence.length $(viable-generators) ]
+ " viable generators" ;
+
+ local result ;
+ local generators-that-succeeded ;
+ for local g in $(viable-generators)
+ {
+ # This variable will be restored on exit from this scope.
+ local .active-generators = $(g) $(.active-generators) ;
+
+ local r = [ try-one-generator $(project) $(name) : $(g) : $(target-type)
+ : $(property-set) : $(sources) ] ;
+
+ if $(r)
+ {
+ generators-that-succeeded += $(g) ;
+ if $(result)
+ {
+ ECHO "Error: ambiguity found when searching for best transformation" ;
+ ECHO "Trying to produce type '$(target-type)' from: " ;
+ for local s in $(sources)
+ {
+ ECHO " - " [ $(s).str ] ;
+ }
+ ECHO "Generators that succeeded:" ;
+ for local g in $(generators-that-succeeded)
+ {
+ ECHO " - " [ $(g).id ] ;
+ }
+ ECHO "First generator produced: " ;
+ for local t in $(result[2-])
+ {
+ ECHO " - " [ $(t).str ] ;
+ }
+ ECHO "Second generator produced: " ;
+ for local t in $(r[2-])
+ {
+ ECHO " - " [ $(t).str ] ;
+ }
+ EXIT ;
+ }
+ else
+ {
+ result = $(r) ;
+ }
+ }
+ }
+
+ return $(result) ;
+}
+
+
+# Attempts to create a target of 'target-type' with 'properties' from 'sources'.
+# The 'sources' are treated as a collection of *possible* ingridients, i.e.
+# there is no obligation to consume them all.
+#
+# Returns a list of targets. When this invocation is first instance of
+# 'construct' in stack, returns only targets of requested 'target-type',
+# otherwise, returns also unused sources and additionally generated targets.
+#
+# If 'top-level' is set, does not suppress generators that are already
+# used in the stack. This may be useful in cases where a generator
+# has to build a metatargets -- for example a target corresponding to
+# built tool.
+#
+rule construct ( project name ? : target-type : property-set * : sources * : top-level ? )
+{
+ local saved-stack ;
+ if $(top-level)
+ {
+ saved-active = $(.active-generators) ;
+ .active-generators = ;
+ }
+
+ if (.construct-stack)
+ {
+ ensure-type $(sources) ;
+ }
+
+ .construct-stack += 1 ;
+
+ increase-indent ;
+
+ if $(.debug)
+ {
+ generators.dout [ indent ] "*** construct" $(target-type) ;
+
+ for local s in $(sources)
+ {
+ generators.dout [ indent ] " from" $(s) ;
+ }
+ generators.dout [ indent ] " properties:" [ $(property-set).raw ] ;
+ }
+
+ local result = [ construct-really $(project) $(name) : $(target-type) :
+ $(property-set) : $(sources) ] ;
+
+ decrease-indent ;
+
+ .construct-stack = $(.construct-stack[2-]) ;
+
+ if $(top-level)
+ {
+ .active-generators = $(saved-active) ;
+ }
+
+ return $(result) ;
+}
+
+# Given 'result', obtained from some generator or generators.construct, adds
+# 'raw-properties' as usage requirements to it. If result already contains usage
+# requirements -- that is the first element of result of an instance of the
+# property-set class, the existing usage requirements and 'raw-properties' are
+# combined.
+#
+rule add-usage-requirements ( result * : raw-properties * )
+{
+ if $(result)
+ {
+ if [ class.is-a $(result[1]) : property-set ]
+ {
+ return [ $(result[1]).add-raw $(raw-properties) ] $(result[2-]) ;
+ }
+ else
+ {
+ return [ property-set.create $(raw-properties) ] $(result) ;
+ }
+ }
+}
+
+rule dump ( )
+{
+ for local g in $(.all-generators)
+ {
+ ECHO [ $(g).id ] ":" [ $(g).source-types ] -> [ $(g).target-types ] ;
+ }
+}
+
diff --git a/tools/build/src/build/generators.py b/tools/build/src/build/generators.py
new file mode 100644
index 0000000000..dd195a840f
--- /dev/null
+++ b/tools/build/src/build/generators.py
@@ -0,0 +1,1097 @@
+# Status: being ported by Vladimir Prus
+# Base revision: 48649
+# TODO: replace the logging with dout
+
+# Copyright Vladimir Prus 2002.
+# Copyright Rene Rivera 2006.
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Manages 'generators' --- objects which can do transformation between different
+# target types and contain algorithm for finding transformation from sources
+# to targets.
+#
+# The main entry point to this module is generators.construct rule. It is given
+# a list of source targets, desired target type and a set of properties.
+# It starts by selecting 'viable generators', which have any chances of producing
+# the desired target type with the required properties. Generators are ranked and
+# a set of most specific ones is selected.
+#
+# The most specific generators have their 'run' methods called, with the properties
+# and list of sources. Each one selects target which can be directly consumed, and
+# tries to convert the remaining ones to the types it can consume. This is done
+# by recursively calling 'construct' with all consumable types.
+#
+# If the generator has collected all the targets it needs, it creates targets
+# corresponding to result, and returns it. When all generators have been run,
+# results of one of them are selected and returned as result.
+#
+# It's quite possible that 'construct' returns more targets that it was asked for.
+# For example, it was asked to target type EXE, but the only found generators produces
+# both EXE and TDS (file with debug) information. The extra target will be returned.
+#
+# Likewise, when generator tries to convert sources to consumable types, it can get
+# more targets that it was asked for. The question is what to do with extra targets.
+# Boost.Build attempts to convert them to requested types, and attempts as early as
+# possible. Specifically, this is done after invoking each generator. (Later I'll
+# document the rationale for trying extra target conversion at that point).
+#
+# That early conversion is not always desirable. Suppose a generator got a source of
+# type Y and must consume one target of type X_1 and one target of type X_2.
+# When converting Y to X_1 extra target of type Y_2 is created. We should not try to
+# convert it to type X_1, because if we do so, the generator will get two targets
+# of type X_1, and will be at loss as to which one to use. Because of that, the
+# 'construct' rule has a parameter, telling if multiple targets can be returned. If
+# the parameter is false, conversion of extra targets is not performed.
+
+
+import re
+import cStringIO
+import os.path
+
+from virtual_target import Subvariant
+import virtual_target, type, property_set, property
+from b2.util.logger import *
+from b2.util.utility import *
+from b2.util import set
+from b2.util.sequence import unique
+import b2.util.sequence as sequence
+from b2.manager import get_manager
+import b2.build.type
+
+def reset ():
+ """ Clear the module state. This is mainly for testing purposes.
+ """
+ global __generators, __type_to_generators, __generators_for_toolset, __construct_stack
+ global __overrides, __active_generators
+ global __viable_generators_cache, __viable_source_types_cache
+ global __vstg_cached_generators, __vst_cached_types
+
+ __generators = {}
+ __type_to_generators = {}
+ __generators_for_toolset = {}
+ __overrides = {}
+
+ # TODO: can these be global?
+ __construct_stack = []
+ __viable_generators_cache = {}
+ __viable_source_types_cache = {}
+ __active_generators = []
+
+ __vstg_cached_generators = []
+ __vst_cached_types = []
+
+reset ()
+
+_re_separate_types_prefix_and_postfix = re.compile ('([^\\(]*)(\\((.*)%(.*)\\))?')
+_re_match_type = re.compile('([^\\(]*)(\\(.*\\))?')
+
+
+__debug = None
+__indent = ""
+
+def debug():
+ global __debug
+ if __debug is None:
+ __debug = "--debug-generators" in bjam.variable("ARGV")
+ return __debug
+
+def increase_indent():
+ global __indent
+ __indent += " "
+
+def decrease_indent():
+ global __indent
+ __indent = __indent[0:-4]
+
+
+# Updated cached viable source target type information as needed after a new
+# derived target type gets added. This is needed because if a target type is a
+# viable source target type for some generator then all of the target type's
+# derived target types are automatically viable as source target types for the
+# same generator. Does nothing if a non-derived target type is passed to it.
+#
+def update_cached_information_with_a_new_type(type):
+
+ base_type = b2.build.type.base(type)
+
+ if base_type:
+ for g in __vstg_cached_generators:
+ if base_type in __viable_source_types_cache.get(g, []):
+ __viable_source_types_cache[g].append(type)
+
+ for t in __vst_cached_types:
+ if base_type in __viable_source_types_cache.get(t, []):
+ __viable_source_types_cache[t].append(type)
+
+# Clears cached viable source target type information except for target types
+# and generators with all source types listed as viable. Should be called when
+# something invalidates those cached values by possibly causing some new source
+# types to become viable.
+#
+def invalidate_extendable_viable_source_target_type_cache():
+
+ global __vstg_cached_generators
+ generators_with_cached_source_types = __vstg_cached_generators
+ __vstg_cached_generators = []
+
+ for g in generators_with_cached_source_types:
+ if __viable_source_types_cache.has_key(g):
+ if __viable_source_types_cache[g] == ["*"]:
+ __vstg_cached_generators.append(g)
+ else:
+ del __viable_source_types_cache[g]
+
+ global __vst_cached_types
+ types_with_cached_sources_types = __vst_cached_types
+ __vst_cached_types = []
+ for t in types_with_cached_sources_types:
+ if __viable_source_types_cache.has_key(t):
+ if __viable_source_types_cache[t] == ["*"]:
+ __vst_cached_types.append(t)
+ else:
+ del __viable_source_types_cache[t]
+
+def dout(message):
+ if debug():
+ print __indent + message
+
+class Generator:
+ """ Creates a generator.
+ manager: the build manager.
+ id: identifies the generator
+
+ rule: the rule which sets up build actions.
+
+ composing: whether generator processes each source target in
+ turn, converting it to required types.
+ Ordinary generators pass all sources together to
+ recusrive generators.construct_types call.
+
+ source_types (optional): types that this generator can handle
+
+ target_types_and_names: types the generator will create and, optionally, names for
+ created targets. Each element should have the form
+ type["(" name-pattern ")"]
+ for example, obj(%_x). Name of generated target will be found
+ by replacing % with the name of source, provided explicit name
+ was not specified.
+
+ requirements (optional)
+
+ NOTE: all subclasses must have a similar signature for clone to work!
+ """
+ def __init__ (self, id, composing, source_types, target_types_and_names, requirements = []):
+ assert(not isinstance(source_types, str))
+ assert(not isinstance(target_types_and_names, str))
+ self.id_ = id
+ self.composing_ = composing
+ self.source_types_ = source_types
+ self.target_types_and_names_ = target_types_and_names
+ self.requirements_ = requirements
+
+ self.target_types_ = []
+ self.name_prefix_ = []
+ self.name_postfix_ = []
+
+ for e in target_types_and_names:
+ # Create three parallel lists: one with the list of target types,
+ # and two other with prefixes and postfixes to be added to target
+ # name. We use parallel lists for prefix and postfix (as opposed
+ # to mapping), because given target type might occur several times,
+ # for example "H H(%_symbols)".
+ m = _re_separate_types_prefix_and_postfix.match (e)
+
+ if not m:
+ raise BaseException ("Invalid type and name '%s' in declaration of type '%s'" % (e, id))
+
+ target_type = m.group (1)
+ if not target_type: target_type = ''
+ prefix = m.group (3)
+ if not prefix: prefix = ''
+ postfix = m.group (4)
+ if not postfix: postfix = ''
+
+ self.target_types_.append (target_type)
+ self.name_prefix_.append (prefix)
+ self.name_postfix_.append (postfix)
+
+ for x in self.source_types_:
+ type.validate (x)
+
+ for x in self.target_types_:
+ type.validate (x)
+
+ def clone (self, new_id, new_toolset_properties):
+ """ Returns another generator which differers from $(self) in
+ - id
+ - value to <toolset> feature in properties
+ """
+ return self.__class__ (new_id,
+ self.composing_,
+ self.source_types_,
+ self.target_types_and_names_,
+ # Note: this does not remove any subfeatures of <toolset>
+ # which might cause problems
+ property.change (self.requirements_, '<toolset>') + new_toolset_properties)
+
+ def clone_and_change_target_type(self, base, type):
+ """Creates another generator that is the same as $(self), except that
+ if 'base' is in target types of $(self), 'type' will in target types
+ of the new generator."""
+ target_types = []
+ for t in self.target_types_and_names_:
+ m = _re_match_type.match(t)
+ assert m
+
+ if m.group(1) == base:
+ if m.group(2):
+ target_types.append(type + m.group(2))
+ else:
+ target_types.append(type)
+ else:
+ target_types.append(t)
+
+ return self.__class__(self.id_, self.composing_,
+ self.source_types_,
+ target_types,
+ self.requirements_)
+
+
+ def id(self):
+ return self.id_
+
+ def source_types (self):
+ """ Returns the list of target type the generator accepts.
+ """
+ return self.source_types_
+
+ def target_types (self):
+ """ Returns the list of target types that this generator produces.
+ It is assumed to be always the same -- i.e. it cannot change depending
+ list of sources.
+ """
+ return self.target_types_
+
+ def requirements (self):
+ """ Returns the required properties for this generator. Properties
+ in returned set must be present in build properties if this
+ generator is to be used. If result has grist-only element,
+ that build properties must include some value of that feature.
+ """
+ return self.requirements_
+
+ def match_rank (self, ps):
+ """ Returns true if the generator can be run with the specified
+ properties.
+ """
+ # See if generator's requirements are satisfied by
+ # 'properties'. Treat a feature name in requirements
+ # (i.e. grist-only element), as matching any value of the
+ # feature.
+ all_requirements = self.requirements ()
+
+ property_requirements = []
+ feature_requirements = []
+ # This uses strings because genenator requirements allow
+ # the '<feature>' syntax without value and regular validation
+ # is not happy about that.
+ for r in all_requirements:
+ if get_value (r):
+ property_requirements.append (r)
+
+ else:
+ feature_requirements.append (r)
+
+ return all(ps.get(get_grist(s)) == [get_value(s)] for s in property_requirements) \
+ and all(ps.get(get_grist(s)) for s in feature_requirements)
+
+ def run (self, project, name, prop_set, sources):
+ """ Tries to invoke this generator on the given sources. Returns a
+ list of generated targets (instances of 'virtual-target').
+
+ project: Project for which the targets are generated.
+
+ name: Determines the name of 'name' attribute for
+ all generated targets. See 'generated_targets' method.
+
+ prop_set: Desired properties for generated targets.
+
+ sources: Source targets.
+ """
+
+ if project.manager ().logger ().on ():
+ project.manager ().logger ().log (__name__, " generator '%s'" % self.id_)
+ project.manager ().logger ().log (__name__, " composing: '%s'" % self.composing_)
+
+ if not self.composing_ and len (sources) > 1 and len (self.source_types_) > 1:
+ raise BaseException ("Unsupported source/source_type combination")
+
+ # We don't run composing generators if no name is specified. The reason
+ # is that composing generator combines several targets, which can have
+ # different names, and it cannot decide which name to give for produced
+ # target. Therefore, the name must be passed.
+ #
+ # This in effect, means that composing generators are runnable only
+ # at top-level of transofrmation graph, or if name is passed explicitly.
+ # Thus, we dissallow composing generators in the middle. For example, the
+ # transofrmation CPP -> OBJ -> STATIC_LIB -> RSP -> EXE won't be allowed
+ # (the OBJ -> STATIC_LIB generator is composing)
+ if not self.composing_ or name:
+ return self.run_really (project, name, prop_set, sources)
+ else:
+ return []
+
+ def run_really (self, project, name, prop_set, sources):
+
+ # consumed: Targets that this generator will consume directly.
+ # bypassed: Targets that can't be consumed and will be returned as-is.
+
+ if self.composing_:
+ (consumed, bypassed) = self.convert_multiple_sources_to_consumable_types (project, prop_set, sources)
+ else:
+ (consumed, bypassed) = self.convert_to_consumable_types (project, name, prop_set, sources)
+
+ result = []
+ if consumed:
+ result = self.construct_result (consumed, project, name, prop_set)
+ result.extend (bypassed)
+
+ if result:
+ if project.manager ().logger ().on ():
+ project.manager ().logger ().log (__name__, " SUCCESS: ", result)
+
+ else:
+ project.manager ().logger ().log (__name__, " FAILURE")
+
+ return result
+
+ def construct_result (self, consumed, project, name, prop_set):
+ """ Constructs the dependency graph that will be returned by this
+ generator.
+ consumed: Already prepared list of consumable targets
+ If generator requires several source files will contain
+ exactly len $(self.source_types_) targets with matching types
+ Otherwise, might contain several targets with the type of
+ self.source_types_ [0]
+ project:
+ name:
+ prop_set: Properties to be used for all actions create here
+ """
+ result = []
+ # If this is 1->1 transformation, apply it to all consumed targets in order.
+ if len (self.source_types_) < 2 and not self.composing_:
+
+ for r in consumed:
+ result.extend (self.generated_targets ([r], prop_set, project, name))
+
+ else:
+
+ if consumed:
+ result.extend (self.generated_targets (consumed, prop_set, project, name))
+
+ return result
+
+ def determine_target_name(self, fullname):
+ # Determine target name from fullname (maybe including path components)
+ # Place optional prefix and postfix around basename
+
+ dir = os.path.dirname(fullname)
+ name = os.path.basename(fullname)
+ idx = name.find(".")
+ if idx != -1:
+ name = name[:idx]
+
+ if dir and not ".." in dir and not os.path.isabs(dir):
+ # Relative path is always relative to the source
+ # directory. Retain it, so that users can have files
+ # with the same in two different subdirectories.
+ name = dir + "/" + name
+
+ return name
+
+ def determine_output_name(self, sources):
+ """Determine the name of the produced target from the
+ names of the sources."""
+
+ # The simple case if when a name
+ # of source has single dot. Then, we take the part before
+ # dot. Several dots can be caused by:
+ # - Using source file like a.host.cpp
+ # - A type which suffix has a dot. Say, we can
+ # type 'host_cpp' with extension 'host.cpp'.
+ # In the first case, we want to take the part till the last
+ # dot. In the second case -- no sure, but for now take
+ # the part till the last dot too.
+ name = os.path.splitext(sources[0].name())[0]
+
+ for s in sources[1:]:
+ n2 = os.path.splitext(s.name())
+ if n2 != name:
+ get_manager().errors()(
+ "%s: source targets have different names: cannot determine target name"
+ % (self.id_))
+
+ # Names of sources might include directory. We should strip it.
+ return self.determine_target_name(sources[0].name())
+
+
+ def generated_targets (self, sources, prop_set, project, name):
+ """ Constructs targets that are created after consuming 'sources'.
+ The result will be the list of virtual-target, which the same length
+ as 'target_types' attribute and with corresponding types.
+
+ When 'name' is empty, all source targets must have the same value of
+ the 'name' attribute, which will be used instead of the 'name' argument.
+
+ The value of 'name' attribute for each generated target will be equal to
+ the 'name' parameter if there's no name pattern for this type. Otherwise,
+ the '%' symbol in the name pattern will be replaced with the 'name' parameter
+ to obtain the 'name' attribute.
+
+ For example, if targets types are T1 and T2(with name pattern "%_x"), suffixes
+ for T1 and T2 are .t1 and t2, and source if foo.z, then created files would
+ be "foo.t1" and "foo_x.t2". The 'name' attribute actually determined the
+ basename of a file.
+
+ Note that this pattern mechanism has nothing to do with implicit patterns
+ in make. It's a way to produce target which name is different for name of
+ source.
+ """
+ if not name:
+ name = self.determine_output_name(sources)
+
+ # Assign an action for each target
+ action = self.action_class()
+ a = action(project.manager(), sources, self.id_, prop_set)
+
+ # Create generated target for each target type.
+ targets = []
+ pre = self.name_prefix_
+ post = self.name_postfix_
+ for t in self.target_types_:
+ basename = os.path.basename(name)
+ generated_name = pre[0] + basename + post[0]
+ generated_name = os.path.join(os.path.dirname(name), generated_name)
+ pre = pre[1:]
+ post = post[1:]
+
+ targets.append(virtual_target.FileTarget(generated_name, t, project, a))
+
+ return [ project.manager().virtual_targets().register(t) for t in targets ]
+
+ def convert_to_consumable_types (self, project, name, prop_set, sources, only_one=False):
+ """ Attempts to convert 'source' to the types that this generator can
+ handle. The intention is to produce the set of targets can should be
+ used when generator is run.
+ only_one: convert 'source' to only one of source types
+ if there's more that one possibility, report an
+ error.
+
+ Returns a pair:
+ consumed: all targets that can be consumed.
+ bypassed: all targets that cannot be consumed.
+ """
+ consumed = []
+ bypassed = []
+ missing_types = []
+
+ if len (sources) > 1:
+ # Don't know how to handle several sources yet. Just try
+ # to pass the request to other generator
+ missing_types = self.source_types_
+
+ else:
+ (c, m) = self.consume_directly (sources [0])
+ consumed += c
+ missing_types += m
+
+ # No need to search for transformation if
+ # some source type has consumed source and
+ # no more source types are needed.
+ if only_one and consumed:
+ missing_types = []
+
+ #TODO: we should check that only one source type
+ #if create of 'only_one' is true.
+ # TODO: consider if consuned/bypassed separation should
+ # be done by 'construct_types'.
+
+ if missing_types:
+ transformed = construct_types (project, name, missing_types, prop_set, sources)
+
+ # Add targets of right type to 'consumed'. Add others to
+ # 'bypassed'. The 'generators.construct' rule has done
+ # its best to convert everything to the required type.
+ # There's no need to rerun it on targets of different types.
+
+ # NOTE: ignoring usage requirements
+ for t in transformed[1]:
+ if t.type() in missing_types:
+ consumed.append(t)
+
+ else:
+ bypassed.append(t)
+
+ consumed = unique(consumed)
+ bypassed = unique(bypassed)
+
+ # remove elements of 'bypassed' that are in 'consumed'
+
+ # Suppose the target type of current generator, X is produced from
+ # X_1 and X_2, which are produced from Y by one generator.
+ # When creating X_1 from Y, X_2 will be added to 'bypassed'
+ # Likewise, when creating X_2 from Y, X_1 will be added to 'bypassed'
+ # But they are also in 'consumed'. We have to remove them from
+ # bypassed, so that generators up the call stack don't try to convert
+ # them.
+
+ # In this particular case, X_1 instance in 'consumed' and X_1 instance
+ # in 'bypassed' will be the same: because they have the same source and
+ # action name, and 'virtual-target.register' won't allow two different
+ # instances. Therefore, it's OK to use 'set.difference'.
+
+ bypassed = set.difference(bypassed, consumed)
+
+ return (consumed, bypassed)
+
+
+ def convert_multiple_sources_to_consumable_types (self, project, prop_set, sources):
+ """ Converts several files to consumable types.
+ """
+ consumed = []
+ bypassed = []
+
+ # We process each source one-by-one, trying to convert it to
+ # a usable type.
+ for s in sources:
+ # TODO: need to check for failure on each source.
+ (c, b) = self.convert_to_consumable_types (project, None, prop_set, [s], True)
+ if not c:
+ project.manager ().logger ().log (__name__, " failed to convert ", s)
+
+ consumed.extend (c)
+ bypassed.extend (b)
+
+ return (consumed, bypassed)
+
+ def consume_directly (self, source):
+ real_source_type = source.type ()
+
+ # If there are no source types, we can consume anything
+ source_types = self.source_types()
+ if not source_types:
+ source_types = [real_source_type]
+
+ consumed = []
+ missing_types = []
+ for st in source_types:
+ # The 'source' if of right type already)
+ if real_source_type == st or type.is_derived (real_source_type, st):
+ consumed.append (source)
+
+ else:
+ missing_types.append (st)
+
+ return (consumed, missing_types)
+
+ def action_class (self):
+ """ Returns the class to be used to actions. Default implementation
+ returns "action".
+ """
+ return virtual_target.Action
+
+
+def find (id):
+ """ Finds the generator with id. Returns None if not found.
+ """
+ return __generators.get (id, None)
+
+def register (g):
+ """ Registers new generator instance 'g'.
+ """
+ id = g.id()
+
+ __generators [id] = g
+
+ # A generator can produce several targets of the
+ # same type. We want unique occurence of that generator
+ # in .generators.$(t) in that case, otherwise, it will
+ # be tried twice and we'll get false ambiguity.
+ for t in sequence.unique(g.target_types()):
+ __type_to_generators.setdefault(t, []).append(g)
+
+ # Update the set of generators for toolset
+
+ # TODO: should we check that generator with this id
+ # is not already registered. For example, the fop.jam
+ # module intentionally declared two generators with the
+ # same id, so such check will break it.
+
+ # Some generators have multiple periods in their name, so the
+ # normal $(id:S=) won't generate the right toolset name.
+ # e.g. if id = gcc.compile.c++, then
+ # .generators-for-toolset.$(id:S=) will append to
+ # .generators-for-toolset.gcc.compile, which is a separate
+ # value from .generators-for-toolset.gcc. Correcting this
+ # makes generator inheritance work properly.
+ # See also inherit-generators in module toolset
+ base = id.split ('.', 100) [0]
+
+ __generators_for_toolset.setdefault(base, []).append(g)
+
+ # After adding a new generator that can construct new target types, we need
+ # to clear the related cached viable source target type information for
+ # constructing a specific target type or using a specific generator. Cached
+ # viable source target type lists affected by this are those containing any
+ # of the target types constructed by the new generator or any of their base
+ # target types.
+ #
+ # A more advanced alternative to clearing that cached viable source target
+ # type information would be to expand it with additional source types or
+ # even better - mark it as needing to be expanded on next use.
+ #
+ # For now we just clear all the cached viable source target type information
+ # that does not simply state 'all types' and may implement a more detailed
+ # algorithm later on if it becomes needed.
+
+ invalidate_extendable_viable_source_target_type_cache()
+
+
+def register_standard (id, source_types, target_types, requirements = []):
+ """ Creates new instance of the 'generator' class and registers it.
+ Returns the creates instance.
+ Rationale: the instance is returned so that it's possible to first register
+ a generator and then call 'run' method on that generator, bypassing all
+ generator selection.
+ """
+ g = Generator (id, False, source_types, target_types, requirements)
+ register (g)
+ return g
+
+def register_composing (id, source_types, target_types, requirements = []):
+ g = Generator (id, True, source_types, target_types, requirements)
+ register (g)
+ return g
+
+def generators_for_toolset (toolset):
+ """ Returns all generators which belong to 'toolset'.
+ """
+ return __generators_for_toolset.get(toolset, [])
+
+def override (overrider_id, overridee_id):
+ """Make generator 'overrider-id' be preferred to
+ 'overridee-id'. If, when searching for generators
+ that could produce a target of certain type,
+ both those generators are amoung viable generators,
+ the overridden generator is immediately discarded.
+
+ The overridden generators are discarded immediately
+ after computing the list of viable generators, before
+ running any of them."""
+
+ __overrides.setdefault(overrider_id, []).append(overridee_id)
+
+def __viable_source_types_real (target_type):
+ """ Returns a list of source type which can possibly be converted
+ to 'target_type' by some chain of generator invocation.
+
+ More formally, takes all generators for 'target_type' and
+ returns union of source types for those generators and result
+ of calling itself recusrively on source types.
+ """
+ generators = []
+
+ # 't0' is the initial list of target types we need to process to get a list
+ # of their viable source target types. New target types will not be added to
+ # this list.
+ t0 = type.all_bases (target_type)
+
+
+ # 't' is the list of target types which have not yet been processed to get a
+ # list of their viable source target types. This list will get expanded as
+ # we locate more target types to process.
+ t = t0
+
+ result = []
+ while t:
+ # Find all generators for current type.
+ # Unlike 'find_viable_generators' we don't care about prop_set.
+ generators = __type_to_generators.get (t [0], [])
+ t = t[1:]
+
+ for g in generators:
+ if not g.source_types():
+ # Empty source types -- everything can be accepted
+ result = "*"
+ # This will terminate outer loop.
+ t = None
+ break
+
+ for source_type in g.source_types ():
+ if not source_type in result:
+ # If generator accepts 'source_type' it
+ # will happily accept any type derived from it
+ all = type.all_derived (source_type)
+ for n in all:
+ if not n in result:
+
+ # Here there is no point in adding target types to
+ # the list of types to process in case they are or
+ # have already been on that list. We optimize this
+ # check by realizing that we only need to avoid the
+ # original target type's base types. Other target
+ # types that are or have been on the list of target
+ # types to process have been added to the 'result'
+ # list as well and have thus already been eliminated
+ # by the previous if.
+ if not n in t0:
+ t.append (n)
+ result.append (n)
+
+ return result
+
+
+def viable_source_types (target_type):
+ """ Helper rule, caches the result of '__viable_source_types_real'.
+ """
+ if not __viable_source_types_cache.has_key(target_type):
+ __vst_cached_types.append(target_type)
+ __viable_source_types_cache [target_type] = __viable_source_types_real (target_type)
+ return __viable_source_types_cache [target_type]
+
+def viable_source_types_for_generator_real (generator):
+ """ Returns the list of source types, which, when passed to 'run'
+ method of 'generator', has some change of being eventually used
+ (probably after conversion by other generators)
+ """
+ source_types = generator.source_types ()
+
+ if not source_types:
+ # If generator does not specify any source types,
+ # it might be special generator like builtin.lib-generator
+ # which just relays to other generators. Return '*' to
+ # indicate that any source type is possibly OK, since we don't
+ # know for sure.
+ return ['*']
+
+ else:
+ result = []
+ for s in source_types:
+ viable_sources = viable_source_types(s)
+ if viable_sources == "*":
+ result = ["*"]
+ break
+ else:
+ result.extend(type.all_derived(s) + viable_sources)
+ return unique(result)
+
+def viable_source_types_for_generator (generator):
+ """ Caches the result of 'viable_source_types_for_generator'.
+ """
+ if not __viable_source_types_cache.has_key(generator):
+ __vstg_cached_generators.append(generator)
+ __viable_source_types_cache[generator] = viable_source_types_for_generator_real (generator)
+
+ return __viable_source_types_cache[generator]
+
+def try_one_generator_really (project, name, generator, target_type, properties, sources):
+ """ Returns usage requirements + list of created targets.
+ """
+ targets = generator.run (project, name, properties, sources)
+
+ usage_requirements = []
+ success = False
+
+ dout("returned " + str(targets))
+
+ if targets:
+ success = True;
+
+ if isinstance (targets[0], property_set.PropertySet):
+ usage_requirements = targets [0]
+ targets = targets [1]
+
+ else:
+ usage_requirements = property_set.empty ()
+
+ dout( " generator" + generator.id() + " spawned ")
+ # generators.dout [ indent ] " " $(targets) ;
+# if $(usage-requirements)
+# {
+# generators.dout [ indent ] " with usage requirements:" $(x) ;
+# }
+
+ if success:
+ return (usage_requirements, targets)
+ else:
+ return None
+
+def try_one_generator (project, name, generator, target_type, properties, sources):
+ """ Checks if generator invocation can be pruned, because it's guaranteed
+ to fail. If so, quickly returns empty list. Otherwise, calls
+ try_one_generator_really.
+ """
+ source_types = []
+
+ for s in sources:
+ source_types.append (s.type ())
+
+ viable_source_types = viable_source_types_for_generator (generator)
+
+ if source_types and viable_source_types != ['*'] and\
+ not set.intersection (source_types, viable_source_types):
+ if project.manager ().logger ().on ():
+ id = generator.id ()
+ project.manager ().logger ().log (__name__, "generator '%s' pruned" % id)
+ project.manager ().logger ().log (__name__, "source_types" '%s' % source_types)
+ project.manager ().logger ().log (__name__, "viable_source_types '%s'" % viable_source_types)
+
+ return []
+
+ else:
+ return try_one_generator_really (project, name, generator, target_type, properties, sources)
+
+
+def construct_types (project, name, target_types, prop_set, sources):
+
+ result = []
+ usage_requirements = property_set.empty()
+
+ for t in target_types:
+ r = construct (project, name, t, prop_set, sources)
+
+ if r:
+ (ur, targets) = r
+ usage_requirements = usage_requirements.add(ur)
+ result.extend(targets)
+
+ # TODO: have to introduce parameter controlling if
+ # several types can be matched and add appropriate
+ # checks
+
+ # TODO: need to review the documentation for
+ # 'construct' to see if it should return $(source) even
+ # if nothing can be done with it. Currents docs seem to
+ # imply that, contrary to the behaviour.
+ if result:
+ return (usage_requirements, result)
+
+ else:
+ return (usage_requirements, sources)
+
+def __ensure_type (targets):
+ """ Ensures all 'targets' have types. If this is not so, exists with
+ error.
+ """
+ for t in targets:
+ if not t.type ():
+ get_manager().errors()("target '%s' has no type" % str (t))
+
+def find_viable_generators_aux (target_type, prop_set):
+ """ Returns generators which can be used to construct target of specified type
+ with specified properties. Uses the following algorithm:
+ - iterates over requested target_type and all it's bases (in the order returned bt
+ type.all-bases.
+ - for each type find all generators that generate that type and which requirements
+ are satisfied by properties.
+ - if the set of generators is not empty, returns that set.
+
+ Note: this algorithm explicitly ignores generators for base classes if there's
+ at least one generator for requested target_type.
+ """
+ # Select generators that can create the required target type.
+ viable_generators = []
+ initial_generators = []
+
+ import type
+
+ # Try all-type generators first. Assume they have
+ # quite specific requirements.
+ all_bases = type.all_bases(target_type)
+
+ for t in all_bases:
+
+ initial_generators = __type_to_generators.get(t, [])
+
+ if initial_generators:
+ dout("there are generators for this type")
+ if t != target_type:
+ # We're here, when no generators for target-type are found,
+ # but there are some generators for a base type.
+ # We'll try to use them, but they will produce targets of
+ # base type, not of 'target-type'. So, we clone the generators
+ # and modify the list of target types.
+ generators2 = []
+ for g in initial_generators[:]:
+ # generators.register adds generator to the list of generators
+ # for toolsets, which is a bit strange, but should work.
+ # That list is only used when inheriting toolset, which
+ # should have being done before generators are run.
+ ng = g.clone_and_change_target_type(t, target_type)
+ generators2.append(ng)
+ register(ng)
+
+ initial_generators = generators2
+ break
+
+ for g in initial_generators:
+ dout("trying generator " + g.id()
+ + "(" + str(g.source_types()) + "->" + str(g.target_types()) + ")")
+
+ m = g.match_rank(prop_set)
+ if m:
+ dout(" is viable")
+ viable_generators.append(g)
+
+ return viable_generators
+
+def find_viable_generators (target_type, prop_set):
+ key = target_type + '.' + str (prop_set)
+
+ l = __viable_generators_cache.get (key, None)
+ if not l:
+ l = []
+
+ if not l:
+ l = find_viable_generators_aux (target_type, prop_set)
+
+ __viable_generators_cache [key] = l
+
+ viable_generators = []
+ for g in l:
+ # Avoid trying the same generator twice on different levels.
+ # TODO: is this really used?
+ if not g in __active_generators:
+ viable_generators.append (g)
+ else:
+ dout(" generator %s is active, discarding" % g.id())
+
+ # Generators which override 'all'.
+ all_overrides = []
+
+ # Generators which are overriden
+ overriden_ids = []
+
+ for g in viable_generators:
+ id = g.id ()
+
+ this_overrides = __overrides.get (id, [])
+
+ if this_overrides:
+ overriden_ids.extend (this_overrides)
+ if 'all' in this_overrides:
+ all_overrides.append (g)
+
+ if all_overrides:
+ viable_generators = all_overrides
+
+ return [g for g in viable_generators if not g.id() in overriden_ids]
+
+def __construct_really (project, name, target_type, prop_set, sources):
+ """ Attempts to construct target by finding viable generators, running them
+ and selecting the dependency graph.
+ """
+ viable_generators = find_viable_generators (target_type, prop_set)
+
+ result = []
+
+ dout(" *** %d viable generators" % len (viable_generators))
+
+ generators_that_succeeded = []
+
+ for g in viable_generators:
+ __active_generators.append(g)
+ r = try_one_generator (project, name, g, target_type, prop_set, sources)
+ del __active_generators[-1]
+
+ if r:
+ generators_that_succeeded.append(g)
+ if result:
+ output = cStringIO.StringIO()
+ print >>output, "ambiguity found when searching for best transformation"
+ print >>output, "Trying to produce type '%s' from: " % (target_type)
+ for s in sources:
+ print >>output, " - " + s.str()
+ print >>output, "Generators that succeeded:"
+ for g in generators_that_succeeded:
+ print >>output, " - " + g.id()
+ print >>output, "First generator produced: "
+ for t in result[1:]:
+ print >>output, " - " + str(t)
+ print >>output, "Second generator produced:"
+ for t in r[1:]:
+ print >>output, " - " + str(t)
+ get_manager().errors()(output.getvalue())
+ else:
+ result = r;
+
+ return result;
+
+
+def construct (project, name, target_type, prop_set, sources, top_level=False):
+ """ Attempts to create target of 'target-type' with 'properties'
+ from 'sources'. The 'sources' are treated as a collection of
+ *possible* ingridients -- i.e. it is not required to consume
+ them all. If 'multiple' is true, the rule is allowed to return
+ several targets of 'target-type'.
+
+ Returns a list of target. When this invocation is first instance of
+ 'construct' in stack, returns only targets of requested 'target-type',
+ otherwise, returns also unused sources and additionally generated
+ targets.
+
+ If 'top-level' is set, does not suppress generators that are already
+ used in the stack. This may be useful in cases where a generator
+ has to build a metatarget -- for example a target corresponding to
+ built tool.
+ """
+
+ global __active_generators
+ if top_level:
+ saved_active = __active_generators
+ __active_generators = []
+
+ global __construct_stack
+ if not __construct_stack:
+ __ensure_type (sources)
+
+ __construct_stack.append (1)
+
+ increase_indent ()
+
+ if project.manager().logger().on():
+ dout( "*** construct " + target_type)
+
+ for s in sources:
+ dout(" from " + str(s))
+
+ project.manager().logger().log (__name__, " properties: ", prop_set.raw ())
+
+ result = __construct_really(project, name, target_type, prop_set, sources)
+
+ decrease_indent()
+
+ __construct_stack = __construct_stack [1:]
+
+ if top_level:
+ __active_generators = saved_active
+
+ return result
+
+def add_usage_requirements (result, raw_properties):
+ if result:
+ if isinstance (result[0], property_set.PropertySet):
+ return (result[0].add_raw(raw_properties), result[1])
+ else:
+ return (propery_set.create(raw-properties), result)
+ #if [ class.is-a $(result[1]) : property-set ]
+ #{
+ # return [ $(result[1]).add-raw $(raw-properties) ] $(result[2-]) ;
+ #}
+ #else
+ #{
+ # return [ property-set.create $(raw-properties) ] $(result) ;
+ #}
diff --git a/tools/build/v2/build/project.ann.py b/tools/build/src/build/project.ann.py
index 349f549550..349f549550 100644
--- a/tools/build/v2/build/project.ann.py
+++ b/tools/build/src/build/project.ann.py
diff --git a/tools/build/src/build/project.jam b/tools/build/src/build/project.jam
new file mode 100644
index 0000000000..c9a090982c
--- /dev/null
+++ b/tools/build/src/build/project.jam
@@ -0,0 +1,1228 @@
+# Copyright 2002, 2003 Dave Abrahams
+# Copyright 2002, 2005, 2006 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Implements project representation and loading. Each project is represented by:
+# - a module where all the Jamfile content lives.
+# - an instance of 'project-attributes' class.
+# (given a module name, can be obtained using the 'attributes' rule)
+# - an instance of 'project-target' class (from targets.jam)
+# (given a module name, can be obtained using the 'target' rule)
+#
+# Typically, projects are created as result of loading a Jamfile, which is done
+# by rules 'load' and 'initialize', below. First, a module is prepared and a new
+# project-attributes instance is created. Some rules necessary for all projects
+# are added to the module (see the 'project-rules' module). Default project
+# attributes are set (inheriting parent project attributes, if it exists). After
+# that the Jamfile is read. It can declare its own attributes using the
+# 'project' rule which will be combined with any already set.
+#
+# The 'project' rule can also declare a project id which will be associated with
+# the project module.
+#
+# Besides Jamfile projects, we also support 'standalone' projects created by
+# calling 'initialize' in an arbitrary module and not specifying the project's
+# location. After the call, the module can call the 'project' rule, declare main
+# targets and behave as a regular project except that, since it is not
+# associated with any location, it should only declare prebuilt targets.
+#
+# The list of all loaded Jamfiles is stored in the .project-locations variable.
+# It is possible to obtain a module name for a location using the 'module-name'
+# rule. Standalone projects are not recorded and can only be referenced using
+# their project id.
+
+import "class" : new ;
+import modules ;
+import path ;
+import print ;
+import property-set ;
+import sequence ;
+
+
+.debug-loading = [ MATCH ^(--debug-loading)$ : [ modules.peek : ARGV ] ] ;
+
+
+# Loads the Jamfile at the given location. After loading, project global file
+# and Jamfiles needed by the requested one will be loaded recursively. If the
+# Jamfile at that location is loaded already, does nothing. Returns the project
+# module for the Jamfile.
+#
+rule load ( jamfile-location )
+{
+ local module-name = [ module-name $(jamfile-location) ] ;
+ # If Jamfile is already loaded, do not try again.
+ if ! $(module-name) in $(.jamfile-modules)
+ {
+ if $(.debug-loading)
+ {
+ ECHO Loading Jamfile at '$(jamfile-location)' ;
+ }
+
+ load-jamfile $(jamfile-location) : $(module-name) ;
+
+ # We want to make sure that child project are loaded only after parent
+ # projects. In particular, because parent projects define attributes
+ # which are then inherited by children, and we do not want children to
+ # be loaded before parent has defined everything.
+ #
+ # While "build-project" and "use-project" can potentially refer to child
+ # projects from parent projects, we do not immediately load child
+ # projects when seeing those attributes. Instead, we record the minimal
+ # information to be used only later.
+ load-used-projects $(module-name) ;
+ }
+ return $(module-name) ;
+}
+
+
+rule load-used-projects ( module-name )
+{
+ local used = [ modules.peek $(module-name) : .used-projects ] ;
+ local location = [ attribute $(module-name) location ] ;
+ while $(used)
+ {
+ local id = $(used[1]) ;
+ local where = [ path.make $(used[2]) ] ;
+ register-id $(id) : [ load [ path.root $(where) $(location) ] ] ;
+ used = $(used[3-]) ;
+ }
+}
+
+
+# Note the use of character groups, as opposed to listing 'Jamroot' and
+# 'jamroot'. With the latter, we would get duplicate matches on Windows and
+# would have to eliminate duplicates.
+JAMROOT ?= [ modules.peek : JAMROOT ] ;
+JAMROOT ?= project-root.jam [Jj]amroot [Jj]amroot.jam ;
+
+
+# Loads parent of Jamfile at 'location'. Issues an error if nothing is found.
+#
+rule load-parent ( location )
+{
+ local found = [ path.glob-in-parents $(location) : $(JAMROOT) $(JAMFILE) ] ;
+ if ! $(found)
+ {
+ import errors ;
+ errors.error Could not find parent "for" project at '$(location)' :
+ Did not find Jamfile.jam or Jamroot.jam "in" any parent directory. ;
+ }
+ return [ load $(found[1]:D) ] ;
+}
+
+
+# Returns the project module corresponding to the given project-id or plain
+# directory name. Returns nothing if such a project can not be found.
+#
+rule find ( name : current-location )
+{
+ local project-module ;
+
+ # Try interpreting name as project id.
+ if [ path.is-rooted $(name) ]
+ {
+ project-module = $($(name).jamfile-module) ;
+ }
+
+ if ! $(project-module)
+ {
+ local location = [ path.root [ path.make $(name) ] $(current-location) ]
+ ;
+
+ # If no project is registered for the given location, try to load it.
+ # First see if we have a Jamfile. If not, then see if we might have a
+ # project root willing to act as a Jamfile. In that case, project root
+ # must be placed in the directory referred to by id.
+
+ project-module = [ module-name $(location) ] ;
+ if ! $(project-module) in $(.jamfile-modules)
+ {
+ if [ path.glob $(location) : $(JAMROOT) $(JAMFILE) ]
+ {
+ project-module = [ load $(location) ] ;
+ }
+ else
+ {
+ project-module = ;
+ }
+ }
+ }
+
+ return $(project-module) ;
+}
+
+
+# Returns the name of the module corresponding to 'jamfile-location'. If no
+# module corresponds to that location yet, associates the default module name
+# with that location.
+#
+rule module-name ( jamfile-location )
+{
+ if ! $(.module.$(jamfile-location))
+ {
+ # Root the path, so that locations are always unambiguous. Without this,
+ # we can not decide if '../../exe/program1' and '.' are the same paths.
+ local normalized = [ path.root $(jamfile-location) [ path.pwd ] ] ;
+
+ # Quick & dirty fix to get the same module name when we supply two
+ # equivalent location paths, e.g. 'd:\Foo' & 'D:\fOo\bar\..' on Windows.
+ # Note that our current implementation will not work correctly if the
+ # given location references an empty folder, but in that case any later
+ # attempt to load a Jamfile from this location will fail anyway.
+ # FIXME: Implement this cleanly. Support for this type of path
+ # normalization already exists internally in Boost Jam and the current
+ # fix relies on the GLOB builtin rule using that support. Most likely we
+ # just need to add a new builtin rule to do this explicitly.
+ normalized = [ NORMALIZE_PATH $(normalized) ] ;
+ local glob-result = [ GLOB [ path.native $(normalized) ] : * ] ;
+ if $(glob-result)
+ {
+ normalized = $(glob-result[1]:D) ;
+ }
+ .module.$(jamfile-location) = Jamfile<$(normalized)> ;
+ }
+ return $(.module.$(jamfile-location)) ;
+}
+
+
+# Default patterns to search for the Jamfiles to use for build declarations.
+#
+JAMFILE = [ modules.peek : JAMFILE ] ;
+JAMFILE ?= [Bb]uild.jam [Jj]amfile.v2 [Jj]amfile [Jj]amfile.jam ;
+
+
+# Find the Jamfile at the given location. This returns the exact names of all
+# the Jamfiles in the given directory. The optional parent-root argument causes
+# this to search not the given directory but the ones above it up to the
+# parent-root directory.
+#
+rule find-jamfile (
+ dir # The directory(s) to look for a Jamfile.
+ parent-root ? # Optional flag indicating to search for the parent Jamfile.
+ : no-errors ?
+ )
+{
+ # Glob for all the possible Jamfiles according to the match pattern.
+ #
+ local jamfile-glob = ;
+ if $(parent-root)
+ {
+ if ! $(.parent-jamfile.$(dir))
+ {
+ .parent-jamfile.$(dir) = [ path.glob-in-parents $(dir) : $(JAMFILE)
+ ] ;
+ }
+ jamfile-glob = $(.parent-jamfile.$(dir)) ;
+ }
+ else
+ {
+ if ! $(.jamfile.$(dir))
+ {
+ .jamfile.$(dir) = [ path.glob $(dir) : $(JAMFILE) ] ;
+ }
+ jamfile-glob = $(.jamfile.$(dir)) ;
+
+ }
+
+ local jamfile-to-load = $(jamfile-glob) ;
+ # Multiple Jamfiles found in the same place. Warn about this and ensure we
+ # use only one of them. As a temporary convenience measure, if there is
+ # Jamfile.v2 among found files, suppress the warning and use it.
+ #
+ if $(jamfile-to-load[2-])
+ {
+ local v2-jamfiles = [ MATCH ^(.*[Jj]amfile\\.v2)|(.*[Bb]uild\\.jam)$ :
+ $(jamfile-to-load) ] ;
+
+ if $(v2-jamfiles) && ! $(v2-jamfiles[2])
+ {
+ jamfile-to-load = $(v2-jamfiles) ;
+ }
+ else
+ {
+ local jamfile = [ path.basename $(jamfile-to-load[1]) ] ;
+ ECHO "warning: Found multiple Jamfiles at '"$(dir)"'!"
+ "Loading the first one: '$(jamfile)'." ;
+ }
+
+ jamfile-to-load = $(jamfile-to-load[1]) ;
+ }
+
+ # Could not find it, error.
+ #
+ if ! $(no-errors) && ! $(jamfile-to-load)
+ {
+ import errors ;
+ errors.error Unable to load Jamfile.
+ : Could not find a Jamfile in directory '$(dir)'.
+ : Attempted to find it with pattern '$(JAMFILE:J=" ")'.
+ : Please consult the documentation at 'http://www.boost.org'. ;
+ }
+
+ return $(jamfile-to-load) ;
+}
+
+
+# Load a Jamfile at the given directory. Returns nothing. Will attempt to load
+# the file as indicated by the JAMFILE patterns. Effect of calling this rule
+# twice with the same 'dir' is undefined.
+#
+local rule load-jamfile ( dir : jamfile-module )
+{
+ # See if the Jamfile is where it should be.
+ #
+ local jamfile-to-load = [ path.glob $(dir) : $(JAMROOT) ] ;
+ if ! $(jamfile-to-load)
+ {
+ jamfile-to-load = [ find-jamfile $(dir) ] ;
+ }
+
+ if $(jamfile-to-load[2])
+ {
+ import errors ;
+ errors.error "Multiple Jamfiles found at '$(dir)'" :
+ "Filenames are: " $(jamfile-to-load:D=) ;
+ }
+
+ # Now load the Jamfile in its own context.
+ # The call to 'initialize' may load the parent Jamfile, which might contain
+ # a 'use-project' or a 'project.load' call, causing a second attempt to load
+ # the same project we are loading now. Checking inside .jamfile-modules
+ # prevents that second attempt from messing things up.
+ if ! $(jamfile-module) in $(.jamfile-modules)
+ {
+ local previous-project = $(.current-project) ;
+
+ # Initialize the Jamfile module before loading.
+ initialize $(jamfile-module) : [ path.parent $(jamfile-to-load) ] :
+ $(jamfile-to-load:BS) ;
+
+ if ! $(jamfile-module) in $(.jamfile-modules)
+ {
+ .jamfile-modules += $(jamfile-module) ;
+
+ local saved-project = $(.current-project) ;
+
+ mark-as-user $(jamfile-module) ;
+ modules.load $(jamfile-module) : [ path.native $(jamfile-to-load) ]
+ : . ;
+ if [ MATCH ^($(JAMROOT))$ : $(jamfile-to-load:BS) ]
+ {
+ jamfile = [ find-jamfile $(dir) : no-errors ] ;
+ if $(jamfile)
+ {
+ load-aux $(jamfile-module) : [ path.native $(jamfile) ] ;
+ }
+ }
+
+ # Now do some checks.
+ if $(.current-project) != $(saved-project)
+ {
+ import errors ;
+ errors.error
+ The value of the .current-project variable has magically
+ : changed after loading a Jamfile. This means some of the
+ : targets might be defined in the wrong project.
+ : after loading $(jamfile-module)
+ : expected value $(saved-project)
+ : actual value $(.current-project) ;
+ }
+
+ end-load $(previous-project) ;
+
+ if $(.global-build-dir)
+ {
+ if [ attribute $(jamfile-module) location ] && ! [ attribute
+ $(jamfile-module) id ]
+ {
+ local project-root = [ attribute $(jamfile-module)
+ project-root ] ;
+ if $(project-root) = $(dir)
+ {
+ ECHO "warning: the --build-dir option was specified" ;
+ ECHO "warning: but Jamroot at '$(dir)'" ;
+ ECHO "warning: specified no project id" ;
+ ECHO "warning: the --build-dir option will be ignored" ;
+ }
+ }
+ }
+ }
+ }
+}
+
+
+# Called when done loading a project module. Restores the current project to its
+# previous value and does some additional checking to make sure our 'currently
+# loaded project' identifier does not get left with an invalid value.
+#
+rule end-load ( previous-project ? )
+{
+ if ! $(.current-project)
+ {
+ import errors ;
+ errors.error Ending project loading requested when there was no project
+ currently being loaded. ;
+ }
+
+ if ! $(previous-project) && $(.saved-current-project)
+ {
+ import errors ;
+ errors.error Ending project loading requested with no 'previous project'
+ when there were other projects still marked as being loaded
+ recursively. ;
+ }
+
+ .current-project = $(previous-project) ;
+}
+
+
+rule mark-as-user ( module-name )
+{
+ if USER_MODULE in [ RULENAMES ]
+ {
+ USER_MODULE $(module-name) ;
+ }
+}
+
+
+rule load-aux ( module-name : file )
+{
+ mark-as-user $(module-name) ;
+
+ module $(module-name)
+ {
+ include $(2) ;
+ local rules = [ RULENAMES $(1) ] ;
+ IMPORT $(1) : $(rules) : $(1) : $(1).$(rules) ;
+ }
+}
+
+
+.global-build-dir = [ MATCH ^--build-dir=(.*)$ : [ modules.peek : ARGV ] ] ;
+if $(.global-build-dir)
+{
+ # If the option is specified several times, take the last value.
+ .global-build-dir = [ path.make $(.global-build-dir[-1]) ] ;
+}
+
+
+# Initialize the module for a project.
+#
+rule initialize (
+ module-name # The name of the project module.
+ : location ? # The location (directory) of the project to initialize. If
+ # not specified, a standalone project will be initialized.
+ : basename ?
+ )
+{
+ if $(.debug-loading)
+ {
+ ECHO "Initializing project '$(module-name)'" ;
+ }
+
+ local jamroot ;
+
+ local parent-module ;
+ if $(module-name) = test-config
+ {
+ # No parent.
+ }
+ else if $(module-name) = site-config
+ {
+ parent-module = test-config ;
+ }
+ else if $(module-name) = user-config
+ {
+ parent-module = site-config ;
+ }
+ else if $(module-name) = project-config
+ {
+ parent-module = user-config ;
+ }
+ else if $(location) && ! [ MATCH ^($(JAMROOT))$ : $(basename) ]
+ {
+ # We search for parent/jamroot only if this is a jamfile project, i.e.
+ # if is not a standalone or a jamroot project.
+ parent-module = [ load-parent $(location) ] ;
+ }
+ else if $(location)
+ {
+ # We have a jamroot project. Inherit from user-config (or project-config
+ # if it exists).
+ if $(project-config.attributes)
+ {
+ parent-module = project-config ;
+ }
+ else
+ {
+ parent-module = user-config ;
+ }
+ jamroot = true ;
+ }
+
+ # TODO: need to consider if standalone projects can do anything but define
+ # prebuilt targets. If so, we need to give them a more sensible "location",
+ # so that source paths are correct.
+ location ?= "" ;
+ # Create the module for the Jamfile first.
+ module $(module-name)
+ {
+ }
+
+ # load-parent can end up loading this module again. Make sure this is not
+ # duplicated.
+ if ! $($(module-name).attributes)
+ {
+ $(module-name).attributes = [ new project-attributes $(location)
+ $(module-name) ] ;
+ local attributes = $($(module-name).attributes) ;
+
+ if $(location)
+ {
+ $(attributes).set source-location : [ path.make $(location) ] :
+ exact ;
+ }
+ else
+ {
+ local cfgs = project site test user ;
+ if ! $(module-name) in $(cfgs)-config
+ {
+ # This is a standalone project with known location. Set its
+ # source location so it can declare targets. This is needed so
+ # you can put a .jam file with your sources and use it via
+ # 'using'. Standard modules (in the 'tools' subdir) may not
+ # assume source dir is set.
+ local s = [ modules.binding $(module-name) ] ;
+ if ! $(s)
+ {
+ import errors ;
+ errors.error Could not determine project location
+ $(module-name) ;
+ }
+ $(attributes).set source-location : $(s:D) : exact ;
+ }
+ }
+
+ $(attributes).set requirements : [ property-set.empty ] : exact ;
+ $(attributes).set usage-requirements : [ property-set.empty ] : exact ;
+
+ # Import rules common to all project modules from project-rules module,
+ # defined at the end of this file.
+ local rules = [ RULENAMES project-rules ] ;
+ IMPORT project-rules : $(rules) : $(module-name) : $(rules) ;
+
+ if $(parent-module)
+ {
+ inherit-attributes $(module-name) : $(parent-module) ;
+ $(attributes).set parent-module : $(parent-module) : exact ;
+ }
+
+ if $(jamroot)
+ {
+ $(attributes).set project-root : $(location) : exact ;
+ if ! $(.first-project-root)
+ {
+ .first-project-root = $(module-name) ;
+ }
+ }
+
+ local parent ;
+ if $(parent-module)
+ {
+ parent = [ target $(parent-module) ] ;
+ }
+
+ if ! $(.target.$(module-name))
+ {
+ local requirements = [ attribute $(module-name) requirements ] ;
+ .target.$(module-name) = [ new project-target $(module-name) :
+ $(module-name) $(parent) : $(requirements) ] ;
+
+ if $(.debug-loading)
+ {
+ ECHO Assigned project target $(.target.$(module-name)) to
+ '$(module-name)' ;
+ }
+ }
+ }
+
+ .current-project = [ target $(module-name) ] ;
+}
+
+
+# Make 'project-module' inherit attributes of project root and parent module.
+#
+rule inherit-attributes ( project-module : parent-module )
+{
+ local attributes = $($(project-module).attributes) ;
+ local pattributes = [ attributes $(parent-module) ] ;
+ # Parent module might be locationless configuration module.
+ if [ modules.binding $(parent-module) ]
+ {
+ $(attributes).set parent :
+ [ path.parent [ path.make [ modules.binding $(parent-module) ] ] ] ;
+ }
+ $(attributes).set project-root :
+ [ $(pattributes).get project-root ] : exact ;
+ $(attributes).set default-build :
+ [ $(pattributes).get default-build ] ;
+ $(attributes).set requirements :
+ [ $(pattributes).get requirements ] : exact ;
+ $(attributes).set usage-requirements :
+ [ $(pattributes).get usage-requirements ] : exact ;
+
+ local parent-build-dir = [ $(pattributes).get build-dir ] ;
+ if $(parent-build-dir)
+ {
+ # Have to compute relative path from parent dir to our dir. Convert both
+ # paths to absolute, since we cannot find relative path from ".." to
+ # ".".
+
+ local location = [ attribute $(project-module) location ] ;
+ local parent-location = [ attribute $(parent-module) location ] ;
+
+ local pwd = [ path.pwd ] ;
+ local parent-dir = [ path.root $(parent-location) $(pwd) ] ;
+ local our-dir = [ path.root $(location) $(pwd) ] ;
+ $(attributes).set build-dir : [ path.join $(parent-build-dir)
+ [ path.relative $(our-dir) $(parent-dir) ] ] : exact ;
+ }
+}
+
+
+# Returns whether the given string is a valid registered project id.
+#
+rule is-registered-id ( id )
+{
+ return $($(id).jamfile-module) ;
+}
+
+
+# Associate the given id with the given project module. Returns the possibly
+# corrected project id.
+#
+rule register-id ( id : module )
+{
+ id = [ path.root $(id) / ] ;
+
+ if [ MATCH (//) : $(id) ]
+ {
+ import errors ;
+ errors.user-error Project id may not contain two consecutive slash
+ characters (project id: '$(id)'). ;
+ }
+
+ local orig-module = $($(id).jamfile-module) ;
+ if $(orig-module) && $(orig-module) != $(module)
+ {
+ local new-file = [ modules.peek $(module) : __file__ ] ;
+ local new-location = [ project.attribute $(module) location ] ;
+
+ local orig-file = [ modules.peek $(orig-module) : __file__ ] ;
+ local orig-main-id = [ project.attribute $(orig-module) id ] ;
+ local orig-location = [ project.attribute $(orig-module) location ] ;
+ local orig-project = [ target $(orig-module) ] ;
+ local orig-name = [ $(orig-project).name ] ;
+
+ import errors ;
+ errors.user-error Attempt to redeclare already registered project id
+ '$(id)'.
+ : Original project:
+ : " " Name: $(orig-name:E=---)
+ : " " Module: $(orig-module)
+ : " " Main id: $(orig-main-id:E=---)
+ : " " File: $(orig-file:E=---)
+ : " " Location: $(orig-location:E=---)
+ : New project:
+ : " " Module: $(module)
+ : " " File: $(new-file:E=---)
+ : " " Location: $(new-location:E=---) ;
+ }
+
+ $(id).jamfile-module = $(module) ;
+ return $(id) ;
+}
+
+
+# Class keeping all the attributes of a project.
+#
+# The standard attributes are "id", "location", "project-root", "parent"
+# "requirements", "default-build", "source-location" and "projects-to-build".
+#
+class project-attributes
+{
+ import path ;
+ import print ;
+ import project ;
+ import property ;
+ import property-set ;
+ import sequence ;
+
+ rule __init__ ( location project-module )
+ {
+ self.location = $(location) ;
+ self.project-module = $(project-module) ;
+ }
+
+ # Set the named attribute from the specification given by the user. The
+ # value actually set may be different.
+ #
+ rule set ( attribute : specification *
+ : exact ? # Sets value from 'specification' without any processing.
+ )
+ {
+ if $(exact)
+ {
+ self.$(attribute) = $(specification) ;
+ }
+ else if $(attribute) = "requirements"
+ {
+ local result = [ property-set.refine-from-user-input
+ $(self.requirements) : $(specification)
+ : $(self.project-module) : $(self.location) ] ;
+
+ if $(result[1]) = "@error"
+ {
+ import errors : error : errors.error ;
+ errors.error Requirements for project at '$(self.location)'
+ conflict with parent's. : Explanation: $(result[2-]) ;
+ }
+
+ self.requirements = $(result) ;
+ }
+ else if $(attribute) = "usage-requirements"
+ {
+ local unconditional ;
+ for local p in $(specification)
+ {
+ local split = [ property.split-conditional $(p) ] ;
+ split ?= nothing $(p) ;
+ unconditional += $(split[2]) ;
+ }
+
+ local non-free = [ property.remove free : $(unconditional) ] ;
+ if $(non-free)
+ {
+ import errors : error : errors.error ;
+ errors.error usage-requirements $(specification) have non-free
+ properties $(non-free) ;
+ }
+ local t = [ property.translate-paths $(specification) :
+ $(self.location) ] ;
+ if $(self.usage-requirements)
+ {
+ self.usage-requirements = [ property-set.create
+ [ $(self.usage-requirements).raw ] $(t) ] ;
+ }
+ else
+ {
+ self.usage-requirements = [ property-set.create $(t) ] ;
+ }
+ }
+ else if $(attribute) = "default-build"
+ {
+ self.default-build = [ property.make $(specification) ] ;
+ }
+ else if $(attribute) = "source-location"
+ {
+ self.source-location = ;
+ for local src-path in $(specification)
+ {
+ self.source-location += [ path.root [ path.make $(src-path) ]
+ $(self.location) ] ;
+ }
+ }
+ else if $(attribute) = "build-dir"
+ {
+ self.build-dir = [ path.root [ path.make $(specification) ]
+ $(self.location) ] ;
+ }
+ else if $(attribute) = "id"
+ {
+ self.id = [ project.register-id $(specification) :
+ $(self.project-module) ] ;
+ }
+ else if ! $(attribute) in "default-build" "location" "parent"
+ "projects-to-build" "project-root" "source-location"
+ {
+ import errors : error : errors.error ;
+ errors.error Invalid project attribute '$(attribute)' specified for
+ project at '$(self.location)' ;
+ }
+ else
+ {
+ self.$(attribute) = $(specification) ;
+ }
+ }
+
+ # Returns the value of the given attribute.
+ #
+ rule get ( attribute )
+ {
+ return $(self.$(attribute)) ;
+ }
+
+ # Returns whether these attributes belong to a Jamroot project module.
+ #
+ rule is-jamroot ( )
+ {
+ if $(self.location) && $(self.project-root) = $(self.location)
+ {
+ return true ;
+ }
+ }
+
+ # Prints the project attributes.
+ #
+ rule print ( )
+ {
+ local id = '$(self.id)' ;
+ print.section $(id:E=(none)) ;
+ print.list-start ;
+ print.list-item "Parent project:" $(self.parent:E=(none)) ;
+ print.list-item "Requirements:" [ $(self.requirements).raw ] ;
+ print.list-item "Default build:" $(self.default-build) ;
+ print.list-item "Source location:" $(self.source-location) ;
+ print.list-item "Projects to build:" [ sequence.insertion-sort
+ $(self.projects-to-build) ] ;
+ print.list-end ;
+ }
+}
+
+
+# Returns the build directory for standalone projects
+#
+rule standalone-build-dir ( )
+{
+ project = [ target $(.first-project-root) ] ;
+ return [ path.join [ $(project).build-dir ] standalone ] ;
+}
+
+# Returns the project which is currently being loaded.
+#
+rule current ( )
+{
+ if ! $(.current-project)
+ {
+ import errors ;
+ errors.error Reference to the project currently being loaded requested
+ when there was no project module being loaded. ;
+ }
+ return $(.current-project) ;
+}
+
+
+# Temporarily changes the current project to 'project'. Should be followed by
+# 'pop-current'.
+#
+rule push-current ( project )
+{
+ .saved-current-project += $(.current-project) ;
+ .current-project = $(project) ;
+}
+
+
+rule pop-current ( )
+{
+ .current-project = $(.saved-current-project[-1]) ;
+ .saved-current-project = $(.saved-current-project[1--2]) ;
+}
+
+
+# Returns the project-attribute instance for the specified Jamfile module.
+#
+rule attributes ( project )
+{
+ return $($(project).attributes) ;
+}
+
+
+# Returns the value of the specified attribute in the specified Jamfile module.
+#
+rule attribute ( project attribute )
+{
+ return [ $($(project).attributes).get $(attribute) ] ;
+}
+
+
+# Returns whether a project module is one of Boost Build's configuration
+# modules.
+#
+rule is-config-module ( project )
+{
+ local cfgs = project site test user ;
+ if $(project) in $(cfgs)-config
+ {
+ return true ;
+ }
+}
+
+
+# Returns whether a project module is a Jamroot project module.
+#
+rule is-jamroot-module ( project )
+{
+ return [ $($(project).attributes).is-jamroot ] ;
+}
+
+
+# Returns a project's parent jamroot module. Returns nothing if there is no such
+# module, i.e. if this is a standalone project or one of the internal Boost
+# Build configuration projects.
+#
+rule get-jamroot-module ( project )
+{
+ local jamroot-location = [ attribute $(project) project-root ] ;
+ if $(jamroot-location)
+ {
+ return [ module-name $(jamroot-location) ] ;
+ }
+}
+
+
+# Returns the project target corresponding to the 'project-module'.
+#
+rule target ( project-module )
+{
+ if ! $(.target.$(project-module))
+ {
+ import errors ;
+ errors.user-error Project target requested but not yet assigned for
+ module '$(project-module)'. ;
+ }
+ return $(.target.$(project-module)) ;
+}
+
+
+# Defines a Boost.Build extension project. Such extensions usually contain
+# library targets and features that can be used by many people. Even though
+# extensions are really projects, they can be initialized as a module would be
+# with the "using" (project.project-rules.using) mechanism.
+#
+rule extension ( id : options * : * )
+{
+ # The caller is a standalone module for the extension.
+ local mod = [ CALLER_MODULE ] ;
+
+ # We need to do the rest within the extension module.
+ module $(mod)
+ {
+ import path ;
+
+ # Find the root project.
+ local root-project = [ project.current ] ;
+ root-project = [ $(root-project).project-module ] ;
+ while
+ [ project.attribute $(root-project) parent-module ] &&
+ [ project.attribute $(root-project) parent-module ] != user-config
+ {
+ root-project = [ project.attribute $(root-project) parent-module ] ;
+ }
+
+ # Create the project data, and bring in the project rules into the
+ # module.
+ project.initialize $(__name__) : [ path.join [ project.attribute
+ $(root-project) location ] ext $(1:L) ] ;
+
+ # Create the project itself, i.e. the attributes. All extensions are
+ # created in the "/ext" project space.
+ project /ext/$(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) :
+ $(9) : $(10) : $(11) : $(12) : $(13) : $(14) : $(15) : $(16) : $(17)
+ : $(18) : $(19) ;
+ local attributes = [ project.attributes $(__name__) ] ;
+
+ # Inherit from the root project of whomever is defining us.
+ project.inherit-attributes $(__name__) : $(root-project) ;
+ $(attributes).set parent-module : $(root-project) : exact ;
+ }
+}
+
+
+rule glob-internal ( project : wildcards + : excludes * : rule-name )
+{
+ local location = [ $(project).get source-location ] ;
+
+ local result ;
+ local paths = [ path.$(rule-name) $(location) :
+ [ sequence.transform path.make : $(wildcards) ] :
+ [ sequence.transform path.make : $(excludes) ] ] ;
+ if $(wildcards:D) || $(rule-name) != glob
+ {
+ # The paths we have found are relative to the current directory, but the
+ # names specified in the sources list are assumed to be relative to the
+ # source directory of the corresponding project. So, just make the names
+ # absolute.
+ for local p in $(paths)
+ {
+ # If the path is below source location, use relative path.
+ # Otherwise, use full path just to avoid any ambiguities.
+ local rel = [ path.relative $(p) $(location) : no-error ] ;
+ if $(rel) = not-a-child
+ {
+ result += [ path.root $(p) [ path.pwd ] ] ;
+ }
+ else
+ {
+ result += $(rel) ;
+ }
+ }
+ }
+ else
+ {
+ # There were no wildcards in the directory path, so the files are all in
+ # the source directory of the project. Just drop the directory, instead
+ # of making paths absolute.
+ result = $(paths:D="") ;
+ }
+
+ return $(result) ;
+}
+
+
+# This module defines rules common to all projects.
+#
+module project-rules
+{
+ import modules ;
+
+ rule using ( toolset-module : * )
+ {
+ import toolset ;
+
+ local saved-project = [ modules.peek project : .current-project ] ;
+
+ # Temporarily change the search path so the module referred to by
+ # 'using' can be placed in the same directory as Jamfile. User will
+ # expect the module to be found even though the directory is not in
+ # BOOST_BUILD_PATH.
+ local x = [ modules.peek : BOOST_BUILD_PATH ] ;
+ local caller = [ CALLER_MODULE ] ;
+ local caller-location = [ modules.binding $(caller) ] ;
+ modules.poke : BOOST_BUILD_PATH : $(caller-location:D) $(x) ;
+ toolset.using $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) :
+ $(9) : $(10) : $(11) : $(12) : $(13) : $(14) : $(15) : $(16) : $(17)
+ : $(18) : $(19) ;
+ modules.poke : BOOST_BUILD_PATH : $(x) ;
+
+ # The above might have clobbered .current-project in case it caused a
+ # new project instance to be created (which would then automatically
+ # get set as the 'current' project). Restore the correct value so any
+ # main targets declared after this do not get mapped to the loaded
+ # module's project.
+ modules.poke project : .current-project : $(saved-project) ;
+ }
+
+ rule import ( * : * : * )
+ {
+ local caller = [ CALLER_MODULE ] ;
+ local saved-project = [ modules.peek project : .current-project ] ;
+ module $(caller)
+ {
+ modules.import $(1) : $(2) : $(3) ;
+ }
+
+ # The above might have clobbered .current-project in case it caused a
+ # new project instance to be created (which would then automatically
+ # get set as the 'current' project). Restore the correct value so any
+ # main targets declared after this do not get mapped to the loaded
+ # module's project.
+ modules.poke project : .current-project : $(saved-project) ;
+ }
+
+ rule project ( id ? : options * : * )
+ {
+ import path ;
+ import project ;
+
+ local caller = [ CALLER_MODULE ] ;
+ local attributes = [ project.attributes $(caller) ] ;
+ if $(id)
+ {
+ $(attributes).set id : $(id) ;
+ }
+
+ local explicit-build-dir ;
+
+ for n in 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19
+ {
+ local option = $($(n)) ;
+ if $(option)
+ {
+ $(attributes).set $(option[1]) : $(option[2-]) ;
+ }
+ if $(option[1]) = "build-dir"
+ {
+ explicit-build-dir = [ path.make $(option[2-]) ] ;
+ }
+ }
+
+ # If '--build-dir' is specified, change the build dir for the project.
+ local global-build-dir = [ modules.peek project : .global-build-dir ] ;
+
+ if $(global-build-dir)
+ {
+ local location = [ $(attributes).get location ] ;
+ # Project with an empty location is a 'standalone' project such as
+ # user-config or qt. It has no build dir. If we try to set build dir
+ # for user-config, we shall then try to inherit it, with either
+ # weird or wrong consequences.
+ if $(location) && $(location) = [ $(attributes).get project-root ]
+ {
+ # Re-read the project id, since it might have been modified a
+ # bit when setting the project's id attribute, e.g. might have
+ # been prefixed by a slash if it was not already.
+ id = [ $(attributes).get id ] ;
+ # This is Jamroot.
+ if $(id)
+ {
+ if $(explicit-build-dir) &&
+ [ path.is-rooted $(explicit-build-dir) ]
+ {
+ import errors ;
+ errors.user-error Absolute directory specified via
+ 'build-dir' project attribute : Do not know how to
+ combine that with the --build-dir option. ;
+ }
+ # Strip the leading slash from id.
+ local rid = [ MATCH ^/(.*) : $(id) ] ;
+ local p = [ path.join $(global-build-dir) $(rid)
+ $(explicit-build-dir) ] ;
+
+ $(attributes).set build-dir : $(p) : exact ;
+ }
+ }
+ else
+ {
+ # Not Jamroot.
+ if $(explicit-build-dir)
+ {
+ import errors ;
+ errors.user-error When --build-dir is specified, the
+ 'build-dir' project : attribute is allowed only for
+ top-level 'project' invocations ;
+ }
+ }
+ }
+ }
+
+ # Declare and set a project global constant. Project global constants are
+ # normal variables but should not be changed. They are applied to every
+ # child Jamfile.
+ #
+ rule constant ( name : value + )
+ {
+ import project ;
+ local caller = [ CALLER_MODULE ] ;
+ local p = [ project.target $(caller) ] ;
+ $(p).add-constant $(name) : $(value) ;
+ }
+
+ # Declare and set a project global constant, whose value is a path. The path
+ # is adjusted to be relative to the invocation directory. The given value
+ # path is taken to be either absolute, or relative to this project root.
+ #
+ rule path-constant ( name : value + )
+ {
+ import project ;
+ local caller = [ CALLER_MODULE ] ;
+ local p = [ project.target $(caller) ] ;
+ $(p).add-constant $(name) : $(value) : path ;
+ }
+
+ rule use-project ( id : where )
+ {
+ # See comment in 'load' for explanation.
+ local caller = [ CALLER_MODULE ] ;
+ modules.poke $(caller) : .used-projects : [ modules.peek $(caller) :
+ .used-projects ] $(id) $(where) ;
+ }
+
+ rule build-project ( dir )
+ {
+ import project ;
+ local caller = [ CALLER_MODULE ] ;
+ local attributes = [ project.attributes $(caller) ] ;
+ local now = [ $(attributes).get projects-to-build ] ;
+ $(attributes).set projects-to-build : $(now) $(dir) ;
+ }
+
+ rule explicit ( target-names * )
+ {
+ import project ;
+ # If 'explicit' is used in a helper rule defined in Jamroot and
+ # inherited by children, then most of the time we want 'explicit' to
+ # operate on the Jamfile where the helper rule is invoked.
+ local t = [ project.current ] ;
+ for local n in $(target-names)
+ {
+ $(t).mark-target-as-explicit $(n) ;
+ }
+ }
+
+ rule always ( target-names * )
+ {
+ import project ;
+ local t = [ project.current ] ;
+ for local n in $(target-names)
+ {
+ $(t).mark-target-as-always $(n) ;
+ }
+ }
+
+ rule glob ( wildcards + : excludes * )
+ {
+ import project ;
+ return [ project.glob-internal [ project.current ] : $(wildcards) :
+ $(excludes) : glob ] ;
+ }
+
+ rule glob-tree ( wildcards + : excludes * )
+ {
+ import project ;
+ if $(wildcards:D) || $(excludes:D)
+ {
+ import errors ;
+ errors.user-error The patterns to 'glob-tree' may not include
+ directory ;
+ }
+ return [ project.glob-internal [ project.current ] : $(wildcards) :
+ $(excludes) : glob-tree ] ;
+ }
+
+ # Calculates conditional requirements for multiple requirements at once.
+ # This is a shorthand to reduce duplication and to keep an inline
+ # declarative syntax. For example:
+ #
+ # lib x : x.cpp : [ conditional <toolset>gcc <variant>debug :
+ # <define>DEBUG_EXCEPTION <define>DEBUG_TRACE ] ;
+ #
+ rule conditional ( condition + : requirements * )
+ {
+ local condition = $(condition:J=,) ;
+ if [ MATCH (:) : $(condition) ]
+ {
+ return $(condition)$(requirements) ;
+ }
+ else
+ {
+ return $(condition):$(requirements) ;
+ }
+ }
+
+ rule option ( name : value )
+ {
+ local m = [ CALLER_MODULE ] ;
+ local cfgs = project site test user ;
+ if ! $(m) in $(cfgs)-config
+ {
+ import errors ;
+ errors.error The 'option' rule may only be used "in" Boost Build
+ configuration files. ;
+ }
+ import option ;
+ option.set $(name) : $(value) ;
+ }
+}
diff --git a/tools/build/src/build/project.py b/tools/build/src/build/project.py
new file mode 100644
index 0000000000..71bc33fb38
--- /dev/null
+++ b/tools/build/src/build/project.py
@@ -0,0 +1,1148 @@
+# Status: ported.
+# Base revision: 64488
+
+# Copyright 2002, 2003 Dave Abrahams
+# Copyright 2002, 2005, 2006 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Implements project representation and loading. Each project is represented
+# by:
+# - a module where all the Jamfile content live.
+# - an instance of 'project-attributes' class.
+# (given a module name, can be obtained using the 'attributes' rule)
+# - an instance of 'project-target' class (from targets.jam)
+# (given a module name, can be obtained using the 'target' rule)
+#
+# Typically, projects are created as result of loading a Jamfile, which is done
+# by rules 'load' and 'initialize', below. First, module for Jamfile is loaded
+# and new project-attributes instance is created. Some rules necessary for
+# project are added to the module (see 'project-rules' module) at the bottom of
+# this file. Default project attributes are set (inheriting attributes of
+# parent project, if it exists). After that the Jamfile is read. It can declare
+# its own attributes using the 'project' rule which will be combined with any
+# already set attributes.
+#
+# The 'project' rule can also declare a project id which will be associated
+# with the project module.
+#
+# There can also be 'standalone' projects. They are created by calling
+# 'initialize' on an arbitrary module and not specifying their location. After
+# the call, the module can call the 'project' rule, declare main targets and
+# behave as a regular project except that, since it is not associated with any
+# location, it should only declare prebuilt targets.
+#
+# The list of all loaded Jamfiles is stored in the .project-locations variable.
+# It is possible to obtain a module name for a location using the 'module-name'
+# rule. Standalone projects are not recorded and can only be references using
+# their project id.
+
+import b2.util.path
+from b2.build import property_set, property
+from b2.build.errors import ExceptionWithUserContext
+import b2.build.targets
+
+import bjam
+import b2
+
+import re
+import sys
+import pkgutil
+import os
+import string
+import imp
+import traceback
+import b2.util.option as option
+
+from b2.util import record_jam_to_value_mapping, qualify_jam_action
+
+class ProjectRegistry:
+
+ def __init__(self, manager, global_build_dir):
+ self.manager = manager
+ self.global_build_dir = global_build_dir
+ self.project_rules_ = ProjectRules(self)
+
+ # The target corresponding to the project being loaded now
+ self.current_project = None
+
+ # The set of names of loaded project modules
+ self.jamfile_modules = {}
+
+ # Mapping from location to module name
+ self.location2module = {}
+
+ # Mapping from project id to project module
+ self.id2module = {}
+
+ # Map from Jamfile directory to parent Jamfile/Jamroot
+ # location.
+ self.dir2parent_jamfile = {}
+
+ # Map from directory to the name of Jamfile in
+ # that directory (or None).
+ self.dir2jamfile = {}
+
+ # Map from project module to attributes object.
+ self.module2attributes = {}
+
+ # Map from project module to target for the project
+ self.module2target = {}
+
+ # Map from names to Python modules, for modules loaded
+ # via 'using' and 'import' rules in Jamfiles.
+ self.loaded_tool_modules_ = {}
+
+ self.loaded_tool_module_path_ = {}
+
+ # Map from project target to the list of
+ # (id,location) pairs corresponding to all 'use-project'
+ # invocations.
+ # TODO: should not have a global map, keep this
+ # in ProjectTarget.
+ self.used_projects = {}
+
+ self.saved_current_project = []
+
+ self.JAMROOT = self.manager.getenv("JAMROOT");
+
+ # Note the use of character groups, as opposed to listing
+ # 'Jamroot' and 'jamroot'. With the latter, we'd get duplicate
+ # matches on windows and would have to eliminate duplicates.
+ if not self.JAMROOT:
+ self.JAMROOT = ["project-root.jam", "[Jj]amroot", "[Jj]amroot.jam"]
+
+ # Default patterns to search for the Jamfiles to use for build
+ # declarations.
+ self.JAMFILE = self.manager.getenv("JAMFILE")
+
+ if not self.JAMFILE:
+ self.JAMFILE = ["[Bb]uild.jam", "[Jj]amfile.v2", "[Jj]amfile",
+ "[Jj]amfile.jam"]
+
+ self.__python_module_cache = {}
+
+
+ def load (self, jamfile_location):
+ """Loads jamfile at the given location. After loading, project global
+ file and jamfile needed by the loaded one will be loaded recursively.
+ If the jamfile at that location is loaded already, does nothing.
+ Returns the project module for the Jamfile."""
+
+ absolute = os.path.join(os.getcwd(), jamfile_location)
+ absolute = os.path.normpath(absolute)
+ jamfile_location = b2.util.path.relpath(os.getcwd(), absolute)
+
+ mname = self.module_name(jamfile_location)
+ # If Jamfile is already loaded, do not try again.
+ if not mname in self.jamfile_modules:
+
+ if "--debug-loading" in self.manager.argv():
+ print "Loading Jamfile at '%s'" % jamfile_location
+
+ self.load_jamfile(jamfile_location, mname)
+
+ # We want to make sure that child project are loaded only
+ # after parent projects. In particular, because parent projects
+ # define attributes which are inherited by children, and we do not
+ # want children to be loaded before parents has defined everything.
+ #
+ # While "build-project" and "use-project" can potentially refer
+ # to child projects from parent projects, we do not immediately
+ # load child projects when seing those attributes. Instead,
+ # we record the minimal information that will be used only later.
+
+ self.load_used_projects(mname)
+
+ return mname
+
+ def load_used_projects(self, module_name):
+ # local used = [ modules.peek $(module-name) : .used-projects ] ;
+ used = self.used_projects[module_name]
+
+ location = self.attribute(module_name, "location")
+ for u in used:
+ id = u[0]
+ where = u[1]
+
+ self.use(id, os.path.join(location, where))
+
+ def load_parent(self, location):
+ """Loads parent of Jamfile at 'location'.
+ Issues an error if nothing is found."""
+
+ found = b2.util.path.glob_in_parents(
+ location, self.JAMROOT + self.JAMFILE)
+
+ if not found:
+ print "error: Could not find parent for project at '%s'" % location
+ print "error: Did not find Jamfile or project-root.jam in any parent directory."
+ sys.exit(1)
+
+ return self.load(os.path.dirname(found[0]))
+
+ def find(self, name, current_location):
+ """Given 'name' which can be project-id or plain directory name,
+ return project module corresponding to that id or directory.
+ Returns nothing of project is not found."""
+
+ project_module = None
+
+ # Try interpreting name as project id.
+ if name[0] == '/':
+ project_module = self.id2module.get(name)
+
+ if not project_module:
+ location = os.path.join(current_location, name)
+ # If no project is registered for the given location, try to
+ # load it. First see if we have Jamfile. If not we might have project
+ # root, willing to act as Jamfile. In that case, project-root
+ # must be placed in the directory referred by id.
+
+ project_module = self.module_name(location)
+ if not project_module in self.jamfile_modules:
+ if b2.util.path.glob([location], self.JAMROOT + self.JAMFILE):
+ project_module = self.load(location)
+ else:
+ project_module = None
+
+ return project_module
+
+ def module_name(self, jamfile_location):
+ """Returns the name of module corresponding to 'jamfile-location'.
+ If no module corresponds to location yet, associates default
+ module name with that location."""
+ module = self.location2module.get(jamfile_location)
+ if not module:
+ # Root the path, so that locations are always umbiguious.
+ # Without this, we can't decide if '../../exe/program1' and '.'
+ # are the same paths, or not.
+ jamfile_location = os.path.realpath(
+ os.path.join(os.getcwd(), jamfile_location))
+ module = "Jamfile<%s>" % jamfile_location
+ self.location2module[jamfile_location] = module
+ return module
+
+ def find_jamfile (self, dir, parent_root=0, no_errors=0):
+ """Find the Jamfile at the given location. This returns the
+ exact names of all the Jamfiles in the given directory. The optional
+ parent-root argument causes this to search not the given directory
+ but the ones above it up to the directory given in it."""
+
+ # Glob for all the possible Jamfiles according to the match pattern.
+ #
+ jamfile_glob = None
+ if parent_root:
+ parent = self.dir2parent_jamfile.get(dir)
+ if not parent:
+ parent = b2.util.path.glob_in_parents(dir,
+ self.JAMFILE)
+ self.dir2parent_jamfile[dir] = parent
+ jamfile_glob = parent
+ else:
+ jamfile = self.dir2jamfile.get(dir)
+ if not jamfile:
+ jamfile = b2.util.path.glob([dir], self.JAMFILE)
+ self.dir2jamfile[dir] = jamfile
+ jamfile_glob = jamfile
+
+ if len(jamfile_glob) > 1:
+ # Multiple Jamfiles found in the same place. Warn about this.
+ # And ensure we use only one of them.
+ # As a temporary convenience measure, if there's Jamfile.v2 amount
+ # found files, suppress the warning and use it.
+ #
+ pattern = "(.*[Jj]amfile\\.v2)|(.*[Bb]uild\\.jam)"
+ v2_jamfiles = [x for x in jamfile_glob if re.match(pattern, x)]
+ if len(v2_jamfiles) == 1:
+ jamfile_glob = v2_jamfiles
+ else:
+ print """warning: Found multiple Jamfiles at '%s'!""" % (dir)
+ for j in jamfile_glob:
+ print " -", j
+ print "Loading the first one"
+
+ # Could not find it, error.
+ if not no_errors and not jamfile_glob:
+ self.manager.errors()(
+ """Unable to load Jamfile.
+Could not find a Jamfile in directory '%s'
+Attempted to find it with pattern '%s'.
+Please consult the documentation at 'http://boost.org/boost-build2'."""
+ % (dir, string.join(self.JAMFILE)))
+
+ if jamfile_glob:
+ return jamfile_glob[0]
+
+ def load_jamfile(self, dir, jamfile_module):
+ """Load a Jamfile at the given directory. Returns nothing.
+ Will attempt to load the file as indicated by the JAMFILE patterns.
+ Effect of calling this rule twice with the same 'dir' is underfined."""
+
+ # See if the Jamfile is where it should be.
+ is_jamroot = False
+ jamfile_to_load = b2.util.path.glob([dir], self.JAMROOT)
+ if not jamfile_to_load:
+ jamfile_to_load = self.find_jamfile(dir)
+ else:
+ if len(jamfile_to_load) > 1:
+ get_manager().errors()("Multiple Jamfiles found at '%s'\n" +\
+ "Filenames are: %s"
+ % (dir, [os.path.basename(j) for j in jamfile_to_load]))
+
+ is_jamroot = True
+ jamfile_to_load = jamfile_to_load[0]
+
+ dir = os.path.dirname(jamfile_to_load)
+ if not dir:
+ dir = "."
+
+ self.used_projects[jamfile_module] = []
+
+ # Now load the Jamfile in it's own context.
+ # The call to 'initialize' may load parent Jamfile, which might have
+ # 'use-project' statement that causes a second attempt to load the
+ # same project we're loading now. Checking inside .jamfile-modules
+ # prevents that second attempt from messing up.
+ if not jamfile_module in self.jamfile_modules:
+ self.jamfile_modules[jamfile_module] = True
+
+ # Initialize the jamfile module before loading.
+ #
+ self.initialize(jamfile_module, dir, os.path.basename(jamfile_to_load))
+
+ saved_project = self.current_project
+
+ bjam.call("load", jamfile_module, jamfile_to_load)
+ basename = os.path.basename(jamfile_to_load)
+
+ if is_jamroot:
+ jamfile = self.find_jamfile(dir, no_errors=True)
+ if jamfile:
+ bjam.call("load", jamfile_module, jamfile)
+
+ # Now do some checks
+ if self.current_project != saved_project:
+ self.manager.errors()(
+"""The value of the .current-project variable
+has magically changed after loading a Jamfile.
+This means some of the targets might be defined a the wrong project.
+after loading %s
+expected value %s
+actual value %s""" % (jamfile_module, saved_project, self.current_project))
+
+ if self.global_build_dir:
+ id = self.attributeDefault(jamfile_module, "id", None)
+ project_root = self.attribute(jamfile_module, "project-root")
+ location = self.attribute(jamfile_module, "location")
+
+ if location and project_root == dir:
+ # This is Jamroot
+ if not id:
+ # FIXME: go via errors module, so that contexts are
+ # shown?
+ print "warning: the --build-dir option was specified"
+ print "warning: but Jamroot at '%s'" % dir
+ print "warning: specified no project id"
+ print "warning: the --build-dir option will be ignored"
+
+
+ def load_standalone(self, jamfile_module, file):
+ """Loads 'file' as standalone project that has no location
+ associated with it. This is mostly useful for user-config.jam,
+ which should be able to define targets, but although it has
+ some location in filesystem, we do not want any build to
+ happen in user's HOME, for example.
+
+ The caller is required to never call this method twice on
+ the same file.
+ """
+
+ self.used_projects[jamfile_module] = []
+ bjam.call("load", jamfile_module, file)
+ self.load_used_projects(jamfile_module)
+
+ def is_jamroot(self, basename):
+ match = [ pat for pat in self.JAMROOT if re.match(pat, basename)]
+ if match:
+ return 1
+ else:
+ return 0
+
+ def initialize(self, module_name, location=None, basename=None):
+ """Initialize the module for a project.
+
+ module-name is the name of the project module.
+ location is the location (directory) of the project to initialize.
+ If not specified, standalone project will be initialized
+ """
+
+ if "--debug-loading" in self.manager.argv():
+ print "Initializing project '%s'" % module_name
+
+ # TODO: need to consider if standalone projects can do anything but defining
+ # prebuilt targets. If so, we need to give more sensible "location", so that
+ # source paths are correct.
+ if not location:
+ location = ""
+
+ attributes = ProjectAttributes(self.manager, location, module_name)
+ self.module2attributes[module_name] = attributes
+
+ python_standalone = False
+ if location:
+ attributes.set("source-location", [location], exact=1)
+ elif not module_name in ["test-config", "site-config", "user-config", "project-config"]:
+ # This is a standalone project with known location. Set source location
+ # so that it can declare targets. This is intended so that you can put
+ # a .jam file in your sources and use it via 'using'. Standard modules
+ # (in 'tools' subdir) may not assume source dir is set.
+ attributes.set("source-location", self.loaded_tool_module_path_[module_name], exact=1)
+ python_standalone = True
+
+ attributes.set("requirements", property_set.empty(), exact=True)
+ attributes.set("usage-requirements", property_set.empty(), exact=True)
+ attributes.set("default-build", property_set.empty(), exact=True)
+ attributes.set("projects-to-build", [], exact=True)
+ attributes.set("project-root", None, exact=True)
+ attributes.set("build-dir", None, exact=True)
+
+ self.project_rules_.init_project(module_name, python_standalone)
+
+ jamroot = False
+
+ parent_module = None;
+ if module_name == "test-config":
+ # No parent
+ pass
+ elif module_name == "site-config":
+ parent_module = "test-config"
+ elif module_name == "user-config":
+ parent_module = "site-config"
+ elif module_name == "project-config":
+ parent_module = "user-config"
+ elif location and not self.is_jamroot(basename):
+ # We search for parent/project-root only if jamfile was specified
+ # --- i.e
+ # if the project is not standalone.
+ parent_module = self.load_parent(location)
+ else:
+ # It's either jamroot, or standalone project.
+ # If it's jamroot, inherit from user-config.
+ if location:
+ # If project-config module exist, inherit from it.
+ if self.module2attributes.has_key("project-config"):
+ parent_module = "project-config"
+ else:
+ parent_module = "user-config" ;
+
+ jamroot = True ;
+
+ if parent_module:
+ self.inherit_attributes(module_name, parent_module)
+ attributes.set("parent-module", parent_module, exact=1)
+
+ if jamroot:
+ attributes.set("project-root", location, exact=1)
+
+ parent = None
+ if parent_module:
+ parent = self.target(parent_module)
+
+ if not self.module2target.has_key(module_name):
+ target = b2.build.targets.ProjectTarget(self.manager,
+ module_name, module_name, parent,
+ self.attribute(module_name, "requirements"),
+ # FIXME: why we need to pass this? It's not
+ # passed in jam code.
+ self.attribute(module_name, "default-build"))
+ self.module2target[module_name] = target
+
+ self.current_project = self.target(module_name)
+
+ def inherit_attributes(self, project_module, parent_module):
+ """Make 'project-module' inherit attributes of project
+ root and parent module."""
+
+ attributes = self.module2attributes[project_module]
+ pattributes = self.module2attributes[parent_module]
+
+ # Parent module might be locationless user-config.
+ # FIXME:
+ #if [ modules.binding $(parent-module) ]
+ #{
+ # $(attributes).set parent : [ path.parent
+ # [ path.make [ modules.binding $(parent-module) ] ] ] ;
+ # }
+
+ attributes.set("project-root", pattributes.get("project-root"), exact=True)
+ attributes.set("default-build", pattributes.get("default-build"), exact=True)
+ attributes.set("requirements", pattributes.get("requirements"), exact=True)
+ attributes.set("usage-requirements",
+ pattributes.get("usage-requirements"), exact=1)
+
+ parent_build_dir = pattributes.get("build-dir")
+
+ if parent_build_dir:
+ # Have to compute relative path from parent dir to our dir
+ # Convert both paths to absolute, since we cannot
+ # find relative path from ".." to "."
+
+ location = attributes.get("location")
+ parent_location = pattributes.get("location")
+
+ our_dir = os.path.join(os.getcwd(), location)
+ parent_dir = os.path.join(os.getcwd(), parent_location)
+
+ build_dir = os.path.join(parent_build_dir,
+ os.path.relpath(our_dir, parent_dir))
+ attributes.set("build-dir", build_dir, exact=True)
+
+ def register_id(self, id, module):
+ """Associate the given id with the given project module."""
+ self.id2module[id] = module
+
+ def current(self):
+ """Returns the project which is currently being loaded."""
+ return self.current_project
+
+ def set_current(self, c):
+ self.current_project = c
+
+ def push_current(self, project):
+ """Temporary changes the current project to 'project'. Should
+ be followed by 'pop-current'."""
+ self.saved_current_project.append(self.current_project)
+ self.current_project = project
+
+ def pop_current(self):
+ self.current_project = self.saved_current_project[-1]
+ del self.saved_current_project[-1]
+
+ def attributes(self, project):
+ """Returns the project-attribute instance for the
+ specified jamfile module."""
+ return self.module2attributes[project]
+
+ def attribute(self, project, attribute):
+ """Returns the value of the specified attribute in the
+ specified jamfile module."""
+ try:
+ return self.module2attributes[project].get(attribute)
+ except:
+ raise BaseException("No attribute '%s' for project" % (attribute, project))
+
+ def attributeDefault(self, project, attribute, default):
+ """Returns the value of the specified attribute in the
+ specified jamfile module."""
+ return self.module2attributes[project].getDefault(attribute, default)
+
+ def target(self, project_module):
+ """Returns the project target corresponding to the 'project-module'."""
+ if not self.module2target.has_key(project_module):
+ self.module2target[project_module] = \
+ b2.build.targets.ProjectTarget(project_module, project_module,
+ self.attribute(project_module, "requirements"))
+
+ return self.module2target[project_module]
+
+ def use(self, id, location):
+ # Use/load a project.
+ saved_project = self.current_project
+ project_module = self.load(location)
+ declared_id = self.attributeDefault(project_module, "id", "")
+
+ if not declared_id or declared_id != id:
+ # The project at 'location' either have no id or
+ # that id is not equal to the 'id' parameter.
+ if self.id2module.has_key(id) and self.id2module[id] != project_module:
+ self.manager.errors()(
+"""Attempt to redeclare already existing project id '%s' at location '%s'""" % (id, location))
+ self.id2module[id] = project_module
+
+ self.current_module = saved_project
+
+ def add_rule(self, name, callable):
+ """Makes rule 'name' available to all subsequently loaded Jamfiles.
+
+ Calling that rule wil relay to 'callable'."""
+ self.project_rules_.add_rule(name, callable)
+
+ def project_rules(self):
+ return self.project_rules_
+
+ def glob_internal(self, project, wildcards, excludes, rule_name):
+ location = project.get("source-location")[0]
+
+ result = []
+ callable = b2.util.path.__dict__[rule_name]
+
+ paths = callable([location], wildcards, excludes)
+ has_dir = 0
+ for w in wildcards:
+ if os.path.dirname(w):
+ has_dir = 1
+ break
+
+ if has_dir or rule_name != "glob":
+ result = []
+ # The paths we've found are relative to current directory,
+ # but the names specified in sources list are assumed to
+ # be relative to source directory of the corresponding
+ # prject. Either translate them or make absolute.
+
+ for p in paths:
+ rel = os.path.relpath(p, location)
+ # If the path is below source location, use relative path.
+ if not ".." in rel:
+ result.append(rel)
+ else:
+ # Otherwise, use full path just to avoid any ambiguities.
+ result.append(os.path.abspath(p))
+
+ else:
+ # There were not directory in wildcard, so the files are all
+ # in the source directory of the project. Just drop the
+ # directory, instead of making paths absolute.
+ result = [os.path.basename(p) for p in paths]
+
+ return result
+
+ def __build_python_module_cache(self):
+ """Recursively walks through the b2/src subdirectories and
+ creates an index of base module name to package name. The
+ index is stored within self.__python_module_cache and allows
+ for an O(1) module lookup.
+
+ For example, given the base module name `toolset`,
+ self.__python_module_cache['toolset'] will return
+ 'b2.build.toolset'
+
+ pkgutil.walk_packages() will find any python package
+ provided a directory contains an __init__.py. This has the
+ added benefit of allowing libraries to be installed and
+ automatically avaiable within the contrib directory.
+
+ *Note*: pkgutil.walk_packages() will import any subpackage
+ in order to access its __path__variable. Meaning:
+ any initialization code will be run if the package hasn't
+ already been imported.
+ """
+ cache = {}
+ for importer, mname, ispkg in pkgutil.walk_packages(b2.__path__, prefix='b2.'):
+ basename = mname.split('.')[-1]
+ # since the jam code is only going to have "import toolset ;"
+ # it doesn't matter if there are separately named "b2.build.toolset" and
+ # "b2.contrib.toolset" as it is impossible to know which the user is
+ # referring to.
+ if basename in cache:
+ self.manager.errors()('duplicate module name "{0}" '
+ 'found in boost-build path'.format(basename))
+ cache[basename] = mname
+ self.__python_module_cache = cache
+
+ def load_module(self, name, extra_path=None):
+ """Load a Python module that should be useable from Jamfiles.
+
+ There are generally two types of modules Jamfiles might want to
+ use:
+ - Core Boost.Build. Those are imported using plain names, e.g.
+ 'toolset', so this function checks if we have module named
+ b2.package.module already.
+ - Python modules in the same directory as Jamfile. We don't
+ want to even temporary add Jamfile's directory to sys.path,
+ since then we might get naming conflicts between standard
+ Python modules and those.
+ """
+ # See if we loaded module of this name already
+ existing = self.loaded_tool_modules_.get(name)
+ if existing:
+ return existing
+
+ # check the extra path as well as any paths outside
+ # of the b2 package and import the module if it exists
+ b2_path = os.path.normpath(b2.__path__[0])
+ # normalize the pathing in the BOOST_BUILD_PATH.
+ # this allows for using startswith() to determine
+ # if a path is a subdirectory of the b2 root_path
+ paths = [os.path.normpath(p) for p in self.manager.boost_build_path()]
+ # remove all paths that start with b2's root_path
+ paths = [p for p in paths if not p.startswith(b2_path)]
+ # add any extra paths
+ paths.extend(extra_path)
+
+ try:
+ # find_module is used so that the pyc's can be used.
+ # an ImportError is raised if not found
+ f, location, description = imp.find_module(name, paths)
+ mname = name + "__for_jamfile"
+ self.loaded_tool_module_path_[mname] = location
+ module = imp.load_module(mname, f, location, description)
+ self.loaded_tool_modules_[name] = module
+ return module
+ except ImportError:
+ # if the module is not found in the b2 package,
+ # this error will be handled later
+ pass
+
+ # the cache is created here due to possibly importing packages
+ # that end up calling get_manager() which might fail
+ if not self.__python_module_cache:
+ self.__build_python_module_cache()
+
+ underscore_name = name.replace('-', '_')
+ # check to see if the module is within the b2 package
+ # and already loaded
+ mname = self.__python_module_cache.get(underscore_name)
+ if mname in sys.modules:
+ return sys.modules[mname]
+ # otherwise, if the module name is within the cache,
+ # the module exists within the BOOST_BUILD_PATH,
+ # load it.
+ elif mname:
+ # __import__ can be used here since the module
+ # is guaranteed to be found under the `b2` namespace.
+ __import__(mname)
+ module = sys.modules[mname]
+ self.loaded_tool_modules_[name] = module
+ self.loaded_tool_module_path_[mname] = module.__file__
+ return module
+
+ self.manager.errors()("Cannot find module '%s'" % name)
+
+
+
+# FIXME:
+# Defines a Boost.Build extension project. Such extensions usually
+# contain library targets and features that can be used by many people.
+# Even though extensions are really projects, they can be initialize as
+# a module would be with the "using" (project.project-rules.using)
+# mechanism.
+#rule extension ( id : options * : * )
+#{
+# # The caller is a standalone module for the extension.
+# local mod = [ CALLER_MODULE ] ;
+#
+# # We need to do the rest within the extension module.
+# module $(mod)
+# {
+# import path ;
+#
+# # Find the root project.
+# local root-project = [ project.current ] ;
+# root-project = [ $(root-project).project-module ] ;
+# while
+# [ project.attribute $(root-project) parent-module ] &&
+# [ project.attribute $(root-project) parent-module ] != user-config
+# {
+# root-project = [ project.attribute $(root-project) parent-module ] ;
+# }
+#
+# # Create the project data, and bring in the project rules
+# # into the module.
+# project.initialize $(__name__) :
+# [ path.join [ project.attribute $(root-project) location ] ext $(1:L) ] ;
+#
+# # Create the project itself, i.e. the attributes.
+# # All extensions are created in the "/ext" project space.
+# project /ext/$(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+# local attributes = [ project.attributes $(__name__) ] ;
+#
+# # Inherit from the root project of whomever is defining us.
+# project.inherit-attributes $(__name__) : $(root-project) ;
+# $(attributes).set parent-module : $(root-project) : exact ;
+# }
+#}
+
+
+class ProjectAttributes:
+ """Class keeping all the attributes of a project.
+
+ The standard attributes are 'id', "location", "project-root", "parent"
+ "requirements", "default-build", "source-location" and "projects-to-build".
+ """
+
+ def __init__(self, manager, location, project_module):
+ self.manager = manager
+ self.location = location
+ self.project_module = project_module
+ self.attributes = {}
+ self.usage_requirements = None
+
+ def set(self, attribute, specification, exact=False):
+ """Set the named attribute from the specification given by the user.
+ The value actually set may be different."""
+
+ if exact:
+ self.__dict__[attribute] = specification
+
+ elif attribute == "requirements":
+ self.requirements = property_set.refine_from_user_input(
+ self.requirements, specification,
+ self.project_module, self.location)
+
+ elif attribute == "usage-requirements":
+ unconditional = []
+ for p in specification:
+ split = property.split_conditional(p)
+ if split:
+ unconditional.append(split[1])
+ else:
+ unconditional.append(p)
+
+ non_free = property.remove("free", unconditional)
+ if non_free:
+ get_manager().errors()("usage-requirements %s have non-free properties %s" \
+ % (specification, non_free))
+
+ t = property.translate_paths(
+ property.create_from_strings(specification, allow_condition=True),
+ self.location)
+
+ existing = self.__dict__.get("usage-requirements")
+ if existing:
+ new = property_set.create(existing.all() + t)
+ else:
+ new = property_set.create(t)
+ self.__dict__["usage-requirements"] = new
+
+
+ elif attribute == "default-build":
+ self.__dict__["default-build"] = property_set.create(specification)
+
+ elif attribute == "source-location":
+ source_location = []
+ for path in specification:
+ source_location.append(os.path.join(self.location, path))
+ self.__dict__["source-location"] = source_location
+
+ elif attribute == "build-dir":
+ self.__dict__["build-dir"] = os.path.join(self.location, specification[0])
+
+ elif attribute == "id":
+ id = specification[0]
+ if id[0] != '/':
+ id = "/" + id
+ self.manager.projects().register_id(id, self.project_module)
+ self.__dict__["id"] = id
+
+ elif not attribute in ["default-build", "location",
+ "source-location", "parent",
+ "projects-to-build", "project-root"]:
+ self.manager.errors()(
+"""Invalid project attribute '%s' specified
+for project at '%s'""" % (attribute, self.location))
+ else:
+ self.__dict__[attribute] = specification
+
+ def get(self, attribute):
+ return self.__dict__[attribute]
+
+ def getDefault(self, attribute, default):
+ return self.__dict__.get(attribute, default)
+
+ def dump(self):
+ """Prints the project attributes."""
+ id = self.get("id")
+ if not id:
+ id = "(none)"
+ else:
+ id = id[0]
+
+ parent = self.get("parent")
+ if not parent:
+ parent = "(none)"
+ else:
+ parent = parent[0]
+
+ print "'%s'" % id
+ print "Parent project:%s", parent
+ print "Requirements:%s", self.get("requirements")
+ print "Default build:%s", string.join(self.get("debuild-build"))
+ print "Source location:%s", string.join(self.get("source-location"))
+ print "Projects to build:%s", string.join(self.get("projects-to-build").sort());
+
+class ProjectRules:
+ """Class keeping all rules that are made available to Jamfile."""
+
+ def __init__(self, registry):
+ self.registry = registry
+ self.manager_ = registry.manager
+ self.rules = {}
+ self.local_names = [x for x in self.__class__.__dict__
+ if x not in ["__init__", "init_project", "add_rule",
+ "error_reporting_wrapper", "add_rule_for_type", "reverse"]]
+ self.all_names_ = [x for x in self.local_names]
+
+ def _import_rule(self, bjam_module, name, callable):
+ if hasattr(callable, "bjam_signature"):
+ bjam.import_rule(bjam_module, name, self.make_wrapper(callable), callable.bjam_signature)
+ else:
+ bjam.import_rule(bjam_module, name, self.make_wrapper(callable))
+
+
+ def add_rule_for_type(self, type):
+ rule_name = type.lower().replace("_", "-")
+
+ def xpto (name, sources = [], requirements = [], default_build = [], usage_requirements = []):
+ return self.manager_.targets().create_typed_target(
+ type, self.registry.current(), name[0], sources,
+ requirements, default_build, usage_requirements)
+
+ self.add_rule(rule_name, xpto)
+
+ def add_rule(self, name, callable):
+ self.rules[name] = callable
+ self.all_names_.append(name)
+
+ # Add new rule at global bjam scope. This might not be ideal,
+ # added because if a jamroot does 'import foo' where foo calls
+ # add_rule, we need to import new rule to jamroot scope, and
+ # I'm lazy to do this now.
+ self._import_rule("", name, callable)
+
+ def all_names(self):
+ return self.all_names_
+
+ def call_and_report_errors(self, callable, *args, **kw):
+ result = None
+ try:
+ self.manager_.errors().push_jamfile_context()
+ result = callable(*args, **kw)
+ except ExceptionWithUserContext, e:
+ e.report()
+ except Exception, e:
+ try:
+ self.manager_.errors().handle_stray_exception (e)
+ except ExceptionWithUserContext, e:
+ e.report()
+ finally:
+ self.manager_.errors().pop_jamfile_context()
+
+ return result
+
+ def make_wrapper(self, callable):
+ """Given a free-standing function 'callable', return a new
+ callable that will call 'callable' and report all exceptins,
+ using 'call_and_report_errors'."""
+ def wrapper(*args, **kw):
+ return self.call_and_report_errors(callable, *args, **kw)
+ return wrapper
+
+ def init_project(self, project_module, python_standalone=False):
+
+ if python_standalone:
+ m = sys.modules[project_module]
+
+ for n in self.local_names:
+ if n != "import_":
+ setattr(m, n, getattr(self, n))
+
+ for n in self.rules:
+ setattr(m, n, self.rules[n])
+
+ return
+
+ for n in self.local_names:
+ # Using 'getattr' here gives us a bound method,
+ # while using self.__dict__[r] would give unbound one.
+ v = getattr(self, n)
+ if callable(v):
+ if n == "import_":
+ n = "import"
+ else:
+ n = string.replace(n, "_", "-")
+
+ self._import_rule(project_module, n, v)
+
+ for n in self.rules:
+ self._import_rule(project_module, n, self.rules[n])
+
+ def project(self, *args):
+
+ jamfile_module = self.registry.current().project_module()
+ attributes = self.registry.attributes(jamfile_module)
+
+ id = None
+ if args and args[0]:
+ id = args[0][0]
+ args = args[1:]
+
+ if id:
+ attributes.set('id', [id])
+
+ explicit_build_dir = None
+ for a in args:
+ if a:
+ attributes.set(a[0], a[1:], exact=0)
+ if a[0] == "build-dir":
+ explicit_build_dir = a[1]
+
+ # If '--build-dir' is specified, change the build dir for the project.
+ if self.registry.global_build_dir:
+
+ location = attributes.get("location")
+ # Project with empty location is 'standalone' project, like
+ # user-config, or qt. It has no build dir.
+ # If we try to set build dir for user-config, we'll then
+ # try to inherit it, with either weird, or wrong consequences.
+ if location and location == attributes.get("project-root"):
+ # Re-read the project id, since it might have been changed in
+ # the project's attributes.
+ id = attributes.get('id')
+
+ # This is Jamroot.
+ if id:
+ if explicit_build_dir and os.path.isabs(explicit_build_dir):
+ self.registry.manager.errors()(
+"""Absolute directory specified via 'build-dir' project attribute
+Don't know how to combine that with the --build-dir option.""")
+
+ rid = id
+ if rid[0] == '/':
+ rid = rid[1:]
+
+ p = os.path.join(self.registry.global_build_dir, rid)
+ if explicit_build_dir:
+ p = os.path.join(p, explicit_build_dir)
+ attributes.set("build-dir", p, exact=1)
+ elif explicit_build_dir:
+ self.registry.manager.errors()(
+"""When --build-dir is specified, the 'build-dir'
+attribute is allowed only for top-level 'project' invocations""")
+
+ def constant(self, name, value):
+ """Declare and set a project global constant.
+ Project global constants are normal variables but should
+ not be changed. They are applied to every child Jamfile."""
+ m = "Jamfile</home/ghost/Work/Boost/boost-svn/tools/build/v2_python/python/tests/bjam/make>"
+ self.registry.current().add_constant(name[0], value)
+
+ def path_constant(self, name, value):
+ """Declare and set a project global constant, whose value is a path. The
+ path is adjusted to be relative to the invocation directory. The given
+ value path is taken to be either absolute, or relative to this project
+ root."""
+ if len(value) > 1:
+ self.registry.manager.error()("path constant should have one element")
+ self.registry.current().add_constant(name[0], value[0], path=1)
+
+ def use_project(self, id, where):
+ # See comment in 'load' for explanation why we record the
+ # parameters as opposed to loading the project now.
+ m = self.registry.current().project_module();
+ self.registry.used_projects[m].append((id[0], where[0]))
+
+ def build_project(self, dir):
+ assert(isinstance(dir, list))
+ jamfile_module = self.registry.current().project_module()
+ attributes = self.registry.attributes(jamfile_module)
+ now = attributes.get("projects-to-build")
+ attributes.set("projects-to-build", now + dir, exact=True)
+
+ def explicit(self, target_names):
+ self.registry.current().mark_targets_as_explicit(target_names)
+
+ def always(self, target_names):
+ self.registry.current().mark_targets_as_alays(target_names)
+
+ def glob(self, wildcards, excludes=None):
+ return self.registry.glob_internal(self.registry.current(),
+ wildcards, excludes, "glob")
+
+ def glob_tree(self, wildcards, excludes=None):
+ bad = 0
+ for p in wildcards:
+ if os.path.dirname(p):
+ bad = 1
+
+ if excludes:
+ for p in excludes:
+ if os.path.dirname(p):
+ bad = 1
+
+ if bad:
+ self.registry.manager.errors()(
+"The patterns to 'glob-tree' may not include directory")
+ return self.registry.glob_internal(self.registry.current(),
+ wildcards, excludes, "glob_tree")
+
+
+ def using(self, toolset, *args):
+ # The module referred by 'using' can be placed in
+ # the same directory as Jamfile, and the user
+ # will expect the module to be found even though
+ # the directory is not in BOOST_BUILD_PATH.
+ # So temporary change the search path.
+ current = self.registry.current()
+ location = current.get('location')
+
+ m = self.registry.load_module(toolset[0], [location])
+ if not m.__dict__.has_key("init"):
+ self.registry.manager.errors()(
+ "Tool module '%s' does not define the 'init' method" % toolset[0])
+ m.init(*args)
+
+ # The above might have clobbered .current-project. Restore the correct
+ # value.
+ self.registry.set_current(current)
+
+ def import_(self, name, names_to_import=None, local_names=None):
+
+ name = name[0]
+ py_name = name
+ if py_name == "os":
+ py_name = "os_j"
+ jamfile_module = self.registry.current().project_module()
+ attributes = self.registry.attributes(jamfile_module)
+ location = attributes.get("location")
+
+ saved = self.registry.current()
+
+ m = self.registry.load_module(py_name, [location])
+
+ for f in m.__dict__:
+ v = m.__dict__[f]
+ f = f.replace("_", "-")
+ if callable(v):
+ qn = name + "." + f
+ self._import_rule(jamfile_module, qn, v)
+ record_jam_to_value_mapping(qualify_jam_action(qn, jamfile_module), v)
+
+
+ if names_to_import:
+ if not local_names:
+ local_names = names_to_import
+
+ if len(names_to_import) != len(local_names):
+ self.registry.manager.errors()(
+"""The number of names to import and local names do not match.""")
+
+ for n, l in zip(names_to_import, local_names):
+ self._import_rule(jamfile_module, l, m.__dict__[n])
+
+ self.registry.set_current(saved)
+
+ def conditional(self, condition, requirements):
+ """Calculates conditional requirements for multiple requirements
+ at once. This is a shorthand to be reduce duplication and to
+ keep an inline declarative syntax. For example:
+
+ lib x : x.cpp : [ conditional <toolset>gcc <variant>debug :
+ <define>DEBUG_EXCEPTION <define>DEBUG_TRACE ] ;
+ """
+
+ c = string.join(condition, ",")
+ if c.find(":") != -1:
+ return [c + r for r in requirements]
+ else:
+ return [c + ":" + r for r in requirements]
+
+ def option(self, name, value):
+ name = name[0]
+ if not name in ["site-config", "user-config", "project-config"]:
+ get_manager().errors()("The 'option' rule may be used only in site-config or user-config")
+
+ option.set(name, value[0])
diff --git a/tools/build/src/build/property-set.jam b/tools/build/src/build/property-set.jam
new file mode 100644
index 0000000000..55cb556453
--- /dev/null
+++ b/tools/build/src/build/property-set.jam
@@ -0,0 +1,517 @@
+# Copyright 2003 Dave Abrahams
+# Copyright 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import "class" : new ;
+import feature ;
+import path ;
+import project ;
+import property ;
+import sequence ;
+import set ;
+import option ;
+
+# Class for storing a set of properties.
+#
+# There is 1<->1 correspondence between identity and value. No two instances
+# of the class are equal. To maintain this property, the 'property-set.create'
+# rule should be used to create new instances. Instances are immutable.
+#
+# Each property is classified with regard to its effect on build results.
+# Incidental properties have no effect on build results, from Boost.Build's
+# point of view. Others are either free, or non-free and we refer to non-free
+# ones as 'base'. Each property belongs to exactly one of those categories.
+#
+# It is possible to get a list of properties belonging to each category as
+# well as a list of properties with a specific attribute.
+#
+# Several operations, like and refine and as-path are provided. They all use
+# caching whenever possible.
+#
+class property-set
+{
+ import errors ;
+ import feature ;
+ import path ;
+ import property ;
+ import property-set ;
+ import set ;
+
+ rule __init__ ( raw-properties * )
+ {
+ self.raw = $(raw-properties) ;
+
+ for local p in $(raw-properties)
+ {
+ if ! $(p:G)
+ {
+ errors.error "Invalid property: '$(p)'" ;
+ }
+ }
+ }
+
+ # Returns Jam list of stored properties.
+ #
+ rule raw ( )
+ {
+ return $(self.raw) ;
+ }
+
+ rule str ( )
+ {
+ return "[" $(self.raw) "]" ;
+ }
+
+ # Returns properties that are neither incidental nor free.
+ #
+ rule base ( )
+ {
+ if ! $(self.base-initialized)
+ {
+ init-base ;
+ }
+ return $(self.base) ;
+ }
+
+ # Returns free properties which are not incidental.
+ #
+ rule free ( )
+ {
+ if ! $(self.base-initialized)
+ {
+ init-base ;
+ }
+ return $(self.free) ;
+ }
+
+ # Returns dependency properties.
+ #
+ rule dependency ( )
+ {
+ if ! $(self.dependency-initialized)
+ {
+ init-dependency ;
+ }
+ return $(self.dependency) ;
+ }
+
+ rule non-dependency ( )
+ {
+ if ! $(self.dependency-initialized)
+ {
+ init-dependency ;
+ }
+ return $(self.non-dependency) ;
+ }
+
+ rule conditional ( )
+ {
+ if ! $(self.conditional-initialized)
+ {
+ init-conditional ;
+ }
+ return $(self.conditional) ;
+ }
+
+ rule non-conditional ( )
+ {
+ if ! $(self.conditional-initialized)
+ {
+ init-conditional ;
+ }
+ return $(self.non-conditional) ;
+ }
+
+ # Returns incidental properties.
+ #
+ rule incidental ( )
+ {
+ if ! $(self.base-initialized)
+ {
+ init-base ;
+ }
+ return $(self.incidental) ;
+ }
+
+ rule refine ( ps )
+ {
+ if ! $(self.refined.$(ps))
+ {
+ local r = [ property.refine $(self.raw) : [ $(ps).raw ] ] ;
+ if $(r[1]) != "@error"
+ {
+ self.refined.$(ps) = [ property-set.create $(r) ] ;
+ }
+ else
+ {
+ self.refined.$(ps) = $(r) ;
+ }
+ }
+ return $(self.refined.$(ps)) ;
+ }
+
+ rule expand ( )
+ {
+ if ! $(self.expanded)
+ {
+ self.expanded = [ property-set.create [ feature.expand $(self.raw) ]
+ ] ;
+ }
+ return $(self.expanded) ;
+ }
+
+ rule expand-composites ( )
+ {
+ if ! $(self.composites)
+ {
+ self.composites = [ property-set.create
+ [ feature.expand-composites $(self.raw) ] ] ;
+ }
+ return $(self.composites) ;
+ }
+
+ rule evaluate-conditionals ( context ? )
+ {
+ context ?= $(__name__) ;
+ if ! $(self.evaluated.$(context))
+ {
+ self.evaluated.$(context) = [ property-set.create
+ [ property.evaluate-conditionals-in-context $(self.raw) : [
+ $(context).raw ] ] ] ;
+ }
+ return $(self.evaluated.$(context)) ;
+ }
+
+ rule propagated ( )
+ {
+ if ! $(self.propagated-ps)
+ {
+ local result ;
+ for local p in $(self.raw)
+ {
+ if propagated in [ feature.attributes $(p:G) ]
+ {
+ result += $(p) ;
+ }
+ }
+ self.propagated-ps = [ property-set.create $(result) ] ;
+ }
+ return $(self.propagated-ps) ;
+ }
+
+ rule add-defaults ( )
+ {
+ if ! $(self.defaults)
+ {
+ self.defaults = [ property-set.create
+ [ feature.add-defaults $(self.raw) ] ] ;
+ }
+ return $(self.defaults) ;
+ }
+
+ rule as-path ( )
+ {
+ if ! $(self.as-path)
+ {
+ self.as-path = [ property.as-path [ base ] ] ;
+ }
+ return $(self.as-path) ;
+ }
+
+ # Computes the path to be used for a target with the given properties.
+ # Returns a list of
+ # - the computed path
+ # - if the path is relative to the build directory, a value of 'true'.
+ #
+ rule target-path ( )
+ {
+ if ! $(self.target-path)
+ {
+ # The <location> feature can be used to explicitly change the
+ # location of generated targets.
+ local l = [ get <location> ] ;
+ if $(l)
+ {
+ self.target-path = $(l) ;
+ }
+ else
+ {
+ local p = [ property-set.hash-maybe [ as-path ] ] ;
+
+ # A real ugly hack. Boost regression test system requires
+ # specific target paths, and it seems that changing it to handle
+ # other directory layout is really hard. For that reason, we
+ # teach V2 to do the things regression system requires. The
+ # value of '<location-prefix>' is prepended to the path.
+ local prefix = [ get <location-prefix> ] ;
+ if $(prefix)
+ {
+ self.target-path = [ path.join $(prefix) $(p) ] ;
+ }
+ else
+ {
+ self.target-path = $(p) ;
+ }
+ if ! $(self.target-path)
+ {
+ self.target-path = . ;
+ }
+ # The path is relative to build dir.
+ self.target-path += true ;
+ }
+ }
+ return $(self.target-path) ;
+ }
+
+ rule add ( ps )
+ {
+ if ! $(self.added.$(ps))
+ {
+ self.added.$(ps) = [ property-set.create $(self.raw) [ $(ps).raw ] ]
+ ;
+ }
+ return $(self.added.$(ps)) ;
+ }
+
+ rule add-raw ( properties * )
+ {
+ return [ add [ property-set.create $(properties) ] ] ;
+ }
+
+ # Returns all values of 'feature'.
+ #
+ rule get ( feature )
+ {
+ if ! $(self.map-built)
+ {
+ # For each feature, create a member var and assign all values to it.
+ # Since all regular member vars start with 'self', there will be no
+ # conflicts between names.
+ self.map-built = true ;
+ for local v in $(self.raw)
+ {
+ $(v:G) += $(v:G=) ;
+ }
+ }
+ return $($(feature)) ;
+ }
+
+ # Returns true if the property-set contains all the
+ # specified properties.
+ #
+ rule contains-raw ( properties * )
+ {
+ if $(properties) in $(self.raw)
+ {
+ return true ;
+ }
+ }
+
+ # Returns true if the property-set has values for
+ # all the specified features
+ #
+ rule contains-features ( features * )
+ {
+ if $(features) in $(self.raw:G)
+ {
+ return true ;
+ }
+ }
+
+ # private
+
+ rule init-base ( )
+ {
+ for local p in $(self.raw)
+ {
+ local att = [ feature.attributes $(p:G) ] ;
+ # A feature can be both incidental and free, in which case we add it
+ # to incidental.
+ if incidental in $(att)
+ {
+ self.incidental += $(p) ;
+ }
+ else if free in $(att)
+ {
+ self.free += $(p) ;
+ }
+ else
+ {
+ self.base += $(p) ;
+ }
+ }
+ self.base-initialized = true ;
+ }
+
+ rule init-dependency ( )
+ {
+ for local p in $(self.raw)
+ {
+ if dependency in [ feature.attributes $(p:G) ]
+ {
+ self.dependency += $(p) ;
+ }
+ else
+ {
+ self.non-dependency += $(p) ;
+ }
+ }
+ self.dependency-initialized = true ;
+ }
+
+ rule init-conditional ( )
+ {
+ for local p in $(self.raw)
+ {
+ # TODO: Note that non-conditional properties may contain colon (':')
+ # characters as well, e.g. free or indirect properties. Indirect
+ # properties for example contain a full Jamfile path in their value
+ # which on Windows file systems contains ':' as the drive separator.
+ if [ MATCH (:) : $(p:G=) ]
+ {
+ self.conditional += $(p) ;
+ }
+ else
+ {
+ self.non-conditional += $(p) ;
+ }
+ }
+ self.conditional-initialized = true ;
+ }
+}
+
+
+# Creates a new 'property-set' instance for the given raw properties or returns
+# an already existing ones.
+#
+rule create ( raw-properties * )
+{
+ raw-properties = [ sequence.unique
+ [ sequence.insertion-sort $(raw-properties) ] ] ;
+
+ local key = $(raw-properties:J=-:E=) ;
+
+ if ! $(.ps.$(key))
+ {
+ .ps.$(key) = [ new property-set $(raw-properties) ] ;
+ }
+ return $(.ps.$(key)) ;
+}
+NATIVE_RULE property-set : create ;
+
+if [ HAS_NATIVE_RULE class@property-set : get : 1 ]
+{
+ NATIVE_RULE class@property-set : get ;
+}
+
+if [ HAS_NATIVE_RULE class@property-set : contains-features : 1 ]
+{
+ NATIVE_RULE class@property-set : contains-features ;
+}
+
+# Creates a new 'property-set' instance after checking that all properties are
+# valid and converting implicit properties into gristed form.
+#
+rule create-with-validation ( raw-properties * )
+{
+ property.validate $(raw-properties) ;
+ return [ create [ property.make $(raw-properties) ] ] ;
+}
+
+
+# Creates a property-set from the input given by the user, in the context of
+# 'jamfile-module' at 'location'.
+#
+rule create-from-user-input ( raw-properties * : jamfile-module location )
+{
+ local project-id = [ project.attribute $(jamfile-module) id ] ;
+ project-id ?= [ path.root $(location) [ path.pwd ] ] ;
+ return [ property-set.create [ property.translate $(raw-properties)
+ : $(project-id) : $(location) : $(jamfile-module) ] ] ;
+}
+
+
+# Refines requirements with requirements provided by the user. Specially handles
+# "-<property>value" syntax in specification to remove given requirements.
+# - parent-requirements -- property-set object with requirements to refine.
+# - specification -- string list of requirements provided by the user.
+# - project-module -- module to which context indirect features will be
+# bound.
+# - location -- path to which path features are relative.
+#
+rule refine-from-user-input ( parent-requirements : specification * :
+ project-module : location )
+{
+ if ! $(specification)
+ {
+ return $(parent-requirements) ;
+ }
+ else
+ {
+ local add-requirements ;
+ local remove-requirements ;
+
+ for local r in $(specification)
+ {
+ local m = [ MATCH "^-(.*)" : $(r) ] ;
+ if $(m)
+ {
+ remove-requirements += $(m) ;
+ }
+ else
+ {
+ add-requirements += $(r) ;
+ }
+ }
+
+ if $(remove-requirements)
+ {
+ # Need to create a property set, so that path features and indirect
+ # features are translated just like they are in project
+ # requirements.
+ local ps = [ property-set.create-from-user-input
+ $(remove-requirements) : $(project-module) $(location) ] ;
+
+ parent-requirements = [ property-set.create
+ [ set.difference [ $(parent-requirements).raw ]
+ : [ $(ps).raw ] ] ] ;
+ specification = $(add-requirements) ;
+ }
+
+ local requirements = [ property-set.create-from-user-input
+ $(specification) : $(project-module) $(location) ] ;
+
+ return [ $(parent-requirements).refine $(requirements) ] ;
+ }
+}
+
+
+# Returns a property-set with an empty set of properties.
+#
+rule empty ( )
+{
+ if ! $(.empty)
+ {
+ .empty = [ create ] ;
+ }
+ return $(.empty) ;
+}
+
+
+if [ option.get hash : : yes ] = yes
+{
+ rule hash-maybe ( path ? )
+ {
+ path ?= "" ;
+ return [ MD5 $(path) ] ;
+ }
+}
+else
+{
+ rule hash-maybe ( path ? )
+ {
+ return $(path) ;
+ }
+}
diff --git a/tools/build/src/build/property.jam b/tools/build/src/build/property.jam
new file mode 100644
index 0000000000..ff28dfd202
--- /dev/null
+++ b/tools/build/src/build/property.jam
@@ -0,0 +1,905 @@
+# Copyright 2001, 2002, 2003 Dave Abrahams
+# Copyright 2006 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import feature ;
+import indirect ;
+import path ;
+import regex ;
+import string ;
+import sequence ;
+import set ;
+import utility ;
+
+
+# Refines 'properties' by overriding any non-free and non-conditional properties
+# for which a different value is specified in 'requirements'. Returns the
+# resulting list of properties.
+#
+rule refine ( properties * : requirements * )
+{
+ local result ;
+ local unset ;
+
+ # Collect all non-free features in requirements
+ for local r in $(requirements)
+ {
+ # Do not consider conditional requirements.
+ if ! [ MATCH (:) : $(r:G=) ] && ! free in [ feature.attributes $(r:G) ]
+ {
+ unset += $(r:G) ;
+ }
+ }
+
+ # Remove properties that are overridden by requirements
+ for local p in $(properties)
+ {
+ if [ MATCH (:) : $(p:G=) ] || ! $(p:G) in $(unset)
+ {
+ result += $(p) ;
+ }
+ }
+
+ return [ sequence.unique $(result) $(requirements) ] ;
+}
+
+
+# Removes all conditional properties whose conditions are not met. For those
+# with met conditions, removes the condition. Properties in conditions are
+# looked up in 'context'.
+#
+rule evaluate-conditionals-in-context ( properties * : context * )
+{
+ local base ;
+ local conditionals ;
+ for local p in $(properties)
+ {
+ if [ MATCH (:<) : $(p) ]
+ {
+ conditionals += $(p) ;
+ }
+ else
+ {
+ base += $(p) ;
+ }
+ }
+
+ local result = $(base) ;
+ for local p in $(conditionals)
+ {
+ # Separate condition and property.
+ local s = [ MATCH ^(.*):(<.*) : $(p) ] ;
+ # Split condition into individual properties.
+ local condition = [ regex.split $(s[1]) "," ] ;
+ # Evaluate condition.
+ if ! [ MATCH ^(!).* : $(condition:G=) ]
+ {
+ # Only positive checks
+ if $(condition) in $(context)
+ {
+ result += $(s[2]) ;
+ }
+ }
+ else
+ {
+ # Have negative checks
+ local fail ;
+ while $(condition)
+ {
+ local c = $(condition[1]) ;
+ local m = [ MATCH ^!(.*) : $(c) ] ;
+ if $(m)
+ {
+ local p = $(m:G=$(c:G)) ;
+ if $(p) in $(context)
+ {
+ fail = true ;
+ c = ;
+ }
+ }
+ else
+ {
+ if ! $(c) in $(context)
+ {
+ fail = true ;
+ c = ;
+ }
+ }
+ condition = $(condition[2-]) ;
+ }
+ if ! $(fail)
+ {
+ result += $(s[2]) ;
+ }
+ }
+ }
+ return $(result) ;
+}
+
+
+rule expand-subfeatures-in-conditions ( properties * )
+{
+ local result ;
+ for local p in $(properties)
+ {
+ local s = [ MATCH ^(.*):(<.*) : $(p) ] ;
+ if ! $(s)
+ {
+ result += $(p) ;
+ }
+ else
+ {
+ local condition = $(s[1]) ;
+ local value = $(s[2]) ;
+ # Condition might include several elements.
+ condition = [ regex.split $(condition) "," ] ;
+ local e ;
+ for local c in $(condition)
+ {
+ # It is common for a condition to include a toolset or
+ # subfeatures that have not been defined. In that case we want
+ # the condition to simply 'never be satisfied' and validation
+ # would only produce a spurious error so we prevent it by
+ # passing 'true' as the second parameter.
+ e += [ feature.expand-subfeatures $(c) : true ] ;
+ }
+ if $(e) = $(condition)
+ {
+ # (todo)
+ # This is just an optimization and possibly a premature one at
+ # that.
+ # (todo) (12.07.2008.) (Jurko)
+ result += $(p) ;
+ }
+ else
+ {
+ result += $(e:J=,):$(value) ;
+ }
+ }
+ }
+ return $(result) ;
+}
+
+
+# Helper for as-path, below. Orders properties with the implicit ones first, and
+# within the two sections in alphabetical order of feature name.
+#
+local rule path-order ( x y )
+{
+ if $(y:G) && ! $(x:G)
+ {
+ return true ;
+ }
+ else if $(x:G) && ! $(y:G)
+ {
+ return ;
+ }
+ else
+ {
+ if ! $(x:G)
+ {
+ x = [ feature.expand-subfeatures $(x) ] ;
+ y = [ feature.expand-subfeatures $(y) ] ;
+ }
+
+ if $(x[1]) < $(y[1])
+ {
+ return true ;
+ }
+ }
+}
+
+
+local rule abbreviate-dashed ( string )
+{
+ local r ;
+ for local part in [ regex.split $(string) - ]
+ {
+ r += [ string.abbreviate $(part) ] ;
+ }
+ return $(r:J=-) ;
+}
+
+
+local rule identity ( string )
+{
+ return $(string) ;
+}
+
+
+if --abbreviate-paths in [ modules.peek : ARGV ]
+{
+ .abbrev = abbreviate-dashed ;
+}
+else
+{
+ .abbrev = identity ;
+}
+
+
+# Returns a path representing the given expanded property set.
+#
+rule as-path ( properties * )
+{
+ local entry = .result.$(properties:J=-) ;
+
+ if ! $($(entry))
+ {
+ # Trim redundancy.
+ properties = [ feature.minimize $(properties) ] ;
+
+ # Sort according to path-order.
+ properties = [ sequence.insertion-sort $(properties) : path-order ] ;
+
+ local components ;
+ for local p in $(properties)
+ {
+ if $(p:G)
+ {
+ local f = [ utility.ungrist $(p:G) ] ;
+ p = $(f)-$(p:G=) ;
+ }
+ components += [ $(.abbrev) $(p) ] ;
+ }
+
+ $(entry) = $(components:J=/) ;
+ }
+
+ return $($(entry)) ;
+}
+
+
+# Exit with error if property is not valid.
+#
+local rule validate1 ( property )
+{
+ local msg ;
+ if $(property:G)
+ {
+ local feature = $(property:G) ;
+ local value = $(property:G=) ;
+
+ if ! [ feature.valid $(feature) ]
+ {
+ # Ungrist for better error messages.
+ feature = [ utility.ungrist $(property:G) ] ;
+ msg = "unknown feature '$(feature)'" ;
+ }
+ else if $(value) && ! free in [ feature.attributes $(feature) ]
+ {
+ feature.validate-value-string $(feature) $(value) ;
+ }
+ else if ! ( $(value) || ( optional in [ feature.attributes $(feature) ] ) )
+ {
+ # Ungrist for better error messages.
+ feature = [ utility.ungrist $(property:G) ] ;
+ msg = "No value specified for feature '$(feature)'" ;
+ }
+ }
+ else
+ {
+ local feature = [ feature.implied-feature $(property) ] ;
+ feature.validate-value-string $(feature) $(property) ;
+ }
+ if $(msg)
+ {
+ import errors ;
+ errors.error "Invalid property "'$(property:J=" ")'": "$(msg:J=" "). ;
+ }
+}
+
+
+rule validate ( properties * )
+{
+ for local p in $(properties)
+ {
+ validate1 $(p) ;
+ }
+}
+
+
+rule validate-property-sets ( property-sets * )
+{
+ for local s in $(property-sets)
+ {
+ validate [ feature.split $(s) ] ;
+ }
+}
+
+
+# Expands any implicit property values in the given property 'specification' so
+# they explicitly state their feature.
+#
+rule make ( specification * )
+{
+ local result ;
+ for local e in $(specification)
+ {
+ if $(e:G)
+ {
+ result += $(e) ;
+ }
+ else if [ feature.is-implicit-value $(e) ]
+ {
+ local feature = [ feature.implied-feature $(e) ] ;
+ result += $(feature)$(e) ;
+ }
+ else
+ {
+ import errors ;
+ errors.error "'$(e)' is not a valid property specification" ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Returns a property set containing all the elements in 'properties' that do not
+# have their attributes listed in 'attributes'.
+#
+rule remove ( attributes + : properties * )
+{
+ local result ;
+ for local e in $(properties)
+ {
+ if ! [ set.intersection $(attributes) : [ feature.attributes $(e:G) ] ]
+ {
+ result += $(e) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Returns a property set containing all the elements in 'properties' that have
+# their attributes listed in 'attributes'.
+#
+rule take ( attributes + : properties * )
+{
+ local result ;
+ for local e in $(properties)
+ {
+ if [ set.intersection $(attributes) : [ feature.attributes $(e:G) ] ]
+ {
+ result += $(e) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Selects properties corresponding to any of the given features.
+#
+rule select ( features * : properties * )
+{
+ local result ;
+
+ # Add any missing angle brackets.
+ local empty = "" ;
+ features = $(empty:G=$(features)) ;
+
+ for local p in $(properties)
+ {
+ if $(p:G) in $(features)
+ {
+ result += $(p) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Returns a modified version of properties with all values of the given feature
+# replaced by the given value. If 'value' is empty the feature will be removed.
+#
+rule change ( properties * : feature value ? )
+{
+ local result ;
+ for local p in $(properties)
+ {
+ if $(p:G) = $(feature)
+ {
+ result += $(value:G=$(feature)) ;
+ }
+ else
+ {
+ result += $(p) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# If 'property' is a conditional property, returns the condition and the
+# property. E.g. <variant>debug,<toolset>gcc:<inlining>full will become
+# <variant>debug,<toolset>gcc <inlining>full. Otherwise, returns an empty
+# string.
+#
+rule split-conditional ( property )
+{
+ return [ MATCH "^(.+):(<.+)" : $(property) ] ;
+}
+
+
+rule translate-path-value ( value : path )
+{
+ local t ;
+ for local v in [ regex.split $(value) "&&" ]
+ {
+ t += [ path.root [ path.make $(v) ] $(path) ] ;
+ }
+ return $(t:TJ="&&") ;
+}
+
+rule translate-dependency-value ( value : project-id : project-location )
+{
+ local split-target = [ regex.match ^(.*)//(.*) : $(value) ] ;
+ if $(split-target)
+ {
+ local rooted = [ path.root [ path.make $(split-target[1]) ]
+ [ path.root $(project-location) [ path.pwd ] ] ] ;
+ return $(rooted)//$(split-target[2]) ;
+ }
+ else if [ path.is-rooted $(value) ]
+ {
+ return $(value) ;
+ }
+ else
+ {
+ return $(project-id)//$(value) ;
+ }
+}
+
+rule translate-indirect-value ( rulename : context-module )
+{
+ if [ MATCH "^([^%]*)%([^%]+)$" : $(rulename) ]
+ {
+ # Rule is already in the 'indirect-rule' format.
+ return @$(rulename) ;
+ }
+ else
+ {
+ local v ;
+ if ! [ MATCH "([.])" : $(rulename) ]
+ {
+ # This is an unqualified rule name. The user might want to
+ # set flags on this rule name and toolset.flag
+ # auto-qualifies it. Need to do the same here so flag
+ # setting works. We can arrange for toolset.flag to *not*
+ # auto-qualify the argument but then two rules defined in
+ # two Jamfiles would conflict.
+ rulename = $(context-module).$(rulename) ;
+ }
+ v = [ indirect.make $(rulename) : $(context-module) ] ;
+ return @$(v) ;
+ }
+
+}
+
+# Equivalent to a calling all of:
+# translate-path
+# translate-indirect
+# translate-dependency
+# expand-subfeatures-in-conditions
+# make
+#
+rule translate ( properties * : project-id : project-location : context-module )
+{
+ local result ;
+ for local p in $(properties)
+ {
+ local split = [ split-conditional $(p) ] ;
+ local condition property ;
+
+ if $(split)
+ {
+ condition = $(split[1]) ;
+ property = $(split[2]) ;
+
+ local e ;
+ for local c in [ regex.split $(condition) "," ]
+ {
+ e += [ feature.expand-subfeatures $(c) : true ] ;
+ }
+
+ condition = $(e:J=,): ;
+ }
+ else
+ {
+ property = $(p) ;
+ }
+
+ local feature = $(property:G) ;
+ if ! $(feature)
+ {
+ if [ feature.is-implicit-value $(property) ]
+ {
+ feature = [ feature.implied-feature $(property) ] ;
+ result += $(condition:E=)$(feature)$(property) ;
+ }
+ else
+ {
+ import errors ;
+ errors.error "'$(property)' is not a valid property specification" ;
+ }
+ } else {
+ local attributes = [ feature.attributes $(feature) ] ;
+ local value ;
+ # Only free features should be translated
+ if free in $(attributes)
+ {
+ if path in $(attributes)
+ {
+ value = [ translate-path-value $(property:G=) : $(project-location) ] ;
+ result += $(condition:E=)$(feature)$(value) ;
+ }
+ else if dependency in $(attributes)
+ {
+ value = [ translate-dependency-value $(property:G=) : $(project-id) : $(project-location) ] ;
+ result += $(condition:E=)$(feature)$(value) ;
+ }
+ else
+ {
+ local m = [ MATCH ^@(.+) : $(property:G=) ] ;
+ if $(m)
+ {
+ value = [ translate-indirect-value $(m) : $(context-module) ] ;
+ result += $(condition:E=)$(feature)$(value) ;
+ }
+ else
+ {
+ result += $(condition:E=)$(property) ;
+ }
+ }
+ }
+ else
+ {
+ result += $(condition:E=)$(property) ;
+ }
+ }
+ }
+ return $(result) ;
+}
+
+# Interpret all path properties in 'properties' as relative to 'path'. The
+# property values are assumed to be in system-specific form, and will be
+# translated into normalized form.
+#
+rule translate-paths ( properties * : path )
+{
+ local result ;
+ for local p in $(properties)
+ {
+ local split = [ split-conditional $(p) ] ;
+ local condition = "" ;
+ if $(split)
+ {
+ condition = $(split[1]): ;
+ p = $(split[2]) ;
+ }
+
+ if path in [ feature.attributes $(p:G) ]
+ {
+ local values = [ regex.split $(p:TG=) "&&" ] ;
+ local t ;
+ for local v in $(values)
+ {
+ t += [ path.root [ path.make $(v) ] $(path) ] ;
+ }
+ t = $(t:J="&&") ;
+ result += $(condition)$(t:TG=$(p:G)) ;
+ }
+ else
+ {
+ result += $(condition)$(p) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Assumes that all feature values that start with '@' are names of rules, used
+# in 'context-module'. Such rules can be either local to the module or global.
+# Converts such values into 'indirect-rule' format (see indirect.jam), so they
+# can be called from other modules. Does nothing for such values that are
+# already in the 'indirect-rule' format.
+#
+rule translate-indirect ( specification * : context-module )
+{
+ local result ;
+ for local p in $(specification)
+ {
+ local m = [ MATCH ^@(.+) : $(p:G=) ] ;
+ if $(m)
+ {
+ local v ;
+ if [ MATCH "^([^%]*)%([^%]+)$" : $(m) ]
+ {
+ # Rule is already in the 'indirect-rule' format.
+ v = $(m) ;
+ }
+ else
+ {
+ if ! [ MATCH "([.])" : $(m) ]
+ {
+ # This is an unqualified rule name. The user might want to
+ # set flags on this rule name and toolset.flag
+ # auto-qualifies it. Need to do the same here so flag
+ # setting works. We can arrange for toolset.flag to *not*
+ # auto-qualify the argument but then two rules defined in
+ # two Jamfiles would conflict.
+ m = $(context-module).$(m) ;
+ }
+ v = [ indirect.make $(m) : $(context-module) ] ;
+ }
+
+ v = @$(v) ;
+ result += $(v:G=$(p:G)) ;
+ }
+ else
+ {
+ result += $(p) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Binds all dependency properties in a list relative to the given project.
+# Targets with absolute paths will be left unchanged and targets which have a
+# project specified will have the path to the project interpreted relative to
+# the specified location.
+#
+rule translate-dependencies ( specification * : project-id : location )
+{
+ local result ;
+ for local p in $(specification)
+ {
+ local split = [ split-conditional $(p) ] ;
+ local condition = "" ;
+ if $(split)
+ {
+ condition = $(split[1]): ;
+ p = $(split[2]) ;
+ }
+ if dependency in [ feature.attributes $(p:G) ]
+ {
+ local split-target = [ regex.match ^(.*)//(.*) : $(p:G=) ] ;
+ if $(split-target)
+ {
+ local rooted = [ path.root [ path.make $(split-target[1]) ]
+ [ path.root $(location) [ path.pwd ] ] ] ;
+ result += $(condition)$(p:G)$(rooted)//$(split-target[2]) ;
+ }
+ else if [ path.is-rooted $(p:G=) ]
+ {
+ result += $(condition)$(p) ;
+ }
+ else
+ {
+ result += $(condition)$(p:G)$(project-id)//$(p:G=) ;
+ }
+ }
+ else
+ {
+ result += $(condition)$(p) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Class maintaining a property set -> string mapping.
+#
+class property-map
+{
+ import numbers ;
+ import sequence ;
+
+ rule __init__ ( )
+ {
+ self.next-flag = 1 ;
+ }
+
+ # Associate 'value' with 'properties'.
+ #
+ rule insert ( properties * : value )
+ {
+ self.all-flags += self.$(self.next-flag) ;
+ self.$(self.next-flag) = $(value) $(properties) ;
+
+ self.next-flag = [ numbers.increment $(self.next-flag) ] ;
+ }
+
+ # Returns the value associated with 'properties' or any subset of it. If
+ # more than one subset has a value assigned to it, returns the value for the
+ # longest subset, if it is unique.
+ #
+ rule find ( property-set )
+ {
+ # First find all matches.
+ local matches ;
+ local match-ranks ;
+ for local i in $(self.all-flags)
+ {
+ local list = $($(i)) ;
+ if [ $(property-set).contains-raw $(list[2-]) ]
+ {
+ matches += $(list[1]) ;
+ match-ranks += [ sequence.length $(list) ] ;
+ }
+ }
+ local best = [ sequence.select-highest-ranked $(matches)
+ : $(match-ranks) ] ;
+ if $(best[2])
+ {
+ import errors : error : errors.error ;
+ errors.error "Ambiguous key $(properties:J= :E=)" ;
+ }
+ return $(best) ;
+ }
+
+ # Returns the value associated with 'properties'. If 'value' parameter is
+ # given, replaces the found value.
+ #
+ rule find-replace ( properties * : value ? )
+ {
+ # First find all matches.
+ local matches ;
+ local match-ranks ;
+ for local i in $(self.all-flags)
+ {
+ if $($(i)[2-]) in $(properties)
+ {
+ matches += $(i) ;
+ match-ranks += [ sequence.length $($(i)) ] ;
+ }
+ }
+ local best = [ sequence.select-highest-ranked $(matches)
+ : $(match-ranks) ] ;
+ if $(best[2])
+ {
+ import errors : error : errors.error ;
+ errors.error "Ambiguous key $(properties:J= :E=)" ;
+ }
+ local original = $($(best)[1]) ;
+ if $(value)-is-set
+ {
+ $(best) = $(value) $($(best)[2-]) ;
+ }
+ return $(original) ;
+ }
+}
+
+
+rule __test__ ( )
+{
+ import assert ;
+ import "class" : new ;
+ import errors : try catch ;
+ import feature ;
+
+ # Local rules must be explicitly re-imported.
+ import property : path-order abbreviate-dashed ;
+
+ feature.prepare-test property-test-temp ;
+
+ feature.feature toolset : gcc : implicit symmetric ;
+ feature.subfeature toolset gcc : version : 2.95.2 2.95.3 2.95.4 3.0 3.0.1
+ 3.0.2 : optional ;
+ feature.feature define : : free ;
+ feature.feature runtime-link : dynamic static : symmetric link-incompatible ;
+ feature.feature optimization : on off ;
+ feature.feature variant : debug release : implicit composite symmetric ;
+ feature.feature rtti : on off : link-incompatible ;
+
+ feature.compose <variant>debug : <define>_DEBUG <optimization>off ;
+ feature.compose <variant>release : <define>NDEBUG <optimization>on ;
+
+ validate <toolset>gcc <toolset>gcc-3.0.1 : $(test-space) ;
+
+ assert.true path-order $(test-space) debug <define>foo ;
+ assert.false path-order $(test-space) <define>foo debug ;
+ assert.true path-order $(test-space) gcc debug ;
+ assert.false path-order $(test-space) debug gcc ;
+ assert.true path-order $(test-space) <optimization>on <rtti>on ;
+ assert.false path-order $(test-space) <rtti>on <optimization>on ;
+
+ assert.result-set-equal <toolset>gcc <rtti>off <define>FOO
+ : refine <toolset>gcc <rtti>off
+ : <define>FOO
+ : $(test-space) ;
+
+ assert.result-set-equal <toolset>gcc <optimization>on
+ : refine <toolset>gcc <optimization>off
+ : <optimization>on
+ : $(test-space) ;
+
+ assert.result-set-equal <toolset>gcc <rtti>off
+ : refine <toolset>gcc : <rtti>off : $(test-space) ;
+
+ assert.result-set-equal <toolset>gcc <rtti>off <rtti>off:<define>FOO
+ : refine <toolset>gcc : <rtti>off <rtti>off:<define>FOO
+ : $(test-space) ;
+
+ assert.result-set-equal <toolset>gcc:<define>foo <toolset>gcc:<define>bar
+ : refine <toolset>gcc:<define>foo : <toolset>gcc:<define>bar
+ : $(test-space) ;
+
+ assert.result <define>MY_RELEASE
+ : evaluate-conditionals-in-context
+ <variant>release,<rtti>off:<define>MY_RELEASE
+ : <toolset>gcc <variant>release <rtti>off ;
+
+ assert.result debug
+ : as-path <optimization>off <variant>debug
+ : $(test-space) ;
+
+ assert.result gcc/debug/rtti-off
+ : as-path <toolset>gcc <optimization>off <rtti>off <variant>debug
+ : $(test-space) ;
+
+ assert.result optmz-off : abbreviate-dashed optimization-off ;
+ assert.result rntm-lnk-sttc : abbreviate-dashed runtime-link-static ;
+
+ try ;
+ validate <feature>value : $(test-space) ;
+ catch "Invalid property '<feature>value': unknown feature 'feature'." ;
+
+ try ;
+ validate <rtti>default : $(test-space) ;
+ catch \"default\" is not a known value of feature <rtti> ;
+
+ validate <define>WHATEVER : $(test-space) ;
+
+ try ;
+ validate <rtti> : $(test-space) ;
+ catch "Invalid property '<rtti>': No value specified for feature 'rtti'." ;
+
+ try ;
+ validate value : $(test-space) ;
+ catch \"value\" is not an implicit feature value ;
+
+ assert.result-set-equal <rtti>on
+ : remove free implicit : <toolset>gcc <define>foo <rtti>on : $(test-space) ;
+
+ assert.result-set-equal <include>a
+ : select include : <include>a <toolset>gcc ;
+
+ assert.result-set-equal <include>a
+ : select include bar : <include>a <toolset>gcc ;
+
+ assert.result-set-equal <include>a <toolset>gcc
+ : select include <bar> <toolset> : <include>a <toolset>gcc ;
+
+ assert.result-set-equal <toolset>kylix <include>a
+ : change <toolset>gcc <include>a : <toolset> kylix ;
+
+ pm = [ new property-map ] ;
+ $(pm).insert <toolset>gcc : o ;
+ $(pm).insert <toolset>gcc <os>NT : obj ;
+ $(pm).insert <toolset>gcc <os>CYGWIN : obj ;
+
+ assert.equal o : [ $(pm).find-replace <toolset>gcc ] ;
+
+ assert.equal obj : [ $(pm).find-replace <toolset>gcc <os>NT ] ;
+
+ try ;
+ $(pm).find-replace <toolset>gcc <os>NT <os>CYGWIN ;
+ catch "Ambiguous key <toolset>gcc <os>NT <os>CYGWIN" ;
+
+ # Test ordinary properties.
+ assert.result : split-conditional <toolset>gcc ;
+
+ # Test properties with ":".
+ assert.result : split-conditional <define>FOO=A::B ;
+
+ # Test conditional feature.
+ assert.result-set-equal <toolset>gcc,<toolset-gcc:version>3.0 <define>FOO
+ : split-conditional <toolset>gcc,<toolset-gcc:version>3.0:<define>FOO ;
+
+ feature.finish-test property-test-temp ;
+}
diff --git a/tools/build/src/build/property.py b/tools/build/src/build/property.py
new file mode 100644
index 0000000000..dab83c7c88
--- /dev/null
+++ b/tools/build/src/build/property.py
@@ -0,0 +1,611 @@
+# Status: ported, except for tests and --abbreviate-paths.
+# Base revision: 64070
+#
+# Copyright 2001, 2002, 2003 Dave Abrahams
+# Copyright 2006 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import re
+from b2.util.utility import *
+from b2.build import feature
+from b2.util import sequence, qualify_jam_action
+import b2.util.set
+from b2.manager import get_manager
+
+__re_two_ampersands = re.compile ('&&')
+__re_comma = re.compile (',')
+__re_split_condition = re.compile ('(.*):(<.*)')
+__re_split_conditional = re.compile (r'(.+):<(.+)')
+__re_colon = re.compile (':')
+__re_has_condition = re.compile (r':<')
+__re_separate_condition_and_property = re.compile (r'(.*):(<.*)')
+
+__not_applicable_feature='not-applicable-in-this-context'
+feature.feature(__not_applicable_feature, [], ['free'])
+
+class Property(object):
+
+ __slots__ = ('_feature', '_value', '_condition')
+
+ def __init__(self, f, value, condition = []):
+ if type(f) == type(""):
+ f = feature.get(f)
+ # At present, single property has a single value.
+ assert type(value) != type([])
+ assert(f.free() or value.find(':') == -1)
+ self._feature = f
+ self._value = value
+ self._condition = condition
+
+ def feature(self):
+ return self._feature
+
+ def value(self):
+ return self._value
+
+ def condition(self):
+ return self._condition
+
+ def to_raw(self):
+ result = "<" + self._feature.name() + ">" + str(self._value)
+ if self._condition:
+ result = ",".join(str(p) for p in self._condition) + ':' + result
+ return result
+
+ def __str__(self):
+ return self.to_raw()
+
+ def __hash__(self):
+ # FIXME: consider if this class should be value-is-identity one
+ return hash((self._feature, self._value, tuple(self._condition)))
+
+ def __cmp__(self, other):
+ return cmp((self._feature, self._value, self._condition),
+ (other._feature, other._value, other._condition))
+
+
+def create_from_string(s, allow_condition=False,allow_missing_value=False):
+
+ condition = []
+ import types
+ if not isinstance(s, types.StringType):
+ print type(s)
+ if __re_has_condition.search(s):
+
+ if not allow_condition:
+ raise BaseException("Conditional property is not allowed in this context")
+
+ m = __re_separate_condition_and_property.match(s)
+ condition = m.group(1)
+ s = m.group(2)
+
+ # FIXME: break dependency cycle
+ from b2.manager import get_manager
+
+ feature_name = get_grist(s)
+ if not feature_name:
+ if feature.is_implicit_value(s):
+ f = feature.implied_feature(s)
+ value = s
+ else:
+ raise get_manager().errors()("Invalid property '%s' -- unknown feature" % s)
+ else:
+ if feature.valid(feature_name):
+ f = feature.get(feature_name)
+ value = get_value(s)
+ else:
+ # In case feature name is not known, it is wrong to do a hard error.
+ # Feature sets change depending on the toolset. So e.g.
+ # <toolset-X:version> is an unknown feature when using toolset Y.
+ #
+ # Ideally we would like to ignore this value, but most of
+ # Boost.Build code expects that we return a valid Property. For this
+ # reason we use a sentinel <not-applicable-in-this-context> feature.
+ #
+ # The underlying cause for this problem is that python port Property
+ # is more strict than its Jam counterpart and must always reference
+ # a valid feature.
+ f = feature.get(__not_applicable_feature)
+ value = s
+
+ if not value and not allow_missing_value:
+ get_manager().errors()("Invalid property '%s' -- no value specified" % s)
+
+
+ if condition:
+ condition = [create_from_string(x) for x in condition.split(',')]
+
+ return Property(f, value, condition)
+
+def create_from_strings(string_list, allow_condition=False):
+
+ return [create_from_string(s, allow_condition) for s in string_list]
+
+def reset ():
+ """ Clear the module state. This is mainly for testing purposes.
+ """
+ global __results
+
+ # A cache of results from as_path
+ __results = {}
+
+reset ()
+
+
+def path_order (x, y):
+ """ Helper for as_path, below. Orders properties with the implicit ones
+ first, and within the two sections in alphabetical order of feature
+ name.
+ """
+ if x == y:
+ return 0
+
+ xg = get_grist (x)
+ yg = get_grist (y)
+
+ if yg and not xg:
+ return -1
+
+ elif xg and not yg:
+ return 1
+
+ else:
+ if not xg:
+ x = feature.expand_subfeatures([x])
+ y = feature.expand_subfeatures([y])
+
+ if x < y:
+ return -1
+ elif x > y:
+ return 1
+ else:
+ return 0
+
+def identify(string):
+ return string
+
+# Uses Property
+def refine (properties, requirements):
+ """ Refines 'properties' by overriding any non-free properties
+ for which a different value is specified in 'requirements'.
+ Conditional requirements are just added without modification.
+ Returns the resulting list of properties.
+ """
+ # The result has no duplicates, so we store it in a set
+ result = set()
+
+ # Records all requirements.
+ required = {}
+
+ # All the elements of requirements should be present in the result
+ # Record them so that we can handle 'properties'.
+ for r in requirements:
+ # Don't consider conditional requirements.
+ if not r.condition():
+ required[r.feature()] = r
+
+ for p in properties:
+ # Skip conditional properties
+ if p.condition():
+ result.add(p)
+ # No processing for free properties
+ elif p.feature().free():
+ result.add(p)
+ else:
+ if required.has_key(p.feature()):
+ result.add(required[p.feature()])
+ else:
+ result.add(p)
+
+ return sequence.unique(list(result) + requirements)
+
+def translate_paths (properties, path):
+ """ Interpret all path properties in 'properties' as relative to 'path'
+ The property values are assumed to be in system-specific form, and
+ will be translated into normalized form.
+ """
+ result = []
+
+ for p in properties:
+
+ if p.feature().path():
+ values = __re_two_ampersands.split(p.value())
+
+ new_value = "&&".join(os.path.join(path, v) for v in values)
+
+ if new_value != p.value():
+ result.append(Property(p.feature(), new_value, p.condition()))
+ else:
+ result.append(p)
+
+ else:
+ result.append (p)
+
+ return result
+
+def translate_indirect(properties, context_module):
+ """Assumes that all feature values that start with '@' are
+ names of rules, used in 'context-module'. Such rules can be
+ either local to the module or global. Qualified local rules
+ with the name of the module."""
+ result = []
+ for p in properties:
+ if p.value()[0] == '@':
+ q = qualify_jam_action(p.value()[1:], context_module)
+ get_manager().engine().register_bjam_action(q)
+ result.append(Property(p.feature(), '@' + q, p.condition()))
+ else:
+ result.append(p)
+
+ return result
+
+def validate (properties):
+ """ Exit with error if any of the properties is not valid.
+ properties may be a single property or a sequence of properties.
+ """
+
+ if isinstance (properties, str):
+ __validate1 (properties)
+ else:
+ for p in properties:
+ __validate1 (p)
+
+def expand_subfeatures_in_conditions (properties):
+
+ result = []
+ for p in properties:
+
+ if not p.condition():
+ result.append(p)
+ else:
+ expanded = []
+ for c in p.condition():
+
+ if c.feature().name().startswith("toolset") or c.feature().name() == "os":
+ # It common that condition includes a toolset which
+ # was never defined, or mentiones subfeatures which
+ # were never defined. In that case, validation will
+ # only produce an spirious error, so don't validate.
+ expanded.extend(feature.expand_subfeatures ([c], True))
+ else:
+ expanded.extend(feature.expand_subfeatures([c]))
+
+ result.append(Property(p.feature(), p.value(), expanded))
+
+ return result
+
+# FIXME: this should go
+def split_conditional (property):
+ """ If 'property' is conditional property, returns
+ condition and the property, e.g
+ <variant>debug,<toolset>gcc:<inlining>full will become
+ <variant>debug,<toolset>gcc <inlining>full.
+ Otherwise, returns empty string.
+ """
+ m = __re_split_conditional.match (property)
+
+ if m:
+ return (m.group (1), '<' + m.group (2))
+
+ return None
+
+
+def select (features, properties):
+ """ Selects properties which correspond to any of the given features.
+ """
+ result = []
+
+ # add any missing angle brackets
+ features = add_grist (features)
+
+ return [p for p in properties if get_grist(p) in features]
+
+def validate_property_sets (sets):
+ for s in sets:
+ validate(s.all())
+
+def evaluate_conditionals_in_context (properties, context):
+ """ Removes all conditional properties which conditions are not met
+ For those with met conditions, removes the condition. Properies
+ in conditions are looked up in 'context'
+ """
+ base = []
+ conditional = []
+
+ for p in properties:
+ if p.condition():
+ conditional.append (p)
+ else:
+ base.append (p)
+
+ result = base[:]
+ for p in conditional:
+
+ # Evaluate condition
+ # FIXME: probably inefficient
+ if all(x in context for x in p.condition()):
+ result.append(Property(p.feature(), p.value()))
+
+ return result
+
+
+def change (properties, feature, value = None):
+ """ Returns a modified version of properties with all values of the
+ given feature replaced by the given value.
+ If 'value' is None the feature will be removed.
+ """
+ result = []
+
+ feature = add_grist (feature)
+
+ for p in properties:
+ if get_grist (p) == feature:
+ if value:
+ result.append (replace_grist (value, feature))
+
+ else:
+ result.append (p)
+
+ return result
+
+
+################################################################
+# Private functions
+
+def __validate1 (property):
+ """ Exit with error if property is not valid.
+ """
+ msg = None
+
+ if not property.feature().free():
+ feature.validate_value_string (property.feature(), property.value())
+
+
+###################################################################
+# Still to port.
+# Original lines are prefixed with "# "
+#
+#
+# import utility : ungrist ;
+# import sequence : unique ;
+# import errors : error ;
+# import feature ;
+# import regex ;
+# import sequence ;
+# import set ;
+# import path ;
+# import assert ;
+#
+#
+
+
+# rule validate-property-sets ( property-sets * )
+# {
+# for local s in $(property-sets)
+# {
+# validate [ feature.split $(s) ] ;
+# }
+# }
+#
+
+def remove(attributes, properties):
+ """Returns a property sets which include all the elements
+ in 'properties' that do not have attributes listed in 'attributes'."""
+
+ result = []
+ for e in properties:
+ attributes_new = feature.attributes(get_grist(e))
+ has_common_features = 0
+ for a in attributes_new:
+ if a in attributes:
+ has_common_features = 1
+ break
+
+ if not has_common_features:
+ result += e
+
+ return result
+
+
+def take(attributes, properties):
+ """Returns a property set which include all
+ properties in 'properties' that have any of 'attributes'."""
+ result = []
+ for e in properties:
+ if b2.util.set.intersection(attributes, feature.attributes(get_grist(e))):
+ result.append(e)
+ return result
+
+def translate_dependencies(properties, project_id, location):
+
+ result = []
+ for p in properties:
+
+ if not p.feature().dependency():
+ result.append(p)
+ else:
+ v = p.value()
+ m = re.match("(.*)//(.*)", v)
+ if m:
+ rooted = m.group(1)
+ if rooted[0] == '/':
+ # Either project id or absolute Linux path, do nothing.
+ pass
+ else:
+ rooted = os.path.join(os.getcwd(), location, rooted)
+
+ result.append(Property(p.feature(), rooted + "//" + m.group(2), p.condition()))
+
+ elif os.path.isabs(v):
+ result.append(p)
+ else:
+ result.append(Property(p.feature(), project_id + "//" + v, p.condition()))
+
+ return result
+
+
+class PropertyMap:
+ """ Class which maintains a property set -> string mapping.
+ """
+ def __init__ (self):
+ self.__properties = []
+ self.__values = []
+
+ def insert (self, properties, value):
+ """ Associate value with properties.
+ """
+ self.__properties.append(properties)
+ self.__values.append(value)
+
+ def find (self, properties):
+ """ Return the value associated with properties
+ or any subset of it. If more than one
+ subset has value assigned to it, return the
+ value for the longest subset, if it's unique.
+ """
+ return self.find_replace (properties)
+
+ def find_replace(self, properties, value=None):
+ matches = []
+ match_ranks = []
+
+ for i in range(0, len(self.__properties)):
+ p = self.__properties[i]
+
+ if b2.util.set.contains (p, properties):
+ matches.append (i)
+ match_ranks.append(len(p))
+
+ best = sequence.select_highest_ranked (matches, match_ranks)
+
+ if not best:
+ return None
+
+ if len (best) > 1:
+ raise NoBestMatchingAlternative ()
+
+ best = best [0]
+
+ original = self.__values[best]
+
+ if value:
+ self.__values[best] = value
+
+ return original
+
+# local rule __test__ ( )
+# {
+# import errors : try catch ;
+# import feature ;
+# import feature : feature subfeature compose ;
+#
+# # local rules must be explicitly re-imported
+# import property : path-order ;
+#
+# feature.prepare-test property-test-temp ;
+#
+# feature toolset : gcc : implicit symmetric ;
+# subfeature toolset gcc : version : 2.95.2 2.95.3 2.95.4
+# 3.0 3.0.1 3.0.2 : optional ;
+# feature define : : free ;
+# feature runtime-link : dynamic static : symmetric link-incompatible ;
+# feature optimization : on off ;
+# feature variant : debug release : implicit composite symmetric ;
+# feature rtti : on off : link-incompatible ;
+#
+# compose <variant>debug : <define>_DEBUG <optimization>off ;
+# compose <variant>release : <define>NDEBUG <optimization>on ;
+#
+# import assert ;
+# import "class" : new ;
+#
+# validate <toolset>gcc <toolset>gcc-3.0.1 : $(test-space) ;
+#
+# assert.result <toolset>gcc <rtti>off <define>FOO
+# : refine <toolset>gcc <rtti>off
+# : <define>FOO
+# : $(test-space)
+# ;
+#
+# assert.result <toolset>gcc <optimization>on
+# : refine <toolset>gcc <optimization>off
+# : <optimization>on
+# : $(test-space)
+# ;
+#
+# assert.result <toolset>gcc <rtti>off
+# : refine <toolset>gcc : <rtti>off : $(test-space)
+# ;
+#
+# assert.result <toolset>gcc <rtti>off <rtti>off:<define>FOO
+# : refine <toolset>gcc : <rtti>off <rtti>off:<define>FOO
+# : $(test-space)
+# ;
+#
+# assert.result <toolset>gcc:<define>foo <toolset>gcc:<define>bar
+# : refine <toolset>gcc:<define>foo : <toolset>gcc:<define>bar
+# : $(test-space)
+# ;
+#
+# assert.result <define>MY_RELEASE
+# : evaluate-conditionals-in-context
+# <variant>release,<rtti>off:<define>MY_RELEASE
+# : <toolset>gcc <variant>release <rtti>off
+#
+# ;
+#
+# try ;
+# validate <feature>value : $(test-space) ;
+# catch "Invalid property '<feature>value': unknown feature 'feature'." ;
+#
+# try ;
+# validate <rtti>default : $(test-space) ;
+# catch \"default\" is not a known value of feature <rtti> ;
+#
+# validate <define>WHATEVER : $(test-space) ;
+#
+# try ;
+# validate <rtti> : $(test-space) ;
+# catch "Invalid property '<rtti>': No value specified for feature 'rtti'." ;
+#
+# try ;
+# validate value : $(test-space) ;
+# catch "value" is not a value of an implicit feature ;
+#
+#
+# assert.result <rtti>on
+# : remove free implicit : <toolset>gcc <define>foo <rtti>on : $(test-space) ;
+#
+# assert.result <include>a
+# : select include : <include>a <toolset>gcc ;
+#
+# assert.result <include>a
+# : select include bar : <include>a <toolset>gcc ;
+#
+# assert.result <include>a <toolset>gcc
+# : select include <bar> <toolset> : <include>a <toolset>gcc ;
+#
+# assert.result <toolset>kylix <include>a
+# : change <toolset>gcc <include>a : <toolset> kylix ;
+#
+# # Test ordinary properties
+# assert.result
+# : split-conditional <toolset>gcc
+# ;
+#
+# # Test properties with ":"
+# assert.result
+# : split-conditional <define>FOO=A::B
+# ;
+#
+# # Test conditional feature
+# assert.result <toolset>gcc,<toolset-gcc:version>3.0 <define>FOO
+# : split-conditional <toolset>gcc,<toolset-gcc:version>3.0:<define>FOO
+# ;
+#
+# feature.finish-test property-test-temp ;
+# }
+#
+
diff --git a/tools/build/src/build/property_set.py b/tools/build/src/build/property_set.py
new file mode 100644
index 0000000000..6b3643045f
--- /dev/null
+++ b/tools/build/src/build/property_set.py
@@ -0,0 +1,460 @@
+# Status: ported.
+# Base revision: 40480
+
+# Copyright (C) Vladimir Prus 2002. Permission to copy, use, modify, sell and
+# distribute this software is granted provided this copyright notice appears in
+# all copies. This software is provided "as is" without express or implied
+# warranty, and with no claim as to its suitability for any purpose.
+
+import hashlib
+
+from b2.util.utility import *
+import property, feature
+import b2.build.feature
+from b2.exceptions import *
+from b2.util.sequence import unique
+from b2.util.set import difference
+from b2.util import cached
+
+from b2.manager import get_manager
+
+
+def reset ():
+ """ Clear the module state. This is mainly for testing purposes.
+ """
+ global __cache
+
+ # A cache of property sets
+ # TODO: use a map of weak refs?
+ __cache = {}
+
+reset ()
+
+
+def create (raw_properties = []):
+ """ Creates a new 'PropertySet' instance for the given raw properties,
+ or returns an already existing one.
+ """
+ # FIXME: propagate to callers.
+ if len(raw_properties) > 0 and isinstance(raw_properties[0], property.Property):
+ x = raw_properties
+ else:
+ x = [property.create_from_string(ps) for ps in raw_properties]
+ x.sort()
+ x = unique (x)
+
+ # FIXME: can we do better, e.g. by directly computing
+ # hash value of the list?
+ key = tuple(x)
+
+ if not __cache.has_key (key):
+ __cache [key] = PropertySet(x)
+
+ return __cache [key]
+
+def create_with_validation (raw_properties):
+ """ Creates new 'PropertySet' instances after checking
+ that all properties are valid and converting implicit
+ properties into gristed form.
+ """
+ properties = [property.create_from_string(s) for s in raw_properties]
+ property.validate(properties)
+
+ return create(properties)
+
+def empty ():
+ """ Returns PropertySet with empty set of properties.
+ """
+ return create ()
+
+def create_from_user_input(raw_properties, jamfile_module, location):
+ """Creates a property-set from the input given by the user, in the
+ context of 'jamfile-module' at 'location'"""
+
+ properties = property.create_from_strings(raw_properties, True)
+ properties = property.translate_paths(properties, location)
+ properties = property.translate_indirect(properties, jamfile_module)
+
+ project_id = get_manager().projects().attributeDefault(jamfile_module, 'id', None)
+ if not project_id:
+ project_id = os.path.abspath(location)
+ properties = property.translate_dependencies(properties, project_id, location)
+ properties = property.expand_subfeatures_in_conditions(properties)
+ return create(properties)
+
+
+def refine_from_user_input(parent_requirements, specification, jamfile_module,
+ location):
+ """Refines requirements with requirements provided by the user.
+ Specially handles "-<property>value" syntax in specification
+ to remove given requirements.
+ - parent-requirements -- property-set object with requirements
+ to refine
+ - specification -- string list of requirements provided by the use
+ - project-module -- the module to which context indirect features
+ will be bound.
+ - location -- the path to which path features are relative."""
+
+
+ if not specification:
+ return parent_requirements
+
+
+ add_requirements = []
+ remove_requirements = []
+
+ for r in specification:
+ if r[0] == '-':
+ remove_requirements.append(r[1:])
+ else:
+ add_requirements.append(r)
+
+ if remove_requirements:
+ # Need to create property set, so that path features
+ # and indirect features are translated just like they
+ # are in project requirements.
+ ps = create_from_user_input(remove_requirements,
+ jamfile_module, location)
+
+ parent_requirements = create(difference(parent_requirements.all(),
+ ps.all()))
+ specification = add_requirements
+
+ requirements = create_from_user_input(specification,
+ jamfile_module, location)
+
+ return parent_requirements.refine(requirements)
+
+class PropertySet:
+ """ Class for storing a set of properties.
+ - there's 1<->1 correspondence between identity and value. No
+ two instances of the class are equal. To maintain this property,
+ the 'PropertySet.create' rule should be used to create new instances.
+ Instances are immutable.
+
+ - each property is classified with regard to it's effect on build
+ results. Incidental properties have no effect on build results, from
+ Boost.Build point of view. Others are either free, or non-free, which we
+ call 'base'. Each property belong to exactly one of those categories and
+ it's possible to get list of properties in each category.
+
+ In addition, it's possible to get list of properties with specific
+ attribute.
+
+ - several operations, like and refine and as_path are provided. They all use
+ caching whenever possible.
+ """
+ def __init__ (self, properties = []):
+
+
+ raw_properties = []
+ for p in properties:
+ raw_properties.append(p.to_raw())
+
+ self.all_ = properties
+ self.all_raw_ = raw_properties
+ self.all_set_ = set(properties)
+
+ self.incidental_ = []
+ self.free_ = []
+ self.base_ = []
+ self.dependency_ = []
+ self.non_dependency_ = []
+ self.conditional_ = []
+ self.non_conditional_ = []
+ self.propagated_ = []
+ self.link_incompatible = []
+
+ # A cache of refined properties.
+ self.refined_ = {}
+
+ # A cache of property sets created by adding properties to this one.
+ self.added_ = {}
+
+ # Cache for the default properties.
+ self.defaults_ = None
+
+ # Cache for the expanded properties.
+ self.expanded_ = None
+
+ # Cache for the expanded composite properties
+ self.composites_ = None
+
+ # Cache for property set with expanded subfeatures
+ self.subfeatures_ = None
+
+ # Cache for the property set containing propagated properties.
+ self.propagated_ps_ = None
+
+ # A map of features to its values.
+ self.feature_map_ = None
+
+ # A tuple (target path, is relative to build directory)
+ self.target_path_ = None
+
+ self.as_path_ = None
+
+ # A cache for already evaluated sets.
+ self.evaluated_ = {}
+
+ for p in raw_properties:
+ if not get_grist (p):
+ raise BaseException ("Invalid property: '%s'" % p)
+
+ att = feature.attributes (get_grist (p))
+
+ if 'propagated' in att:
+ self.propagated_.append (p)
+
+ if 'link_incompatible' in att:
+ self.link_incompatible.append (p)
+
+ for p in properties:
+
+ # A feature can be both incidental and free,
+ # in which case we add it to incidental.
+ if p.feature().incidental():
+ self.incidental_.append(p)
+ elif p.feature().free():
+ self.free_.append(p)
+ else:
+ self.base_.append(p)
+
+ if p.condition():
+ self.conditional_.append(p)
+ else:
+ self.non_conditional_.append(p)
+
+ if p.feature().dependency():
+ self.dependency_.append (p)
+ else:
+ self.non_dependency_.append (p)
+
+
+ def all(self):
+ return self.all_
+
+ def raw (self):
+ """ Returns the list of stored properties.
+ """
+ return self.all_raw_
+
+ def __str__(self):
+ return ' '.join(str(p) for p in self.all_)
+
+ def base (self):
+ """ Returns properties that are neither incidental nor free.
+ """
+ return self.base_
+
+ def free (self):
+ """ Returns free properties which are not dependency properties.
+ """
+ return self.free_
+
+ def non_free(self):
+ return self.base_ + self.incidental_
+
+ def dependency (self):
+ """ Returns dependency properties.
+ """
+ return self.dependency_
+
+ def non_dependency (self):
+ """ Returns properties that are not dependencies.
+ """
+ return self.non_dependency_
+
+ def conditional (self):
+ """ Returns conditional properties.
+ """
+ return self.conditional_
+
+ def non_conditional (self):
+ """ Returns properties that are not conditional.
+ """
+ return self.non_conditional_
+
+ def incidental (self):
+ """ Returns incidental properties.
+ """
+ return self.incidental_
+
+ def refine (self, requirements):
+ """ Refines this set's properties using the requirements passed as an argument.
+ """
+ assert isinstance(requirements, PropertySet)
+ if not self.refined_.has_key (requirements):
+ r = property.refine(self.all_, requirements.all_)
+
+ self.refined_[requirements] = create(r)
+
+ return self.refined_[requirements]
+
+ def expand (self):
+ if not self.expanded_:
+ expanded = feature.expand(self.all_)
+ self.expanded_ = create(expanded)
+ return self.expanded_
+
+ def expand_subfeatures(self):
+ if not self.subfeatures_:
+ self.subfeatures_ = create(feature.expand_subfeatures(self.all_))
+ return self.subfeatures_
+
+ def evaluate_conditionals(self, context=None):
+ if not context:
+ context = self
+
+ if not self.evaluated_.has_key(context):
+ # FIXME: figure why the call messes up first parameter
+ self.evaluated_[context] = create(
+ property.evaluate_conditionals_in_context(self.all(), context))
+
+ return self.evaluated_[context]
+
+ def propagated (self):
+ if not self.propagated_ps_:
+ self.propagated_ps_ = create (self.propagated_)
+ return self.propagated_ps_
+
+ def add_defaults (self):
+ # FIXME: this caching is invalidated when new features
+ # are declare inside non-root Jamfiles.
+ if not self.defaults_:
+ expanded = feature.add_defaults(self.all_)
+ self.defaults_ = create(expanded)
+ return self.defaults_
+
+ def as_path (self):
+ if not self.as_path_:
+
+ def path_order (p1, p2):
+
+ i1 = p1.feature().implicit()
+ i2 = p2.feature().implicit()
+
+ if i1 != i2:
+ return i2 - i1
+ else:
+ return cmp(p1.feature().name(), p2.feature().name())
+
+ # trim redundancy
+ properties = feature.minimize(self.base_)
+
+ # sort according to path_order
+ properties.sort (path_order)
+
+ components = []
+ for p in properties:
+ if p.feature().implicit():
+ components.append(p.value())
+ else:
+ components.append(p.feature().name() + "-" + p.value())
+
+ self.as_path_ = '/'.join (components)
+
+ return self.as_path_
+
+ def target_path (self):
+ """ Computes the target path that should be used for
+ target with these properties.
+ Returns a tuple of
+ - the computed path
+ - if the path is relative to build directory, a value of
+ 'true'.
+ """
+ if not self.target_path_:
+ # The <location> feature can be used to explicitly
+ # change the location of generated targets
+ l = self.get ('<location>')
+ if l:
+ computed = l[0]
+ is_relative = False
+
+ else:
+ p = self.as_path()
+ if hash_maybe:
+ p = hash_maybe(p)
+
+ # Really, an ugly hack. Boost regression test system requires
+ # specific target paths, and it seems that changing it to handle
+ # other directory layout is really hard. For that reason,
+ # we teach V2 to do the things regression system requires.
+ # The value o '<location-prefix>' is predended to the path.
+ prefix = self.get ('<location-prefix>')
+
+ if prefix:
+ if len (prefix) > 1:
+ raise AlreadyDefined ("Two <location-prefix> properties specified: '%s'" % prefix)
+
+ computed = os.path.join(prefix[0], p)
+
+ else:
+ computed = p
+
+ if not computed:
+ computed = "."
+
+ is_relative = True
+
+ self.target_path_ = (computed, is_relative)
+
+ return self.target_path_
+
+ def add (self, ps):
+ """ Creates a new property set containing the properties in this one,
+ plus the ones of the property set passed as argument.
+ """
+ if not self.added_.has_key(ps):
+ self.added_[ps] = create(self.all_ + ps.all())
+ return self.added_[ps]
+
+ def add_raw (self, properties):
+ """ Creates a new property set containing the properties in this one,
+ plus the ones passed as argument.
+ """
+ return self.add (create (properties))
+
+
+ def get (self, feature):
+ """ Returns all values of 'feature'.
+ """
+ if type(feature) == type([]):
+ feature = feature[0]
+ if not isinstance(feature, b2.build.feature.Feature):
+ feature = b2.build.feature.get(feature)
+
+ if not self.feature_map_:
+ self.feature_map_ = {}
+
+ for v in self.all_:
+ if not self.feature_map_.has_key(v.feature()):
+ self.feature_map_[v.feature()] = []
+ self.feature_map_[v.feature()].append(v.value())
+
+ return self.feature_map_.get(feature, [])
+
+ @cached
+ def get_properties(self, feature):
+ """Returns all contained properties associated with 'feature'"""
+
+ if not isinstance(feature, b2.build.feature.Feature):
+ feature = b2.build.feature.get(feature)
+
+ result = []
+ for p in self.all_:
+ if p.feature() == feature:
+ result.append(p)
+ return result
+
+ def __contains__(self, item):
+ return item in self.all_set_
+
+def hash(p):
+ m = hashlib.md5()
+ m.update(p)
+ return m.hexdigest()
+
+hash_maybe = hash if "--hash" in bjam.variable("ARGV") else None
+
diff --git a/tools/build/src/build/readme.txt b/tools/build/src/build/readme.txt
new file mode 100644
index 0000000000..b15055b8e5
--- /dev/null
+++ b/tools/build/src/build/readme.txt
@@ -0,0 +1,11 @@
+Copyright 2001, 2002 Dave Abrahams
+Copyright 2002 Vladimir Prus
+Distributed under the Boost Software License, Version 1.0.
+(See accompanying file LICENSE_1_0.txt or copy at
+http://www.boost.org/LICENSE_1_0.txt)
+
+Development code for new build system. To run unit tests for jam code, execute:
+
+ bjam --debug --build-system=test
+
+Comprehensive tests require Python. See ../test/readme.txt
diff --git a/tools/build/src/build/scanner.jam b/tools/build/src/build/scanner.jam
new file mode 100644
index 0000000000..ed5507029a
--- /dev/null
+++ b/tools/build/src/build/scanner.jam
@@ -0,0 +1,163 @@
+# Copyright 2003 Dave Abrahams
+# Copyright 2002, 2003, 2004, 2005 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Implements scanners: objects computing implicit dependencies for files, such
+# as includes in C++.
+#
+# A scanner has a regular expression used to find the dependencies, some data
+# needed to interpret those dependencies (e.g., include paths), and code which
+# establishing needed relationships between actual jam targets.
+#
+# Scanner objects are created by actions when they try to actualize virtual
+# targets, passed to the virtual-target.actualize() method and are then
+# associated with actual targets. It is possible to use several scanners for a
+# single virtual-target. For example, a single source file might be compiled
+# twice - each time using a different include path. In this case, two separate
+# actual targets will be created, each having a scanner of its own.
+#
+# Typically, scanners are created from target type and the action's properties,
+# using the rule 'get' in this module. Directly creating scanners is not
+# recommended, as it might create multiple equvivalent but different instances,
+# and lead to unnecessary actual target duplication. However, actions can also
+# create scanners in a special way, instead of relying on just the target type.
+
+import "class" : new ;
+import property ;
+import property-set ;
+import virtual-target ;
+
+# Base scanner class.
+#
+class scanner
+{
+ rule __init__ ( )
+ {
+ }
+
+ # Returns a pattern to use for scanning.
+ #
+ rule pattern ( )
+ {
+ import errors : error : errors.error ;
+ errors.error "method must be overriden" ;
+ }
+
+ # Establish necessary relationship between targets, given an actual target
+ # beeing scanned and a list of pattern matches in that file.
+ #
+ rule process ( target : matches * )
+ {
+ import errors : error : errors.error ;
+ errors.error "method must be overriden" ;
+ }
+}
+
+
+# Registers a new generator class, specifying a set of properties relevant to
+# this scanner. Constructor for that class should have one parameter: a list of
+# properties.
+#
+rule register ( scanner-class : relevant-properties * )
+{
+ .registered += $(scanner-class) ;
+ .relevant-properties.$(scanner-class) = $(relevant-properties) ;
+}
+
+
+# Common scanner class, usable when there is only one kind of includes (unlike
+# C, where "" and <> includes have different search paths).
+#
+class common-scanner : scanner
+{
+ import scanner ;
+
+ rule __init__ ( includes * )
+ {
+ scanner.__init__ ;
+ self.includes = $(includes) ;
+ }
+
+ rule process ( target : matches * : binding )
+ {
+ local target_path = [ NORMALIZE_PATH $(binding:D) ] ;
+
+ NOCARE $(matches) ;
+ INCLUDES $(target) : $(matches) ;
+ SEARCH on $(matches) = $(target_path) $(self.includes:G=) ;
+ ISFILE $(matches) ;
+
+ scanner.propagate $(__name__) : $(matches) : $(target) ;
+ }
+}
+
+
+# Returns an instance of a previously registered scanner, with the specified
+# properties.
+#
+rule get ( scanner-class : property-set )
+{
+ if ! $(scanner-class) in $(.registered)
+ {
+ import errors ;
+ errors.error "attempt to get an unregisted scanner" ;
+ }
+
+ local r = $(.rv-cache.$(property-set)) ;
+ if ! $(r)
+ {
+ r = [ property-set.create
+ [ property.select $(.relevant-properties.$(scanner-class)) :
+ [ $(property-set).raw ] ] ] ;
+ .rv-cache.$(property-set) = $(r) ;
+ }
+
+ if ! $(scanner.$(scanner-class).$(r:J=-))
+ {
+ local s = [ new $(scanner-class) [ $(r).raw ] ] ;
+ scanner.$(scanner-class).$(r:J=-) = $(s) ;
+ }
+ return $(scanner.$(scanner-class).$(r:J=-)) ;
+}
+
+
+# Installs the specified scanner on the actual target 'target'.
+#
+rule install ( scanner : target )
+{
+ HDRSCAN on $(target) = [ $(scanner).pattern ] ;
+ SCANNER on $(target) = $(scanner) ;
+ HDRRULE on $(target) = scanner.hdrrule ;
+
+ # Scanner reflects differences in properties affecting binding of 'target',
+ # which will be known when processing includes for it, and give information
+ # on how to interpret different include types (e.g. quoted vs. those in
+ # angle brackets in C files).
+ HDRGRIST on $(target) = $(scanner) ;
+}
+
+
+# Propagate scanner settings from 'including-target' to 'targets'.
+#
+rule propagate ( scanner : targets * : including-target )
+{
+ HDRSCAN on $(targets) = [ on $(including-target) return $(HDRSCAN) ] ;
+ SCANNER on $(targets) = $(scanner) ;
+ HDRRULE on $(targets) = scanner.hdrrule ;
+ HDRGRIST on $(targets) = [ on $(including-target) return $(HDRGRIST) ] ;
+}
+
+
+rule hdrrule ( target : matches * : binding )
+{
+ local scanner = [ on $(target) return $(SCANNER) ] ;
+ $(scanner).process $(target) : $(matches) : $(binding) ;
+}
+
+
+# hdrrule must be available at global scope so it can be invoked by header
+# scanning.
+#
+IMPORT scanner : hdrrule : : scanner.hdrrule ;
diff --git a/tools/build/v2/build/scanner.py b/tools/build/src/build/scanner.py
index 19f1431d47..19f1431d47 100644
--- a/tools/build/v2/build/scanner.py
+++ b/tools/build/src/build/scanner.py
diff --git a/tools/build/src/build/targets.jam b/tools/build/src/build/targets.jam
new file mode 100644
index 0000000000..44c8fc9e45
--- /dev/null
+++ b/tools/build/src/build/targets.jam
@@ -0,0 +1,1698 @@
+# Copyright Vladimir Prus 2002.
+# Copyright Rene Rivera 2006.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Supports 'abstract' targets, which are targets explicitly defined in a
+# Jamfile.
+#
+# Abstract targets are represented by classes derived from 'abstract-target'
+# class. The first abstract target is 'project-target', which is created for
+# each Jamfile, and can be obtained by the 'target' rule in the Jamfile's module
+# (see project.jam).
+#
+# Project targets keep a list of 'main-target' instances. A main target is what
+# the user explicitly defines in a Jamfile. It is possible to have several
+# definitions for a main target, for example to have different lists of sources
+# for different platforms. So, main targets keep a list of alternatives.
+#
+# Each alternative is an instance of 'abstract-target'. When a main target
+# subvariant is defined by some rule, that rule will decide what class to use,
+# create an instance of that class and add it to the list of alternatives for
+# the main target.
+#
+# Rules supplied by the build system will use only targets derived from the
+# 'basic-target' class, which will provide some default behaviour. There will be
+# different classes derived from it such as 'make-target', created by the 'make'
+# rule, and 'typed-target', created by rules such as 'exe' and 'lib'.
+#
+# +--------------------------+
+# | abstract-target |
+# +==========================+
+# | name |
+# | project |
+# | |
+# | generate(properties) = 0 |
+# +-------------+------------+
+# |
+# ^
+# / \
+# +-+-+
+# |
+# |
+# +------------------+-----+-------------------------------+
+# | | |
+# | | |
+# +-----------+----------+ +------+------+ +-------+------+
+# | project-target | | main-target | | basic-target |
+# +======================+ 1 * +=============+ alternatives +==============+
+# | generate(properties) |o-----+ generate |<>------------->| generate |
+# | main-target | +-------------+ | construct = 0|
+# +----------------------+ +-------+------+
+# |
+# ^
+# / \
+# +-+-+
+# |
+# |
+# ...--+-----------------+-----------------+------------------+
+# | | | |
+# | | | |
+# ... ---+-----+ +-------+------+ +------+------+ +-------+------+
+# | | typed-target | | make-target | | stage-target |
+# . +==============+ +=============+ +==============+
+# . | construct | | construct | | construct |
+# +--------------+ +-------------+ +--------------+
+
+import assert ;
+import build-request ;
+import "class" : new ;
+import feature ;
+import indirect ;
+import path ;
+import property ;
+import property-set ;
+import sequence ;
+import set ;
+import toolset ;
+
+
+# Base class for all abstract targets.
+#
+class abstract-target
+{
+ import assert ;
+ import "class" ;
+ import errors ;
+ import project ;
+
+ rule __init__ ( name # Name of the target in Jamfile.
+ : project-target # The project target to which this one belongs.
+ )
+ {
+ # Note: it might seem that we don't need either name or project at all.
+ # However, there are places where we really need it. One example is
+ # error messages which should name problematic targets. Another is
+ # setting correct paths for sources and generated files.
+
+ self.name = $(name) ;
+ self.project = $(project-target) ;
+ self.location = [ errors.nearest-user-location ] ;
+ }
+
+ # Returns the name of this target.
+ rule name ( )
+ {
+ return $(self.name) ;
+ }
+
+ # Returns the project for this target.
+ rule project ( )
+ {
+ return $(self.project) ;
+ }
+
+ # Return the location where the target was declared.
+ rule location ( )
+ {
+ return $(self.location) ;
+ }
+
+ # Returns a user-readable name for this target.
+ rule full-name ( )
+ {
+ local location = [ $(self.project).get location ] ;
+ return $(location)/$(self.name) ;
+ }
+
+ # Generates virtual targets for this abstract target using the specified
+ # properties, unless a different value of some feature is required by the
+ # target.
+ # On success, returns:
+ # - a property-set with the usage requirements to be applied to dependants
+ # - a list of produced virtual targets, which may be empty.
+ # If 'property-set' is empty, performs the default build of this target, in
+ # a way specific to the derived class.
+ #
+ rule generate ( property-set )
+ {
+ errors.error "method should be defined in derived classes" ;
+ }
+
+ rule rename ( new-name )
+ {
+ self.name = $(new-name) ;
+ }
+}
+
+
+if --debug-building in [ modules.peek : ARGV ]
+{
+ modules.poke : .debug-building : true ;
+}
+
+
+rule indent ( )
+{
+ return $(.indent:J="") ;
+}
+
+
+rule increase-indent ( )
+{
+ .indent += " " ;
+}
+
+
+rule decrease-indent ( )
+{
+ .indent = $(.indent[2-]) ;
+}
+
+
+# Project target class (derived from 'abstract-target').
+#
+# This class has the following responsibilities:
+# - Maintaining a list of main targets in this project and building them.
+#
+# Main targets are constructed in two stages:
+# - When Jamfile is read, a number of calls to 'add-alternative' is made. At
+# that time, alternatives can also be renamed to account for inline targets.
+# - The first time 'main-target' or 'has-main-target' rule is called, all
+# alternatives are enumerated and main targets are created.
+#
+class project-target : abstract-target
+{
+ import project ;
+ import targets ;
+ import path ;
+ import print ;
+ import property-set ;
+ import set ;
+ import sequence ;
+ import "class" : new ;
+
+ rule __init__ ( name : project-module parent-project ?
+ : requirements * : default-build * )
+ {
+ abstract-target.__init__ $(name) : $(__name__) ;
+
+ self.project-module = $(project-module) ;
+ self.location = [ project.attribute $(project-module) location ] ;
+ self.requirements = $(requirements) ;
+ self.default-build = $(default-build) ;
+
+ if $(parent-project)
+ {
+ inherit $(parent-project) ;
+ }
+ }
+
+ # This is needed only by the 'make' rule. Need to find a way to make 'make'
+ # work without this method.
+ #
+ rule project-module ( )
+ {
+ return $(self.project-module) ;
+ }
+
+ rule get ( attribute )
+ {
+ return [ project.attribute $(self.project-module) $(attribute) ] ;
+ }
+
+ rule build-dir ( )
+ {
+ if ! $(self.build-dir)
+ {
+ self.build-dir = [ get build-dir ] ;
+ if ! $(self.build-dir)
+ {
+ local location = [ $(self.project).get location ] ;
+ if $(location)
+ {
+ self.build-dir = [ path.join $(location) bin ] ;
+ }
+ else
+ {
+ local id = [ get id ] ;
+ if $(id)
+ {
+ local rid = [ MATCH ^/(.*) : $(id) ] ;
+ self.build-dir = [ path.join [ project.standalone-build-dir ] $(rid) ] ;
+ }
+ else
+ {
+ errors.error "Could not create build-dir for standalone project $(self.project-module:E=)."
+ : "Missing project id" ;
+ }
+ }
+ }
+ }
+ return $(self.build-dir) ;
+ }
+
+ # Generates all possible targets contained in this project.
+ #
+ rule generate ( property-set * )
+ {
+ if [ modules.peek : .debug-building ]
+ {
+ ECHO [ targets.indent ] "building project" [ name ]
+ " ('$(__name__)') with" [ $(property-set).raw ] ;
+ targets.increase-indent ;
+ }
+
+ local usage-requirements = [ property-set.empty ] ;
+ local targets ;
+
+ for local t in [ targets-to-build ]
+ {
+ local g = [ $(t).generate $(property-set) ] ;
+ usage-requirements = [ $(usage-requirements).add $(g[1]) ] ;
+ targets += $(g[2-]) ;
+ }
+ targets.decrease-indent ;
+ return $(usage-requirements) [ sequence.unique $(targets) ] ;
+ }
+
+ # Computes and returns a list of abstract-target instances which must be
+ # built when this project is built.
+ #
+ rule targets-to-build ( )
+ {
+ local result ;
+
+ if ! $(self.built-main-targets)
+ {
+ build-main-targets ;
+ }
+
+ # Collect all main targets here, except for "explicit" ones.
+ for local t in $(self.main-targets)
+ {
+ if ! [ $(t).name ] in $(self.explicit-targets)
+ {
+ result += $(t) ;
+ }
+ }
+
+ # Collect all projects referenced via "projects-to-build" attribute.
+ local self-location = [ get location ] ;
+ for local pn in [ get projects-to-build ]
+ {
+ result += [ find $(pn)/ ] ;
+ }
+
+ return $(result) ;
+ }
+
+ # Add 'target' to the list of targets in this project that should be build
+ # only by explicit request
+ #
+ rule mark-target-as-explicit ( target-name * )
+ {
+ # Record the name of the target, not instance, since this rule is called
+ # before main target instances are created.
+ self.explicit-targets += $(target-name) ;
+ }
+
+ rule mark-target-as-always ( target-name * )
+ {
+ # Record the name of the target, not instance, since this rule is called
+ # before main target instances are created.
+ self.always-targets += $(target-name) ;
+ }
+
+ # Add new target alternative
+ #
+ rule add-alternative ( target-instance )
+ {
+ if $(self.built-main-targets)
+ {
+ import errors : error : errors.error ;
+ errors.error add-alternative called when main targets are already
+ created. : in project [ full-name ] ;
+ }
+ self.alternatives += $(target-instance) ;
+ }
+
+ # Returns a 'main-target' class instance corresponding to 'name'.
+ #
+ rule main-target ( name )
+ {
+ if ! $(self.built-main-targets)
+ {
+ build-main-targets ;
+ }
+ return $(self.main-target.$(name)) ;
+ }
+
+ # Returns whether a main target with the specified name exists.
+ #
+ rule has-main-target ( name )
+ {
+ if ! $(self.built-main-targets)
+ {
+ build-main-targets ;
+ }
+
+ if $(self.main-target.$(name))
+ {
+ return true ;
+ }
+ }
+
+ # Worker function for the find rule not implementing any caching and simply
+ # returning nothing in case the target can not be found.
+ #
+ rule find-really ( id )
+ {
+ local result ;
+ local current-location = [ get location ] ;
+
+ local split = [ MATCH ^(.*)//(.*)$ : $(id) ] ;
+ local project-part = $(split[1]) ;
+ local target-part = $(split[2]) ;
+
+ local extra-error-message ;
+ if $(project-part)
+ {
+ # There is an explicitly specified project part in id. Looks up the
+ # project and passes the request to it.
+ local pm = [ project.find $(project-part) : $(current-location) ] ;
+ if $(pm)
+ {
+ project-target = [ project.target $(pm) ] ;
+ result = [ $(project-target).find $(target-part) : no-error ] ;
+ }
+ else
+ {
+ extra-error-message = could not resolve project reference
+ '$(project-part)' ;
+ if ! [ path.is-rooted $(project-part) ]
+ {
+ local rooted = [ path.root $(project-part) / ] ;
+ if $(rooted) && [ project.is-registered-id $(rooted) ]
+ {
+ extra-error-message += - possibly missing a leading
+ slash ('/') character. ;
+ }
+ }
+ }
+ }
+ else
+ {
+ # Interpret target-name as name of main target. Need to do this
+ # before checking for file. Consider the following scenario with a
+ # toolset not modifying its executable's names, e.g. gcc on
+ # Unix-like platforms:
+ #
+ # exe test : test.cpp ;
+ # install s : test : <location>. ;
+ #
+ # After the first build we would have a target named 'test' in the
+ # Jamfile and a file named 'test' on the disk. We need the target to
+ # override the file.
+ result = [ main-target $(id) ] ;
+
+ # Interpret id as an existing file reference.
+ if ! $(result)
+ {
+ result = [ new file-reference [ path.make $(id) ] :
+ $(self.project) ] ;
+ if ! [ $(result).exists ]
+ {
+ result = ;
+ }
+ }
+
+ # Interpret id as project-id.
+ if ! $(result)
+ {
+ local project-module = [ project.find $(id) :
+ $(current-location) ] ;
+ if $(project-module)
+ {
+ result = [ project.target $(project-module) ] ;
+ }
+ }
+ }
+
+ return $(result:E="") $(extra-error-message) ;
+ }
+
+ # Find and return the target with the specified id, treated relative to
+ # self. Id may specify either a target or a file name with the target taking
+ # priority. May report an error or return nothing if the target is not found
+ # depending on the 'no-error' parameter.
+ #
+ rule find ( id : no-error ? )
+ {
+ local v = $(.id.$(id)) ;
+ local extra-error-message ;
+ if ! $(v)
+ {
+ local r = [ find-really $(id) ] ;
+ v = $(r[1]) ;
+ extra-error-message = $(r[2-]) ;
+ if ! $(v)
+ {
+ v = none ;
+ }
+ .id.$(id) = $(v) ;
+ }
+
+ if $(v) != none
+ {
+ return $(v) ;
+ }
+ else if ! $(no-error)
+ {
+ local current-location = [ get location ] ;
+ import errors : user-error : errors.user-error ;
+ errors.user-error Unable to find file or target named
+ : " " '$(id)'
+ : referred to from project at
+ : " " '$(current-location)'
+ : $(extra-error-message) ;
+ }
+ }
+
+ rule build-main-targets ( )
+ {
+ self.built-main-targets = true ;
+ for local a in $(self.alternatives)
+ {
+ local name = [ $(a).name ] ;
+ local target = $(self.main-target.$(name)) ;
+ if ! $(target)
+ {
+ local t = [ new main-target $(name) : $(self.project) ] ;
+ self.main-target.$(name) = $(t) ;
+ self.main-targets += $(t) ;
+ target = $(self.main-target.$(name)) ;
+ }
+
+ if $(name) in $(self.always-targets)
+ {
+ $(a).always ;
+ }
+
+ $(target).add-alternative $(a) ;
+ }
+ }
+
+ # Accessor, add a constant.
+ #
+ rule add-constant (
+ name # Variable name of the constant.
+ : value + # Value of the constant.
+ : type ? # Optional type of value.
+ )
+ {
+ switch $(type)
+ {
+ case path :
+ local r ;
+ for local v in $(value)
+ {
+ local l = $(self.location) ;
+ if ! $(l)
+ {
+ # Project corresponding to config files do not have
+ # 'location' attribute, but do have source location. It
+ # might be more reasonable to make every project have a
+ # location and use some other approach to prevent buildable
+ # targets in config files, but that has been left for later.
+ l = [ get source-location ] ;
+ }
+ v = [ path.root [ path.make $(v) ] $(l) ] ;
+ # Now make the value absolute path.
+ v = [ path.root $(v) [ path.pwd ] ] ;
+ # Constants should be in platform-native form.
+ v = [ path.native $(v) ] ;
+ r += $(v) ;
+ }
+ value = $(r) ;
+ }
+ if ! $(name) in $(self.constants)
+ {
+ self.constants += $(name) ;
+ }
+ self.constant.$(name) = $(value) ;
+ # Inject the constant in the scope of the Jamroot module.
+ modules.poke $(self.project-module) : $(name) : $(value) ;
+ }
+
+ rule inherit ( parent )
+ {
+ for local c in [ modules.peek $(parent) : self.constants ]
+ {
+ # No need to pass the type. Path constants were converted to
+ # absolute paths already by parent.
+ add-constant $(c) : [ modules.peek $(parent) : self.constant.$(c) ]
+ ;
+ }
+
+ # Import rules from parent.
+ local this-module = [ project-module ] ;
+ local parent-module = [ $(parent).project-module ] ;
+ # Do not import rules coming from 'project-rules' as they must be
+ # imported localized.
+ local user-rules = [ set.difference
+ [ RULENAMES $(parent-module) ] :
+ [ RULENAMES project-rules ] ] ;
+ IMPORT $(parent-module) : $(user-rules) : $(this-module) : $(user-rules)
+ ;
+ EXPORT $(this-module) : $(user-rules) ;
+ }
+}
+
+
+# Helper rules to detect cycles in main target references.
+#
+local rule start-building ( main-target-instance )
+{
+ if $(main-target-instance) in $(.targets-being-built)
+ {
+ local names ;
+ for local t in $(.targets-being-built) $(main-target-instance)
+ {
+ names += [ $(t).full-name ] ;
+ }
+
+ import errors ;
+ errors.error "Recursion in main target references"
+ : "the following target are being built currently:"
+ : $(names) ;
+ }
+ .targets-being-built += $(main-target-instance) ;
+}
+
+
+local rule end-building ( main-target-instance )
+{
+ .targets-being-built = $(.targets-being-built[1--2]) ;
+}
+
+
+# A named top-level target in Jamfile.
+#
+class main-target : abstract-target
+{
+ import assert ;
+ import feature ;
+ import print ;
+ import property-set ;
+ import sequence ;
+ import targets : start-building end-building ;
+
+ rule __init__ ( name : project )
+ {
+ abstract-target.__init__ $(name) : $(project) ;
+ }
+
+ # Add a new alternative for this target
+ rule add-alternative ( target )
+ {
+ local d = [ $(target).default-build ] ;
+ if $(self.alternatives) && ( $(self.default-build) != $(d) )
+ {
+ import errors : error : errors.error ;
+ errors.error "default build must be identical in all alternatives"
+ : "main target is" [ full-name ]
+ : "with" [ $(d).raw ]
+ : "differing from previous default build"
+ [ $(self.default-build).raw ] ;
+ }
+ else
+ {
+ self.default-build = $(d) ;
+ }
+ self.alternatives += $(target) ;
+ }
+
+ # Returns the best viable alternative for this property-set. See the
+ # documentation for selection rules.
+ #
+ local rule select-alternatives ( property-set debug ? )
+ {
+ # When selecting alternatives we have to consider defaults, for example:
+ # lib l : l.cpp : <variant>debug ;
+ # lib l : l_opt.cpp : <variant>release ;
+ # will not work unless we add default value <variant>debug.
+ property-set = [ $(p).add-defaults ] ;
+
+ # The algorithm: we keep the current best viable alternative. When we
+ # encounter a new best viable alternative, we compare it with the
+ # current one.
+
+ local best ;
+ local best-properties ;
+
+ if $(self.alternatives[2-])
+ {
+ local bad ;
+ local worklist = $(self.alternatives) ;
+ while $(worklist) && ! $(bad)
+ {
+ local v = $(worklist[1]) ;
+ local properties = [ $(v).match $(property-set) $(debug) ] ;
+
+ if $(properties) != no-match
+ {
+ if ! $(best)
+ {
+ best = $(v) ;
+ best-properties = $(properties) ;
+ }
+ else
+ {
+ if $(properties) = $(best-properties)
+ {
+ bad = true ;
+ }
+ else if $(properties) in $(best-properties)
+ {
+ # Do nothing, this alternative is worse
+ }
+ else if $(best-properties) in $(properties)
+ {
+ best = $(v) ;
+ best-properties = $(properties) ;
+ }
+ else
+ {
+ bad = true ;
+ }
+ }
+ }
+ worklist = $(worklist[2-]) ;
+ }
+ if ! $(bad)
+ {
+ return $(best) ;
+ }
+ }
+ else
+ {
+ return $(self.alternatives) ;
+ }
+ }
+
+ rule apply-default-build ( property-set )
+ {
+ return [ targets.apply-default-build $(property-set) :
+ $(self.default-build) ] ;
+ }
+
+ # Select an alternative for this main target, by finding all alternatives
+ # whose requirements are satisfied by 'properties' and picking the one with
+ # the longest requirements set. Returns the result of calling 'generate' on
+ # that alternative.
+ #
+ rule generate ( property-set )
+ {
+ start-building $(__name__) ;
+
+ # We want composite properties in the build request to act as if all the
+ # properties they expand to have been explicitly specified.
+ property-set = [ $(property-set).expand ] ;
+
+ local all-property-sets = [ apply-default-build $(property-set) ] ;
+ local usage-requirements = [ property-set.empty ] ;
+ local result ;
+ for local p in $(all-property-sets)
+ {
+ local r = [ generate-really $(p) ] ;
+ if $(r)
+ {
+ usage-requirements = [ $(usage-requirements).add $(r[1]) ] ;
+ result += $(r[2-]) ;
+ }
+ }
+ end-building $(__name__) ;
+ return $(usage-requirements) [ sequence.unique $(result) ] ;
+ }
+
+ # Generates the main target with the given property set and returns a list
+ # which first element is property-set object containing usage-requirements
+ # of generated target and with generated virtual target in other elements.
+ # It is possible that no targets are generated.
+ #
+ local rule generate-really ( property-set )
+ {
+ local best-alternatives = [ select-alternatives $(property-set) ] ;
+ if ! $(best-alternatives)
+ {
+ ECHO "error: No best alternative for" [ full-name ] ;
+ select-alternatives $(property-set) debug ;
+ return [ property-set.empty ] ;
+ }
+ else
+ {
+ # Now return virtual targets for the only alternative.
+ return [ $(best-alternatives).generate $(property-set) ] ;
+ }
+ }
+
+ rule rename ( new-name )
+ {
+ abstract-target.rename $(new-name) ;
+ for local a in $(self.alternatives)
+ {
+ $(a).rename $(new-name) ;
+ }
+ }
+}
+
+
+# Abstract target referring to a source file. This is an artificial entity
+# allowing sources to a target to be represented using a list of abstract target
+# instances.
+#
+class file-reference : abstract-target
+{
+ import virtual-target ;
+ import property-set ;
+ import path ;
+
+ rule __init__ ( file : project )
+ {
+ abstract-target.__init__ $(file) : $(project) ;
+ }
+
+ rule generate ( properties )
+ {
+ return [ property-set.empty ] [ virtual-target.from-file $(self.name) :
+ [ location ] : $(self.project) ] ;
+ }
+
+ # Returns true if the referred file really exists.
+ rule exists ( )
+ {
+ location ;
+ return $(self.file-path) ;
+ }
+
+ # Returns the location of target. Needed by 'testing.jam'.
+ rule location ( )
+ {
+ if ! $(self.file-location)
+ {
+ local source-location = [ $(self.project).get source-location ] ;
+ for local src-dir in $(source-location)
+ {
+ if ! $(self.file-location)
+ {
+ local location = [ path.root $(self.name) $(src-dir) ] ;
+ if [ CHECK_IF_FILE [ path.native $(location) ] ]
+ {
+ self.file-location = $(src-dir) ;
+ self.file-path = $(location) ;
+ }
+ }
+ }
+ }
+ return $(self.file-location) ;
+ }
+}
+
+
+# Given a target-reference, made in context of 'project', returns the
+# abstract-target instance that is referred to, as well as properties explicitly
+# specified for this reference.
+#
+rule resolve-reference ( target-reference : project )
+{
+ # Separate target name from properties override.
+ local split = [ MATCH "^([^<]*)(/(<.*))?$" : $(target-reference) ] ;
+ local id = $(split[1]) ;
+ if ! $(split) || ! $(id)
+ {
+ error "Malformed target reference $(target-reference)" ;
+ }
+ local sproperties = ;
+ if $(split[3])
+ {
+ sproperties = [ property.make [ feature.split $(split[3]) ] ] ;
+ sproperties = [ feature.expand-composites $(sproperties) ] ;
+ }
+
+ # Find the target.
+ local target = [ $(project).find $(id) ] ;
+
+ return $(target) [ property-set.create $(sproperties) ] ;
+}
+
+
+# Attempts to generate the target given by target reference, which can refer
+# both to a main target or to a file. Returns a list consisting of
+# - usage requirements
+# - generated virtual targets, if any
+#
+rule generate-from-reference (
+ target-reference # Target reference.
+ : project # Project where the reference is made.
+ : property-set # Properties of the main target that makes the reference.
+)
+{
+ local r = [ resolve-reference $(target-reference) : $(project) ] ;
+ local target = $(r[1]) ;
+ local sproperties = $(r[2]) ;
+
+ # Take properties which should be propagated and refine them with
+ # source-specific requirements.
+ local propagated = [ $(property-set).propagated ] ;
+ local rproperties = [ $(propagated).refine $(sproperties) ] ;
+ if $(rproperties[1]) = "@error"
+ {
+ import errors ;
+ errors.error
+ "When building" [ full-name ] " with properties " $(properties) :
+ "Invalid properties specified for " $(source) ":"
+ $(rproperties[2-]) ;
+ }
+ return [ $(target).generate $(rproperties) ] ;
+}
+
+
+rule apply-default-build ( property-set : default-build )
+{
+ # 1. First, see what properties from default-build are already present in
+ # property-set.
+
+ local raw = [ $(property-set).raw ] ;
+ local specified-features = $(raw:G) ;
+
+ local defaults-to-apply ;
+ for local d in [ $(default-build).raw ]
+ {
+ if ! $(d:G) in $(specified-features)
+ {
+ defaults-to-apply += $(d) ;
+ }
+ }
+
+ # 2. If there are any defaults to be applied, form a new build request. Pass
+ # it through to 'expand-no-defaults' since default-build might contain
+ # "release debug" resulting in two property-sets.
+ local result ;
+ if $(defaults-to-apply)
+ {
+ # We have to compress subproperties here to prevent property lists like:
+ # <toolset>msvc <toolset-msvc:version>7.1 <threading>multi
+ #
+ # from being expanded into:
+ # <toolset-msvc:version>7.1/<threading>multi
+ # <toolset>msvc/<toolset-msvc:version>7.1/<threading>multi
+ #
+ # due to a cross-product property combination. That may be an indication
+ # that build-request.expand-no-defaults is the wrong rule to use here.
+ properties = [ build-request.expand-no-defaults
+ [ feature.compress-subproperties $(raw) ] $(defaults-to-apply) ] ;
+
+ if $(properties)
+ {
+ for local p in $(properties)
+ {
+ result += [ property-set.create
+ [ feature.expand [ feature.split $(p) ] ] ] ;
+ }
+ }
+ else
+ {
+ result = [ property-set.empty ] ;
+ }
+ }
+ else
+ {
+ result = $(property-set) ;
+ }
+ return $(result) ;
+}
+
+
+# Given a build request and requirements, return properties common to dependency
+# build request and target requirements.
+#
+# TODO: Document exactly what 'common properties' are, whether they should
+# include default property values, whether they should contain any conditional
+# properties or should those be already processed, etc. See whether there are
+# any differences between use cases with empty and non-empty build-request as
+# well as with requirements containing and those not containing any non-free
+# features.
+#
+rule common-properties ( build-request requirements )
+{
+ # For optimization, we add free requirements directly, without using a
+ # complex algorithm. This gives the complex algorithm a better chance of
+ # caching results.
+ local free = [ $(requirements).free ] ;
+ local non-free = [ property-set.create [ $(requirements).base ]
+ [ $(requirements).incidental ] ] ;
+
+ local key = .rp.$(build-request)-$(non-free) ;
+ if ! $($(key))
+ {
+ $(key) = [ common-properties2 $(build-request) $(non-free) ] ;
+ }
+ return [ $($(key)).add-raw $(free) ] ;
+}
+
+
+# Given a 'context' -- a set of already present properties, and 'requirements',
+# decide which extra properties should be applied to 'context'. For conditional
+# requirements, this means evaluating the condition. For indirect conditional
+# requirements, this means calling a rule. Ordinary requirements are always
+# applied.
+#
+# Handles the situation where evaluating one conditional requirement affects
+# conditions of another conditional requirements, such as:
+# <toolset>gcc:<variant>release <variant>release:<define>RELEASE
+#
+# If 'what' is 'refined' returns context refined with new requirements. If
+# 'what' is 'added' returns just the requirements to be applied.
+#
+rule evaluate-requirements ( requirements : context : what )
+{
+ # Apply non-conditional requirements. It is possible that further
+ # conditional requirement change a value set by non-conditional
+ # requirements. For example:
+ #
+ # exe a : a.cpp : <threading>single <toolset>foo:<threading>multi ;
+ #
+ # I am not sure if this should be an error, or not, especially given that
+ #
+ # <threading>single
+ #
+ # might come from project's requirements.
+
+ local unconditional = [ feature.expand [ $(requirements).non-conditional ] ]
+ ;
+
+ local raw = [ $(context).raw ] ;
+ raw = [ property.refine $(raw) : $(unconditional) ] ;
+
+ # We have collected properties that surely must be present in common
+ # properties. We now try to figure out what other properties should be added
+ # in order to satisfy rules (4)-(6) from the docs.
+
+ local conditionals = [ $(requirements).conditional ] ;
+ # The 'count' variable has one element for each conditional feature and for
+ # each occurrence of '<indirect-conditional>' feature. It is used as a loop
+ # counter: for each iteration of the loop before we remove one element and
+ # the property set should stabilize before we are done. It is assumed that
+ # #conditionals iterations should be enough for properties to propagate
+ # along conditions in any direction.
+ local count = $(conditionals) [ $(requirements).get <conditional> ]
+ and-once-more ;
+
+ local added-requirements ;
+
+ local current = $(raw) ;
+
+ # It is assumed that ordinary conditional requirements can not add
+ # <conditional> properties (a.k.a. indirect conditional properties), and
+ # that rules referred to by <conditional> properties can not add new
+ # <conditional> properties. So the list of indirect conditionals does not
+ # change.
+ local indirect = [ $(requirements).get <conditional> ] ;
+ indirect = [ MATCH ^@(.*) : $(indirect) ] ;
+
+ local ok ;
+ while $(count)
+ {
+ # Evaluate conditionals in context of current properties.
+ local e = [ property.evaluate-conditionals-in-context $(conditionals) :
+ $(current) ] ;
+
+ # Evaluate indirect conditionals.
+ for local i in $(indirect)
+ {
+ local t = [ current ] ;
+ local p = [ $(t).project ] ;
+ local new = [ indirect.call $(i) $(current) ] ;
+ e += [ property.translate-paths $(new) : [ $(p).location ] ] ;
+ }
+
+ if $(e) = $(added-requirements)
+ {
+ # If we got the same result, we have found the final properties.
+ count = ;
+ ok = true ;
+ }
+ else
+ {
+ # Oops, conditional evaluation results have changed. Also 'current'
+ # contains leftovers from a previous evaluation. Recompute 'current'
+ # using initial properties and conditional requirements.
+ added-requirements = $(e) ;
+ current = [ property.refine $(raw) : [ feature.expand $(e) ] ] ;
+ }
+ count = $(count[2-]) ;
+ }
+ if ! $(ok)
+ {
+ import errors ;
+ errors.error Can not evaluate conditional properties $(conditionals) ;
+ }
+
+ if $(what) = added
+ {
+ return [ property-set.create $(unconditional) $(added-requirements) ] ;
+ }
+ else if $(what) = refined
+ {
+ return [ property-set.create $(current) ] ;
+ }
+ else
+ {
+ import errors ;
+ errors.error "Invalid value of the 'what' parameter." ;
+ }
+}
+
+
+rule common-properties2 ( build-request requirements )
+{
+ # This guarantees that default properties are present in the result, unless
+ # they are overriden by some requirement. FIXME: There is a possibility that
+ # we have added <foo>bar, which is composite and expands to <foo2>bar2, but
+ # default value of <foo2> is not bar2, in which case it is not clear what to
+ # do.
+ #
+ build-request = [ $(build-request).add-defaults ] ;
+ # Features added by 'add-defaults' can be composite and expand to features
+ # without default values -- which therefore have not been added yet. It
+ # could be clearer/faster to expand only newly added properties but that is
+ # not critical.
+ build-request = [ $(build-request).expand ] ;
+
+ return [ evaluate-requirements $(requirements) : $(build-request) :
+ refined ] ;
+}
+
+
+rule push-target ( target )
+{
+ .targets = $(target) $(.targets) ;
+}
+
+rule pop-target ( )
+{
+ .targets = $(.targets[2-]) ;
+}
+
+# Return the metatarget that is currently being generated.
+rule current ( )
+{
+ return $(.targets[1]) ;
+}
+
+
+# Implements the most standard way of constructing main target alternative from
+# sources. Allows sources to be either file or other main target and handles
+# generation of those dependency targets.
+#
+class basic-target : abstract-target
+{
+ import build-request ;
+ import build-system ;
+ import "class" : new ;
+ import feature ;
+ import property ;
+ import property-set ;
+ import sequence ;
+ import set ;
+ import targets ;
+ import virtual-target ;
+
+ rule __init__ ( name : project : sources * : requirements * :
+ default-build * : usage-requirements * )
+ {
+ abstract-target.__init__ $(name) : $(project) ;
+
+ self.sources = $(sources) ;
+ if ! $(requirements)
+ {
+ requirements = [ property-set.empty ] ;
+ }
+ self.requirements = $(requirements) ;
+ if ! $(default-build)
+ {
+ default-build = [ property-set.empty ] ;
+ }
+ self.default-build = $(default-build) ;
+ if ! $(usage-requirements)
+ {
+ usage-requirements = [ property-set.empty ] ;
+ }
+ self.usage-requirements = $(usage-requirements) ;
+
+ if $(sources:G)
+ {
+ import errors : user-error : errors.user-error ;
+ errors.user-error properties found "in" the 'sources' parameter
+ "for" [ full-name ] ;
+ }
+ }
+
+ rule always ( )
+ {
+ self.always = 1 ;
+ }
+
+ # Returns the list of abstract-targets which are used as sources. The extra
+ # properties specified for sources are not represented. The only user for
+ # this rule at the moment is the "--dump-tests" feature of the test system.
+ #
+ rule sources ( )
+ {
+ if ! $(self.source-targets)
+ {
+ for local s in $(self.sources)
+ {
+ self.source-targets += [ targets.resolve-reference $(s) :
+ $(self.project) ] ;
+ }
+ }
+ return $(self.source-targets) ;
+ }
+
+ rule requirements ( )
+ {
+ return $(self.requirements) ;
+ }
+
+ rule default-build ( )
+ {
+ return $(self.default-build) ;
+ }
+
+ # Returns the alternative condition for this alternative, if the condition
+ # is satisfied by 'property-set'.
+ #
+ rule match ( property-set debug ? )
+ {
+ # The condition is composed of all base non-conditional properties. It
+ # is not clear if we should expand 'self.requirements' or not. For one
+ # thing, it would be nice to be able to put
+ # <toolset>msvc-6.0
+ # in requirements. On the other hand, if we have <variant>release as a
+ # condition it does not make sense to require <optimization>full to be
+ # in the build request just to select this variant.
+ local bcondition = [ $(self.requirements).base ] ;
+ local ccondition = [ $(self.requirements).conditional ] ;
+ local condition = [ set.difference $(bcondition) : $(ccondition) ] ;
+ if $(debug)
+ {
+ ECHO " next alternative: required properties:"
+ $(condition:E=(empty)) ;
+ }
+
+ if $(condition) in [ $(property-set).raw ]
+ {
+ if $(debug)
+ {
+ ECHO " matched" ;
+ }
+ return $(condition) ;
+ }
+ else
+ {
+ if $(debug)
+ {
+ ECHO " not matched" ;
+ }
+ return no-match ;
+ }
+ }
+
+ # Takes a target reference, which might be either target id or a dependency
+ # property, and generates that target using 'property-set' as a build
+ # request.
+ #
+ # The results are added to the variable called 'result-var'. Usage
+ # requirements are added to the variable called 'usage-requirements-var'.
+ #
+ rule generate-dependencies ( dependencies * : property-set : result-var
+ usage-requirements-var )
+ {
+ for local dependency in $(dependencies)
+ {
+ local grist = $(dependency:G) ;
+ local id = $(dependency:G=) ;
+ local result = [ targets.generate-from-reference $(id) :
+ $(self.project) : $(property-set) ] ;
+
+ $(result-var) += $(result[2-]:G=$(grist)) ;
+ $(usage-requirements-var) += [ $(result[1]).raw ] ;
+ }
+ }
+
+ # Determines final build properties, generates sources, and calls
+ # 'construct'. This method should not be overridden.
+ #
+ rule generate ( property-set )
+ {
+ if [ modules.peek : .debug-building ]
+ {
+ ECHO ;
+ local fn = [ full-name ] ;
+ ECHO [ targets.indent ] "Building target '$(fn)'" ;
+ targets.increase-indent ;
+ ECHO [ targets.indent ] Build request: $(property-set)
+ [ $(property-set).raw ] ;
+ local cf = [ build-system.command-line-free-features ] ;
+ ECHO [ targets.indent ] Command line free features: [ $(cf).raw ] ;
+ ECHO [ targets.indent ] Target requirements:
+ [ $(self.requirements).raw ] ;
+ }
+ targets.push-target $(__name__) ;
+
+ # Apply free features from the command line. If user said
+ # define=FOO
+ # he most likely wants this define to be set for all compiles.
+ # Make it before check for already built.
+ property-set = [ $(property-set).refine
+ [ build-system.command-line-free-features ] ] ;
+
+ if ! $(self.generated.$(property-set))
+ {
+ local rproperties = [ targets.common-properties $(property-set)
+ $(self.requirements) ] ;
+
+ if [ modules.peek : .debug-building ]
+ {
+ ECHO ;
+ ECHO [ targets.indent ] "Common properties: "
+ [ $(rproperties).raw ] ;
+ }
+
+ if ( $(rproperties[1]) != "@error" ) && ( [ $(rproperties).get
+ <build> ] != no )
+ {
+ local source-targets ;
+ local properties = [ $(rproperties).non-dependency ] ;
+ local usage-requirements ;
+
+ generate-dependencies [ $(rproperties).dependency ] :
+ $(rproperties) : properties usage-requirements ;
+
+ generate-dependencies $(self.sources) : $(rproperties) :
+ source-targets usage-requirements ;
+
+ if [ modules.peek : .debug-building ]
+ {
+ ECHO ;
+ ECHO [ targets.indent ] "Usage requirements for"
+ $(self.name)": " $(usage-requirements) ;
+ }
+
+ rproperties = [ property-set.create $(properties)
+ $(usage-requirements) ] ;
+ usage-requirements = [ property-set.create $(usage-requirements)
+ ] ;
+
+ if [ modules.peek : .debug-building ]
+ {
+ ECHO [ targets.indent ] "Build properties: "
+ [ $(rproperties).raw ] ;
+ }
+
+ local extra = [ $(rproperties).get <source> ] ;
+ source-targets += $(extra:G=) ;
+ # We might get duplicate sources, for example if we link to two
+ # libraries having the same <library> usage requirement. Use
+ # stable sort, since for some targets the order is important,
+ # e.g. RUN_PY targets need a python source to come first.
+ source-targets = [ sequence.unique $(source-targets) : stable ]
+ ;
+
+ local result = [ construct $(self.name) : $(source-targets) :
+ $(rproperties) ] ;
+
+ if $(result)
+ {
+ local gur = $(result[1]) ;
+ result = $(result[2-]) ;
+
+ if $(self.always)
+ {
+ for local t in $(result)
+ {
+ $(t).always ;
+ }
+ }
+
+ local s = [ create-subvariant $(result)
+ : [ virtual-target.recent-targets ]
+ : $(property-set) : $(source-targets)
+ : $(rproperties) : $(usage-requirements) ] ;
+ virtual-target.clear-recent-targets ;
+
+ local ur = [ compute-usage-requirements $(s) ] ;
+ ur = [ $(ur).add $(gur) ] ;
+ $(s).set-usage-requirements $(ur) ;
+ if [ modules.peek : .debug-building ]
+ {
+ ECHO [ targets.indent ] "Usage requirements from"
+ $(self.name)": " [ $(ur).raw ] ;
+ }
+
+ self.generated.$(property-set) = $(ur) $(result) ;
+ }
+ }
+ else
+ {
+ if $(rproperties[1]) = "@error"
+ {
+ ECHO [ targets.indent ] "Skipping build of:" [ full-name ]
+ "cannot compute common properties" ;
+ }
+ else if [ $(rproperties).get <build> ] = no
+ {
+ # If we just see <build>no, we cannot produce any reasonable
+ # diagnostics. The code that adds this property is expected
+ # to explain why a target is not built, for example using
+ # the configure.log-component-configuration function.
+ }
+ else
+ {
+ ECHO [ targets.indent ] "Skipping build of: " [ full-name ]
+ " unknown reason" ;
+ }
+
+ # We are here either because there has been an error computing
+ # properties or there is <build>no in properties. In the latter
+ # case we do not want any diagnostic. In the former case, we
+ # need diagnostics. FIXME
+
+ # If this target fails to build, add <build>no to properties to
+ # cause any parent target to fail to build. Except that it
+ # - does not work now, since we check for <build>no only in
+ # common properties, but not in properties that came from
+ # dependencies
+ # - it is not clear if that is a good idea anyway. The alias
+ # target, for example, should not fail to build if a
+ # dependency fails.
+ self.generated.$(property-set) = [ property-set.create <build>no
+ ] ;
+ }
+ }
+ else
+ {
+ if [ modules.peek : .debug-building ]
+ {
+ ECHO [ targets.indent ] "Already built" ;
+ local ur = $(self.generated.$(property-set)) ;
+ ur = $(ur[0]) ;
+ targets.increase-indent ;
+ ECHO [ targets.indent ] "Usage requirements from"
+ $(self.name)": " [ $(ur).raw ] ;
+ targets.decrease-indent ;
+ }
+ }
+
+ targets.pop-target ;
+ targets.decrease-indent ;
+ return $(self.generated.$(property-set)) ;
+ }
+
+ # Given the set of generated targets, and refined build properties,
+ # determines and sets appropriate usage requirements on those targets.
+ #
+ rule compute-usage-requirements ( subvariant )
+ {
+ local rproperties = [ $(subvariant).build-properties ] ;
+ xusage-requirements = [ targets.evaluate-requirements
+ $(self.usage-requirements) : $(rproperties) : added ] ;
+
+ # We generate all dependency properties and add them, as well as their
+ # usage requirements, to the result.
+ local extra ;
+ generate-dependencies [ $(xusage-requirements).dependency ] :
+ $(rproperties) : extra extra ;
+
+ local result = [ property-set.create
+ [ $(xusage-requirements).non-dependency ] $(extra) ] ;
+
+ # Propagate usage requirements we got from sources, except for the
+ # <pch-header> and <pch-file> features.
+ #
+ # That feature specifies which pch file to use, and should apply only to
+ # direct dependents. Consider:
+ #
+ # pch pch1 : ...
+ # lib lib1 : ..... pch1 ;
+ # pch pch2 :
+ # lib lib2 : pch2 lib1 ;
+ #
+ # Here, lib2 should not get <pch-header> property from pch1.
+ #
+ # Essentially, when those two features are in usage requirements, they
+ # are propagated only to direct dependents. We might need a more general
+ # mechanism, but for now, only those two features are special.
+ #
+ # TODO - Actually there are more possible candidates like for instance
+ # when listing static library X as a source for another static library.
+ # Then static library X will be added as a <source> property to the
+ # second library's usage requirements but those requirements should last
+ # only up to the first executable or shared library that actually links
+ # to it.
+ local raw = [ $(subvariant).sources-usage-requirements ] ;
+ raw = [ $(raw).raw ] ;
+ raw = [ property.change $(raw) : <pch-header> ] ;
+ raw = [ property.change $(raw) : <pch-file> ] ;
+ return [ $(result).add [ property-set.create $(raw) ] ] ;
+ }
+
+ # Creates new subvariant instances for 'targets'.
+ # 'root-targets' - virtual targets to be returned to dependants
+ # 'all-targets' - virtual targets created while building this main target
+ # 'build-request' - property-set instance with requested build properties
+ #
+ local rule create-subvariant ( root-targets * : all-targets * :
+ build-request : sources * : rproperties : usage-requirements )
+ {
+ for local e in $(root-targets)
+ {
+ $(e).root true ;
+ }
+
+ # Process all virtual targets that will be created if this main target
+ # is created.
+ local s = [ new subvariant $(__name__) : $(build-request) : $(sources) :
+ $(rproperties) : $(usage-requirements) : $(all-targets) ] ;
+ for local v in $(all-targets)
+ {
+ if ! [ $(v).creating-subvariant ]
+ {
+ $(v).creating-subvariant $(s) ;
+ }
+ }
+ return $(s) ;
+ }
+
+ # Constructs virtual targets for this abstract target and the dependency
+ # graph. Returns a usage-requirements property-set and a list of virtual
+ # targets. Should be overriden in derived classes.
+ #
+ rule construct ( name : source-targets * : properties * )
+ {
+ import errors : error : errors.error ;
+ errors.error "method should be defined in derived classes" ;
+ }
+}
+
+
+class typed-target : basic-target
+{
+ import generators ;
+
+ rule __init__ ( name : project : type : sources * : requirements * :
+ default-build * : usage-requirements * )
+ {
+ basic-target.__init__ $(name) : $(project) : $(sources) :
+ $(requirements) : $(default-build) : $(usage-requirements) ;
+
+ self.type = $(type) ;
+ }
+
+ rule type ( )
+ {
+ return $(self.type) ;
+ }
+
+ rule construct ( name : source-targets * : property-set )
+ {
+ local r = [ generators.construct $(self.project) $(name:S=)
+ : $(self.type)
+ : [ property-set.create [ $(property-set).raw ]
+ <main-target-type>$(self.type) ]
+ : $(source-targets) : true ] ;
+ if ! $(r)
+ {
+ ECHO "warn: Unable to construct" [ full-name ] ;
+
+ # Are there any top-level generators for this type/property set.
+ if ! [ generators.find-viable-generators $(self.type) :
+ $(property-set) ]
+ {
+ ECHO "error: no generators were found for type '$(self.type)'" ;
+ ECHO "error: and the requested properties" ;
+ ECHO "error: make sure you've configured the needed tools" ;
+ ECHO "See http://boost.org/boost-build2/doc/html/bbv2/advanced/configuration.html" ;
+ EXIT "To debug this problem, try the --debug-generators option."
+ ;
+ }
+ }
+ return $(r) ;
+ }
+}
+
+
+# Return the list of sources to use, if main target rule is invoked with
+# 'sources'. If there are any objects in 'sources', they are treated as main
+# target instances, and the name of such targets are adjusted to be
+# '<name_of_this_target>__<name_of_source_target>'. Such renaming is disabled if
+# a non-empty value is passed as the 'no-renaming' parameter.
+#
+rule main-target-sources ( sources * : main-target-name : no-renaming ? )
+{
+ local result ;
+ for local t in $(sources)
+ {
+ if [ class.is-instance $(t) ]
+ {
+ local name = [ $(t).name ] ;
+ if ! $(no-renaming)
+ {
+ name = $(main-target-name)__$(name) ;
+ $(t).rename $(name) ;
+ }
+ # Inline targets are not built by default.
+ local p = [ $(t).project ] ;
+ $(p).mark-target-as-explicit $(name) ;
+ result += $(name) ;
+ }
+ else
+ {
+ result += $(t) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Returns the requirements to use when declaring a main target, obtained by
+# translating all specified property paths and refining project requirements
+# with the ones specified for the target.
+#
+rule main-target-requirements (
+ specification * # Properties explicitly specified for the main target.
+ : project # Project where the main target is to be declared.
+)
+{
+ local requirements = [ property-set.refine-from-user-input
+ [ $(project).get requirements ] : $(specification) :
+ [ $(project).project-module ] : [ $(project).get location ] ] ;
+ if $(requirements[1]) = "@error"
+ {
+ import errors ;
+ errors.error "Conflicting requirements for target:" $(requirements) ;
+ }
+ return [ $(requirements).add [ toolset.requirements ] ] ;
+}
+
+
+# Returns the usage requirements to use when declaring a main target, which are
+# obtained by translating all specified property paths and adding project's
+# usage requirements.
+#
+rule main-target-usage-requirements (
+ specification * # Use-properties explicitly specified for a main target.
+ : project # Project where the main target is to be declared.
+)
+{
+ local project-usage-requirements = [ $(project).get usage-requirements ] ;
+
+ # We do not use 'refine-from-user-input' because:
+ # - I am not sure if removing parent's usage requirements makes sense
+ # - refining usage requirements is not needed, since usage requirements are
+ # always free.
+ local usage-requirements = [ property-set.create-from-user-input
+ $(specification)
+ : [ $(project).project-module ] [ $(project).get location ] ] ;
+
+ return [ $(project-usage-requirements).add $(usage-requirements) ] ;
+}
+
+
+# Return the default build value to use when declaring a main target, which is
+# obtained by using the specified value if not empty and parent's default build
+# attribute otherwise.
+#
+rule main-target-default-build (
+ specification * # Default build explicitly specified for a main target.
+ : project # Project where the main target is to be declared.
+)
+{
+ local result ;
+ if $(specification)
+ {
+ result = $(specification) ;
+ }
+ else
+ {
+ result = [ $(project).get default-build ] ;
+ }
+ return [ property-set.create-with-validation $(result) ] ;
+}
+
+
+# Registers the specified target as a main target alternative and returns it.
+#
+rule main-target-alternative ( target )
+{
+ local ptarget = [ $(target).project ] ;
+ $(ptarget).add-alternative $(target) ;
+ return $(target) ;
+}
+
+
+# Creates a metatarget with the specified properties, using 'klass' as the
+# class. The 'name', 'sources', 'requirements', 'default-build' and
+# 'usage-requirements' are assumed to be in the form specified by the user in
+# the Jamfile corresponding to 'project'.
+#
+rule create-metatarget ( klass : project : name : sources * : requirements * :
+ default-build * : usage-requirements * )
+{
+ return [ targets.main-target-alternative [ new $(klass) $(name) : $(project)
+ : [ targets.main-target-sources $(sources) : $(name) ]
+ : [ targets.main-target-requirements $(requirements) : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ : [ targets.main-target-usage-requirements $(usage-requirements) :
+ $(project) ] ] ] ;
+}
+
+
+# Creates a typed-target with the specified properties. The 'name', 'sources',
+# 'requirements', 'default-build' and 'usage-requirements' are assumed to be in
+# the form specified by the user in the Jamfile corresponding to 'project'.
+#
+rule create-typed-target ( type : project : name : sources * : requirements * :
+ default-build * : usage-requirements * )
+{
+ return [ targets.main-target-alternative [ new typed-target $(name) :
+ $(project) : $(type)
+ : [ targets.main-target-sources $(sources) : $(name) ]
+ : [ targets.main-target-requirements $(requirements) : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ : [ targets.main-target-usage-requirements $(usage-requirements) :
+ $(project) ] ] ] ;
+}
diff --git a/tools/build/src/build/targets.py b/tools/build/src/build/targets.py
new file mode 100644
index 0000000000..acf10e4fdd
--- /dev/null
+++ b/tools/build/src/build/targets.py
@@ -0,0 +1,1401 @@
+# Status: ported.
+# Base revision: 64488
+
+# Copyright Vladimir Prus 2002-2007.
+# Copyright Rene Rivera 2006.
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Supports 'abstract' targets, which are targets explicitly defined in Jamfile.
+#
+# Abstract targets are represented by classes derived from 'AbstractTarget' class.
+# The first abstract target is 'project_target', which is created for each
+# Jamfile, and can be obtained by the 'target' rule in the Jamfile's module.
+# (see project.jam).
+#
+# Project targets keep a list of 'MainTarget' instances.
+# A main target is what the user explicitly defines in a Jamfile. It is
+# possible to have several definitions for a main target, for example to have
+# different lists of sources for different platforms. So, main targets
+# keep a list of alternatives.
+#
+# Each alternative is an instance of 'AbstractTarget'. When a main target
+# subvariant is defined by some rule, that rule will decide what class to
+# use, create an instance of that class and add it to the list of alternatives
+# for the main target.
+#
+# Rules supplied by the build system will use only targets derived
+# from 'BasicTarget' class, which will provide some default behaviour.
+# There will be two classes derived from it, 'make-target', created by the
+# 'make' rule, and 'TypedTarget', created by rules such as 'exe' and 'dll'.
+
+#
+# +------------------------+
+# |AbstractTarget |
+# +========================+
+# |name |
+# |project |
+# | |
+# |generate(properties) = 0|
+# +-----------+------------+
+# |
+# ^
+# / \
+# +-+-+
+# |
+# |
+# +------------------------+------+------------------------------+
+# | | |
+# | | |
+# +----------+-----------+ +------+------+ +------+-------+
+# | project_target | | MainTarget | | BasicTarget |
+# +======================+ 1 * +=============+ alternatives +==============+
+# | generate(properties) |o-----------+ generate |<>------------->| generate |
+# | main-target | +-------------+ | construct = 0|
+# +----------------------+ +--------------+
+# |
+# ^
+# / \
+# +-+-+
+# |
+# |
+# ...--+----------------+------------------+----------------+---+
+# | | | |
+# | | | |
+# ... ---+-----+ +------+-------+ +------+------+ +--------+-----+
+# | | TypedTarget | | make-target | | stage-target |
+# . +==============+ +=============+ +==============+
+# . | construct | | construct | | construct |
+# +--------------+ +-------------+ +--------------+
+
+import re
+import os.path
+import sys
+
+from b2.manager import get_manager
+
+from b2.util.utility import *
+import property, project, virtual_target, property_set, feature, generators, toolset
+from virtual_target import Subvariant
+from b2.exceptions import *
+from b2.util.sequence import unique
+from b2.util import path, bjam_signature
+from b2.build.errors import user_error_checkpoint
+
+import b2.build.build_request as build_request
+
+import b2.util.set
+_re_separate_target_from_properties = re.compile (r'^([^<]*)(/(<.*))?$')
+
+class TargetRegistry:
+
+ def __init__ (self):
+ # All targets that are currently being built.
+ # Only the key is id (target), the value is the actual object.
+ self.targets_being_built_ = {}
+
+ # Current indent for debugging messages
+ self.indent_ = ""
+
+ self.debug_building_ = "--debug-building" in bjam.variable("ARGV")
+
+ self.targets_ = []
+
+ def main_target_alternative (self, target):
+ """ Registers the specified target as a main target alternatives.
+ Returns 'target'.
+ """
+ target.project ().add_alternative (target)
+ return target
+
+ def main_target_sources (self, sources, main_target_name, no_renaming=0):
+ """Return the list of sources to use, if main target rule is invoked
+ with 'sources'. If there are any objects in 'sources', they are treated
+ as main target instances, and the name of such targets are adjusted to
+ be '<name_of_this_target>__<name_of_source_target>'. Such renaming
+ is disabled is non-empty value is passed for 'no-renaming' parameter."""
+ result = []
+
+ for t in sources:
+
+ t = b2.util.jam_to_value_maybe(t)
+
+ if isinstance (t, AbstractTarget):
+ name = t.name ()
+
+ if not no_renaming:
+ name = main_target_name + '__' + name
+ t.rename (name)
+
+ # Inline targets are not built by default.
+ p = t.project()
+ p.mark_targets_as_explicit([name])
+ result.append(name)
+
+ else:
+ result.append (t)
+
+ return result
+
+
+ def main_target_requirements(self, specification, project):
+ """Returns the requirement to use when declaring a main target,
+ which are obtained by
+ - translating all specified property paths, and
+ - refining project requirements with the one specified for the target
+
+ 'specification' are the properties xplicitly specified for a
+ main target
+ 'project' is the project where the main taret is to be declared."""
+
+ specification.extend(toolset.requirements())
+
+ requirements = property_set.refine_from_user_input(
+ project.get("requirements"), specification,
+ project.project_module(), project.get("location"))
+
+ return requirements
+
+ def main_target_usage_requirements (self, specification, project):
+ """ Returns the use requirement to use when declaraing a main target,
+ which are obtained by
+ - translating all specified property paths, and
+ - adding project's usage requirements
+ specification: Use-properties explicitly specified for a main target
+ project: Project where the main target is to be declared
+ """
+ project_usage_requirements = project.get ('usage-requirements')
+
+ # We don't use 'refine-from-user-input' because I'm not sure if:
+ # - removing of parent's usage requirements makes sense
+ # - refining of usage requirements is not needed, since usage requirements
+ # are always free.
+ usage_requirements = property_set.create_from_user_input(
+ specification, project.project_module(), project.get("location"))
+
+ return project_usage_requirements.add (usage_requirements)
+
+ def main_target_default_build (self, specification, project):
+ """ Return the default build value to use when declaring a main target,
+ which is obtained by using specified value if not empty and parent's
+ default build attribute otherwise.
+ specification: Default build explicitly specified for a main target
+ project: Project where the main target is to be declared
+ """
+ if specification:
+ return property_set.create_with_validation(specification)
+ else:
+ return project.get ('default-build')
+
+ def start_building (self, main_target_instance):
+ """ Helper rules to detect cycles in main target references.
+ """
+ if self.targets_being_built_.has_key(id(main_target_instance)):
+ names = []
+ for t in self.targets_being_built_.values() + [main_target_instance]:
+ names.append (t.full_name())
+
+ get_manager().errors()("Recursion in main target references\n")
+
+ self.targets_being_built_[id(main_target_instance)] = main_target_instance
+
+ def end_building (self, main_target_instance):
+ assert (self.targets_being_built_.has_key (id (main_target_instance)))
+ del self.targets_being_built_ [id (main_target_instance)]
+
+ def create_typed_target (self, type, project, name, sources, requirements, default_build, usage_requirements):
+ """ Creates a TypedTarget with the specified properties.
+ The 'name', 'sources', 'requirements', 'default_build' and
+ 'usage_requirements' are assumed to be in the form specified
+ by the user in Jamfile corresponding to 'project'.
+ """
+ return self.main_target_alternative (TypedTarget (name, project, type,
+ self.main_target_sources (sources, name),
+ self.main_target_requirements (requirements, project),
+ self.main_target_default_build (default_build, project),
+ self.main_target_usage_requirements (usage_requirements, project)))
+
+ def increase_indent(self):
+ self.indent_ += " "
+
+ def decrease_indent(self):
+ self.indent_ = self.indent_[0:-4]
+
+ def logging(self):
+ return self.debug_building_
+
+ def log(self, message):
+ if self.debug_building_:
+ print self.indent_ + message
+
+ def push_target(self, target):
+ self.targets_.append(target)
+
+ def pop_target(self):
+ self.targets_ = self.targets_[:-1]
+
+ def current(self):
+ return self.targets_[0]
+
+
+class GenerateResult:
+
+ def __init__ (self, ur=None, targets=None):
+ if not targets:
+ targets = []
+
+ self.__usage_requirements = ur
+ self.__targets = targets
+ assert all(isinstance(t, virtual_target.VirtualTarget) for t in targets)
+
+ if not self.__usage_requirements:
+ self.__usage_requirements = property_set.empty ()
+
+ def usage_requirements (self):
+ return self.__usage_requirements
+
+ def targets (self):
+ return self.__targets
+
+ def extend (self, other):
+ assert (isinstance (other, GenerateResult))
+
+ self.__usage_requirements = self.__usage_requirements.add (other.usage_requirements ())
+ self.__targets.extend (other.targets ())
+
+class AbstractTarget:
+ """ Base class for all abstract targets.
+ """
+ def __init__ (self, name, project, manager = None):
+ """ manager: the Manager object
+ name: name of the target
+ project: the project target to which this one belongs
+ manager:the manager object. If none, uses project.manager ()
+ """
+ assert (isinstance (project, ProjectTarget))
+ # Note: it might seem that we don't need either name or project at all.
+ # However, there are places where we really need it. One example is error
+ # messages which should name problematic targets. Another is setting correct
+ # paths for sources and generated files.
+
+ # Why allow manager to be specified? Because otherwise project target could not derive
+ # from this class.
+ if manager:
+ self.manager_ = manager
+ else:
+ self.manager_ = project.manager ()
+
+ self.name_ = name
+ self.project_ = project
+
+ def manager (self):
+ return self.manager_
+
+ def name (self):
+ """ Returns the name of this target.
+ """
+ return self.name_
+
+ def project (self):
+ """ Returns the project for this target.
+ """
+ return self.project_
+
+ def location (self):
+ """ Return the location where the target was declared.
+ """
+ return self.location_
+
+ def full_name (self):
+ """ Returns a user-readable name for this target.
+ """
+ location = self.project ().get ('location')
+ return location + '/' + self.name_
+
+ def generate (self, property_set):
+ """ Takes a property set. Generates virtual targets for this abstract
+ target, using the specified properties, unless a different value of some
+ feature is required by the target.
+ On success, returns a GenerateResult instance with:
+ - a property_set with the usage requirements to be
+ applied to dependents
+ - a list of produced virtual targets, which may be
+ empty.
+ If 'property_set' is empty, performs default build of this
+ target, in a way specific to derived class.
+ """
+ raise BaseException ("method should be defined in derived classes")
+
+ def rename (self, new_name):
+ self.name_ = new_name
+
+class ProjectTarget (AbstractTarget):
+ """ Project target class (derived from 'AbstractTarget')
+
+ This class these responsibilities:
+ - maintaining a list of main target in this project and
+ building it
+
+ Main targets are constructed in two stages:
+ - When Jamfile is read, a number of calls to 'add_alternative' is made.
+ At that time, alternatives can also be renamed to account for inline
+ targets.
+ - The first time 'main-target' or 'has-main-target' rule is called,
+ all alternatives are enumerated an main targets are created.
+ """
+ def __init__ (self, manager, name, project_module, parent_project, requirements, default_build):
+ AbstractTarget.__init__ (self, name, self, manager)
+
+ self.project_module_ = project_module
+ self.location_ = manager.projects().attribute (project_module, 'location')
+ self.requirements_ = requirements
+ self.default_build_ = default_build
+
+ self.build_dir_ = None
+
+ # A cache of IDs
+ self.ids_cache_ = {}
+
+ # True is main targets have already been built.
+ self.built_main_targets_ = False
+
+ # A list of the registered alternatives for this project.
+ self.alternatives_ = []
+
+ # A map from main target name to the target corresponding
+ # to it.
+ self.main_target_ = {}
+
+ # Targets marked as explicit.
+ self.explicit_targets_ = set()
+
+ # Targets marked as always
+ self.always_targets_ = set()
+
+ # The constants defined for this project.
+ self.constants_ = {}
+
+ # Whether targets for all main target are already created.
+ self.built_main_targets_ = 0
+
+ if parent_project:
+ self.inherit (parent_project)
+
+
+ # TODO: This is needed only by the 'make' rule. Need to find the
+ # way to make 'make' work without this method.
+ def project_module (self):
+ return self.project_module_
+
+ def get (self, attribute):
+ return self.manager().projects().attribute(
+ self.project_module_, attribute)
+
+ def build_dir (self):
+ if not self.build_dir_:
+ self.build_dir_ = self.get ('build-dir')
+ if not self.build_dir_:
+ self.build_dir_ = os.path.join(self.project_.get ('location'), 'bin')
+
+ return self.build_dir_
+
+ def generate (self, ps):
+ """ Generates all possible targets contained in this project.
+ """
+ self.manager_.targets().log(
+ "Building project '%s' with '%s'" % (self.name (), str(ps)))
+ self.manager_.targets().increase_indent ()
+
+ result = GenerateResult ()
+
+ for t in self.targets_to_build ():
+ g = t.generate (ps)
+ result.extend (g)
+
+ self.manager_.targets().decrease_indent ()
+ return result
+
+ def targets_to_build (self):
+ """ Computes and returns a list of AbstractTarget instances which
+ must be built when this project is built.
+ """
+ result = []
+
+ if not self.built_main_targets_:
+ self.build_main_targets ()
+
+ # Collect all main targets here, except for "explicit" ones.
+ for n, t in self.main_target_.iteritems ():
+ if not t.name () in self.explicit_targets_:
+ result.append (t)
+
+ # Collect all projects referenced via "projects-to-build" attribute.
+ self_location = self.get ('location')
+ for pn in self.get ('projects-to-build'):
+ result.append (self.find(pn + "/"))
+
+ return result
+
+ def mark_targets_as_explicit (self, target_names):
+ """Add 'target' to the list of targets in this project
+ that should be build only by explicit request."""
+
+ # Record the name of the target, not instance, since this
+ # rule is called before main target instaces are created.
+ self.explicit_targets_.update(target_names)
+
+ def mark_targets_as_always(self, target_names):
+ self.always_targets_.update(target_names)
+
+ def add_alternative (self, target_instance):
+ """ Add new target alternative.
+ """
+ if self.built_main_targets_:
+ raise IllegalOperation ("add-alternative called when main targets are already created for project '%s'" % self.full_name ())
+
+ self.alternatives_.append (target_instance)
+
+ def main_target (self, name):
+ if not self.built_main_targets_:
+ self.build_main_targets()
+
+ return self.main_target_[name]
+
+ def has_main_target (self, name):
+ """Tells if a main target with the specified name exists."""
+ if not self.built_main_targets_:
+ self.build_main_targets()
+
+ return self.main_target_.has_key(name)
+
+ def create_main_target (self, name):
+ """ Returns a 'MainTarget' class instance corresponding to the 'name'.
+ """
+ if not self.built_main_targets_:
+ self.build_main_targets ()
+
+ return self.main_targets_.get (name, None)
+
+
+ def find_really(self, id):
+ """ Find and return the target with the specified id, treated
+ relative to self.
+ """
+ result = None
+ current_location = self.get ('location')
+
+ __re_split_project_target = re.compile (r'(.*)//(.*)')
+ split = __re_split_project_target.match (id)
+
+ project_part = None
+ target_part = None
+
+ if split:
+ project_part = split.group (1)
+ target_part = split.group (2)
+
+ project_registry = self.project_.manager ().projects ()
+
+ extra_error_message = ''
+ if project_part:
+ # There's explicit project part in id. Looks up the
+ # project and pass the request to it.
+ pm = project_registry.find (project_part, current_location)
+
+ if pm:
+ project_target = project_registry.target (pm)
+ result = project_target.find (target_part, no_error=1)
+
+ else:
+ extra_error_message = "error: could not find project '$(project_part)'"
+
+ else:
+ # Interpret target-name as name of main target
+ # Need to do this before checking for file. Consider this:
+ #
+ # exe test : test.cpp ;
+ # install s : test : <location>. ;
+ #
+ # After first build we'll have target 'test' in Jamfile and file
+ # 'test' on the disk. We need target to override the file.
+
+ result = None
+ if self.has_main_target(id):
+ result = self.main_target(id)
+
+ if not result:
+ result = FileReference (self.manager_, id, self.project_)
+ if not result.exists ():
+ # File actually does not exist.
+ # Reset 'target' so that an error is issued.
+ result = None
+
+
+ if not result:
+ # Interpret id as project-id
+ project_module = project_registry.find (id, current_location)
+ if project_module:
+ result = project_registry.target (project_module)
+
+ return result
+
+ def find (self, id, no_error = False):
+ v = self.ids_cache_.get (id, None)
+
+ if not v:
+ v = self.find_really (id)
+ self.ids_cache_ [id] = v
+
+ if v or no_error:
+ return v
+
+ raise BaseException ("Unable to find file or target named '%s'\nreferred from project at '%s'" % (id, self.get ('location')))
+
+
+ def build_main_targets (self):
+ self.built_main_targets_ = True
+
+ for a in self.alternatives_:
+ name = a.name ()
+ if not self.main_target_.has_key (name):
+ t = MainTarget (name, self.project_)
+ self.main_target_ [name] = t
+
+ if name in self.always_targets_:
+ a.always()
+
+ self.main_target_ [name].add_alternative (a)
+
+ def add_constant(self, name, value, path=0):
+ """Adds a new constant for this project.
+
+ The constant will be available for use in Jamfile
+ module for this project. If 'path' is true,
+ the constant will be interpreted relatively
+ to the location of project.
+ """
+
+ if path:
+ l = self.location_
+ if not l:
+ # Project corresponding to config files do not have
+ # 'location' attribute, but do have source location.
+ # It might be more reasonable to make every project have
+ # a location and use some other approach to prevent buildable
+ # targets in config files, but that's for later.
+ l = get('source-location')
+
+ value = os.path.join(l, value)
+ # Now make the value absolute path. Constants should be in
+ # platform-native form.
+ value = os.path.normpath(os.path.join(os.getcwd(), value))
+
+ self.constants_[name] = value
+ bjam.call("set-variable", self.project_module(), name, value)
+
+ def inherit(self, parent_project):
+ for c in parent_project.constants_:
+ # No need to pass the type. Path constants were converted to
+ # absolute paths already by parent.
+ self.add_constant(c, parent_project.constants_[c])
+
+ # Import rules from parent
+ this_module = self.project_module()
+ parent_module = parent_project.project_module()
+
+ rules = bjam.call("RULENAMES", parent_module)
+ if not rules:
+ rules = []
+ user_rules = [x for x in rules
+ if x not in self.manager().projects().project_rules().all_names()]
+ if user_rules:
+ bjam.call("import-rules-from-parent", parent_module, this_module, user_rules)
+
+class MainTarget (AbstractTarget):
+ """ A named top-level target in Jamfile.
+ """
+ def __init__ (self, name, project):
+ AbstractTarget.__init__ (self, name, project)
+ self.alternatives_ = []
+ self.default_build_ = property_set.empty ()
+
+ def add_alternative (self, target):
+ """ Add a new alternative for this target.
+ """
+ d = target.default_build ()
+
+ if self.alternatives_ and self.default_build_ != d:
+ get_manager().errors()("default build must be identical in all alternatives\n"
+ "main target is '%s'\n"
+ "with '%s'\n"
+ "differing from previous default build: '%s'" % (self.full_name (), d.raw (), self.default_build_.raw ()))
+
+ else:
+ self.default_build_ = d
+
+ self.alternatives_.append (target)
+
+ def __select_alternatives (self, property_set, debug):
+ """ Returns the best viable alternative for this property_set
+ See the documentation for selection rules.
+ # TODO: shouldn't this be 'alternative' (singular)?
+ """
+ # When selecting alternatives we have to consider defaults,
+ # for example:
+ # lib l : l.cpp : <variant>debug ;
+ # lib l : l_opt.cpp : <variant>release ;
+ # won't work unless we add default value <variant>debug.
+ property_set = property_set.add_defaults ()
+
+ # The algorithm: we keep the current best viable alternative.
+ # When we've got new best viable alternative, we compare it
+ # with the current one.
+ best = None
+ best_properties = None
+
+ if len (self.alternatives_) == 0:
+ return None
+
+ if len (self.alternatives_) == 1:
+ return self.alternatives_ [0]
+
+ if debug:
+ print "Property set for selection:", property_set
+
+ for v in self.alternatives_:
+ properties = v.match (property_set, debug)
+
+ if properties is not None:
+ if not best:
+ best = v
+ best_properties = properties
+
+ else:
+ if b2.util.set.equal (properties, best_properties):
+ return None
+
+ elif b2.util.set.contains (properties, best_properties):
+ # Do nothing, this alternative is worse
+ pass
+
+ elif b2.util.set.contains (best_properties, properties):
+ best = v
+ best_properties = properties
+
+ else:
+ return None
+
+ return best
+
+ def apply_default_build (self, property_set):
+ return apply_default_build(property_set, self.default_build_)
+
+ def generate (self, ps):
+ """ Select an alternative for this main target, by finding all alternatives
+ which requirements are satisfied by 'properties' and picking the one with
+ longest requirements set.
+ Returns the result of calling 'generate' on that alternative.
+ """
+ self.manager_.targets ().start_building (self)
+
+ # We want composite properties in build request act as if
+ # all the properties it expands too are explicitly specified.
+ ps = ps.expand ()
+
+ all_property_sets = self.apply_default_build (ps)
+
+ result = GenerateResult ()
+
+ for p in all_property_sets:
+ result.extend (self.__generate_really (p))
+
+ self.manager_.targets ().end_building (self)
+
+ return result
+
+ def __generate_really (self, prop_set):
+ """ Generates the main target with the given property set
+ and returns a list which first element is property_set object
+ containing usage_requirements of generated target and with
+ generated virtual target in other elements. It's possible
+ that no targets are generated.
+ """
+ best_alternative = self.__select_alternatives (prop_set, debug=0)
+
+ if not best_alternative:
+ # FIXME: revive.
+ # self.__select_alternatives(prop_set, debug=1)
+ self.manager_.errors()(
+ "No best alternative for '%s'.\n"
+ % (self.full_name(),))
+
+ result = best_alternative.generate (prop_set)
+
+ # Now return virtual targets for the only alternative
+ return result
+
+ def rename(self, new_name):
+ AbstractTarget.rename(self, new_name)
+ for a in self.alternatives_:
+ a.rename(new_name)
+
+class FileReference (AbstractTarget):
+ """ Abstract target which refers to a source file.
+ This is artificial creature; it's usefull so that sources to
+ a target can be represented as list of abstract target instances.
+ """
+ def __init__ (self, manager, file, project):
+ AbstractTarget.__init__ (self, file, project)
+ self.file_location_ = None
+
+ def generate (self, properties):
+ return GenerateResult (None, [
+ self.manager_.virtual_targets ().from_file (
+ self.name_, self.location(), self.project_) ])
+
+ def exists (self):
+ """ Returns true if the referred file really exists.
+ """
+ if self.location ():
+ return True
+ else:
+ return False
+
+ def location (self):
+ # Returns the location of target. Needed by 'testing.jam'
+ if not self.file_location_:
+ source_location = self.project_.get('source-location')
+
+ for src_dir in source_location:
+ location = os.path.join(src_dir, self.name())
+ if os.path.isfile(location):
+ self.file_location_ = src_dir
+ self.file_path = location
+ break
+
+ return self.file_location_
+
+def resolve_reference(target_reference, project):
+ """ Given a target_reference, made in context of 'project',
+ returns the AbstractTarget instance that is referred to, as well
+ as properties explicitly specified for this reference.
+ """
+ # Separate target name from properties override
+ split = _re_separate_target_from_properties.match (target_reference)
+ if not split:
+ raise BaseException ("Invalid reference: '%s'" % target_reference)
+
+ id = split.group (1)
+
+ sproperties = []
+
+ if split.group (3):
+ sproperties = property.create_from_strings(feature.split(split.group(3)))
+ sproperties = feature.expand_composites(sproperties)
+
+ # Find the target
+ target = project.find (id)
+
+ return (target, property_set.create(sproperties))
+
+def generate_from_reference(target_reference, project, property_set):
+ """ Attempts to generate the target given by target reference, which
+ can refer both to a main target or to a file.
+ Returns a list consisting of
+ - usage requirements
+ - generated virtual targets, if any
+ target_reference: Target reference
+ project: Project where the reference is made
+ property_set: Properties of the main target that makes the reference
+ """
+ target, sproperties = resolve_reference(target_reference, project)
+
+ # Take properties which should be propagated and refine them
+ # with source-specific requirements.
+ propagated = property_set.propagated()
+ rproperties = propagated.refine(sproperties)
+
+ return target.generate(rproperties)
+
+
+
+class BasicTarget (AbstractTarget):
+ """ Implements the most standard way of constructing main target
+ alternative from sources. Allows sources to be either file or
+ other main target and handles generation of those dependency
+ targets.
+ """
+ def __init__ (self, name, project, sources, requirements = None, default_build = None, usage_requirements = None):
+ AbstractTarget.__init__ (self, name, project)
+
+ for s in sources:
+ if get_grist (s):
+ raise InvalidSource ("property '%s' found in the 'sources' parameter for '%s'" % (s, name))
+
+ self.sources_ = sources
+
+ if not requirements: requirements = property_set.empty ()
+ self.requirements_ = requirements
+
+ if not default_build: default_build = property_set.empty ()
+ self.default_build_ = default_build
+
+ if not usage_requirements: usage_requirements = property_set.empty ()
+ self.usage_requirements_ = usage_requirements
+
+ # A cache for resolved references
+ self.source_targets_ = None
+
+ # A cache for generated targets
+ self.generated_ = {}
+
+ # A cache for build requests
+ self.request_cache = {}
+
+ # Result of 'capture_user_context' has everything. For example, if this
+ # target is declare as result of loading Jamfile which was loaded when
+ # building target B which was requested from A, then we'll have A, B and
+ # Jamroot location in context. We only care about Jamroot location, most
+ # of the times.
+ self.user_context_ = self.manager_.errors().capture_user_context()[-1:]
+
+ self.always_ = False
+
+ def always(self):
+ self.always_ = True
+
+ def sources (self):
+ """ Returns the list of AbstractTargets which are used as sources.
+ The extra properties specified for sources are not represented.
+ The only used of this rule at the moment is the '--dump-tests'
+ feature of the test system.
+ """
+ if self.source_targets_ == None:
+ self.source_targets_ = []
+ for s in self.sources_:
+ self.source_targets_.append(resolve_reference(s, self.project_)[0])
+
+ return self.source_targets_
+
+ def requirements (self):
+ return self.requirements_
+
+ def default_build (self):
+ return self.default_build_
+
+ def common_properties (self, build_request, requirements):
+ """ Given build request and requirements, return properties
+ common to dependency build request and target build
+ properties.
+ """
+ # For optimization, we add free unconditional requirements directly,
+ # without using complex algorithsm.
+ # This gives the complex algorithm better chance of caching results.
+ # The exact effect of this "optimization" is no longer clear
+ free_unconditional = []
+ other = []
+ for p in requirements.all():
+ if p.feature().free() and not p.condition() and p.feature().name() != 'conditional':
+ free_unconditional.append(p)
+ else:
+ other.append(p)
+ other = property_set.create(other)
+
+ key = (build_request, other)
+ if not self.request_cache.has_key(key):
+ self.request_cache[key] = self.__common_properties2 (build_request, other)
+
+ return self.request_cache[key].add_raw(free_unconditional)
+
+ # Given 'context' -- a set of already present properties, and 'requirements',
+ # decide which extra properties should be applied to 'context'.
+ # For conditional requirements, this means evaluating condition. For
+ # indirect conditional requirements, this means calling a rule. Ordinary
+ # requirements are always applied.
+ #
+ # Handles situation where evaluating one conditional requirements affects
+ # condition of another conditional requirements, for example:
+ #
+ # <toolset>gcc:<variant>release <variant>release:<define>RELEASE
+ #
+ # If 'what' is 'refined' returns context refined with new requirements.
+ # If 'what' is 'added' returns just the requirements that must be applied.
+ def evaluate_requirements(self, requirements, context, what):
+ # Apply non-conditional requirements.
+ # It's possible that that further conditional requirement change
+ # a value set by non-conditional requirements. For example:
+ #
+ # exe a : a.cpp : <threading>single <toolset>foo:<threading>multi ;
+ #
+ # I'm not sure if this should be an error, or not, especially given that
+ #
+ # <threading>single
+ #
+ # might come from project's requirements.
+ unconditional = feature.expand(requirements.non_conditional())
+
+ context = context.refine(property_set.create(unconditional))
+
+ # We've collected properties that surely must be present in common
+ # properties. We now try to figure out what other properties
+ # should be added in order to satisfy rules (4)-(6) from the docs.
+
+ conditionals = property_set.create(requirements.conditional())
+
+ # It's supposed that #conditionals iterations
+ # should be enough for properties to propagate along conditions in any
+ # direction.
+ max_iterations = len(conditionals.all()) +\
+ len(requirements.get("<conditional>")) + 1
+
+ added_requirements = []
+ current = context
+
+ # It's assumed that ordinary conditional requirements can't add
+ # <indirect-conditional> properties, and that rules referred
+ # by <indirect-conditional> properties can't add new
+ # <indirect-conditional> properties. So the list of indirect conditionals
+ # does not change.
+ indirect = requirements.get("<conditional>")
+
+ ok = 0
+ for i in range(0, max_iterations):
+
+ e = conditionals.evaluate_conditionals(current).all()[:]
+
+ # Evaluate indirect conditionals.
+ for i in indirect:
+ i = b2.util.jam_to_value_maybe(i)
+ if callable(i):
+ # This is Python callable, yeah.
+ e.extend(i(current))
+ else:
+ # Name of bjam function. Because bjam is unable to handle
+ # list of Property, pass list of strings.
+ br = b2.util.call_jam_function(i[1:], [str(p) for p in current.all()])
+ if br:
+ e.extend(property.create_from_strings(br))
+
+ if e == added_requirements:
+ # If we got the same result, we've found final properties.
+ ok = 1
+ break
+ else:
+ # Oops, results of evaluation of conditionals has changed.
+ # Also 'current' contains leftover from previous evaluation.
+ # Recompute 'current' using initial properties and conditional
+ # requirements.
+ added_requirements = e
+ current = context.refine(property_set.create(feature.expand(e)))
+
+ if not ok:
+ self.manager().errors()("Can't evaluate conditional properties "
+ + str(conditionals))
+
+
+ if what == "added":
+ return property_set.create(unconditional + added_requirements)
+ elif what == "refined":
+ return current
+ else:
+ self.manager().errors("Invalid value of the 'what' parameter")
+
+ def __common_properties2(self, build_request, requirements):
+ # This guarantees that default properties are present
+ # in result, unless they are overrided by some requirement.
+ # TODO: There is possibility that we've added <foo>bar, which is composite
+ # and expands to <foo2>bar2, but default value of <foo2> is not bar2,
+ # in which case it's not clear what to do.
+ #
+ build_request = build_request.add_defaults()
+ # Featured added by 'add-default' can be composite and expand
+ # to features without default values -- so they are not added yet.
+ # It could be clearer/faster to expand only newly added properties
+ # but that's not critical.
+ build_request = build_request.expand()
+
+ return self.evaluate_requirements(requirements, build_request,
+ "refined")
+
+ def match (self, property_set, debug):
+ """ Returns the alternative condition for this alternative, if
+ the condition is satisfied by 'property_set'.
+ """
+ # The condition is composed of all base non-conditional properties.
+ # It's not clear if we should expand 'self.requirements_' or not.
+ # For one thing, it would be nice to be able to put
+ # <toolset>msvc-6.0
+ # in requirements.
+ # On the other hand, if we have <variant>release in condition it
+ # does not make sense to require <optimization>full to be in
+ # build request just to select this variant.
+ bcondition = self.requirements_.base ()
+ ccondition = self.requirements_.conditional ()
+ condition = b2.util.set.difference (bcondition, ccondition)
+
+ if debug:
+ print " next alternative: required properties:", [str(p) for p in condition]
+
+ if b2.util.set.contains (condition, property_set.all()):
+
+ if debug:
+ print " matched"
+
+ return condition
+
+ else:
+ return None
+
+
+ def generate_dependency_targets (self, target_ids, property_set):
+ targets = []
+ usage_requirements = []
+ for id in target_ids:
+
+ result = generate_from_reference(id, self.project_, property_set)
+ targets += result.targets()
+ usage_requirements += result.usage_requirements().all()
+
+ return (targets, usage_requirements)
+
+ def generate_dependency_properties(self, properties, ps):
+ """ Takes a target reference, which might be either target id
+ or a dependency property, and generates that target using
+ 'property_set' as build request.
+
+ Returns a tuple (result, usage_requirements).
+ """
+ result_properties = []
+ usage_requirements = []
+ for p in properties:
+
+ result = generate_from_reference(p.value(), self.project_, ps)
+
+ for t in result.targets():
+ result_properties.append(property.Property(p.feature(), t))
+
+ usage_requirements += result.usage_requirements().all()
+
+ return (result_properties, usage_requirements)
+
+
+
+
+ @user_error_checkpoint
+ def generate (self, ps):
+ """ Determines final build properties, generates sources,
+ and calls 'construct'. This method should not be
+ overridden.
+ """
+ self.manager_.errors().push_user_context(
+ "Generating target " + self.full_name(), self.user_context_)
+
+ if self.manager().targets().logging():
+ self.manager().targets().log(
+ "Building target '%s'" % self.name_)
+ self.manager().targets().increase_indent ()
+ self.manager().targets().log(
+ "Build request: '%s'" % str (ps.raw ()))
+ cf = self.manager().command_line_free_features()
+ self.manager().targets().log(
+ "Command line free features: '%s'" % str (cf.raw ()))
+ self.manager().targets().log(
+ "Target requirements: %s'" % str (self.requirements().raw ()))
+
+ self.manager().targets().push_target(self)
+
+ if not self.generated_.has_key(ps):
+
+ # Apply free features form the command line. If user
+ # said
+ # define=FOO
+ # he most likely want this define to be set for all compiles.
+ ps = ps.refine(self.manager().command_line_free_features())
+ rproperties = self.common_properties (ps, self.requirements_)
+
+ self.manager().targets().log(
+ "Common properties are '%s'" % str (rproperties))
+
+ if rproperties.get("<build>") != ["no"]:
+
+ result = GenerateResult ()
+
+ properties = rproperties.non_dependency ()
+
+ (p, u) = self.generate_dependency_properties (rproperties.dependency (), rproperties)
+ properties += p
+ assert all(isinstance(p, property.Property) for p in properties)
+ usage_requirements = u
+
+ (source_targets, u) = self.generate_dependency_targets (self.sources_, rproperties)
+ usage_requirements += u
+
+ self.manager_.targets().log(
+ "Usage requirements for '%s' are '%s'" % (self.name_, usage_requirements))
+
+ # FIXME:
+
+ rproperties = property_set.create(properties + usage_requirements)
+ usage_requirements = property_set.create (usage_requirements)
+
+ self.manager_.targets().log(
+ "Build properties: '%s'" % str(rproperties))
+
+ source_targets += rproperties.get('<source>')
+
+ # We might get duplicate sources, for example if
+ # we link to two library which have the same <library> in
+ # usage requirements.
+ # Use stable sort, since for some targets the order is
+ # important. E.g. RUN_PY target need python source to come
+ # first.
+ source_targets = unique(source_targets, stable=True)
+
+ # FIXME: figure why this call messes up source_targets in-place
+ result = self.construct (self.name_, source_targets[:], rproperties)
+
+ if result:
+ assert len(result) == 2
+ gur = result [0]
+ result = result [1]
+
+ if self.always_:
+ for t in result:
+ t.always()
+
+ s = self.create_subvariant (
+ result,
+ self.manager().virtual_targets().recent_targets(), ps,
+ source_targets, rproperties, usage_requirements)
+ self.manager().virtual_targets().clear_recent_targets()
+
+ ur = self.compute_usage_requirements (s)
+ ur = ur.add (gur)
+ s.set_usage_requirements (ur)
+
+ self.manager_.targets().log (
+ "Usage requirements from '%s' are '%s'" %
+ (self.name(), str(rproperties)))
+
+ self.generated_[ps] = GenerateResult (ur, result)
+ else:
+ self.generated_[ps] = GenerateResult (property_set.empty(), [])
+ else:
+ # If we just see <build>no, we cannot produce any reasonable
+ # diagnostics. The code that adds this property is expected
+ # to explain why a target is not built, for example using
+ # the configure.log-component-configuration function.
+
+ # If this target fails to build, add <build>no to properties
+ # to cause any parent target to fail to build. Except that it
+ # - does not work now, since we check for <build>no only in
+ # common properties, but not in properties that came from
+ # dependencies
+ # - it's not clear if that's a good idea anyway. The alias
+ # target, for example, should not fail to build if a dependency
+ # fails.
+ self.generated_[ps] = GenerateResult(
+ property_set.create(["<build>no"]), [])
+ else:
+ self.manager().targets().log ("Already built")
+
+ self.manager().targets().pop_target()
+ self.manager().targets().decrease_indent()
+
+ return self.generated_[ps]
+
+ def compute_usage_requirements (self, subvariant):
+ """ Given the set of generated targets, and refined build
+ properties, determines and sets appripriate usage requirements
+ on those targets.
+ """
+ rproperties = subvariant.build_properties ()
+ xusage_requirements =self.evaluate_requirements(
+ self.usage_requirements_, rproperties, "added")
+
+ # We generate all dependency properties and add them,
+ # as well as their usage requirements, to result.
+ (r1, r2) = self.generate_dependency_properties(xusage_requirements.dependency (), rproperties)
+ extra = r1 + r2
+
+ result = property_set.create (xusage_requirements.non_dependency () + extra)
+
+ # Propagate usage requirements we've got from sources, except
+ # for the <pch-header> and <pch-file> features.
+ #
+ # That feature specifies which pch file to use, and should apply
+ # only to direct dependents. Consider:
+ #
+ # pch pch1 : ...
+ # lib lib1 : ..... pch1 ;
+ # pch pch2 :
+ # lib lib2 : pch2 lib1 ;
+ #
+ # Here, lib2 should not get <pch-header> property from pch1.
+ #
+ # Essentially, when those two features are in usage requirements,
+ # they are propagated only to direct dependents. We might need
+ # a more general mechanism, but for now, only those two
+ # features are special.
+ removed_pch = filter(lambda prop: prop.feature().name() not in ['<pch-header>', '<pch-file>'], subvariant.sources_usage_requirements().all())
+ result = result.add(property_set.PropertySet(removed_pch))
+
+ return result
+
+ def create_subvariant (self, root_targets, all_targets,
+ build_request, sources,
+ rproperties, usage_requirements):
+ """Creates a new subvariant-dg instances for 'targets'
+ - 'root-targets' the virtual targets will be returned to dependents
+ - 'all-targets' all virtual
+ targets created while building this main target
+ - 'build-request' is property-set instance with
+ requested build properties"""
+
+ for e in root_targets:
+ e.root (True)
+
+ s = Subvariant (self, build_request, sources,
+ rproperties, usage_requirements, all_targets)
+
+ for v in all_targets:
+ if not v.creating_subvariant():
+ v.creating_subvariant(s)
+
+ return s
+
+ def construct (self, name, source_targets, properties):
+ """ Constructs the virtual targets for this abstract targets and
+ the dependecy graph. Returns a tuple consisting of the properties and the list of virtual targets.
+ Should be overrided in derived classes.
+ """
+ raise BaseException ("method should be defined in derived classes")
+
+
+class TypedTarget (BasicTarget):
+ import generators
+
+ def __init__ (self, name, project, type, sources, requirements, default_build, usage_requirements):
+ BasicTarget.__init__ (self, name, project, sources, requirements, default_build, usage_requirements)
+ self.type_ = type
+
+ def __jam_repr__(self):
+ return b2.util.value_to_jam(self)
+
+ def type (self):
+ return self.type_
+
+ def construct (self, name, source_targets, prop_set):
+
+ r = generators.construct (self.project_, os.path.splitext(name)[0],
+ self.type_,
+ prop_set.add_raw(['<main-target-type>' + self.type_]),
+ source_targets, True)
+
+ if not r:
+ print "warning: Unable to construct '%s'" % self.full_name ()
+
+ # Are there any top-level generators for this type/property set.
+ if not generators.find_viable_generators (self.type_, prop_set):
+ print "error: no generators were found for type '" + self.type_ + "'"
+ print "error: and the requested properties"
+ print "error: make sure you've configured the needed tools"
+ print "See http://boost.org/boost-build2/doc/html/bbv2/advanced/configuration.html"
+
+ print "To debug this problem, try the --debug-generators option."
+ sys.exit(1)
+
+ return r
+
+def apply_default_build(property_set, default_build):
+ # 1. First, see what properties from default_build
+ # are already present in property_set.
+
+ specified_features = set(p.feature() for p in property_set.all())
+
+ defaults_to_apply = []
+ for d in default_build.all():
+ if not d.feature() in specified_features:
+ defaults_to_apply.append(d)
+
+ # 2. If there's any defaults to be applied, form the new
+ # build request. Pass it throw 'expand-no-defaults', since
+ # default_build might contain "release debug", which will
+ # result in two property_sets.
+ result = []
+ if defaults_to_apply:
+
+ # We have to compress subproperties here to prevent
+ # property lists like:
+ #
+ # <toolset>msvc <toolset-msvc:version>7.1 <threading>multi
+ #
+ # from being expanded into:
+ #
+ # <toolset-msvc:version>7.1/<threading>multi
+ # <toolset>msvc/<toolset-msvc:version>7.1/<threading>multi
+ #
+ # due to cross-product property combination. That may
+ # be an indication that
+ # build_request.expand-no-defaults is the wrong rule
+ # to use here.
+ compressed = feature.compress_subproperties(property_set.all())
+
+ result = build_request.expand_no_defaults(
+ b2.build.property_set.create(feature.expand([p])) for p in (compressed + defaults_to_apply))
+
+ else:
+ result.append (property_set)
+
+ return result
+
+
+def create_typed_metatarget(name, type, sources, requirements, default_build, usage_requirements):
+
+ from b2.manager import get_manager
+ t = get_manager().targets()
+
+ project = get_manager().projects().current()
+
+ return t.main_target_alternative(
+ TypedTarget(name, project, type,
+ t.main_target_sources(sources, name),
+ t.main_target_requirements(requirements, project),
+ t.main_target_default_build(default_build, project),
+ t.main_target_usage_requirements(usage_requirements, project)))
+
+
+def create_metatarget(klass, name, sources, requirements=[], default_build=[], usage_requirements=[]):
+ from b2.manager import get_manager
+ t = get_manager().targets()
+
+ project = get_manager().projects().current()
+
+ return t.main_target_alternative(
+ klass(name, project,
+ t.main_target_sources(sources, name),
+ t.main_target_requirements(requirements, project),
+ t.main_target_default_build(default_build, project),
+ t.main_target_usage_requirements(usage_requirements, project)))
+
+def metatarget_function_for_class(class_):
+
+ @bjam_signature((["name"], ["sources", "*"], ["requirements", "*"],
+ ["default_build", "*"], ["usage_requirements", "*"]))
+ def create_metatarget(name, sources, requirements = [], default_build = None, usage_requirements = []):
+
+ from b2.manager import get_manager
+ t = get_manager().targets()
+
+ project = get_manager().projects().current()
+
+ return t.main_target_alternative(
+ class_(name, project,
+ t.main_target_sources(sources, name),
+ t.main_target_requirements(requirements, project),
+ t.main_target_default_build(default_build, project),
+ t.main_target_usage_requirements(usage_requirements, project)))
+
+ return create_metatarget
diff --git a/tools/build/src/build/toolset.jam b/tools/build/src/build/toolset.jam
new file mode 100644
index 0000000000..a942cd9060
--- /dev/null
+++ b/tools/build/src/build/toolset.jam
@@ -0,0 +1,603 @@
+# Copyright 2003 Dave Abrahams
+# Copyright 2005 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Support for toolset definition.
+
+import errors ;
+import feature ;
+import generators ;
+import numbers ;
+import path ;
+import property ;
+import regex ;
+import sequence ;
+import set ;
+import property-set ;
+import order ;
+import "class" : new ;
+
+
+.flag-no = 1 ;
+
+.ignore-requirements = ;
+
+# This is used only for testing, to make sure we do not get random extra
+# elements in paths.
+if --ignore-toolset-requirements in [ modules.peek : ARGV ]
+{
+ .ignore-requirements = 1 ;
+}
+
+
+# Initializes an additional toolset-like module. First load the 'toolset-module'
+# and then calls its 'init' rule with trailing arguments.
+#
+rule using ( toolset-module : * )
+{
+ import $(toolset-module) ;
+ $(toolset-module).init $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9)
+ ;
+}
+
+
+# Expands subfeatures in each property sets, e.g. '<toolset>gcc-3.2' will be
+# converted to '<toolset>gcc/<toolset-version>3.2'.
+#
+local rule normalize-condition ( property-sets * )
+{
+ local result ;
+ for local p in $(property-sets)
+ {
+ local split = [ feature.split $(p) ] ;
+ local expanded = [ feature.expand-subfeatures [ feature.split $(p) ] ] ;
+ result += $(expanded:J=/) ;
+ }
+ return $(result) ;
+}
+
+
+# Specifies if the 'flags' rule should check that the invoking module is the
+# same as the module we are setting the flag for. 'v' can be either 'checked' or
+# 'unchecked'. Subsequent call to 'pop-checking-for-flags-module' will restore
+# the setting that was in effect before calling this rule.
+#
+rule push-checking-for-flags-module ( v )
+{
+ .flags-module-checking = $(v) $(.flags-module-checking) ;
+}
+
+rule pop-checking-for-flags-module ( )
+{
+ .flags-module-checking = $(.flags-module-checking[2-]) ;
+}
+
+
+# Specifies the flags (variables) that must be set on targets under certain
+# conditions, described by arguments.
+#
+rule flags (
+ rule-or-module # If contains a dot, should be a rule name. The flags will
+ # be applied when that rule is used to set up build
+ # actions.
+ #
+ # If does not contain dot, should be a module name. The
+ # flag will be applied for all rules in that module. If
+ # module for rule is different from the calling module, an
+ # error is issued.
+
+ variable-name # Variable that should be set on target.
+ condition * : # A condition when this flag should be applied. Should be a
+ # set of property sets. If one of those property sets is
+ # contained in the build properties, the flag will be used.
+ # Implied values are not allowed: "<toolset>gcc" should be
+ # used, not just "gcc". Subfeatures, like in
+ # "<toolset>gcc-3.2" are allowed. If left empty, the flag
+ # will be used unconditionally.
+ #
+ # Propery sets may use value-less properties ('<a>' vs.
+ # '<a>value') to match absent properties. This allows to
+ # separately match:
+ #
+ # <architecture>/<address-model>64
+ # <architecture>ia64/<address-model>
+ #
+ # Where both features are optional. Without this syntax
+ # we would be forced to define "default" values.
+
+ values * : # The value to add to variable. If <feature> is specified,
+ # then the value of 'feature' will be added.
+ unchecked ? # If value 'unchecked' is passed, will not test that flags
+ # are set for the calling module.
+ : hack-hack ? # For
+ # flags rule OPTIONS <cxx-abi> : -model ansi
+ # Treat <cxx-abi> as condition
+ # FIXME: ugly hack.
+)
+{
+ local caller = [ CALLER_MODULE ] ;
+ if ! [ MATCH ".*([.]).*" : $(rule-or-module) ]
+ && [ MATCH "(Jamfile<.*)" : $(caller) ]
+ {
+ # Unqualified rule name, used inside Jamfile. Most likely used with
+ # 'make' or 'notfile' rules. This prevents setting flags on the entire
+ # Jamfile module (this will be considered as rule), but who cares?
+ # Probably, 'flags' rule should be split into 'flags' and
+ # 'flags-on-module'.
+ rule-or-module = $(caller).$(rule-or-module) ;
+ }
+ else
+ {
+ local module_ = [ MATCH "([^.]*).*" : $(rule-or-module) ] ;
+ if $(unchecked) != unchecked
+ && $(.flags-module-checking[1]) != unchecked
+ && $(module_) != $(caller)
+ {
+ errors.error "Module $(caller) attempted to set flags for module $(module_)" ;
+ }
+ }
+
+ if $(condition) && ! $(condition:G=) && ! $(hack-hack)
+ {
+ # We have condition in the form '<feature>', that is, without value.
+ # That is an older syntax:
+ # flags gcc.link RPATH <dll-path> ;
+ # for compatibility, convert it to
+ # flags gcc.link RPATH : <dll-path> ;
+ values = $(condition) ;
+ condition = ;
+ }
+
+ if $(condition)
+ {
+ property.validate-property-sets $(condition) ;
+ condition = [ normalize-condition $(condition) ] ;
+ }
+
+ add-flag $(rule-or-module) : $(variable-name) : $(condition) : $(values) ;
+}
+
+
+# Adds a new flag setting with the specified values. Does no checking.
+#
+local rule add-flag ( rule-or-module : variable-name : condition * : values * )
+{
+ .$(rule-or-module).flags += $(.flag-no) ;
+
+ # Store all flags for a module.
+ local module_ = [ MATCH "([^.]*).*" : $(rule-or-module) ] ;
+ .module-flags.$(module_) += $(.flag-no) ;
+ # Store flag-no -> rule-or-module mapping.
+ .rule-or-module.$(.flag-no) = $(rule-or-module) ;
+
+ .$(rule-or-module).variable.$(.flag-no) += $(variable-name) ;
+ .$(rule-or-module).values.$(.flag-no) += $(values) ;
+ .$(rule-or-module).condition.$(.flag-no) += $(condition) ;
+
+ .flag-no = [ numbers.increment $(.flag-no) ] ;
+}
+
+
+# Returns the first element of 'property-sets' which is a subset of
+# 'properties' or an empty list if no such element exists.
+#
+rule find-property-subset ( property-sets * : properties * )
+{
+ # Cut property values off.
+ local prop-keys = $(properties:G) ;
+
+ local result ;
+ for local s in $(property-sets)
+ {
+ if ! $(result)
+ {
+ # Handle value-less properties like '<architecture>' (compare with
+ # '<architecture>x86').
+
+ local set = [ feature.split $(s) ] ;
+
+ # Find the set of features that
+ # - have no property specified in required property set
+ # - are omitted in the build property set.
+ local default-props ;
+ for local i in $(set)
+ {
+ # If $(i) is a value-less property it should match default value
+ # of an optional property. See the first line in the example
+ # below:
+ #
+ # property set properties result
+ # <a> <b>foo <b>foo match
+ # <a> <b>foo <a>foo <b>foo no match
+ # <a>foo <b>foo <b>foo no match
+ # <a>foo <b>foo <a>foo <b>foo match
+ if ! ( $(i:G=) || ( $(i:G) in $(prop-keys) ) )
+ {
+ default-props += $(i) ;
+ }
+ }
+
+ if $(set) in $(properties) $(default-props)
+ {
+ result = $(s) ;
+ }
+ }
+ }
+ return $(result) ;
+}
+
+
+# Returns a value to be added to some flag for some target based on the flag's
+# value definition and the given target's property set.
+#
+rule handle-flag-value ( value * : properties * )
+{
+ local result ;
+ if $(value:G)
+ {
+ local matches = [ property.select $(value) : $(properties) ] ;
+ local order ;
+ for local p in $(matches)
+ {
+ local att = [ feature.attributes $(p:G) ] ;
+ if dependency in $(att)
+ {
+ # The value of a dependency feature is a target and needs to be
+ # actualized.
+ result += [ $(p:G=).actualize ] ;
+ }
+ else if path in $(att) || free in $(att)
+ {
+ local values ;
+ # Treat features with && in the value specially -- each
+ # &&-separated element is considered a separate value. This is
+ # needed to handle searched libraries or include paths, which
+ # may need to be in a specific order.
+ if ! [ MATCH (&&) : $(p:G=) ]
+ {
+ values = $(p:G=) ;
+ }
+ else
+ {
+ values = [ regex.split $(p:G=) "&&" ] ;
+ }
+ if path in $(att)
+ {
+ values = [ sequence.transform path.native : $(values) ] ;
+ }
+ result += $(values) ;
+ if $(values[2])
+ {
+ if ! $(order)
+ {
+ order = [ new order ] ;
+ }
+ local prev ;
+ for local v in $(values)
+ {
+ if $(prev)
+ {
+ $(order).add-pair $(prev) $(v) ;
+ }
+ prev = $(v) ;
+ }
+ }
+ }
+ else
+ {
+ result += $(p:G=) ;
+ }
+ }
+ if $(order)
+ {
+ result = [ $(order).order [ sequence.unique $(result) : stable ] ] ;
+ DELETE_MODULE $(order) ;
+ }
+ }
+ else
+ {
+ result += $(value) ;
+ }
+ return $(result) ;
+}
+
+
+# Given a rule name and a property set, returns a list of interleaved variables
+# names and values which must be set on targets for that rule/property-set
+# combination.
+#
+rule set-target-variables-aux ( rule-or-module : property-set )
+{
+ local result ;
+ properties = [ $(property-set).raw ] ;
+ for local f in $(.$(rule-or-module).flags)
+ {
+ local variable = $(.$(rule-or-module).variable.$(f)) ;
+ local condition = $(.$(rule-or-module).condition.$(f)) ;
+ local values = $(.$(rule-or-module).values.$(f)) ;
+
+ if ! $(condition) ||
+ [ find-property-subset $(condition) : $(properties) ]
+ {
+ local processed ;
+ for local v in $(values)
+ {
+ # The value might be <feature-name> so needs special treatment.
+ processed += [ handle-flag-value $(v) : $(properties) ] ;
+ }
+ for local r in $(processed)
+ {
+ result += $(variable) $(r) ;
+ }
+ }
+ }
+
+ # Strip away last dot separated part and recurse.
+ local next = [ MATCH ^(.+)\\.([^\\.])* : $(rule-or-module) ] ;
+ if $(next)
+ {
+ result += [ set-target-variables-aux $(next[1]) : $(property-set) ] ;
+ }
+ return $(result) ;
+}
+
+rule relevant-features ( rule-or-module )
+{
+ local result ;
+ if ! $(.relevant-features.$(rule-or-module))
+ {
+ for local f in $(.$(rule-or-module).flags)
+ {
+ local condition = $(.$(rule-or-module).condition.$(f)) ;
+ local values = $(.$(rule-or-module).values.$(f)) ;
+
+ for local c in $(condition)
+ {
+ for local p in [ feature.split $(c) ]
+ {
+ if $(p:G)
+ {
+ result += $(p:G) ;
+ }
+ else
+ {
+ local temp = [ feature.expand-subfeatures $(p) ] ;
+ result += $(temp:G) ;
+ }
+ }
+ }
+
+ for local v in $(values)
+ {
+ if $(v:G)
+ {
+ result += $(v:G) ;
+ }
+ }
+ }
+
+ # Strip away last dot separated part and recurse.
+ local next = [ MATCH ^(.+)\\.([^\\.])* : $(rule-or-module) ] ;
+ if $(next)
+ {
+ result += [ relevant-features $(next[1]) ] ;
+ }
+ result = [ sequence.unique $(result) ] ;
+ if $(result[1]) = ""
+ {
+ result = $(result) ;
+ }
+ .relevant-features.$(rule-or-module) = $(result) ;
+ return $(result) ;
+ }
+ else
+ {
+ return $(.relevant-features.$(rule-or-module)) ;
+ }
+}
+
+rule filter-property-set ( rule-or-module : property-set )
+{
+ local key = .filtered.property-set.$(rule-or-module).$(property-set) ;
+ if ! $($(key))
+ {
+ local relevant = [ relevant-features $(rule-or-module) ] ;
+ local result ;
+ for local p in [ $(property-set).raw ]
+ {
+ if $(p:G) in $(relevant)
+ {
+ result += $(p) ;
+ }
+ }
+ $(key) = [ property-set.create $(result) ] ;
+ }
+ return $($(key)) ;
+}
+
+rule set-target-variables ( rule-or-module targets + : property-set )
+{
+ property-set = [ filter-property-set $(rule-or-module) : $(property-set) ] ;
+ local key = .stv.$(rule-or-module).$(property-set) ;
+ local settings = $($(key)) ;
+ if ! $(settings)
+ {
+ settings = [ set-target-variables-aux $(rule-or-module) :
+ $(property-set) ] ;
+
+ if ! $(settings)
+ {
+ settings = none ;
+ }
+ $(key) = $(settings) ;
+ }
+
+ if $(settings) != none
+ {
+ local var-name = ;
+ for local name-or-value in $(settings)
+ {
+ if $(var-name)
+ {
+ $(var-name) on $(targets) += $(name-or-value) ;
+ var-name = ;
+ }
+ else
+ {
+ var-name = $(name-or-value) ;
+ }
+ }
+ }
+}
+
+
+# Make toolset 'toolset', defined in a module of the same name, inherit from
+# 'base'.
+# 1. The 'init' rule from 'base' is imported into 'toolset' with full name.
+# Another 'init' is called, which forwards to the base one.
+# 2. All generators from 'base' are cloned. The ids are adjusted and <toolset>
+# property in requires is adjusted too.
+# 3. All flags are inherited.
+# 4. All rules are imported.
+#
+rule inherit ( toolset : base )
+{
+ import $(base) ;
+ inherit-generators $(toolset) : $(base) ;
+ inherit-flags $(toolset) : $(base) ;
+ inherit-rules $(toolset) : $(base) ;
+}
+
+
+rule inherit-generators ( toolset properties * : base : generators-to-ignore * )
+{
+ properties ?= <toolset>$(toolset) ;
+ local base-generators = [ generators.generators-for-toolset $(base) ] ;
+ for local g in $(base-generators)
+ {
+ local id = [ $(g).id ] ;
+
+ if ! $(id) in $(generators-to-ignore)
+ {
+ # Some generator names have multiple periods in their name, so
+ # $(id:B=$(toolset)) does not generate the right new-id name. E.g.
+ # if id = gcc.compile.c++ then $(id:B=darwin) = darwin.c++, which is
+ # not what we want. Manually parse the base and suffix. If there is
+ # a better way to do this, I would love to see it. See also the
+ # register() rule in the generators module.
+ local base = $(id) ;
+ local suffix = "" ;
+ while $(base:S)
+ {
+ suffix = $(base:S)$(suffix) ;
+ base = $(base:B) ;
+ }
+ local new-id = $(toolset)$(suffix) ;
+
+ generators.register [ $(g).clone $(new-id) : $(properties) ] ;
+ }
+ }
+}
+
+
+# Brings all flag definitions from the 'base' toolset into the 'toolset'
+# toolset. Flag definitions whose conditions make use of properties in
+# 'prohibited-properties' are ignored. Do not confuse property and feature, for
+# example <debug-symbols>on and <debug-symbols>off, so blocking one of them does
+# not block the other one.
+#
+# The flag conditions are not altered at all, so if a condition includes a name,
+# or version of a base toolset, it will not ever match the inheriting toolset.
+# When such flag settings must be inherited, define a rule in base toolset
+# module and call it as needed.
+#
+rule inherit-flags ( toolset : base : prohibited-properties * : prohibited-vars * )
+{
+ for local f in $(.module-flags.$(base))
+ {
+ local rule-or-module = $(.rule-or-module.$(f)) ;
+ if ( [ set.difference
+ $(.$(rule-or-module).condition.$(f)) :
+ $(prohibited-properties) ]
+ || ! $(.$(rule-or-module).condition.$(f))
+ ) && ( ! $(.$(rule-or-module).variable.$(f)) in $(prohibited-vars) )
+ {
+ local rule_ = [ MATCH "[^.]*\.(.*)" : $(rule-or-module) ] ;
+ local new-rule-or-module ;
+ if $(rule_)
+ {
+ new-rule-or-module = $(toolset).$(rule_) ;
+ }
+ else
+ {
+ new-rule-or-module = $(toolset) ;
+ }
+
+ add-flag
+ $(new-rule-or-module)
+ : $(.$(rule-or-module).variable.$(f))
+ : $(.$(rule-or-module).condition.$(f))
+ : $(.$(rule-or-module).values.$(f)) ;
+ }
+ }
+}
+
+
+rule inherit-rules ( toolset : base : localize ? )
+{
+ # It appears that "action" creates a local rule.
+ local base-generators = [ generators.generators-for-toolset $(base) ] ;
+ local rules ;
+ for local g in $(base-generators)
+ {
+ rules += [ MATCH "[^.]*\.(.*)" : [ $(g).rule-name ] ] ;
+ }
+ rules = [ sequence.unique $(rules) ] ;
+ IMPORT $(base) : $(rules) : $(toolset) : $(rules) : $(localize) ;
+ IMPORT $(toolset) : $(rules) : : $(toolset).$(rules) ;
+}
+
+.requirements = [ property-set.empty ] ;
+
+# Return the list of global 'toolset requirements'. Those requirements will be
+# automatically added to the requirements of any main target.
+#
+rule requirements ( )
+{
+ return $(.requirements) ;
+}
+
+
+# Adds elements to the list of global 'toolset requirements'. The requirements
+# will be automatically added to the requirements for all main targets, as if
+# they were specified literally. For best results, all requirements added should
+# be conditional or indirect conditional.
+#
+rule add-requirements ( requirements * )
+{
+ if ! $(.ignore-requirements)
+ {
+ requirements = [ property.translate-indirect $(requirements) : [ CALLER_MODULE ] ] ;
+ requirements = [ property.expand-subfeatures-in-conditions $(requirements) ] ;
+ requirements = [ property.make $(requirements) ] ;
+ .requirements = [ $(.requirements).add-raw $(requirements) ] ;
+ }
+}
+
+
+rule __test__ ( )
+{
+ import assert ;
+ local p = <b>0 <c>1 <d>2 <e>3 <f>4 ;
+ assert.result <c>1/<d>2/<e>3 : find-property-subset <c>1/<d>2/<e>3 <a>0/<b>0/<c>1 <d>2/<e>5 <a>9 : $(p) ;
+ assert.result : find-property-subset <a>0/<b>0/<c>9/<d>9/<e>5 <a>9 : $(p) ;
+
+ local p-set = <a>/<b> <a>0/<b> <a>/<b>1 <a>0/<b>1 ;
+ assert.result <a>/<b> : find-property-subset $(p-set) : ;
+ assert.result <a>0/<b> : find-property-subset $(p-set) : <a>0 <c>2 ;
+ assert.result <a>/<b>1 : find-property-subset $(p-set) : <b>1 <c>2 ;
+ assert.result <a>0/<b>1 : find-property-subset $(p-set) : <a>0 <b>1 ;
+}
diff --git a/tools/build/src/build/toolset.py b/tools/build/src/build/toolset.py
new file mode 100644
index 0000000000..e969123d44
--- /dev/null
+++ b/tools/build/src/build/toolset.py
@@ -0,0 +1,399 @@
+# Status: being ported by Vladimir Prus
+# Base revision: 40958
+#
+# Copyright 2003 Dave Abrahams
+# Copyright 2005 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+""" Support for toolset definition.
+"""
+
+import feature, property, generators, property_set
+import b2.util.set
+from b2.util import cached, qualify_jam_action
+from b2.util.utility import *
+from b2.util import bjam_signature
+from b2.manager import get_manager
+
+__re_split_last_segment = re.compile (r'^(.+)\.([^\.])*')
+__re_two_ampersands = re.compile ('(&&)')
+__re_first_segment = re.compile ('([^.]*).*')
+__re_first_group = re.compile (r'[^.]*\.(.*)')
+
+# Flag is a mechanism to set a value
+# A single toolset flag. Specifies that when certain
+# properties are in build property set, certain values
+# should be appended to some variable.
+#
+# A flag applies to a specific action in specific module.
+# The list of all flags for a module is stored, and each
+# flag further contains the name of the rule it applies
+# for,
+class Flag:
+
+ def __init__(self, variable_name, values, condition, rule = None):
+ self.variable_name = variable_name
+ self.values = values
+ self.condition = condition
+ self.rule = rule
+
+ def __str__(self):
+ return("Flag(" + str(self.variable_name) + ", " + str(self.values) +\
+ ", " + str(self.condition) + ", " + str(self.rule) + ")")
+
+def reset ():
+ """ Clear the module state. This is mainly for testing purposes.
+ """
+ global __module_flags, __flags, __stv
+
+ # Mapping from module name to a list of all flags that apply
+ # to either that module directly, or to any rule in that module.
+ # Each element of the list is Flag instance.
+ # So, for module named xxx this might contain flags for 'xxx',
+ # for 'xxx.compile', for 'xxx.compile.c++', etc.
+ __module_flags = {}
+
+ # Mapping from specific rule or module name to a list of Flag instances
+ # that apply to that name.
+ # Say, it might contain flags for 'xxx.compile.c++'. If there are
+ # entries for module name 'xxx', they are flags for 'xxx' itself,
+ # not including any rules in that module.
+ __flags = {}
+
+ # A cache for varaible settings. The key is generated from the rule name and the properties.
+ __stv = {}
+
+reset ()
+
+# FIXME: --ignore-toolset-requirements
+def using(toolset_module, *args):
+ loaded_toolset_module= get_manager().projects().load_module(toolset_module, [os.getcwd()]);
+ loaded_toolset_module.init(*args)
+
+# FIXME push-checking-for-flags-module ....
+# FIXME: investigate existing uses of 'hack-hack' parameter
+# in jam code.
+
+@bjam_signature((["rule_or_module", "variable_name", "condition", "*"],
+ ["values", "*"]))
+def flags(rule_or_module, variable_name, condition, values = []):
+ """ Specifies the flags (variables) that must be set on targets under certain
+ conditions, described by arguments.
+ rule_or_module: If contains dot, should be a rule name.
+ The flags will be applied when that rule is
+ used to set up build actions.
+
+ If does not contain dot, should be a module name.
+ The flags will be applied for all rules in that
+ module.
+ If module for rule is different from the calling
+ module, an error is issued.
+
+ variable_name: Variable that should be set on target
+
+ condition A condition when this flag should be applied.
+ Should be set of property sets. If one of
+ those property sets is contained in build
+ properties, the flag will be used.
+ Implied values are not allowed:
+ "<toolset>gcc" should be used, not just
+ "gcc". Subfeatures, like in "<toolset>gcc-3.2"
+ are allowed. If left empty, the flag will
+ always used.
+
+ Propery sets may use value-less properties
+ ('<a>' vs. '<a>value') to match absent
+ properties. This allows to separately match
+
+ <architecture>/<address-model>64
+ <architecture>ia64/<address-model>
+
+ Where both features are optional. Without this
+ syntax we'd be forced to define "default" value.
+
+ values: The value to add to variable. If <feature>
+ is specified, then the value of 'feature'
+ will be added.
+ """
+ caller = bjam.caller()
+ if not '.' in rule_or_module and caller and caller[:-1].startswith("Jamfile"):
+ # Unqualified rule name, used inside Jamfile. Most likely used with
+ # 'make' or 'notfile' rules. This prevents setting flags on the entire
+ # Jamfile module (this will be considered as rule), but who cares?
+ # Probably, 'flags' rule should be split into 'flags' and
+ # 'flags-on-module'.
+ rule_or_module = qualify_jam_action(rule_or_module, caller)
+ else:
+ # FIXME: revive checking that we don't set flags for a different
+ # module unintentionally
+ pass
+
+ if condition and not replace_grist (condition, ''):
+ # We have condition in the form '<feature>', that is, without
+ # value. That's a previous syntax:
+ #
+ # flags gcc.link RPATH <dll-path> ;
+ # for compatibility, convert it to
+ # flags gcc.link RPATH : <dll-path> ;
+ values = [ condition ]
+ condition = None
+
+ if condition:
+ transformed = []
+ for c in condition:
+ # FIXME: 'split' might be a too raw tool here.
+ pl = [property.create_from_string(s,False,True) for s in c.split('/')]
+ pl = feature.expand_subfeatures(pl);
+ transformed.append(property_set.create(pl))
+ condition = transformed
+
+ property.validate_property_sets(condition)
+
+ __add_flag (rule_or_module, variable_name, condition, values)
+
+def set_target_variables (manager, rule_or_module, targets, ps):
+ """
+ """
+ settings = __set_target_variables_aux(manager, rule_or_module, ps)
+
+ if settings:
+ for s in settings:
+ for target in targets:
+ manager.engine ().set_target_variable (target, s [0], s[1], True)
+
+def find_satisfied_condition(conditions, ps):
+ """Returns the first element of 'property-sets' which is a subset of
+ 'properties', or an empty list if no such element exists."""
+
+ features = set(p.feature() for p in ps.all())
+
+ for condition in conditions:
+
+ found_all = True
+ for i in condition.all():
+
+ found = False
+ if i.value():
+ found = i.value() in ps.get(i.feature())
+ else:
+ # Handle value-less properties like '<architecture>' (compare with
+ # '<architecture>x86').
+ # If $(i) is a value-less property it should match default
+ # value of an optional property. See the first line in the
+ # example below:
+ #
+ # property set properties result
+ # <a> <b>foo <b>foo match
+ # <a> <b>foo <a>foo <b>foo no match
+ # <a>foo <b>foo <b>foo no match
+ # <a>foo <b>foo <a>foo <b>foo match
+ found = not i.feature() in features
+
+ found_all = found_all and found
+
+ if found_all:
+ return condition
+
+ return None
+
+
+def register (toolset):
+ """ Registers a new toolset.
+ """
+ feature.extend('toolset', [toolset])
+
+def inherit_generators (toolset, properties, base, generators_to_ignore = []):
+ if not properties:
+ properties = [replace_grist (toolset, '<toolset>')]
+
+ base_generators = generators.generators_for_toolset(base)
+
+ for g in base_generators:
+ id = g.id()
+
+ if not id in generators_to_ignore:
+ # Some generator names have multiple periods in their name, so
+ # $(id:B=$(toolset)) doesn't generate the right new_id name.
+ # e.g. if id = gcc.compile.c++, $(id:B=darwin) = darwin.c++,
+ # which is not what we want. Manually parse the base and suffix
+ # (if there's a better way to do this, I'd love to see it.)
+ # See also register in module generators.
+ (base, suffix) = split_action_id(id)
+
+ new_id = toolset + '.' + suffix
+
+ generators.register(g.clone(new_id, properties))
+
+def inherit_flags(toolset, base, prohibited_properties = []):
+ """Brings all flag definitions from the 'base' toolset into the 'toolset'
+ toolset. Flag definitions whose conditions make use of properties in
+ 'prohibited-properties' are ignored. Don't confuse property and feature, for
+ example <debug-symbols>on and <debug-symbols>off, so blocking one of them does
+ not block the other one.
+
+ The flag conditions are not altered at all, so if a condition includes a name,
+ or version of a base toolset, it won't ever match the inheriting toolset. When
+ such flag settings must be inherited, define a rule in base toolset module and
+ call it as needed."""
+ for f in __module_flags.get(base, []):
+
+ if not f.condition or b2.util.set.difference(f.condition, prohibited_properties):
+ match = __re_first_group.match(f.rule)
+ rule_ = None
+ if match:
+ rule_ = match.group(1)
+
+ new_rule_or_module = ''
+
+ if rule_:
+ new_rule_or_module = toolset + '.' + rule_
+ else:
+ new_rule_or_module = toolset
+
+ __add_flag (new_rule_or_module, f.variable_name, f.condition, f.values)
+
+def inherit_rules (toolset, base):
+ pass
+ # FIXME: do something about this.
+# base_generators = generators.generators_for_toolset (base)
+
+# import action
+
+# ids = []
+# for g in base_generators:
+# (old_toolset, id) = split_action_id (g.id ())
+# ids.append (id) ;
+
+# new_actions = []
+
+# engine = get_manager().engine()
+ # FIXME: do this!
+# for action in engine.action.values():
+# pass
+# (old_toolset, id) = split_action_id(action.action_name)
+#
+# if old_toolset == base:
+# new_actions.append ((id, value [0], value [1]))
+#
+# for a in new_actions:
+# action.register (toolset + '.' + a [0], a [1], a [2])
+
+ # TODO: how to deal with this?
+# IMPORT $(base) : $(rules) : $(toolset) : $(rules) : localized ;
+# # Import the rules to the global scope
+# IMPORT $(toolset) : $(rules) : : $(toolset).$(rules) ;
+# }
+#
+
+######################################################################################
+# Private functions
+
+@cached
+def __set_target_variables_aux (manager, rule_or_module, ps):
+ """ Given a rule name and a property set, returns a list of tuples of
+ variables names and values, which must be set on targets for that
+ rule/properties combination.
+ """
+ result = []
+
+ for f in __flags.get(rule_or_module, []):
+
+ if not f.condition or find_satisfied_condition (f.condition, ps):
+ processed = []
+ for v in f.values:
+ # The value might be <feature-name> so needs special
+ # treatment.
+ processed += __handle_flag_value (manager, v, ps)
+
+ for r in processed:
+ result.append ((f.variable_name, r))
+
+ # strip away last dot separated part and recurse.
+ next = __re_split_last_segment.match(rule_or_module)
+
+ if next:
+ result.extend(__set_target_variables_aux(
+ manager, next.group(1), ps))
+
+ return result
+
+def __handle_flag_value (manager, value, ps):
+ result = []
+
+ if get_grist (value):
+ f = feature.get(value)
+ values = ps.get(f)
+
+ for value in values:
+
+ if f.dependency():
+ # the value of a dependency feature is a target
+ # and must be actualized
+ result.append(value.actualize())
+
+ elif f.path() or f.free():
+
+ # Treat features with && in the value
+ # specially -- each &&-separated element is considered
+ # separate value. This is needed to handle searched
+ # libraries, which must be in specific order.
+ if not __re_two_ampersands.search(value):
+ result.append(value)
+
+ else:
+ result.extend(value.split ('&&'))
+ else:
+ result.append (ungristed)
+ else:
+ result.append (value)
+
+ return result
+
+def __add_flag (rule_or_module, variable_name, condition, values):
+ """ Adds a new flag setting with the specified values.
+ Does no checking.
+ """
+ f = Flag(variable_name, values, condition, rule_or_module)
+
+ # Grab the name of the module
+ m = __re_first_segment.match (rule_or_module)
+ assert m
+ module = m.group(1)
+
+ __module_flags.setdefault(module, []).append(f)
+ __flags.setdefault(rule_or_module, []).append(f)
+
+__requirements = []
+
+def requirements():
+ """Return the list of global 'toolset requirements'.
+ Those requirements will be automatically added to the requirements of any main target."""
+ return __requirements
+
+def add_requirements(requirements):
+ """Adds elements to the list of global 'toolset requirements'. The requirements
+ will be automatically added to the requirements for all main targets, as if
+ they were specified literally. For best results, all requirements added should
+ be conditional or indirect conditional."""
+
+ #if ! $(.ignore-requirements)
+ #{
+ __requirements.extend(requirements)
+ #}
+
+# Make toolset 'toolset', defined in a module of the same name,
+# inherit from 'base'
+# 1. The 'init' rule from 'base' is imported into 'toolset' with full
+# name. Another 'init' is called, which forwards to the base one.
+# 2. All generators from 'base' are cloned. The ids are adjusted and
+# <toolset> property in requires is adjusted too
+# 3. All flags are inherited
+# 4. All rules are imported.
+def inherit(toolset, base):
+ get_manager().projects().load_module(base, []);
+
+ inherit_generators(toolset, [], base)
+ inherit_flags(toolset, base)
+ inherit_rules(toolset, base)
diff --git a/tools/build/src/build/type.jam b/tools/build/src/build/type.jam
new file mode 100644
index 0000000000..e8cc44ebbb
--- /dev/null
+++ b/tools/build/src/build/type.jam
@@ -0,0 +1,401 @@
+# Copyright 2002, 2003 Dave Abrahams
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Deals with target type declaration and defines target class which supports
+# typed targets.
+
+import "class" : new ;
+import feature ;
+import generators : * ;
+import os ;
+import project ;
+import property ;
+import scanner ;
+
+# The following import would create a circular dependency:
+# project -> project-root -> builtin -> type -> targets -> project
+# import targets ;
+
+# The feature is optional so it would never get added implicitly. It is used
+# only for internal purposes and in all cases we want to use it explicitly.
+feature.feature target-type : : composite optional ;
+
+feature.feature main-target-type : : optional incidental ;
+feature.feature base-target-type : : composite optional free ;
+
+
+# Registers a target type, possible derived from a 'base-type'. Providing a list
+# of 'suffixes' here is a shortcut for separately calling the register-suffixes
+# rule with the given suffixes and the set-generated-target-suffix rule with the
+# first given suffix.
+#
+rule register ( type : suffixes * : base-type ? )
+{
+ # Type names cannot contain hyphens, because when used as feature-values
+ # they would be interpreted as composite features which need to be
+ # decomposed.
+ switch $(type)
+ {
+ case *-* :
+ import errors ;
+ errors.error "type name \"$(type)\" contains a hyphen" ;
+ }
+
+ if $(type) in $(.types)
+ {
+ import errors ;
+ errors.error "Type $(type) is already registered." ;
+ }
+
+ {
+ .types += $(type) ;
+ .base.$(type) = $(base-type) ;
+ .derived.$(base-type) += $(type) ;
+ .bases.$(type) = $(type) $(.bases.$(base-type)) ;
+
+ # Store suffixes for generated targets.
+ .suffixes.$(type) = [ new property-map ] ;
+
+ # Store prefixes for generated targets (e.g. "lib" for library).
+ .prefixes.$(type) = [ new property-map ] ;
+
+ if $(suffixes)-is-defined
+ {
+ # Specify mapping from suffixes to type.
+ register-suffixes $(suffixes) : $(type) ;
+ # By default generated targets of 'type' will use the first of
+ #'suffixes'. This may be overriden.
+ set-generated-target-suffix $(type) : : $(suffixes[1]) ;
+ }
+
+ feature.extend target-type : $(type) ;
+ feature.extend main-target-type : $(type) ;
+ feature.extend base-target-type : $(type) ;
+
+ feature.compose <target-type>$(type) : $(base-type:G=<base-target-type>) ;
+ feature.compose <base-target-type>$(type) : <base-target-type>$(base-type) ;
+
+ # We used to declare the main target rule only when a 'main' parameter
+ # has been specified. However, it is hard to decide that a type will
+ # *never* need a main target rule and so from time to time we needed to
+ # make yet another type 'main'. So now a main target rule is defined for
+ # each type.
+ main-rule-name = [ type-to-rule-name $(type) ] ;
+ .main-target-type.$(main-rule-name) = $(type) ;
+ IMPORT $(__name__) : main-target-rule : : $(main-rule-name) ;
+
+ # Adding a new derived type affects generator selection so we need to
+ # make the generator selection module update any of its cached
+ # information related to a new derived type being defined.
+ generators.update-cached-information-with-a-new-type $(type) ;
+ }
+}
+
+
+# Given a type, returns the name of the main target rule which creates targets
+# of that type.
+#
+rule type-to-rule-name ( type )
+{
+ # Lowercase everything. Convert underscores to dashes.
+ import regex ;
+ local n = [ regex.split $(type:L) "_" ] ;
+ return $(n:J=-) ;
+}
+
+
+# Given a main target rule name, returns the type for which it creates targets.
+#
+rule type-from-rule-name ( rule-name )
+{
+ return $(.main-target-type.$(rule-name)) ;
+}
+
+
+# Specifies that files with suffix from 'suffixes' be recognized as targets of
+# type 'type'. Issues an error if a different type is already specified for any
+# of the suffixes.
+#
+rule register-suffixes ( suffixes + : type )
+{
+ for local s in $(suffixes)
+ {
+ if ! $(.type.$(s))
+ {
+ .type.$(s) = $(type) ;
+ }
+ else if $(.type.$(s)) != $(type)
+ {
+ import errors ;
+ errors.error Attempting to specify multiple types for suffix
+ \"$(s)\" : "Old type $(.type.$(s)), New type $(type)" ;
+ }
+ }
+}
+
+
+# Returns true iff type has been registered.
+#
+rule registered ( type )
+{
+ if $(type) in $(.types)
+ {
+ return true ;
+ }
+}
+
+
+# Issues an error if 'type' is unknown.
+#
+rule validate ( type )
+{
+ if ! [ registered $(type) ]
+ {
+ import errors ;
+ errors.error "Unknown target type $(type)" ;
+ }
+}
+
+
+# Sets a scanner class that will be used for this 'type'.
+#
+rule set-scanner ( type : scanner )
+{
+ validate $(type) ;
+ .scanner.$(type) = $(scanner) ;
+}
+
+
+# Returns a scanner instance appropriate to 'type' and 'properties'.
+#
+rule get-scanner ( type : property-set )
+{
+ if $(.scanner.$(type))
+ {
+ return [ scanner.get $(.scanner.$(type)) : $(property-set) ] ;
+ }
+}
+
+
+# Returns a base type for the given type or nothing in case the given type is
+# not derived.
+#
+rule base ( type )
+{
+ return $(.base.$(type)) ;
+}
+
+
+# Returns the given type and all of its base types in order of their distance
+# from type.
+#
+rule all-bases ( type )
+{
+ return $(.bases.$(type)) ;
+}
+
+
+# Returns the given type and all of its derived types in order of their distance
+# from type.
+#
+rule all-derived ( type )
+{
+ local result = $(type) ;
+ for local d in $(.derived.$(type))
+ {
+ result += [ all-derived $(d) ] ;
+ }
+ return $(result) ;
+}
+
+
+# Returns true if 'type' is equal to 'base' or has 'base' as its direct or
+# indirect base.
+#
+rule is-derived ( type base )
+{
+ if $(base) in $(.bases.$(type))
+ {
+ return true ;
+ }
+}
+
+# Returns true if 'type' is either derived from or is equal to 'base'.
+#
+# TODO: It might be that is-derived and is-subtype were meant to be different
+# rules - one returning true for type = base and one not, but as currently
+# implemented they are actually the same. Clean this up.
+#
+rule is-subtype ( type base )
+{
+ return [ is-derived $(type) $(base) ] ;
+}
+
+
+
+
+# Sets a file suffix to be used when generating a target of 'type' with the
+# specified properties. Can be called with no properties if no suffix has
+# already been specified for the 'type'. The 'suffix' parameter can be an empty
+# string ("") to indicate that no suffix should be used.
+#
+# Note that this does not cause files with 'suffix' to be automatically
+# recognized as being of 'type'. Two different types can use the same suffix for
+# their generated files but only one type can be auto-detected for a file with
+# that suffix. User should explicitly specify which one using the
+# register-suffixes rule.
+#
+rule set-generated-target-suffix ( type : properties * : suffix )
+{
+ set-generated-target-ps suffix : $(type) : $(properties) : $(suffix) ;
+}
+
+
+# Change the suffix previously registered for this type/properties combination.
+# If suffix is not yet specified, sets it.
+#
+rule change-generated-target-suffix ( type : properties * : suffix )
+{
+ change-generated-target-ps suffix : $(type) : $(properties) : $(suffix) ;
+}
+
+
+# Returns the suffix used when generating a file of 'type' with the given
+# properties.
+#
+rule generated-target-suffix ( type : property-set )
+{
+ return [ generated-target-ps suffix : $(type) : $(property-set) ] ;
+}
+
+
+# Sets a target prefix that should be used when generating targets of 'type'
+# with the specified properties. Can be called with empty properties if no
+# prefix for 'type' has been specified yet.
+#
+# The 'prefix' parameter can be empty string ("") to indicate that no prefix
+# should be used.
+#
+# Usage example: library names use the "lib" prefix on unix.
+#
+rule set-generated-target-prefix ( type : properties * : prefix )
+{
+ set-generated-target-ps prefix : $(type) : $(properties) : $(prefix) ;
+}
+
+
+# Change the prefix previously registered for this type/properties combination.
+# If prefix is not yet specified, sets it.
+#
+rule change-generated-target-prefix ( type : properties * : prefix )
+{
+ change-generated-target-ps prefix : $(type) : $(properties) : $(prefix) ;
+}
+
+
+rule generated-target-prefix ( type : property-set )
+{
+ return [ generated-target-ps prefix : $(type) : $(property-set) ] ;
+}
+
+
+# Common rules for prefix/suffix provisioning follow.
+
+local rule set-generated-target-ps ( ps : type : properties * : psval )
+{
+ $(.$(ps)es.$(type)).insert $(properties) : $(psval) ;
+}
+
+
+local rule change-generated-target-ps ( ps : type : properties * : psval )
+{
+ local prev = [ $(.$(ps)es.$(type)).find-replace $(properties) : $(psval) ] ;
+ if ! $(prev)
+ {
+ set-generated-target-ps $(ps) : $(type) : $(properties) : $(psval) ;
+ }
+}
+
+
+# Returns either prefix or suffix (as indicated by 'ps') that should be used
+# when generating a target of 'type' with the specified properties. Parameter
+# 'ps' can be either "prefix" or "suffix". If no prefix/suffix is specified for
+# 'type', returns prefix/suffix for base type, if any.
+#
+local rule generated-target-ps ( ps : type : property-set )
+{
+ local result ;
+ local found ;
+ while $(type) && ! $(found)
+ {
+ result = [ $(.$(ps)es.$(type)).find $(property-set) ] ;
+ # If the prefix/suffix is explicitly set to an empty string, we consider
+ # prefix/suffix to be found. If we were not to compare with "", there
+ # would be no way to specify an empty prefix/suffix.
+ if $(result)-is-defined
+ {
+ found = true ;
+ }
+ type = $(.base.$(type)) ;
+ }
+ if $(result) = ""
+ {
+ result = ;
+ }
+ return $(result) ;
+}
+
+
+# Returns file type given its name. If there are several dots in filename, tries
+# each suffix. E.g. for name of "file.so.1.2" suffixes "2", "1", and "so" will
+# be tried.
+#
+rule type ( filename )
+{
+ if [ os.name ] in NT CYGWIN
+ {
+ filename = $(filename:L) ;
+ }
+ local type ;
+ while ! $(type) && $(filename:S)
+ {
+ local suffix = $(filename:S) ;
+ type = $(.type$(suffix)) ;
+ filename = $(filename:S=) ;
+ }
+ return $(type) ;
+}
+
+
+# Rule used to construct all main targets. Note that this rule gets imported
+# into the global namespace under different alias names and the exact target
+# type to construct is selected based on the alias used to actually invoke this
+# rule.
+#
+rule main-target-rule ( name : sources * : requirements * : default-build * :
+ usage-requirements * )
+{
+ # First discover the required target type based on the exact alias used to
+ # invoke this rule.
+ local bt = [ BACKTRACE 1 ] ;
+ local rulename = $(bt[4]) ;
+ local target-type = [ type-from-rule-name $(rulename) ] ;
+
+ # This is a circular module dependency and so must be imported here.
+ import targets ;
+
+ return [ targets.create-typed-target $(target-type) : [ project.current ] :
+ $(name) : $(sources) : $(requirements) : $(default-build) :
+ $(usage-requirements) ] ;
+}
+
+
+rule __test__ ( )
+{
+ import assert ;
+
+ # TODO: Add tests for all the is-derived, is-base & related type relation
+ # checking rules.
+}
diff --git a/tools/build/v2/build/type.py b/tools/build/src/build/type.py
index e815739f40..e815739f40 100644
--- a/tools/build/v2/build/type.py
+++ b/tools/build/src/build/type.py
diff --git a/tools/build/src/build/version.jam b/tools/build/src/build/version.jam
new file mode 100644
index 0000000000..fa8fb3a568
--- /dev/null
+++ b/tools/build/src/build/version.jam
@@ -0,0 +1,165 @@
+# Copyright 2002, 2003, 2004, 2006 Vladimir Prus
+# Copyright 2008, 2012 Jurko Gospodnetic
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import numbers ;
+
+
+.major = "2014" ;
+.minor = "03" ;
+
+
+rule boost-build ( )
+{
+ return "$(.major).$(.minor)-svn" ;
+}
+
+
+rule print ( )
+{
+ if [ verify-engine-version ]
+ {
+ ECHO "Boost.Build" [ boost-build ] ;
+ }
+}
+
+
+rule verify-engine-version ( )
+{
+ local v = [ modules.peek : JAM_VERSION ] ;
+
+ if $(v[1]) != $(.major) || $(v[2]) != $(.minor)
+ {
+ local argv = [ modules.peek : ARGV ] ;
+ local e = $(argv[1]) ;
+ local l = [ modules.binding version ] ;
+ l = $(l:D) ;
+ l = $(l:D) ;
+ ECHO "warning: mismatched versions of Boost.Build engine and core" ;
+ ECHO "warning: Boost.Build engine ($(e)) is $(v:J=.)" ;
+ ECHO "warning: Boost.Build core (at $(l)) is" [ boost-build ] ;
+ }
+ else
+ {
+ return true ;
+ }
+}
+
+
+# Utility rule for testing whether all elements in a sequence are equal to 0.
+#
+local rule is-all-zeroes ( sequence * )
+{
+ local result = "true" ;
+ for local e in $(sequence)
+ {
+ if $(e) != "0"
+ {
+ result = "" ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Returns "true" if the first version is less than the second one.
+#
+rule version-less ( lhs + : rhs + )
+{
+ numbers.check $(lhs) ;
+ numbers.check $(rhs) ;
+
+ local done ;
+ local result ;
+
+ while ! $(done) && $(lhs) && $(rhs)
+ {
+ if [ numbers.less $(lhs[1]) $(rhs[1]) ]
+ {
+ done = "true" ;
+ result = "true" ;
+ }
+ else if [ numbers.less $(rhs[1]) $(lhs[1]) ]
+ {
+ done = "true" ;
+ }
+ else
+ {
+ lhs = $(lhs[2-]) ;
+ rhs = $(rhs[2-]) ;
+ }
+ }
+ if ( ! $(done) && ! $(lhs) && ! [ is-all-zeroes $(rhs) ] )
+ {
+ result = "true" ;
+ }
+
+ return $(result) ;
+}
+
+
+# Returns "true" if the current JAM version version is at least the given
+# version.
+#
+rule check-jam-version ( version + )
+{
+ local version-tag = $(version:J=.) ;
+ if ! $(version-tag)
+ {
+ import errors ;
+ errors.error Invalid version specifier: : $(version:E="(undefined)") ;
+ }
+
+ if ! $(.jam-version-check.$(version-tag))-is-defined
+ {
+ local jam-version = [ modules.peek : JAM_VERSION ] ;
+ if ! $(jam-version)
+ {
+ import errors ;
+ errors.error "Unable to deduce Boost Jam version. Your Boost Jam"
+ "installation is most likely terribly outdated." ;
+ }
+ .jam-version-check.$(version-tag) = "true" ;
+ if [ version-less [ modules.peek : JAM_VERSION ] : $(version) ]
+ {
+ .jam-version-check.$(version-tag) = "" ;
+ }
+ }
+ return $(.jam-version-check.$(version-tag)) ;
+}
+
+
+rule __test__ ( )
+{
+ import assert ;
+
+ local jam-version = [ modules.peek : JAM_VERSION ] ;
+ local future-version = $(jam-version) ;
+ future-version += "1" ;
+
+ assert.true check-jam-version $(jam-version) ;
+ assert.false check-jam-version $(future-version) ;
+
+ assert.true version-less 0 : 1 ;
+ assert.false version-less 0 : 0 ;
+ assert.true version-less 1 : 2 ;
+ assert.false version-less 1 : 1 ;
+ assert.false version-less 2 : 1 ;
+ assert.true version-less 3 1 20 : 3 4 10 ;
+ assert.false version-less 3 1 10 : 3 1 10 ;
+ assert.false version-less 3 4 10 : 3 1 20 ;
+ assert.true version-less 3 1 20 5 1 : 3 4 10 ;
+ assert.false version-less 3 1 10 5 1 : 3 1 10 ;
+ assert.false version-less 3 4 10 5 1 : 3 1 20 ;
+ assert.true version-less 3 1 20 : 3 4 10 5 1 ;
+ assert.true version-less 3 1 10 : 3 1 10 5 1 ;
+ assert.false version-less 3 4 10 : 3 1 20 5 1 ;
+ assert.false version-less 3 1 10 : 3 1 10 0 0 ;
+ assert.false version-less 3 1 10 0 0 : 3 1 10 ;
+ assert.false version-less 3 1 10 0 : 3 1 10 0 0 ;
+ assert.false version-less 3 1 10 0 : 03 1 10 0 0 ;
+ assert.false version-less 03 1 10 0 : 3 1 10 0 0 ;
+
+ # TODO: Add tests for invalid input data being sent to version-less.
+}
diff --git a/tools/build/src/build/virtual-target.jam b/tools/build/src/build/virtual-target.jam
new file mode 100644
index 0000000000..3103c30514
--- /dev/null
+++ b/tools/build/src/build/virtual-target.jam
@@ -0,0 +1,1344 @@
+# Copyright 2003 Dave Abrahams
+# Copyright 2005, 2006 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Implements virtual targets, which correspond to actual files created during a
+# build, but are not yet targets in Jam sense. They are needed, for example,
+# when searching for possible transformation sequences, when it is not yet known
+# whether a particular target should be created at all.
+#
+# +--------------------------+
+# | virtual-target |
+# +==========================+
+# | actualize |
+# +--------------------------+
+# | actualize-action() = 0 |
+# | actualize-location() = 0 |
+# +----------------+---------+
+# |
+# ^
+# / \
+# +-+-+
+# |
+# +---------------------+ +-------+--------------+
+# | action | | abstract-file-target |
+# +=====================| * +======================+
+# | action-name | +--+ action |
+# | properties | | +----------------------+
+# +---------------------+--+ | actualize-action() |
+# | actualize() |0..1 +-----------+----------+
+# | path() | |
+# | adjust-properties() | sources |
+# | actualize-sources() | targets |
+# +------+--------------+ ^
+# | / \
+# ^ +-+-+
+# / \ |
+# +-+-+ +-------------+-------------+
+# | | |
+# | +------+---------------+ +--------+-------------+
+# | | file-target | | searched-lib-target |
+# | +======================+ +======================+
+# | | actualize-location() | | actualize-location() |
+# | +----------------------+ +----------------------+
+# |
+# +-+------------------------------+
+# | |
+# +----+----------------+ +---------+-----------+
+# | compile-action | | link-action |
+# +=====================+ +=====================+
+# | adjust-properties() | | adjust-properties() |
+# +---------------------+ | actualize-sources() |
+# +---------------------+
+#
+# The 'compile-action' and 'link-action' classes are not defined here but in
+# builtin.jam modules. They are shown in the diagram to give the big picture.
+
+import "class" : new ;
+import path ;
+import sequence ;
+import set ;
+import type ;
+import utility ;
+
+
+# Models a potential target. It can be converted into a Jam target and used in
+# building, if needed. However, it can be also dropped, which allows us to
+# search for different transformations and select only one.
+#
+class virtual-target
+{
+ import scanner ;
+ import sequence ;
+ import utility ;
+ import virtual-target ;
+
+ rule __init__ (
+ name # Target/project name.
+ : project # Project to which this target belongs.
+ )
+ {
+ self.name = $(name) ;
+ self.project = $(project) ;
+ self.dependencies = ;
+ }
+
+ # Name of this target.
+ #
+ rule name ( )
+ {
+ return $(self.name) ;
+ }
+
+ # Project of this target.
+ #
+ rule project ( )
+ {
+ return $(self.project) ;
+ }
+
+ # Adds additional 'virtual-target' instances this one depends on.
+ #
+ rule depends ( d + )
+ {
+ self.dependencies = [ sequence.merge $(self.dependencies) :
+ [ sequence.insertion-sort $(d) ] ] ;
+ }
+
+ rule dependencies ( )
+ {
+ return $(self.dependencies) ;
+ }
+
+ rule always ( )
+ {
+ .always = 1 ;
+ }
+
+ # Generates all the actual targets and sets up build actions for this
+ # target.
+ #
+ # If 'scanner' is specified, creates an additional target with the same
+ # location as the actual target, which will depend on the actual target and
+ # be associated with a 'scanner'. That additional target is returned. See
+ # the docs (#dependency_scanning) for rationale. Target must correspond to a
+ # file if 'scanner' is specified.
+ #
+ # If scanner is not specified then the actual target is returned.
+ #
+ rule actualize ( scanner ? )
+ {
+ local actual-name = [ actualize-no-scanner ] ;
+
+ if $(.always)
+ {
+ ALWAYS $(actual-name) ;
+ }
+
+ if ! $(scanner)
+ {
+ return $(actual-name) ;
+ }
+ else
+ {
+ # Add the scanner instance to the grist for name.
+ local g = [ sequence.join [ utility.ungrist $(actual-name:G) ]
+ $(scanner) : - ] ;
+ local name = $(actual-name:G=$(g)) ;
+
+ if ! $(self.made.$(scanner))
+ {
+ self.made.$(scanner) = true ;
+ actualize-location $(name) ;
+ scanner.install $(scanner) : $(name) ;
+ }
+ return $(name) ;
+ }
+ }
+
+# private: (overridables)
+
+ # Sets up build actions for 'target'. Should call appropriate rules and set
+ # target variables.
+ #
+ rule actualize-action ( target )
+ {
+ import errors : error : errors.error ;
+ errors.error "method should be defined in derived classes" ;
+ }
+
+ # Sets up variables on 'target' which specify its location.
+ #
+ rule actualize-location ( target )
+ {
+ import errors : error : errors.error ;
+ errors.error "method should be defined in derived classes" ;
+ }
+
+ # If the target is a generated one, returns the path where it will be
+ # generated. Otherwise, returns an empty list.
+ #
+ rule path ( )
+ {
+ import errors : error : errors.error ;
+ errors.error "method should be defined in derived classes" ;
+ }
+
+ # Returns the actual target name to be used in case when no scanner is
+ # involved.
+ #
+ rule actual-name ( )
+ {
+ import errors : error : errors.error ;
+ errors.error "method should be defined in derived classes" ;
+ }
+
+# implementation
+ rule actualize-no-scanner ( )
+ {
+ # In fact, we just need to merge virtual-target with
+ # abstract-file-target as the latter is the only class derived from the
+ # former. But that has been left for later.
+
+ import errors : error : errors.error ;
+ errors.error "method should be defined in derived classes" ;
+ }
+}
+
+
+# Target corresponding to a file. The exact mapping for file is not yet
+# specified in this class. (TODO: Actually, the class name could be better...)
+#
+# May be a source file (when no action is specified) or a derived file
+# (otherwise).
+#
+# The target's grist is a concatenation of its project's location, action
+# properties (for derived targets) and, optionally, value identifying the main
+# target.
+#
+class abstract-file-target : virtual-target
+{
+ import project ;
+ import regex ;
+ import sequence ;
+ import path ;
+ import type ;
+ import property-set ;
+ import indirect ;
+
+ rule __init__ (
+ name # Target's name.
+ exact ? # If non-empty, the name is exactly the name created file
+ # should have. Otherwise, the '__init__' method will add a
+ # suffix obtained from 'type' by calling
+ # 'type.generated-target-suffix'.
+ : type ? # Target's type.
+ : project
+ : action ?
+ )
+ {
+ virtual-target.__init__ $(name) : $(project) ;
+
+ self.type = $(type) ;
+ self.action = $(action) ;
+ if $(action)
+ {
+ $(action).add-targets $(__name__) ;
+
+ if $(self.type) && ! $(exact)
+ {
+ _adjust-name $(name) ;
+ }
+ }
+ }
+
+ rule type ( )
+ {
+ return $(self.type) ;
+ }
+
+ # Sets the path. When generating target name, it will override any path
+ # computation from properties.
+ #
+ rule set-path ( path )
+ {
+ self.path = [ path.native $(path) ] ;
+ }
+
+ # Returns the currently set action.
+ #
+ rule action ( )
+ {
+ return $(self.action) ;
+ }
+
+ # Sets/gets the 'root' flag. Target is root if it directly corresponds to
+ # some variant of a main target.
+ #
+ rule root ( set ? )
+ {
+ if $(set)
+ {
+ self.root = true ;
+ }
+ return $(self.root) ;
+ }
+
+ # Gets or sets the subvariant which created this target. Subvariant is set
+ # when target is brought into existance and is never changed after that. In
+ # particular, if a target is shared by multiple subvariants, only the first
+ # one is stored.
+ #
+ rule creating-subvariant ( s ? # If specified, specifies the value to set,
+ # which should be a 'subvariant' class
+ # instance.
+ )
+ {
+ if $(s) && ! $(self.creating-subvariant)
+ {
+ self.creating-subvariant = $(s) ;
+ }
+ return $(self.creating-subvariant) ;
+ }
+
+ rule actualize-action ( target )
+ {
+ if $(self.action)
+ {
+ $(self.action).actualize ;
+ }
+ }
+
+ # Return a human-readable representation of this target. If this target has
+ # an action, that is:
+ #
+ # { <action-name>-<self.name>.<self.type> <action-sources>... }
+ #
+ # otherwise, it is:
+ #
+ # { <self.name>.<self.type> }
+ #
+ rule str ( )
+ {
+ local action = [ action ] ;
+ local name-dot-type = [ sequence.join $(self.name) "." $(self.type) ] ;
+
+ if $(action)
+ {
+ local sources = [ $(action).sources ] ;
+ local action-name = [ $(action).action-name ] ;
+
+ local ss ;
+ for local s in $(sources)
+ {
+ ss += [ $(s).str ] ;
+ }
+
+ return "{" $(action-name)-$(name-dot-type) $(ss) "}" ;
+ }
+ else
+ {
+ return "{" $(name-dot-type) "}" ;
+ }
+ }
+
+ rule less ( a )
+ {
+ if [ str ] < [ $(a).str ]
+ {
+ return true ;
+ }
+ }
+
+ rule equal ( a )
+ {
+ if [ str ] = [ $(a).str ]
+ {
+ return true ;
+ }
+ }
+
+# private:
+ rule actual-name ( )
+ {
+ if ! $(self.actual-name)
+ {
+ local grist = [ grist ] ;
+ local basename = [ path.native $(self.name) ] ;
+ self.actual-name = <$(grist)>$(basename) ;
+ }
+ return $(self.actual-name) ;
+ }
+
+ # Helper to 'actual-name', above. Computes a unique prefix used to
+ # distinguish this target from other targets with the same name creating
+ # different files.
+ #
+ rule grist ( )
+ {
+ # Depending on target, there may be different approaches to generating
+ # unique prefixes. We generate prefixes in the form:
+ # <one letter approach code> <the actual prefix>
+ local path = [ path ] ;
+ if $(path)
+ {
+ # The target will be generated to a known path. Just use the path
+ # for identification, since path is as unique as it can get.
+ return p$(path) ;
+ }
+ else
+ {
+ # File is either source, which will be searched for, or is not a
+ # file at all. Use the location of project for distinguishing.
+ local project-location = [ $(self.project).get location ] ;
+ local location-grist = [ sequence.join [ regex.split
+ $(project-location) "/" ] : "!" ] ;
+
+ if $(self.action)
+ {
+ local ps = [ $(self.action).properties ] ;
+ local property-grist = [ $(ps).as-path ] ;
+ # 'property-grist' can be empty when 'ps' is an empty property
+ # set.
+ if $(property-grist)
+ {
+ location-grist = $(location-grist)/$(property-grist) ;
+ }
+ }
+
+ return l$(location-grist) ;
+ }
+ }
+
+ # Given the target name specified in constructor, returns the name which
+ # should be really used, by looking at the <tag> properties. Tag properties
+ # need to be specified as <tag>@rule-name. This makes Boost Build call the
+ # specified rule with the target name, type and properties to get the new
+ # name. If no <tag> property is specified or the rule specified by <tag>
+ # returns nothing, returns the result of calling
+ # virtual-target.add-prefix-and-suffix.
+ #
+ rule _adjust-name ( specified-name )
+ {
+ local ps ;
+ if $(self.action)
+ {
+ ps = [ $(self.action).properties ] ;
+ }
+ else
+ {
+ ps = [ property-set.empty ] ;
+ }
+
+ local tag = [ $(ps).get <tag> ] ;
+
+ if $(tag)
+ {
+ local rule-name = [ MATCH ^@(.*) : $(tag) ] ;
+ if $(rule-name)
+ {
+ if $(tag[2])
+ {
+ import errors : error : errors.error ;
+ errors.error <tag>@rulename is present but is not the only
+ <tag> feature. ;
+ }
+
+ self.name = [ indirect.call $(rule-name) $(specified-name)
+ : $(self.type) : $(ps) ] ;
+ }
+ else
+ {
+ import errors : error : errors.error ;
+ errors.error <tag> property value must be '@rule-name'. ;
+ }
+ }
+
+ # If there is no tag or the tag rule returned nothing.
+ if ! $(tag) || ! $(self.name)
+ {
+ self.name = [ virtual-target.add-prefix-and-suffix $(specified-name)
+ : $(self.type) : $(ps) ] ;
+ }
+ }
+
+ rule actualize-no-scanner ( )
+ {
+ local name = [ actual-name ] ;
+
+ # Do anything only on the first invocation.
+ if ! $(self.made-no-scanner)
+ {
+ self.made-no-scanner = true ;
+
+ if $(self.action)
+ {
+ # For non-derived target, we do not care if there are several
+ # virtual targets that refer to the same name. One case when
+ # this is unavoidable is when the file name is main.cpp and two
+ # targets have types CPP (for compiling) and MOCCABLE_CPP (for
+ # conversion to H via Qt tools).
+ virtual-target.register-actual-name $(name) : $(__name__) ;
+ }
+
+ for local i in $(self.dependencies)
+ {
+ DEPENDS $(name) : [ $(i).actualize ] ;
+ }
+
+ actualize-location $(name) ;
+ actualize-action $(name) ;
+ }
+ return $(name) ;
+ }
+}
+
+
+# Appends the suffix appropriate to 'type/property-set' combination to the
+# specified name and returns the result.
+#
+rule add-prefix-and-suffix ( specified-name : type ? : property-set )
+{
+ local suffix = [ type.generated-target-suffix $(type) : $(property-set) ] ;
+
+ # Handle suffixes for which no leading dot is desired. Those are specified
+ # by enclosing them in <...>. Needed by python so it can create "_d.so"
+ # extensions, for example.
+ if $(suffix:G)
+ {
+ suffix = [ utility.ungrist $(suffix) ] ;
+ }
+ else
+ {
+ suffix = .$(suffix) ;
+ }
+
+ local prefix = [ type.generated-target-prefix $(type) : $(property-set) ] ;
+
+ if [ MATCH ^($(prefix)) : $(specified-name) ]
+ {
+ prefix = ;
+ }
+ return $(prefix:E="")$(specified-name)$(suffix:E="") ;
+}
+
+
+# File targets with explicitly known location.
+#
+# The file path is determined as
+# * Value passed to the 'set-path' method, if any.
+# * For derived files, project's build dir, joined with components that
+# describe action properties. If free properties are not equal to the
+# project's reference properties an element with the name of the main
+# target is added.
+# * For source files, project's source dir.
+#
+# The file suffix is determined as:
+# * The value passed to the 'suffix' method, if any.
+# * The suffix corresponding to the target's type.
+#
+class file-target : abstract-file-target
+{
+ import "class" : new ;
+ import common ;
+
+ rule __init__ (
+ name exact ?
+ : type ? # Optional type for this target.
+ : project
+ : action ?
+ : path ?
+ )
+ {
+ abstract-file-target.__init__ $(name) $(exact) : $(type) : $(project) :
+ $(action) ;
+
+ self.path = $(path) ;
+ }
+
+ rule clone-with-different-type ( new-type )
+ {
+ return [ new file-target $(self.name) exact : $(new-type) :
+ $(self.project) : $(self.action) : $(self.path) ] ;
+ }
+
+ rule actualize-location ( target )
+ {
+ # Scanner targets are always bound to already existing files in already
+ # existing folder. They need to be marked as depending on their base
+ # target (i.e. the target being scanned) but, unlike regular
+ # dependencies set up by the DEPENDS rule, they must not depend on any
+ # targets already marked as included by the base target. Otherwise such
+ # an included file being newer than the file being scanned would cause
+ # the scanner target to be updated, further causing any target depending
+ # on that scanner target to be rebuilt. This is the exact relationship
+ # as set up by Boost Jam's SEARCH binding method (needed to support
+ # searching for generated targets) so we want to bind scanner targets
+ # using this method instead of explicitly specifying their location
+ # using LOCATE.
+ #
+ # FIXME: We recognize scanner targets by their given name being
+ # different from this target's actual name. This is a hack and should be
+ # cleaned up by reorganizing who knows about scanners in the
+ # virtual-target/abstract-file-target/file-target/notfile-target/
+ # searched-lib-target/... class hierarchy.
+ local is-scanner-target ;
+ if $(target) != [ actual-name ]
+ {
+ is-scanner-target = true ;
+ }
+
+ if $(self.action) && ! $(is-scanner-target)
+ {
+ # This is a derived file.
+ local path = [ path ] ;
+ LOCATE on $(target) = $(path) ;
+
+ # Make sure the path exists.
+ DEPENDS $(target) : $(path) ;
+ common.MkDir $(path) ;
+
+ # It is possible that the target name includes a directory too, for
+ # example when installing headers. Create that directory.
+ if $(target:D)
+ {
+ local d = $(target:D) ;
+ d = $(d:R=$(path)) ;
+ DEPENDS $(target) : $(d) ;
+ common.MkDir $(d) ;
+ }
+
+ # For a real file target, we create a fake target depending on the
+ # real target. This allows us to run
+ #
+ # b2 hello.o
+ #
+ # without trying to guess the name of the real target. Note that the
+ # target has no directory name and uses a special <e> grist.
+ #
+ # First, that means that "b2 hello.o" will build all known hello.o
+ # targets. Second, the <e> grist makes sure this target will not be
+ # confused with other targets, for example, if we have subdir 'test'
+ # with target 'test' in it that includes a 'test.o' file, then the
+ # target for directory will be just 'test' the target for test.o
+ # will be <ptest/bin/gcc/debug>test.o and the target we create below
+ # will be <e>test.o
+ DEPENDS $(target:G=e) : $(target) ;
+ # Allow b2 <path-to-file>/<file> to work. This will not catch all
+ # possible ways to refer to the path (relative/absolute, extra ".",
+ # various "..", but should help in obvious cases.
+ DEPENDS $(target:G=e:R=$(path)) : $(target) ;
+ }
+ else
+ {
+ SEARCH on $(target) = [ path.native $(self.path) ] ;
+ }
+ }
+
+ # Returns the directory for this target.
+ #
+ rule path ( )
+ {
+ if ! $(self.path)
+ {
+ if $(self.action)
+ {
+ local p = [ $(self.action).properties ] ;
+ local path,relative-to-build-dir = [ $(p).target-path ] ;
+ local path = $(path,relative-to-build-dir[1]) ;
+ local relative-to-build-dir = $(path,relative-to-build-dir[2]) ;
+
+ if $(relative-to-build-dir)
+ {
+ path = [ path.join [ $(self.project).build-dir ] $(path) ] ;
+ }
+
+ self.path = [ path.native $(path) ] ;
+ }
+ }
+ return $(self.path) ;
+ }
+}
+
+
+class notfile-target : abstract-file-target
+{
+ rule __init__ ( name : project : action ? )
+ {
+ abstract-file-target.__init__ $(name) : : $(project) : $(action) ;
+ }
+
+ # Returns nothing to indicate that the target's path is not known.
+ #
+ rule path ( )
+ {
+ return ;
+ }
+
+ rule actualize-location ( target )
+ {
+ NOTFILE $(target) ;
+ ALWAYS $(target) ;
+ # TEMPORARY $(target) ;
+ NOUPDATE $(target) ;
+ }
+}
+
+
+# Class representing an action. Both 'targets' and 'sources' should list
+# instances of 'virtual-target'. Action name should name a rule with this
+# prototype:
+# rule action-name ( targets + : sources * : properties * )
+# Targets and sources are passed as actual Jam targets. The rule may not
+# establish additional dependency relationships.
+#
+class action
+{
+ import "class" ;
+ import indirect ;
+ import path ;
+ import property-set ;
+ import set : difference ;
+ import toolset ;
+ import type ;
+
+ rule __init__ ( sources * : action-name + : property-set ? )
+ {
+ self.sources = $(sources) ;
+
+ self.action-name = [ indirect.make-qualified $(action-name) ] ;
+
+ if ! $(property-set)
+ {
+ property-set = [ property-set.empty ] ;
+ }
+
+ if ! [ class.is-instance $(property-set) ]
+ {
+ import errors : error : errors.error ;
+ errors.error "Property set instance required" ;
+ }
+
+ self.properties = $(property-set) ;
+ }
+
+ rule add-targets ( targets * )
+ {
+ self.targets += $(targets) ;
+ }
+
+ rule replace-targets ( old-targets * : new-targets * )
+ {
+ self.targets = [ set.difference $(self.targets) : $(old-targets) ] ;
+ self.targets += $(new-targets) ;
+ }
+
+ rule targets ( )
+ {
+ return $(self.targets) ;
+ }
+
+ rule sources ( )
+ {
+ return $(self.sources) ;
+ }
+
+ rule action-name ( )
+ {
+ return $(self.action-name) ;
+ }
+
+ rule properties ( )
+ {
+ return $(self.properties) ;
+ }
+
+ # Generates actual build instructions.
+ #
+ rule actualize ( )
+ {
+ if ! $(self.actualized)
+ {
+ self.actualized = true ;
+
+ local ps = [ properties ] ;
+ local properties = [ adjust-properties $(ps) ] ;
+
+ local actual-targets ;
+ for local i in [ targets ]
+ {
+ actual-targets += [ $(i).actualize ] ;
+ }
+
+ actualize-sources [ sources ] : $(properties) ;
+
+ DEPENDS $(actual-targets) : $(self.actual-sources)
+ $(self.dependency-only-sources) ;
+
+ # Action name can include additional rule arguments, which should
+ # not be passed to 'set-target-variables'.
+ toolset.set-target-variables
+ [ indirect.get-rule $(self.action-name[1]) ] $(actual-targets)
+ : $(properties) ;
+
+ # Reflect ourselves in a variable for the target. This allows
+ # looking up additional info for the action given the raw target.
+ # For example to debug or output action information from action
+ # rules.
+ .action on $(actual-targets) = $(__name__) ;
+
+ indirect.call $(self.action-name) $(actual-targets)
+ : $(self.actual-sources) : [ $(properties).raw ] ;
+
+ # Since we set up the creating action here, we set up the action for
+ # cleaning up as well.
+ common.Clean clean-all : $(actual-targets) ;
+ }
+ }
+
+ # Helper for 'actualize-sources'. For each passed source, actualizes it with
+ # the appropriate scanner. Returns the actualized virtual targets.
+ #
+ rule actualize-source-type ( sources * : property-set )
+ {
+ local result = ;
+ for local i in $(sources)
+ {
+ local scanner ;
+ if [ $(i).type ]
+ {
+ scanner = [ type.get-scanner [ $(i).type ] : $(property-set) ] ;
+ }
+ result += [ $(i).actualize $(scanner) ] ;
+ }
+ return $(result) ;
+ }
+
+ # Creates actual Jam targets for sources. Initializes the following member
+ # variables:
+ # 'self.actual-sources' -- sources passed to the updating action.
+ # 'self.dependency-only-sources' -- sources marked as dependencies, but
+ # are not used otherwise.
+ #
+ # New values will be *appended* to the variables. They may be non-empty if
+ # caller wants it.
+ #
+ rule actualize-sources ( sources * : property-set )
+ {
+ local dependencies = [ $(self.properties).get <dependency> ] ;
+
+ self.dependency-only-sources +=
+ [ actualize-source-type $(dependencies) : $(property-set) ] ;
+ self.actual-sources +=
+ [ actualize-source-type $(sources) : $(property-set) ] ;
+
+ # This is used to help b2 find dependencies in generated headers and
+ # other main targets, e.g. in:
+ #
+ # make a.h : ....... ;
+ # exe hello : hello.cpp : <implicit-dependency>a.h ;
+ #
+ # For b2 to find the dependency the generated target must be
+ # actualized (i.e. have its Jam target constructed). In the above case,
+ # if we are building just hello ("b2 hello"), 'a.h' will not be
+ # actualized unless we do it here.
+ local implicit = [ $(self.properties).get <implicit-dependency> ] ;
+ for local i in $(implicit)
+ {
+ $(i:G=).actualize ;
+ }
+ }
+
+ # Determines real properties when trying to build with 'properties'. This is
+ # the last chance to fix properties, for example to adjust includes to get
+ # generated headers correctly. Default implementation simply returns its
+ # argument.
+ #
+ rule adjust-properties ( property-set )
+ {
+ return $(property-set) ;
+ }
+}
+
+
+# Action class which does nothing --- it produces the targets with specific
+# properties out of nowhere. It is needed to distinguish virtual targets with
+# different properties that are known to exist and have no actions which create
+# them.
+#
+class null-action : action
+{
+ rule __init__ ( property-set ? )
+ {
+ action.__init__ : .no-action : $(property-set) ;
+ }
+
+ rule actualize ( )
+ {
+ if ! $(self.actualized)
+ {
+ self.actualized = true ;
+ for local i in [ targets ]
+ {
+ $(i).actualize ;
+ }
+ }
+ }
+}
+
+
+# Class which acts exactly like 'action', except that its sources are not
+# scanned for dependencies.
+#
+class non-scanning-action : action
+{
+ rule __init__ ( sources * : action-name + : property-set ? )
+ {
+ action.__init__ $(sources) : $(action-name) : $(property-set) ;
+ }
+
+ rule actualize-source-type ( sources * : property-set )
+ {
+ local result ;
+ for local i in $(sources)
+ {
+ result += [ $(i).actualize ] ;
+ }
+ return $(result) ;
+ }
+}
+
+
+# Creates a virtual target with an appropriate name and type from 'file'. If a
+# target with that name in that project already exists, returns that already
+# created target.
+#
+# FIXME: a more correct way would be to compute the path to the file, based on
+# name and source location for the project, and use that path to determine if
+# the target has already been created. This logic should be shared with how we
+# usually find targets identified by a specific target id. It should also be
+# updated to work correctly when the file is specified using both relative and
+# absolute paths.
+#
+# TODO: passing a project with all virtual targets is starting to be annoying.
+#
+rule from-file ( file : file-loc : project )
+{
+ import type ; # Had to do this here to break a circular dependency.
+
+ # Check whether we already created a target corresponding to this file.
+ local path = [ path.root [ path.root $(file) $(file-loc) ] [ path.pwd ] ] ;
+
+ if $(.files.$(path))
+ {
+ return $(.files.$(path)) ;
+ }
+ else
+ {
+ local name = [ path.make $(file) ] ;
+ local type = [ type.type $(file) ] ;
+ local result ;
+
+ result = [ new file-target $(file) : $(type) : $(project) : :
+ $(file-loc) ] ;
+
+ .files.$(path) = $(result) ;
+ return $(result) ;
+ }
+}
+
+
+# Registers a new virtual target. Checks if there is already a registered target
+# with the same name, type, project and subvariant properties as well as the
+# same sources and equal action. If such target is found it is returned and a
+# new 'target' is not registered. Otherwise, 'target' is registered and
+# returned.
+#
+rule register ( target )
+{
+ local signature = [ sequence.join [ $(target).path ] [ $(target).name ] : -
+ ] ;
+
+ local result ;
+ for local t in $(.cache.$(signature))
+ {
+ local a1 = [ $(t).action ] ;
+ local a2 = [ $(target).action ] ;
+
+ if ! $(result)
+ {
+ if ! $(a1) && ! $(a2)
+ {
+ result = $(t) ;
+ }
+ else if $(a1) && $(a2) &&
+ ( [ $(a1).action-name ] = [ $(a2).action-name ] ) &&
+ ( [ $(a1).sources ] = [ $(a2).sources ] )
+ {
+ local ps1 = [ $(a1).properties ] ;
+ local ps2 = [ $(a2).properties ] ;
+ local p1 = [ $(ps1).base ] [ $(ps1).free ] [ set.difference
+ [ $(ps1).dependency ] : [ $(ps1).incidental ] ] ;
+ local p2 = [ $(ps2).base ] [ $(ps2).free ] [ set.difference
+ [ $(ps2).dependency ] : [ $(ps2).incidental ] ] ;
+ if $(p1) = $(p2)
+ {
+ result = $(t) ;
+ }
+ }
+ }
+ }
+
+ if ! $(result)
+ {
+ .cache.$(signature) += $(target) ;
+ result = $(target) ;
+ }
+
+ .recent-targets += $(result) ;
+ .all-targets += $(result) ;
+
+ return $(result) ;
+}
+
+
+# Each target returned by 'register' is added to the .recent-targets list,
+# returned by this function. This allows us to find all virtual targets created
+# when building a specific main target, even those constructed only as
+# intermediate targets.
+#
+rule recent-targets ( )
+{
+ return $(.recent-targets) ;
+}
+
+
+rule clear-recent-targets ( )
+{
+ .recent-targets = ;
+}
+
+
+# Returns all virtual targets ever created.
+#
+rule all-targets ( )
+{
+ return $(.all-targets) ;
+}
+
+
+# Returns all targets from 'targets' with types equal to 'type' or derived from
+# it.
+#
+rule select-by-type ( type : targets * )
+{
+ local result ;
+ for local t in $(targets)
+ {
+ if [ type.is-subtype [ $(t).type ] $(type) ]
+ {
+ result += $(t) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+rule register-actual-name ( actual-name : virtual-target )
+{
+ if $(.actual.$(actual-name))
+ {
+ local cs1 = [ $(.actual.$(actual-name)).creating-subvariant ] ;
+ local cmt1-name ;
+ if $(cs1)-is-defined
+ {
+ local cmt1 = [ $(cs1).main-target ] ;
+ cmt1-name = [ $(cmt1).full-name ] ;
+ }
+ local cs2 = [ $(virtual-target).creating-subvariant ] ;
+ local cmt2-name ;
+ if $(cs2)-is-defined
+ {
+ local cmt2 = [ $(cs2).main-target ] ;
+ cmt2-name = [ $(cmt2).full-name ] ;
+ }
+ local extra-error-information ;
+ if ! $(cs1)-is-defined || ! $(cs2)-is-defined
+ {
+ extra-error-information = Encountered a virtual-target without a
+ creating subvariant. It could be the virtual target has not been
+ registered via the virtual-target.register rule. ;
+ }
+
+ local action1 = [ $(.actual.$(actual-name)).action ] ;
+ local action2 = [ $(virtual-target).action ] ;
+ local properties-added ;
+ local properties-removed ;
+ if $(action1) && $(action2)
+ {
+ local p1 = [ $(action1).properties ] ;
+ p1 = [ $(p1).raw ] ;
+ local p2 = [ $(action2).properties ] ;
+ p2 = [ $(p2).raw ] ;
+ properties-removed = [ set.difference $(p1) : $(p2) ] ;
+ properties-removed ?= "none" ;
+ properties-added = [ set.difference $(p2) : $(p1) ] ;
+ properties-added ?= "none" ;
+ }
+ import errors : user-error : errors.user-error ;
+ errors.user-error "Name clash for '$(actual-name)'"
+ : ""
+ : "Tried to build the target twice, with property sets having "
+ : "these incompabile properties:"
+ : ""
+ : " - " $(properties-removed)
+ : " - " $(properties-added)
+ : ""
+ : "Please make sure to have consistent requirements for these "
+ : "properties everywhere in your project, especially for install"
+ : "targets."
+ ;
+ }
+ else
+ {
+ .actual.$(actual-name) = $(virtual-target) ;
+ }
+}
+
+
+# Traverses the dependency graph of 'target' and return all targets that will be
+# created before this one is created. If the root of some dependency graph is
+# found during traversal, it is either included or not, depending on the
+# 'include-roots' value. In either case traversal stops at root targets, i.e.
+# root target sources are not traversed.
+#
+rule traverse ( target : include-roots ? : include-sources ? )
+{
+ local result ;
+ if [ $(target).action ]
+ {
+ local action = [ $(target).action ] ;
+ # This includes the 'target' as well.
+ result += [ $(action).targets ] ;
+
+ for local t in [ $(action).sources ]
+ {
+ if ! [ $(t).root ]
+ {
+ result += [ traverse $(t) : $(include-roots) :
+ $(include-sources) ] ;
+ }
+ else if $(include-roots)
+ {
+ result += $(t) ;
+ }
+ }
+ }
+ else if $(include-sources)
+ {
+ result = $(target) ;
+ }
+ return $(result) ;
+}
+
+
+# Takes an 'action' instance and creates a new instance of it and all targets
+# produced by the action. The rule-name and properties are set to
+# 'new-rule-name' and 'new-properties', if those are specified. Returns the
+# cloned action.
+#
+rule clone-action ( action : new-project : new-action-name ? : new-properties ?
+ )
+{
+ if ! $(new-action-name)
+ {
+ new-action-name = [ $(action).action-name ] ;
+ }
+ if ! $(new-properties)
+ {
+ new-properties = [ $(action).properties ] ;
+ }
+
+ local action-class = [ modules.peek $(action) : __class__ ] ;
+ local cloned-action = [ class.new $(action-class)
+ [ $(action).sources ] : $(new-action-name) : $(new-properties) ] ;
+
+ local cloned-targets ;
+ for local target in [ $(action).targets ]
+ {
+ local n = [ $(target).name ] ;
+ # Do not modify produced target names.
+ local cloned-target = [ class.new file-target $(n) exact :
+ [ $(target).type ] : $(new-project) : $(cloned-action) ] ;
+ local d = [ $(target).dependencies ] ;
+ if $(d)
+ {
+ $(cloned-target).depends $(d) ;
+ }
+ $(cloned-target).root [ $(target).root ] ;
+ $(cloned-target).creating-subvariant [ $(target).creating-subvariant ] ;
+
+ cloned-targets += $(cloned-target) ;
+ }
+
+ return $(cloned-action) ;
+}
+
+
+class subvariant
+{
+ import sequence ;
+ import type ;
+
+ rule __init__ ( main-target # The instance of main-target class.
+ : property-set # Properties requested for this target.
+ : sources *
+ : build-properties # Actually used properties.
+ : sources-usage-requirements # Properties propagated from sources.
+ : created-targets * ) # Top-level created targets.
+ {
+ self.main-target = $(main-target) ;
+ self.properties = $(property-set) ;
+ self.sources = $(sources) ;
+ self.build-properties = $(build-properties) ;
+ self.sources-usage-requirements = $(sources-usage-requirements) ;
+ self.created-targets = $(created-targets) ;
+
+ # Pre-compose a list of other dependency graphs this one depends on.
+ local deps = [ $(build-properties).get <implicit-dependency> ] ;
+ for local d in $(deps)
+ {
+ self.other-dg += [ $(d:G=).creating-subvariant ] ;
+ }
+
+ self.other-dg = [ sequence.unique $(self.other-dg) ] ;
+ }
+
+ rule main-target ( )
+ {
+ return $(self.main-target) ;
+ }
+
+ rule created-targets ( )
+ {
+ return $(self.created-targets) ;
+ }
+
+ rule requested-properties ( )
+ {
+ return $(self.properties) ;
+ }
+
+ rule build-properties ( )
+ {
+ return $(self.build-properties) ;
+ }
+
+ rule sources-usage-requirements ( )
+ {
+ return $(self.sources-usage-requirements) ;
+ }
+
+ rule set-usage-requirements ( usage-requirements )
+ {
+ self.usage-requirements = $(usage-requirements) ;
+ }
+
+ rule usage-requirements ( )
+ {
+ return $(self.usage-requirements) ;
+ }
+
+ # Returns all targets referenced by this subvariant, either directly or
+ # indirectly, and either as sources, or as dependency properties. Targets
+ # referred to using the dependency property are returned as properties, not
+ # targets.
+ #
+ rule all-referenced-targets ( theset )
+ {
+ # Find directly referenced targets.
+ local deps = [ $(self.build-properties).dependency ] ;
+ local all-targets = $(self.sources) $(deps) ;
+
+ # Find other subvariants.
+ local r ;
+ for local t in $(all-targets)
+ {
+ if ! [ $(theset).contains $(t) ]
+ {
+ $(theset).add $(t) ;
+ r += [ $(t:G=).creating-subvariant ] ;
+ }
+ }
+ r = [ sequence.unique $(r) ] ;
+ for local s in $(r)
+ {
+ if $(s) != $(__name__)
+ {
+ $(s).all-referenced-targets $(theset) ;
+ }
+ }
+ }
+
+ # Returns the properties specifying implicit include paths to generated
+ # headers. This traverses all targets in this subvariant and subvariants
+ # referred by <implicit-dependecy> properties. For all targets of type
+ # 'target-type' (or for all targets, if 'target-type' is not specified), the
+ # result will contain <$(feature)>path-to-that-target.
+ #
+ rule implicit-includes ( feature : target-type ? )
+ {
+ local key = ii$(feature)-$(target-type:E="") ;
+ if ! $($(key))-is-not-empty
+ {
+ local target-paths = [ all-target-directories $(target-type) ] ;
+ target-paths = [ sequence.unique $(target-paths) ] ;
+ local result = $(target-paths:G=$(feature)) ;
+ if ! $(result)
+ {
+ result = "" ;
+ }
+ $(key) = $(result) ;
+ }
+ if $($(key)) = ""
+ {
+ return ;
+ }
+ else
+ {
+ return $($(key)) ;
+ }
+ }
+
+ rule all-target-directories ( target-type ? )
+ {
+ if ! $(self.target-directories)
+ {
+ compute-target-directories $(target-type) ;
+ }
+ return $(self.target-directories) ;
+ }
+
+ rule compute-target-directories ( target-type ? )
+ {
+ local result ;
+ for local t in $(self.created-targets)
+ {
+ # Skip targets of the wrong type.
+ local type = [ $(t).type ] ;
+ if ! $(target-type) ||
+ ( $(type) && [ type.is-derived $(type) $(target-type) ] )
+ {
+ result = [ sequence.merge $(result) : [ $(t).path ] ] ;
+ }
+ }
+ for local d in $(self.other-dg)
+ {
+ result += [ $(d).all-target-directories $(target-type) ] ;
+ }
+ self.target-directories = $(result) ;
+ }
+}
diff --git a/tools/build/src/build/virtual_target.py b/tools/build/src/build/virtual_target.py
new file mode 100644
index 0000000000..ac6703056b
--- /dev/null
+++ b/tools/build/src/build/virtual_target.py
@@ -0,0 +1,1107 @@
+# Status: ported.
+# Base revision: 64488.
+#
+# Copyright (C) Vladimir Prus 2002. Permission to copy, use, modify, sell and
+# distribute this software is granted provided this copyright notice appears in
+# all copies. This software is provided "as is" without express or implied
+# warranty, and with no claim as to its suitability for any purpose.
+
+# Implements virtual targets, which correspond to actual files created during
+# build, but are not yet targets in Jam sense. They are needed, for example,
+# when searching for possible transormation sequences, when it's not known
+# if particular target should be created at all.
+#
+#
+# +--------------------------+
+# | VirtualTarget |
+# +==========================+
+# | actualize |
+# +--------------------------+
+# | actualize_action() = 0 |
+# | actualize_location() = 0 |
+# +----------------+---------+
+# |
+# ^
+# / \
+# +-+-+
+# |
+# +---------------------+ +-------+--------------+
+# | Action | | AbstractFileTarget |
+# +=====================| * +======================+
+# | action_name | +--+ action |
+# | properties | | +----------------------+
+# +---------------------+--+ | actualize_action() |
+# | actualize() |0..1 +-----------+----------+
+# | path() | |
+# | adjust_properties() | sources |
+# | actualize_sources() | targets |
+# +------+--------------+ ^
+# | / \
+# ^ +-+-+
+# / \ |
+# +-+-+ +-------------+-------------+
+# | | |
+# | +------+---------------+ +--------+-------------+
+# | | FileTarget | | SearchedLibTarget |
+# | +======================+ +======================+
+# | | actualize-location() | | actualize-location() |
+# | +----------------------+ +----------------------+
+# |
+# +-+------------------------------+
+# | |
+# +----+----------------+ +---------+-----------+
+# | CompileAction | | LinkAction |
+# +=====================+ +=====================+
+# | adjust_properties() | | adjust_properties() |
+# +---------------------+ | actualize_sources() |
+# +---------------------+
+#
+# The 'CompileAction' and 'LinkAction' classes are defined not here,
+# but in builtin.jam modules. They are shown in the diagram to give
+# the big picture.
+
+import bjam
+
+import re
+import os.path
+import string
+import types
+
+from b2.util import path, utility, set
+from b2.util.utility import add_grist, get_grist, ungrist, replace_grist, get_value
+from b2.util.sequence import unique
+from b2.tools import common
+from b2.exceptions import *
+import b2.build.type
+import b2.build.property_set as property_set
+
+import b2.build.property as property
+
+from b2.manager import get_manager
+from b2.util import bjam_signature
+
+__re_starts_with_at = re.compile ('^@(.*)')
+
+class VirtualTargetRegistry:
+ def __init__ (self, manager):
+ self.manager_ = manager
+
+ # A cache for FileTargets
+ self.files_ = {}
+
+ # A cache for targets.
+ self.cache_ = {}
+
+ # A map of actual names to virtual targets.
+ # Used to make sure we don't associate same
+ # actual target to two virtual targets.
+ self.actual_ = {}
+
+ self.recent_targets_ = []
+
+ # All targets ever registed
+ self.all_targets_ = []
+
+ self.next_id_ = 0
+
+ def register (self, target):
+ """ Registers a new virtual target. Checks if there's already registered target, with the same
+ name, type, project and subvariant properties, and also with the same sources
+ and equal action. If such target is found it is retured and 'target' is not registered.
+ Otherwise, 'target' is registered and returned.
+ """
+ if target.path():
+ signature = target.path() + "-" + target.name()
+ else:
+ signature = "-" + target.name()
+
+ result = None
+ if not self.cache_.has_key (signature):
+ self.cache_ [signature] = []
+
+ for t in self.cache_ [signature]:
+ a1 = t.action ()
+ a2 = target.action ()
+
+ # TODO: why are we checking for not result?
+ if not result:
+ if not a1 and not a2:
+ result = t
+ else:
+ if a1 and a2 and a1.action_name () == a2.action_name () and a1.sources () == a2.sources ():
+ ps1 = a1.properties ()
+ ps2 = a2.properties ()
+ p1 = ps1.base () + ps1.free () +\
+ b2.util.set.difference(ps1.dependency(), ps1.incidental())
+ p2 = ps2.base () + ps2.free () +\
+ b2.util.set.difference(ps2.dependency(), ps2.incidental())
+ if p1 == p2:
+ result = t
+
+ if not result:
+ self.cache_ [signature].append (target)
+ result = target
+
+ # TODO: Don't append if we found pre-existing target?
+ self.recent_targets_.append(result)
+ self.all_targets_.append(result)
+
+ return result
+
+ def from_file (self, file, file_location, project):
+ """ Creates a virtual target with appropriate name and type from 'file'.
+ If a target with that name in that project was already created, returns that already
+ created target.
+ TODO: more correct way would be to compute path to the file, based on name and source location
+ for the project, and use that path to determine if the target was already created.
+ TODO: passing project with all virtual targets starts to be annoying.
+ """
+ # Check if we've created a target corresponding to this file.
+ path = os.path.join(os.getcwd(), file_location, file)
+ path = os.path.normpath(path)
+
+ if self.files_.has_key (path):
+ return self.files_ [path]
+
+ file_type = b2.build.type.type (file)
+
+ result = FileTarget (file, file_type, project,
+ None, file_location)
+ self.files_ [path] = result
+
+ return result
+
+ def recent_targets(self):
+ """Each target returned by 'register' is added to a list of
+ 'recent-target', returned by this function. So, this allows
+ us to find all targets created when building a given main
+ target, even if the target."""
+
+ return self.recent_targets_
+
+ def clear_recent_targets(self):
+ self.recent_targets_ = []
+
+ def all_targets(self):
+ # Returns all virtual targets ever created
+ return self.all_targets_
+
+ # Returns all targets from 'targets' with types
+ # equal to 'type' or derived from it.
+ def select_by_type(self, type, targets):
+ return [t for t in targets if b2.build.type.is_sybtype(t.type(), type)]
+
+ def register_actual_name (self, actual_name, virtual_target):
+ if self.actual_.has_key (actual_name):
+ cs1 = self.actual_ [actual_name].creating_subvariant ()
+ cs2 = virtual_target.creating_subvariant ()
+ cmt1 = cs1.main_target ()
+ cmt2 = cs2.main_target ()
+
+ action1 = self.actual_ [actual_name].action ()
+ action2 = virtual_target.action ()
+
+ properties_added = []
+ properties_removed = []
+ if action1 and action2:
+ p1 = action1.properties ()
+ p1 = p1.raw ()
+ p2 = action2.properties ()
+ p2 = p2.raw ()
+
+ properties_removed = set.difference (p1, p2)
+ if not properties_removed: properties_removed = "none"
+
+ properties_added = set.difference (p2, p1)
+ if not properties_added: properties_added = "none"
+
+ # FIXME: Revive printing of real location.
+ get_manager().errors()(
+ "Duplicate name of actual target: '%s'\n"
+ "previous virtual target '%s'\n"
+ "created from '%s'\n"
+ "another virtual target '%s'\n"
+ "created from '%s'\n"
+ "added properties: '%s'\n"
+ "removed properties: '%s'\n"
+ % (actual_name,
+ self.actual_ [actual_name], "loc", #cmt1.location (),
+ virtual_target,
+ "loc", #cmt2.location (),
+ properties_added, properties_removed))
+
+ else:
+ self.actual_ [actual_name] = virtual_target
+
+
+ def add_suffix (self, specified_name, file_type, prop_set):
+ """ Appends the suffix appropriate to 'type/property_set' combination
+ to the specified name and returns the result.
+ """
+ suffix = b2.build.type.generated_target_suffix (file_type, prop_set)
+
+ if suffix:
+ return specified_name + '.' + suffix
+
+ else:
+ return specified_name
+
+class VirtualTarget:
+ """ Potential target. It can be converted into jam target and used in
+ building, if needed. However, it can be also dropped, which allows
+ to search for different transformation and select only one.
+ name: name of this target.
+ project: project to which this target belongs.
+ """
+ def __init__ (self, name, project):
+ self.name_ = name
+ self.project_ = project
+ self.dependencies_ = []
+ self.always_ = False
+
+ # Caches if dapendencies for scanners have already been set.
+ self.made_ = {}
+
+ def manager(self):
+ return self.project_.manager()
+
+ def virtual_targets(self):
+ return self.manager().virtual_targets()
+
+ def name (self):
+ """ Name of this target.
+ """
+ return self.name_
+
+ def project (self):
+ """ Project of this target.
+ """
+ return self.project_
+
+ def depends (self, d):
+ """ Adds additional instances of 'VirtualTarget' that this
+ one depends on.
+ """
+ self.dependencies_ = unique (self.dependencies_ + d).sort ()
+
+ def dependencies (self):
+ return self.dependencies_
+
+ def always(self):
+ self.always_ = True
+
+ def actualize (self, scanner = None):
+ """ Generates all the actual targets and sets up build actions for
+ this target.
+
+ If 'scanner' is specified, creates an additional target
+ with the same location as actual target, which will depend on the
+ actual target and be associated with 'scanner'. That additional
+ target is returned. See the docs (#dependency_scanning) for rationale.
+ Target must correspond to a file if 'scanner' is specified.
+
+ If scanner is not specified, then actual target is returned.
+ """
+ actual_name = self.actualize_no_scanner ()
+
+ if self.always_:
+ bjam.call("ALWAYS", actual_name)
+
+ if not scanner:
+ return actual_name
+
+ else:
+ # Add the scanner instance to the grist for name.
+ g = '-'.join ([ungrist(get_grist(actual_name)), str(id(scanner))])
+
+ name = replace_grist (actual_name, '<' + g + '>')
+
+ if not self.made_.has_key (name):
+ self.made_ [name] = True
+
+ self.project_.manager ().engine ().add_dependency (name, actual_name)
+
+ self.actualize_location (name)
+
+ self.project_.manager ().scanners ().install (scanner, name, str (self))
+
+ return name
+
+# private: (overridables)
+
+ def actualize_action (self, target):
+ """ Sets up build actions for 'target'. Should call appropriate rules
+ and set target variables.
+ """
+ raise BaseException ("method should be defined in derived classes")
+
+ def actualize_location (self, target):
+ """ Sets up variables on 'target' which specify its location.
+ """
+ raise BaseException ("method should be defined in derived classes")
+
+ def path (self):
+ """ If the target is generated one, returns the path where it will be
+ generated. Otherwise, returns empty list.
+ """
+ raise BaseException ("method should be defined in derived classes")
+
+ def actual_name (self):
+ """ Return that actual target name that should be used
+ (for the case where no scanner is involved)
+ """
+ raise BaseException ("method should be defined in derived classes")
+
+
+class AbstractFileTarget (VirtualTarget):
+ """ Target which correspond to a file. The exact mapping for file
+ is not yet specified in this class. (TODO: Actually, the class name
+ could be better...)
+
+ May be a source file (when no action is specified), or
+ derived file (otherwise).
+
+ The target's grist is concatenation of project's location,
+ properties of action (for derived files), and, optionally,
+ value identifying the main target.
+
+ exact: If non-empty, the name is exactly the name
+ created file should have. Otherwise, the '__init__'
+ method will add suffix obtained from 'type' by
+ calling 'type.generated-target-suffix'.
+
+ type: optional type of this target.
+ """
+ def __init__ (self, name, type, project, action = None, exact=False):
+ VirtualTarget.__init__ (self, name, project)
+
+ self.type_ = type
+
+ self.action_ = action
+ self.exact_ = exact
+
+ if action:
+ action.add_targets ([self])
+
+ if self.type and not exact:
+ self.__adjust_name (name)
+
+
+ self.actual_name_ = None
+ self.path_ = None
+ self.intermediate_ = False
+ self.creating_subvariant_ = None
+
+ # True if this is a root target.
+ self.root_ = False
+
+ def type (self):
+ return self.type_
+
+ def set_path (self, path):
+ """ Sets the path. When generating target name, it will override any path
+ computation from properties.
+ """
+ self.path_ = os.path.normpath(path)
+
+ def action (self):
+ """ Returns the action.
+ """
+ return self.action_
+
+ def root (self, set = None):
+ """ Sets/gets the 'root' flag. Target is root is it directly correspods to some
+ variant of a main target.
+ """
+ if set:
+ self.root_ = True
+ return self.root_
+
+ def creating_subvariant (self, s = None):
+ """ Gets or sets the subvariant which created this target. Subvariant
+ is set when target is brought into existance, and is never changed
+ after that. In particual, if target is shared by subvariant, only
+ the first is stored.
+ s: If specified, specified the value to set,
+ which should be instance of 'subvariant' class.
+ """
+ if s and not self.creating_subvariant ():
+ if self.creating_subvariant ():
+ raise BaseException ("Attempt to change 'dg'")
+
+ else:
+ self.creating_subvariant_ = s
+
+ return self.creating_subvariant_
+
+ def actualize_action (self, target):
+ if self.action_:
+ self.action_.actualize ()
+
+ # Return a human-readable representation of this target
+ #
+ # If this target has an action, that's:
+ #
+ # { <action-name>-<self.name>.<self.type> <action-sources>... }
+ #
+ # otherwise, it's:
+ #
+ # { <self.name>.<self.type> }
+ #
+ def str(self):
+ a = self.action()
+
+ name_dot_type = self.name_ + "." + self.type_
+
+ if a:
+ action_name = a.action_name()
+ ss = [ s.str() for s in a.sources()]
+
+ return "{ %s-%s %s}" % (action_name, name_dot_type, str(ss))
+ else:
+ return "{ " + name_dot_type + " }"
+
+# private:
+
+ def actual_name (self):
+ if not self.actual_name_:
+ self.actual_name_ = '<' + self.grist() + '>' + os.path.normpath(self.name_)
+
+ return self.actual_name_
+
+ def grist (self):
+ """Helper to 'actual_name', above. Compute unique prefix used to distinguish
+ this target from other targets with the same name which create different
+ file.
+ """
+ # Depending on target, there may be different approaches to generating
+ # unique prefixes. We'll generate prefixes in the form
+ # <one letter approach code> <the actual prefix>
+ path = self.path ()
+
+ if path:
+ # The target will be generated to a known path. Just use the path
+ # for identification, since path is as unique as it can get.
+ return 'p' + path
+
+ else:
+ # File is either source, which will be searched for, or is not a file at
+ # all. Use the location of project for distinguishing.
+ project_location = self.project_.get ('location')
+ path_components = b2.util.path.split(project_location)
+ location_grist = '!'.join (path_components)
+
+ if self.action_:
+ ps = self.action_.properties ()
+ property_grist = ps.as_path ()
+ # 'property_grist' can be empty when 'ps' is an empty
+ # property set.
+ if property_grist:
+ location_grist = location_grist + '/' + property_grist
+
+ return 'l' + location_grist
+
+ def __adjust_name(self, specified_name):
+ """Given the target name specified in constructor, returns the
+ name which should be really used, by looking at the <tag> properties.
+ The tag properties come in two flavour:
+ - <tag>value,
+ - <tag>@rule-name
+ In the first case, value is just added to name
+ In the second case, the specified rule is called with specified name,
+ target type and properties and should return the new name.
+ If not <tag> property is specified, or the rule specified by
+ <tag> returns nothing, returns the result of calling
+ virtual-target.add-suffix"""
+
+ if self.action_:
+ ps = self.action_.properties()
+ else:
+ ps = property_set.empty()
+
+ # FIXME: I'm not sure how this is used, need to check with
+ # Rene to figure out how to implement
+ #~ We add ourselves to the properties so that any tag rule can get
+ #~ more direct information about the target than just that available
+ #~ through the properties. This is useful in implementing
+ #~ name changes based on the sources of the target. For example to
+ #~ make unique names of object files based on the source file.
+ #~ --grafik
+ #ps = property_set.create(ps.raw() + ["<target>%s" % "XXXX"])
+ #ps = [ property-set.create [ $(ps).raw ] <target>$(__name__) ] ;
+
+ tag = ps.get("<tag>")
+
+ if tag:
+
+ if len(tag) > 1:
+ get_manager().errors()(
+ """<tag>@rulename is present but is not the only <tag> feature""")
+
+ tag = tag[0]
+ if callable(tag):
+ self.name_ = tag(specified_name, self.type_, ps)
+ else:
+ if not tag[0] == '@':
+ self.manager_.errors()("""The value of the <tag> feature must be '@rule-nane'""")
+
+ exported_ps = b2.util.value_to_jam(ps, methods=True)
+ self.name_ = b2.util.call_jam_function(
+ tag[1:], specified_name, self.type_, exported_ps)
+ if self.name_:
+ self.name_ = self.name_[0]
+
+ # If there's no tag or the tag rule returned nothing.
+ if not tag or not self.name_:
+ self.name_ = add_prefix_and_suffix(specified_name, self.type_, ps)
+
+ def actualize_no_scanner(self):
+ name = self.actual_name()
+
+ # Do anything only on the first invocation
+ if not self.made_:
+ self.made_[name] = True
+
+ if self.action_:
+ # For non-derived target, we don't care if there
+ # are several virtual targets that refer to the same name.
+ # One case when this is unavoidable is when file name is
+ # main.cpp and two targets have types CPP (for compiling)
+ # and MOCCABLE_CPP (for convertion to H via Qt tools).
+ self.virtual_targets().register_actual_name(name, self)
+
+ for i in self.dependencies_:
+ self.manager_.engine().add_dependency(name, i.actualize())
+
+ self.actualize_location(name)
+ self.actualize_action(name)
+
+ return name
+
+@bjam_signature((["specified_name"], ["type"], ["property_set"]))
+def add_prefix_and_suffix(specified_name, type, property_set):
+ """Appends the suffix appropriate to 'type/property-set' combination
+ to the specified name and returns the result."""
+
+ property_set = b2.util.jam_to_value_maybe(property_set)
+
+ suffix = ""
+ if type:
+ suffix = b2.build.type.generated_target_suffix(type, property_set)
+
+ # Handle suffixes for which no leading dot is desired. Those are
+ # specified by enclosing them in <...>. Needed by python so it
+ # can create "_d.so" extensions, for example.
+ if get_grist(suffix):
+ suffix = ungrist(suffix)
+ elif suffix:
+ suffix = "." + suffix
+
+ prefix = ""
+ if type:
+ prefix = b2.build.type.generated_target_prefix(type, property_set)
+
+ if specified_name.startswith(prefix):
+ prefix = ""
+
+ if not prefix:
+ prefix = ""
+ if not suffix:
+ suffix = ""
+ return prefix + specified_name + suffix
+
+
+class FileTarget (AbstractFileTarget):
+ """ File target with explicitly known location.
+
+ The file path is determined as
+ - value passed to the 'set_path' method, if any
+ - for derived files, project's build dir, joined with components
+ that describe action's properties. If the free properties
+ are not equal to the project's reference properties
+ an element with name of main target is added.
+ - for source files, project's source dir
+
+ The file suffix is
+ - the value passed to the 'suffix' method, if any, or
+ - the suffix which correspond to the target's type.
+ """
+ def __init__ (self, name, type, project, action = None, path=None, exact=False):
+ AbstractFileTarget.__init__ (self, name, type, project, action, exact)
+
+ self.path_ = path
+
+ def __str__(self):
+ if self.type_:
+ return self.name_ + "." + self.type_
+ else:
+ return self.name_
+
+ def clone_with_different_type(self, new_type):
+ return FileTarget(self.name_, new_type, self.project_,
+ self.action_, self.path_, exact=True)
+
+ def actualize_location (self, target):
+ engine = self.project_.manager_.engine ()
+
+ if self.action_:
+ # This is a derived file.
+ path = self.path ()
+ engine.set_target_variable (target, 'LOCATE', path)
+
+ # Make sure the path exists.
+ engine.add_dependency (target, path)
+ common.mkdir(engine, path)
+
+ # It's possible that the target name includes a directory
+ # too, for example when installing headers. Create that
+ # directory.
+ d = os.path.dirname(get_value(target))
+ if d:
+ d = os.path.join(path, d)
+ engine.add_dependency(target, d)
+ common.mkdir(engine, d)
+
+ # For real file target, we create a fake target that
+ # depends on the real target. This allows to run
+ #
+ # bjam hello.o
+ #
+ # without trying to guess the name of the real target.
+ # Note the that target has no directory name, and a special
+ # grist <e>.
+ #
+ # First, that means that "bjam hello.o" will build all
+ # known hello.o targets.
+ # Second, the <e> grist makes sure this target won't be confused
+ # with other targets, for example, if we have subdir 'test'
+ # with target 'test' in it that includes 'test.o' file,
+ # then the target for directory will be just 'test' the target
+ # for test.o will be <ptest/bin/gcc/debug>test.o and the target
+ # we create below will be <e>test.o
+ engine.add_dependency("<e>%s" % get_value(target), target)
+
+ # Allow bjam <path-to-file>/<file> to work. This won't catch all
+ # possible ways to refer to the path (relative/absolute, extra ".",
+ # various "..", but should help in obvious cases.
+ engine.add_dependency("<e>%s" % (os.path.join(path, get_value(target))), target)
+
+ else:
+ # This is a source file.
+ engine.set_target_variable (target, 'SEARCH', self.project_.get ('source-location'))
+
+
+ def path (self):
+ """ Returns the directory for this target.
+ """
+ if not self.path_:
+ if self.action_:
+ p = self.action_.properties ()
+ (target_path, relative_to_build_dir) = p.target_path ()
+
+ if relative_to_build_dir:
+ # Indicates that the path is relative to
+ # build dir.
+ target_path = os.path.join (self.project_.build_dir (), target_path)
+
+ # Store the computed path, so that it's not recomputed
+ # any more
+ self.path_ = target_path
+
+ return os.path.normpath(self.path_)
+
+
+class NotFileTarget(AbstractFileTarget):
+
+ def __init__(self, name, project, action):
+ AbstractFileTarget.__init__(self, name, None, project, action)
+
+ def path(self):
+ """Returns nothing, to indicate that target path is not known."""
+ return None
+
+ def actualize_location(self, target):
+ bjam.call("NOTFILE", target)
+ bjam.call("ALWAYS", target)
+ bjam.call("NOUPDATE", target)
+
+
+class Action:
+ """ Class which represents an action.
+ Both 'targets' and 'sources' should list instances of 'VirtualTarget'.
+ Action name should name a rule with this prototype
+ rule action_name ( targets + : sources * : properties * )
+ Targets and sources are passed as actual jam targets. The rule may
+ not establish dependency relationship, but should do everything else.
+ """
+ def __init__ (self, manager, sources, action_name, prop_set):
+ assert(isinstance(prop_set, property_set.PropertySet))
+ assert type(sources) == types.ListType
+ self.sources_ = sources
+ self.action_name_ = action_name
+ if not prop_set:
+ prop_set = property_set.empty()
+ self.properties_ = prop_set
+ if not all(isinstance(v, VirtualTarget) for v in prop_set.get('implicit-dependency')):
+ import pdb
+ pdb.set_trace()
+
+ self.manager_ = manager
+ self.engine_ = self.manager_.engine ()
+ self.targets_ = []
+
+ # Indicates whether this has been actualized or not.
+ self.actualized_ = False
+
+ self.dependency_only_sources_ = []
+ self.actual_sources_ = []
+
+
+ def add_targets (self, targets):
+ self.targets_ += targets
+
+
+ def replace_targets (old_targets, new_targets):
+ self.targets_ = [t for t in targets if not t in old_targets] + new_targets
+
+ def targets (self):
+ return self.targets_
+
+ def sources (self):
+ return self.sources_
+
+ def action_name (self):
+ return self.action_name_
+
+ def properties (self):
+ return self.properties_
+
+ def actualize (self):
+ """ Generates actual build instructions.
+ """
+ if self.actualized_:
+ return
+
+ self.actualized_ = True
+
+ ps = self.properties ()
+ properties = self.adjust_properties (ps)
+
+
+ actual_targets = []
+
+ for i in self.targets ():
+ actual_targets.append (i.actualize ())
+
+ self.actualize_sources (self.sources (), properties)
+
+ self.engine_.add_dependency (actual_targets, self.actual_sources_ + self.dependency_only_sources_)
+
+ # FIXME: check the comment below. Was self.action_name_ [1]
+ # Action name can include additional rule arguments, which should not
+ # be passed to 'set-target-variables'.
+ # FIXME: breaking circular dependency
+ import toolset
+ toolset.set_target_variables (self.manager_, self.action_name_, actual_targets, properties)
+
+ engine = self.manager_.engine ()
+
+ # FIXME: this is supposed to help --out-xml option, but we don't
+ # implement that now, and anyway, we should handle it in Python,
+ # not but putting variables on bjam-level targets.
+ bjam.call("set-target-variable", actual_targets, ".action", repr(self))
+
+ self.manager_.engine ().set_update_action (self.action_name_, actual_targets, self.actual_sources_,
+ properties)
+
+ # Since we set up creating action here, we also set up
+ # action for cleaning up
+ self.manager_.engine ().set_update_action ('common.Clean', 'clean-all',
+ actual_targets)
+
+ return actual_targets
+
+ def actualize_source_type (self, sources, prop_set):
+ """ Helper for 'actualize_sources'.
+ For each passed source, actualizes it with the appropriate scanner.
+ Returns the actualized virtual targets.
+ """
+ result = []
+ for i in sources:
+ scanner = None
+
+# FIXME: what's this?
+# if isinstance (i, str):
+# i = self.manager_.get_object (i)
+
+ if i.type ():
+ scanner = b2.build.type.get_scanner (i.type (), prop_set)
+
+ r = i.actualize (scanner)
+ result.append (r)
+
+ return result
+
+ def actualize_sources (self, sources, prop_set):
+ """ Creates actual jam targets for sources. Initializes two member
+ variables:
+ 'self.actual_sources_' -- sources which are passed to updating action
+ 'self.dependency_only_sources_' -- sources which are made dependencies, but
+ are not used otherwise.
+
+ New values will be *appended* to the variables. They may be non-empty,
+ if caller wants it.
+ """
+ dependencies = self.properties_.get ('<dependency>')
+
+ self.dependency_only_sources_ += self.actualize_source_type (dependencies, prop_set)
+ self.actual_sources_ += self.actualize_source_type (sources, prop_set)
+
+ # This is used to help bjam find dependencies in generated headers
+ # in other main targets.
+ # Say:
+ #
+ # make a.h : ....... ;
+ # exe hello : hello.cpp : <implicit-dependency>a.h ;
+ #
+ # However, for bjam to find the dependency the generated target must
+ # be actualized (i.e. have the jam target). In the above case,
+ # if we're building just hello ("bjam hello"), 'a.h' won't be
+ # actualized unless we do it here.
+ implicit = self.properties_.get("<implicit-dependency>")
+
+ for i in implicit:
+ i.actualize()
+
+ def adjust_properties (self, prop_set):
+ """ Determines real properties when trying building with 'properties'.
+ This is last chance to fix properties, for example to adjust includes
+ to get generated headers correctly. Default implementation returns
+ its argument.
+ """
+ return prop_set
+
+
+class NullAction (Action):
+ """ Action class which does nothing --- it produces the targets with
+ specific properties out of nowhere. It's needed to distinguish virtual
+ targets with different properties that are known to exist, and have no
+ actions which create them.
+ """
+ def __init__ (self, manager, prop_set):
+ Action.__init__ (self, manager, [], None, prop_set)
+
+ def actualize (self):
+ if not self.actualized_:
+ self.actualized_ = True
+
+ for i in self.targets ():
+ i.actualize ()
+
+class NonScanningAction(Action):
+ """Class which acts exactly like 'action', except that the sources
+ are not scanned for dependencies."""
+
+ def __init__(self, sources, action_name, property_set):
+ #FIXME: should the manager parameter of Action.__init__
+ #be removed? -- Steven Watanabe
+ Action.__init__(self, b2.manager.get_manager(), sources, action_name, property_set)
+
+ def actualize_source_type(self, sources, property_set):
+
+ result = []
+ for s in sources:
+ result.append(s.actualize())
+ return result
+
+def traverse (target, include_roots = False, include_sources = False):
+ """ Traverses the dependency graph of 'target' and return all targets that will
+ be created before this one is created. If root of some dependency graph is
+ found during traversal, it's either included or not, dependencing of the
+ value of 'include_roots'. In either case, sources of root are not traversed.
+ """
+ result = []
+
+ if target.action ():
+ action = target.action ()
+
+ # This includes 'target' as well
+ result += action.targets ()
+
+ for t in action.sources ():
+
+ # FIXME:
+ # TODO: see comment in Manager.register_object ()
+ #if not isinstance (t, VirtualTarget):
+ # t = target.project_.manager_.get_object (t)
+
+ if not t.root ():
+ result += traverse (t, include_roots, include_sources)
+
+ elif include_roots:
+ result.append (t)
+
+ elif include_sources:
+ result.append (target)
+
+ return result
+
+def clone_action (action, new_project, new_action_name, new_properties):
+ """Takes an 'action' instances and creates new instance of it
+ and all produced target. The rule-name and properties are set
+ to 'new-rule-name' and 'new-properties', if those are specified.
+ Returns the cloned action."""
+
+ if not new_action_name:
+ new_action_name = action.action_name()
+
+ if not new_properties:
+ new_properties = action.properties()
+
+ cloned_action = action.__class__(action.manager_, action.sources(), new_action_name,
+ new_properties)
+
+ cloned_targets = []
+ for target in action.targets():
+
+ n = target.name()
+ # Don't modify the name of the produced targets. Strip the directory f
+ cloned_target = FileTarget(n, target.type(), new_project,
+ cloned_action, exact=True)
+
+ d = target.dependencies()
+ if d:
+ cloned_target.depends(d)
+ cloned_target.root(target.root())
+ cloned_target.creating_subvariant(target.creating_subvariant())
+
+ cloned_targets.append(cloned_target)
+
+ return cloned_action
+
+class Subvariant:
+
+ def __init__ (self, main_target, prop_set, sources, build_properties, sources_usage_requirements, created_targets):
+ """
+ main_target: The instance of MainTarget class
+ prop_set: Properties requested for this target
+ sources:
+ build_properties: Actually used properties
+ sources_usage_requirements: Properties propagated from sources
+ created_targets: Top-level created targets
+ """
+ self.main_target_ = main_target
+ self.properties_ = prop_set
+ self.sources_ = sources
+ self.build_properties_ = build_properties
+ self.sources_usage_requirements_ = sources_usage_requirements
+ self.created_targets_ = created_targets
+
+ self.usage_requirements_ = None
+
+ # Pre-compose the list of other dependency graphs, on which this one
+ # depends
+ deps = build_properties.get('<implicit-dependency>')
+
+ self.other_dg_ = []
+ for d in deps:
+ self.other_dg_.append(d.creating_subvariant ())
+
+ self.other_dg_ = unique (self.other_dg_)
+
+ self.implicit_includes_cache_ = {}
+ self.target_directories_ = None
+
+ def main_target (self):
+ return self.main_target_
+
+ def created_targets (self):
+ return self.created_targets_
+
+ def requested_properties (self):
+ return self.properties_
+
+ def build_properties (self):
+ return self.build_properties_
+
+ def sources_usage_requirements (self):
+ return self.sources_usage_requirements_
+
+ def set_usage_requirements (self, usage_requirements):
+ self.usage_requirements_ = usage_requirements
+
+ def usage_requirements (self):
+ return self.usage_requirements_
+
+ def all_referenced_targets(self, result):
+ """Returns all targets referenced by this subvariant,
+ either directly or indirectly, and either as sources,
+ or as dependency properties. Targets referred with
+ dependency property are returned a properties, not targets."""
+
+ # Find directly referenced targets.
+ deps = self.build_properties().dependency()
+ all_targets = self.sources_ + deps
+
+ # Find other subvariants.
+ r = []
+ for e in all_targets:
+ if not e in result:
+ result.add(e)
+ if isinstance(e, property.Property):
+ t = e.value()
+ else:
+ t = e
+
+ # FIXME: how can this be?
+ cs = t.creating_subvariant()
+ if cs:
+ r.append(cs)
+ r = unique(r)
+ for s in r:
+ if s != self:
+ s.all_referenced_targets(result)
+
+
+ def implicit_includes (self, feature, target_type):
+ """ Returns the properties which specify implicit include paths to
+ generated headers. This traverses all targets in this subvariant,
+ and subvariants referred by <implcit-dependecy>properties.
+ For all targets which are of type 'target-type' (or for all targets,
+ if 'target_type' is not specified), the result will contain
+ <$(feature)>path-to-that-target.
+ """
+
+ if not target_type:
+ key = feature
+ else:
+ key = feature + "-" + target_type
+
+
+ result = self.implicit_includes_cache_.get(key)
+ if not result:
+ target_paths = self.all_target_directories(target_type)
+ target_paths = unique(target_paths)
+ result = ["<%s>%s" % (feature, p) for p in target_paths]
+ self.implicit_includes_cache_[key] = result
+
+ return result
+
+ def all_target_directories(self, target_type = None):
+ # TODO: does not appear to use target_type in deciding
+ # if we've computed this already.
+ if not self.target_directories_:
+ self.target_directories_ = self.compute_target_directories(target_type)
+ return self.target_directories_
+
+ def compute_target_directories(self, target_type=None):
+ result = []
+ for t in self.created_targets():
+ if not target_type or b2.build.type.is_derived(t.type(), target_type):
+ result.append(t.path())
+
+ for d in self.other_dg_:
+ result.extend(d.all_target_directories(target_type))
+
+ result = unique(result)
+ return result
diff --git a/tools/build/src/build_system.py b/tools/build/src/build_system.py
new file mode 100644
index 0000000000..c5757c8173
--- /dev/null
+++ b/tools/build/src/build_system.py
@@ -0,0 +1,860 @@
+# Status: mostly ported. Missing is --out-xml support, 'configure' integration
+# and some FIXME.
+# Base revision: 64351
+
+# Copyright 2003, 2005 Dave Abrahams
+# Copyright 2006 Rene Rivera
+# Copyright 2003, 2004, 2005, 2006, 2007 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+
+from b2.build.engine import Engine
+from b2.manager import Manager
+from b2.util.path import glob
+from b2.build import feature, property_set
+import b2.build.virtual_target
+from b2.build.targets import ProjectTarget
+from b2.util.sequence import unique
+import b2.build.build_request
+from b2.build.errors import ExceptionWithUserContext
+import b2.tools.common
+from b2.build.toolset import using
+
+import b2.build.project as project
+import b2.build.virtual_target as virtual_target
+import b2.build.build_request as build_request
+
+import b2.util.regex
+
+from b2.manager import get_manager
+from b2.util import cached
+from b2.util import option
+
+
+import bjam
+
+import os
+import sys
+import re
+
+################################################################################
+#
+# Module global data.
+#
+################################################################################
+
+# Flag indicating we should display additional debugging information related to
+# locating and loading Boost Build configuration files.
+debug_config = False
+
+# The cleaning is tricky. Say, if user says 'bjam --clean foo' where 'foo' is a
+# directory, then we want to clean targets which are in 'foo' as well as those
+# in any children Jamfiles under foo but not in any unrelated Jamfiles. To
+# achieve this we collect a list of projects under which cleaning is allowed.
+project_targets = []
+
+# Virtual targets obtained when building main targets references on the command
+# line. When running 'bjam --clean main_target' we want to clean only files
+# belonging to that main target so we need to record which targets are produced
+# for it.
+results_of_main_targets = []
+
+# Was an XML dump requested?
+out_xml = False
+
+# Default toolset & version to be used in case no other toolset has been used
+# explicitly by either the loaded configuration files, the loaded project build
+# scripts or an explicit toolset request on the command line. If not specified,
+# an arbitrary default will be used based on the current host OS. This value,
+# while not strictly necessary, has been added to allow testing Boost-Build's
+# default toolset usage functionality.
+default_toolset = None
+default_toolset_version = None
+
+################################################################################
+#
+# Public rules.
+#
+################################################################################
+
+# Returns the property set with the free features from the currently processed
+# build request.
+#
+def command_line_free_features():
+ return command_line_free_features
+
+# Sets the default toolset & version to be used in case no other toolset has
+# been used explicitly by either the loaded configuration files, the loaded
+# project build scripts or an explicit toolset request on the command line. For
+# more detailed information see the comment related to used global variables.
+#
+def set_default_toolset(toolset, version=None):
+ default_toolset = toolset
+ default_toolset_version = version
+
+
+pre_build_hook = []
+
+def add_pre_build_hook(callable):
+ pre_build_hook.append(callable)
+
+post_build_hook = None
+
+def set_post_build_hook(callable):
+ post_build_hook = callable
+
+################################################################################
+#
+# Local rules.
+#
+################################################################################
+
+# Returns actual Jam targets to be used for executing a clean request.
+#
+def actual_clean_targets(targets):
+
+ # Construct a list of projects explicitly detected as targets on this build
+ # system run. These are the projects under which cleaning is allowed.
+ for t in targets:
+ if isinstance(t, b2.build.targets.ProjectTarget):
+ project_targets.append(t.project_module())
+
+ # Construct a list of targets explicitly detected on this build system run
+ # as a result of building main targets.
+ targets_to_clean = set()
+ for t in results_of_main_targets:
+ # Do not include roots or sources.
+ targets_to_clean.update(virtual_target.traverse(t))
+
+ to_clean = []
+ for t in get_manager().virtual_targets().all_targets():
+
+ # Remove only derived targets.
+ if t.action():
+ p = t.project()
+ if t in targets_to_clean or should_clean_project(p.project_module()):
+ to_clean.append(t)
+
+ return [t.actualize() for t in to_clean]
+
+_target_id_split = re.compile("(.*)//(.*)")
+
+# Given a target id, try to find and return the corresponding target. This is
+# only invoked when there is no Jamfile in ".". This code somewhat duplicates
+# code in project-target.find but we can not reuse that code without a
+# project-targets instance.
+#
+def find_target(target_id):
+
+ projects = get_manager().projects()
+ m = _target_id_split.match(target_id)
+ if m:
+ pm = projects.find(m.group(1), ".")
+ else:
+ pm = projects.find(target_id, ".")
+
+ if pm:
+ result = projects.target(pm)
+
+ if m:
+ result = result.find(m.group(2))
+
+ return result
+
+def initialize_config_module(module_name, location=None):
+
+ get_manager().projects().initialize(module_name, location)
+
+# Helper rule used to load configuration files. Loads the first configuration
+# file with the given 'filename' at 'path' into module with name 'module-name'.
+# Not finding the requested file may or may not be treated as an error depending
+# on the must-find parameter. Returns a normalized path to the loaded
+# configuration file or nothing if no file was loaded.
+#
+def load_config(module_name, filename, paths, must_find=False):
+
+ if debug_config:
+ print "notice: Searching '%s' for '%s' configuration file '%s." \
+ % (paths, module_name, filename)
+
+ where = None
+ for path in paths:
+ t = os.path.join(path, filename)
+ if os.path.exists(t):
+ where = t
+ break
+
+ if where:
+ where = os.path.realpath(where)
+
+ if debug_config:
+ print "notice: Loading '%s' configuration file '%s' from '%s'." \
+ % (module_name, filename, where)
+
+ # Set source location so that path-constant in config files
+ # with relative paths work. This is of most importance
+ # for project-config.jam, but may be used in other
+ # config files as well.
+ attributes = get_manager().projects().attributes(module_name) ;
+ attributes.set('source-location', os.path.dirname(where), True)
+ get_manager().projects().load_standalone(module_name, where)
+
+ else:
+ msg = "Configuration file '%s' not found in '%s'." % (filename, path)
+ if must_find:
+ get_manager().errors()(msg)
+
+ elif debug_config:
+ print msg
+
+ return where
+
+# Loads all the configuration files used by Boost Build in the following order:
+#
+# -- test-config --
+# Loaded only if specified on the command-line using the --test-config
+# command-line parameter. It is ok for this file not to exist even if
+# specified. If this configuration file is loaded, regular site and user
+# configuration files will not be. If a relative path is specified, file is
+# searched for in the current folder.
+#
+# -- site-config --
+# Always named site-config.jam. Will only be found if located on the system
+# root path (Windows), /etc (non-Windows), user's home folder or the Boost
+# Build path, in that order. Not loaded in case the test-config configuration
+# file is loaded or the --ignore-site-config command-line option is specified.
+#
+# -- user-config --
+# Named user-config.jam by default or may be named explicitly using the
+# --user-config command-line option or the BOOST_BUILD_USER_CONFIG environment
+# variable. If named explicitly the file is looked for from the current working
+# directory and if the default one is used then it is searched for in the
+# user's home directory and the Boost Build path, in that order. Not loaded in
+# case either the test-config configuration file is loaded or an empty file
+# name is explicitly specified. If the file name has been given explicitly then
+# the file must exist.
+#
+# Test configurations have been added primarily for use by Boost Build's
+# internal unit testing system but may be used freely in other places as well.
+#
+def load_configuration_files():
+
+ # Flag indicating that site configuration should not be loaded.
+ ignore_site_config = "--ignore-site-config" in sys.argv
+
+ initialize_config_module("test-config")
+ test_config = None
+ for a in sys.argv:
+ m = re.match("--test-config=(.*)$", a)
+ if m:
+ test_config = b2.util.unquote(m.group(1))
+ break
+
+ if test_config:
+ where = load_config("test-config", os.path.basename(test_config), [os.path.dirname(test_config)])
+ if where:
+ if debug_config:
+ print "notice: Regular site and user configuration files will"
+ print "notice: be ignored due to the test configuration being loaded."
+
+ user_path = [os.path.expanduser("~")] + bjam.variable("BOOST_BUILD_PATH")
+ site_path = ["/etc"] + user_path
+ if os.name in ["nt"]:
+ site_path = [os.getenv("SystemRoot")] + user_path
+
+ if debug_config and not test_config and ignore_site_config:
+ print "notice: Site configuration files will be ignored due to the"
+ print "notice: --ignore-site-config command-line option."
+
+ initialize_config_module("site-config")
+ if not test_config and not ignore_site_config:
+ load_config('site-config', 'site-config.jam', site_path)
+
+ initialize_config_module('user-config')
+ if not test_config:
+
+ # Here, user_config has value of None if nothing is explicitly
+ # specified, and value of '' if user explicitly does not want
+ # to load any user config.
+ user_config = None
+ for a in sys.argv:
+ m = re.match("--user-config=(.*)$", a)
+ if m:
+ user_config = m.group(1)
+ break
+
+ if user_config is None:
+ user_config = os.getenv("BOOST_BUILD_USER_CONFIG")
+
+ # Special handling for the case when the OS does not strip the quotes
+ # around the file name, as is the case when using Cygwin bash.
+ user_config = b2.util.unquote(user_config)
+ explicitly_requested = user_config
+
+ if user_config is None:
+ user_config = "user-config.jam"
+
+ if user_config:
+ if explicitly_requested:
+
+ user_config = os.path.abspath(user_config)
+
+ if debug_config:
+ print "notice: Loading explicitly specified user configuration file:"
+ print " " + user_config
+
+ load_config('user-config', os.path.basename(user_config), [os.path.dirname(user_config)], True)
+ else:
+ load_config('user-config', os.path.basename(user_config), user_path)
+ else:
+ if debug_config:
+ print "notice: User configuration file loading explicitly disabled."
+
+ # We look for project-config.jam from "." upward. I am not sure this is
+ # 100% right decision, we might as well check for it only alongside the
+ # Jamroot file. However:
+ # - We need to load project-config.jam before Jamroot
+ # - We probably need to load project-config.jam even if there is no Jamroot
+ # - e.g. to implement automake-style out-of-tree builds.
+ if os.path.exists("project-config.jam"):
+ file = ["project-config.jam"]
+ else:
+ file = b2.util.path.glob_in_parents(".", ["project-config.jam"])
+
+ if file:
+ initialize_config_module('project-config', os.path.dirname(file[0]))
+ load_config('project-config', "project-config.jam", [os.path.dirname(file[0])], True)
+
+
+# Autoconfigure toolsets based on any instances of --toolset=xx,yy,...zz or
+# toolset=xx,yy,...zz in the command line. May return additional properties to
+# be processed as if they had been specified by the user.
+#
+def process_explicit_toolset_requests():
+
+ extra_properties = []
+
+ option_toolsets = [e for option in b2.util.regex.transform(sys.argv, "^--toolset=(.*)$")
+ for e in option.split(',')]
+ feature_toolsets = [e for option in b2.util.regex.transform(sys.argv, "^toolset=(.*)$")
+ for e in option.split(',')]
+
+ for t in option_toolsets + feature_toolsets:
+
+ # Parse toolset-version/properties.
+ (toolset_version, toolset, version) = re.match("(([^-/]+)-?([^/]+)?)/?.*", t).groups()
+
+ if debug_config:
+ print "notice: [cmdline-cfg] Detected command-line request for '%s': toolset= %s version=%s" \
+ % (toolset_version, toolset, version)
+
+ # If the toolset is not known, configure it now.
+ known = False
+ if toolset in feature.values("toolset"):
+ known = True
+
+ if known and version and not feature.is_subvalue("toolset", toolset, "version", version):
+ known = False
+ # TODO: we should do 'using $(toolset)' in case no version has been
+ # specified and there are no versions defined for the given toolset to
+ # allow the toolset to configure its default version. For this we need
+ # to know how to detect whether a given toolset has any versions
+ # defined. An alternative would be to do this whenever version is not
+ # specified but that would require that toolsets correctly handle the
+ # case when their default version is configured multiple times which
+ # should be checked for all existing toolsets first.
+
+ if not known:
+
+ if debug_config:
+ print "notice: [cmdline-cfg] toolset '%s' not previously configured; attempting to auto-configure now" % toolset_version
+ if version is not None:
+ using(toolset, version)
+ else:
+ using(toolset)
+
+ else:
+
+ if debug_config:
+
+ print "notice: [cmdline-cfg] toolset '%s' already configured" % toolset_version
+
+ # Make sure we get an appropriate property into the build request in
+ # case toolset has been specified using the "--toolset=..." command-line
+ # option form.
+ if not t in sys.argv and not t in feature_toolsets:
+
+ if debug_config:
+ print "notice: [cmdline-cfg] adding toolset=%s) to the build request." % t ;
+ extra_properties += "toolset=%s" % t
+
+ return extra_properties
+
+
+
+# Returns 'true' if the given 'project' is equal to or is a (possibly indirect)
+# child to any of the projects requested to be cleaned in this build system run.
+# Returns 'false' otherwise. Expects the .project-targets list to have already
+# been constructed.
+#
+@cached
+def should_clean_project(project):
+
+ if project in project_targets:
+ return True
+ else:
+
+ parent = get_manager().projects().attribute(project, "parent-module")
+ if parent and parent != "user-config":
+ return should_clean_project(parent)
+ else:
+ return False
+
+################################################################################
+#
+# main()
+# ------
+#
+################################################################################
+
+def main():
+
+ sys.argv = bjam.variable("ARGV")
+
+ # FIXME: document this option.
+ if "--profiling" in sys.argv:
+ import cProfile
+ r = cProfile.runctx('main_real()', globals(), locals(), "stones.prof")
+
+ import pstats
+ stats = pstats.Stats("stones.prof")
+ stats.strip_dirs()
+ stats.sort_stats('time', 'calls')
+ stats.print_callers(20)
+ return r
+ else:
+ try:
+ return main_real()
+ except ExceptionWithUserContext, e:
+ e.report()
+
+def main_real():
+
+ global debug_config, out_xml
+
+ debug_config = "--debug-configuration" in sys.argv
+ out_xml = any(re.match("^--out-xml=(.*)$", a) for a in sys.argv)
+
+ engine = Engine()
+
+ global_build_dir = option.get("build-dir")
+ manager = Manager(engine, global_build_dir)
+
+ import b2.build.configure as configure
+
+ if "--version" in sys.argv:
+
+ version.report()
+ return
+
+ # This module defines types and generator and what not,
+ # and depends on manager's existence
+ import b2.tools.builtin
+
+ b2.tools.common.init(manager)
+
+ load_configuration_files()
+
+ # Load explicitly specified toolset modules.
+ extra_properties = process_explicit_toolset_requests()
+
+ # Load the actual project build script modules. We always load the project
+ # in the current folder so 'use-project' directives have any chance of
+ # being seen. Otherwise, we would not be able to refer to subprojects using
+ # target ids.
+ current_project = None
+ projects = get_manager().projects()
+ if projects.find(".", "."):
+ current_project = projects.target(projects.load("."))
+
+ # Load the default toolset module if no other has already been specified.
+ if not feature.values("toolset"):
+
+ dt = default_toolset
+ dtv = None
+ if default_toolset:
+ dtv = default_toolset_version
+ else:
+ dt = "gcc"
+ if os.name == 'nt':
+ dt = "msvc"
+ # FIXME:
+ #else if [ os.name ] = MACOSX
+ #{
+ # default-toolset = darwin ;
+ #}
+
+ print "warning: No toolsets are configured."
+ print "warning: Configuring default toolset '%s'." % dt
+ print "warning: If the default is wrong, your build may not work correctly."
+ print "warning: Use the \"toolset=xxxxx\" option to override our guess."
+ print "warning: For more configuration options, please consult"
+ print "warning: http://boost.org/boost-build2/doc/html/bbv2/advanced/configuration.html"
+
+ using(dt, dtv)
+
+ # Parse command line for targets and properties. Note that this requires
+ # that all project files already be loaded.
+ (target_ids, properties) = build_request.from_command_line(sys.argv[1:] + extra_properties)
+
+ # Expand properties specified on the command line into multiple property
+ # sets consisting of all legal property combinations. Each expanded property
+ # set will be used for a single build run. E.g. if multiple toolsets are
+ # specified then requested targets will be built with each of them.
+ if properties:
+ expanded = build_request.expand_no_defaults(properties)
+ else:
+ expanded = [property_set.empty()]
+
+ # Check that we actually found something to build.
+ if not current_project and not target_ids:
+ get_manager().errors()("no Jamfile in current directory found, and no target references specified.")
+ # FIXME:
+ # EXIT
+
+ # Flags indicating that this build system run has been started in order to
+ # clean existing instead of create new targets. Note that these are not the
+ # final flag values as they may get changed later on due to some special
+ # targets being specified on the command line.
+ clean = "--clean" in sys.argv
+ cleanall = "--clean-all" in sys.argv
+
+ # List of explicitly requested files to build. Any target references read
+ # from the command line parameter not recognized as one of the targets
+ # defined in the loaded Jamfiles will be interpreted as an explicitly
+ # requested file to build. If any such files are explicitly requested then
+ # only those files and the targets they depend on will be built and they
+ # will be searched for among targets that would have been built had there
+ # been no explicitly requested files.
+ explicitly_requested_files = []
+
+ # List of Boost Build meta-targets, virtual-targets and actual Jam targets
+ # constructed in this build system run.
+ targets = []
+ virtual_targets = []
+ actual_targets = []
+
+ explicitly_requested_files = []
+
+ # Process each target specified on the command-line and convert it into
+ # internal Boost Build target objects. Detect special clean target. If no
+ # main Boost Build targets were explictly requested use the current project
+ # as the target.
+ for id in target_ids:
+ if id == "clean":
+ clean = 1
+ else:
+ t = None
+ if current_project:
+ t = current_project.find(id, no_error=1)
+ else:
+ t = find_target(id)
+
+ if not t:
+ print "notice: could not find main target '%s'" % id
+ print "notice: assuming it's a name of file to create " ;
+ explicitly_requested_files.append(id)
+ else:
+ targets.append(t)
+
+ if not targets:
+ targets = [projects.target(projects.module_name("."))]
+
+ # FIXME: put this BACK.
+
+ ## if [ option.get dump-generators : : true ]
+ ## {
+ ## generators.dump ;
+ ## }
+
+
+ # We wish to put config.log in the build directory corresponding
+ # to Jamroot, so that the location does not differ depending on
+ # directory where we do build. The amount of indirection necessary
+ # here is scary.
+ first_project = targets[0].project()
+ first_project_root_location = first_project.get('project-root')
+ first_project_root_module = manager.projects().load(first_project_root_location)
+ first_project_root = manager.projects().target(first_project_root_module)
+ first_build_build_dir = first_project_root.build_dir()
+ configure.set_log_file(os.path.join(first_build_build_dir, "config.log"))
+
+ virtual_targets = []
+
+ global results_of_main_targets
+
+ # Now that we have a set of targets to build and a set of property sets to
+ # build the targets with, we can start the main build process by using each
+ # property set to generate virtual targets from all of our listed targets
+ # and any of their dependants.
+ for p in expanded:
+ manager.set_command_line_free_features(property_set.create(p.free()))
+
+ for t in targets:
+ try:
+ g = t.generate(p)
+ if not isinstance(t, ProjectTarget):
+ results_of_main_targets.extend(g.targets())
+ virtual_targets.extend(g.targets())
+ except ExceptionWithUserContext, e:
+ e.report()
+ except Exception:
+ raise
+
+ # Convert collected virtual targets into actual raw Jam targets.
+ for t in virtual_targets:
+ actual_targets.append(t.actualize())
+
+
+ # FIXME: restore
+## # If XML data output has been requested prepare additional rules and targets
+## # so we can hook into Jam to collect build data while its building and have
+## # it trigger the final XML report generation after all the planned targets
+## # have been built.
+## if $(.out-xml)
+## {
+## # Get a qualified virtual target name.
+## rule full-target-name ( target )
+## {
+## local name = [ $(target).name ] ;
+## local project = [ $(target).project ] ;
+## local project-path = [ $(project).get location ] ;
+## return $(project-path)//$(name) ;
+## }
+
+## # Generate an XML file containing build statistics for each constituent.
+## #
+## rule out-xml ( xml-file : constituents * )
+## {
+## # Prepare valid XML header and footer with some basic info.
+## local nl = "
+## " ;
+## local jam = [ version.jam ] ;
+## local os = [ modules.peek : OS OSPLAT JAMUNAME ] "" ;
+## local timestamp = [ modules.peek : JAMDATE ] ;
+## local cwd = [ PWD ] ;
+## local command = $(.sys.argv) ;
+## local bb-version = [ version.boost-build ] ;
+## .header on $(xml-file) =
+## "<?xml version=\"1.0\" encoding=\"utf-8\"?>"
+## "$(nl)<build format=\"1.0\" version=\"$(bb-version)\">"
+## "$(nl) <jam version=\"$(jam:J=.)\" />"
+## "$(nl) <os name=\"$(os[1])\" platform=\"$(os[2])\"><![CDATA[$(os[3-]:J= )]]></os>"
+## "$(nl) <timestamp><![CDATA[$(timestamp)]]></timestamp>"
+## "$(nl) <directory><![CDATA[$(cwd)]]></directory>"
+## "$(nl) <command><![CDATA[\"$(command:J=\" \")\"]]></command>"
+## ;
+## .footer on $(xml-file) =
+## "$(nl)</build>" ;
+
+## # Generate the target dependency graph.
+## .contents on $(xml-file) +=
+## "$(nl) <targets>" ;
+## for local t in [ virtual-target.all-targets ]
+## {
+## local action = [ $(t).action ] ;
+## if $(action)
+## # If a target has no action, it has no dependencies.
+## {
+## local name = [ full-target-name $(t) ] ;
+## local sources = [ $(action).sources ] ;
+## local dependencies ;
+## for local s in $(sources)
+## {
+## dependencies += [ full-target-name $(s) ] ;
+## }
+
+## local path = [ $(t).path ] ;
+## local jam-target = [ $(t).actual-name ] ;
+
+## .contents on $(xml-file) +=
+## "$(nl) <target>"
+## "$(nl) <name><![CDATA[$(name)]]></name>"
+## "$(nl) <dependencies>"
+## "$(nl) <dependency><![CDATA[$(dependencies)]]></dependency>"
+## "$(nl) </dependencies>"
+## "$(nl) <path><![CDATA[$(path)]]></path>"
+## "$(nl) <jam-target><![CDATA[$(jam-target)]]></jam-target>"
+## "$(nl) </target>"
+## ;
+## }
+## }
+## .contents on $(xml-file) +=
+## "$(nl) </targets>" ;
+
+## # Build $(xml-file) after $(constituents). Do so even if a
+## # constituent action fails and regenerate the xml on every bjam run.
+## INCLUDES $(xml-file) : $(constituents) ;
+## ALWAYS $(xml-file) ;
+## __ACTION_RULE__ on $(xml-file) = build-system.out-xml.generate-action ;
+## out-xml.generate $(xml-file) ;
+## }
+
+## # The actual build actions are here; if we did this work in the actions
+## # clause we would have to form a valid command line containing the
+## # result of @(...) below (the name of the XML file).
+## #
+## rule out-xml.generate-action ( args * : xml-file
+## : command status start end user system : output ? )
+## {
+## local contents =
+## [ on $(xml-file) return $(.header) $(.contents) $(.footer) ] ;
+## local f = @($(xml-file):E=$(contents)) ;
+## }
+
+## # Nothing to do here; the *real* actions happen in
+## # out-xml.generate-action.
+## actions quietly out-xml.generate { }
+
+## # Define the out-xml file target, which depends on all the targets so
+## # that it runs the collection after the targets have run.
+## out-xml $(.out-xml) : $(actual-targets) ;
+
+## # Set up a global __ACTION_RULE__ that records all the available
+## # statistics about each actual target in a variable "on" the --out-xml
+## # target.
+## #
+## rule out-xml.collect ( xml-file : target : command status start end user
+## system : output ? )
+## {
+## local nl = "
+## " ;
+## # Open the action with some basic info.
+## .contents on $(xml-file) +=
+## "$(nl) <action status=\"$(status)\" start=\"$(start)\" end=\"$(end)\" user=\"$(user)\" system=\"$(system)\">" ;
+
+## # If we have an action object we can print out more detailed info.
+## local action = [ on $(target) return $(.action) ] ;
+## if $(action)
+## {
+## local action-name = [ $(action).action-name ] ;
+## local action-sources = [ $(action).sources ] ;
+## local action-props = [ $(action).properties ] ;
+
+## # The qualified name of the action which we created the target.
+## .contents on $(xml-file) +=
+## "$(nl) <name><![CDATA[$(action-name)]]></name>" ;
+
+## # The sources that made up the target.
+## .contents on $(xml-file) +=
+## "$(nl) <sources>" ;
+## for local source in $(action-sources)
+## {
+## local source-actual = [ $(source).actual-name ] ;
+## .contents on $(xml-file) +=
+## "$(nl) <source><![CDATA[$(source-actual)]]></source>" ;
+## }
+## .contents on $(xml-file) +=
+## "$(nl) </sources>" ;
+
+## # The properties that define the conditions under which the
+## # target was built.
+## .contents on $(xml-file) +=
+## "$(nl) <properties>" ;
+## for local prop in [ $(action-props).raw ]
+## {
+## local prop-name = [ MATCH ^<(.*)>$ : $(prop:G) ] ;
+## .contents on $(xml-file) +=
+## "$(nl) <property name=\"$(prop-name)\"><![CDATA[$(prop:G=)]]></property>" ;
+## }
+## .contents on $(xml-file) +=
+## "$(nl) </properties>" ;
+## }
+
+## local locate = [ on $(target) return $(LOCATE) ] ;
+## locate ?= "" ;
+## .contents on $(xml-file) +=
+## "$(nl) <jam-target><![CDATA[$(target)]]></jam-target>"
+## "$(nl) <path><![CDATA[$(target:G=:R=$(locate))]]></path>"
+## "$(nl) <command><![CDATA[$(command)]]></command>"
+## "$(nl) <output><![CDATA[$(output)]]></output>" ;
+## .contents on $(xml-file) +=
+## "$(nl) </action>" ;
+## }
+
+## # When no __ACTION_RULE__ is set "on" a target, the search falls back to
+## # the global module.
+## module
+## {
+## __ACTION_RULE__ = build-system.out-xml.collect
+## [ modules.peek build-system : .out-xml ] ;
+## }
+
+## IMPORT
+## build-system :
+## out-xml.collect
+## out-xml.generate-action
+## : :
+## build-system.out-xml.collect
+## build-system.out-xml.generate-action
+## ;
+## }
+
+ j = option.get("jobs")
+ if j:
+ bjam.call("set-variable", PARALLELISM, j)
+
+ k = option.get("keep-going", "true", "true")
+ if k in ["on", "yes", "true"]:
+ bjam.call("set-variable", "KEEP_GOING", "1")
+ elif k in ["off", "no", "false"]:
+ bjam.call("set-variable", "KEEP_GOING", "0")
+ else:
+ print "error: Invalid value for the --keep-going option"
+ sys.exit()
+
+ # The 'all' pseudo target is not strictly needed expect in the case when we
+ # use it below but people often assume they always have this target
+ # available and do not declare it themselves before use which may cause
+ # build failures with an error message about not being able to build the
+ # 'all' target.
+ bjam.call("NOTFILE", "all")
+
+ # And now that all the actual raw Jam targets and all the dependencies
+ # between them have been prepared all that is left is to tell Jam to update
+ # those targets.
+ if explicitly_requested_files:
+ # Note that this case can not be joined with the regular one when only
+ # exact Boost Build targets are requested as here we do not build those
+ # requested targets but only use them to construct the dependency tree
+ # needed to build the explicitly requested files.
+ # FIXME: add $(.out-xml)
+ bjam.call("UPDATE", ["<e>%s" % x for x in explicitly_requested_files])
+ elif cleanall:
+ bjam.call("UPDATE", "clean-all")
+ elif clean:
+ manager.engine().set_update_action("common.Clean", "clean",
+ actual_clean_targets(targets))
+ bjam.call("UPDATE", "clean")
+ else:
+ # FIXME:
+ #configure.print-configure-checks-summary ;
+
+ if pre_build_hook:
+ for h in pre_build_hook:
+ h()
+
+ bjam.call("DEPENDS", "all", actual_targets)
+ ok = bjam.call("UPDATE_NOW", "all") # FIXME: add out-xml
+ if post_build_hook:
+ post_build_hook(ok)
+ # Prevent automatic update of the 'all' target, now that
+ # we have explicitly updated what we wanted.
+ bjam.call("UPDATE")
+
+ if manager.errors().count() == 0:
+ return ["ok"]
+ else:
+ return []
diff --git a/tools/build/v2/tools/__init__.py b/tools/build/src/contrib/__init__.py
index e69de29bb2..e69de29bb2 100644
--- a/tools/build/v2/tools/__init__.py
+++ b/tools/build/src/contrib/__init__.py
diff --git a/tools/build/src/contrib/boost.jam b/tools/build/src/contrib/boost.jam
new file mode 100644
index 0000000000..7daefd0c7a
--- /dev/null
+++ b/tools/build/src/contrib/boost.jam
@@ -0,0 +1,304 @@
+# Copyright 2008 - 2013 Roland Schwarz
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Boost library support module.
+#
+# This module allows to use the boost library from boost-build projects. The
+# location of a boost source tree or the path to a pre-built version of the
+# library can be configured from either site-config.jam or user-config.jam. If
+# no location is configured the module looks for a BOOST_ROOT environment
+# variable, which should point to a boost source tree. As a last resort it tries
+# to use pre-built libraries from the standard search path of the compiler.
+#
+# If the location to a source tree is known, the module can be configured from
+# the *-config.jam files:
+#
+# using boost : 1.35 : <root>/path-to-boost-root ;
+#
+# If the location to a pre-built version is known:
+#
+# using boost : 1.34
+# : <include>/usr/local/include/boost_1_34
+# <library>/usr/local/lib
+# ;
+#
+# It is legal to configure more than one boost library version in the config
+# files. The version identifier is used to disambiguate between them. The first
+# configured version becomes the default.
+#
+# To use a boost library you need to put a 'use' statement into your Jamfile:
+#
+# import boost ;
+#
+# boost.use-project 1.35 ;
+#
+# If you do not care about a specific version you just can omit the version
+# part, in which case the default is picked up:
+#
+# boost.use-project ;
+#
+# The library can be referenced with the project identifier '/boost'. To
+# reference the program_options you would specify:
+#
+# exe myexe : mysrc.cpp : <library>/boost//program_options ;
+#
+# Note that the requirements are automatically transformed into suitable tags to
+# find the correct pre-built library.
+#
+
+import common ;
+import modules ;
+import numbers ;
+import project ;
+import property-set ;
+import regex ;
+import toolset ;
+
+.boost.auto_config = [ property-set.create <layout>system ] ;
+
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+ .debug-configuration = true ;
+}
+
+# Configuration of the boost library to use.
+#
+# This can either be a boost source tree or pre-built libraries. The 'version'
+# parameter must be a valid boost version number, e.g. 1.35, if specifying a
+# pre-built version with versioned layout. It may be a symbolic name, e.g.
+# 'trunk' if specifying a source tree. The options are specified as named
+# parameters (like properties). The following paramters are available:
+#
+# <root>/path-to-boost-root : Specify a source tree.
+# <include>/path-to-include : The include directory to search.
+# <library>/path-to-library : The library directory to search.
+# <layout>system or <layout>versioned : Built library layout.
+# <build-id>my_build_id : The custom build id to use.
+#
+rule init
+(
+ version # Version identifier.
+ : options * # Set the option properties.
+)
+{
+ if $(.boost.$(version))
+ {
+ import errors ;
+ errors.user-error Boost $(version) already configured. ;
+ }
+ else
+ {
+ if $(.debug-configuration)
+ {
+ if ! $(.boost_default)
+ {
+ echo notice: configuring default boost library $(version) ;
+ }
+ echo notice: configuring boost library $(version) ;
+ }
+ .boost_default ?= $(version) ; # the first configured is default
+ .boost.$(version) = [ property-set.create $(options) ] ;
+ }
+}
+
+# Use a certain version of the library.
+#
+# The use-project rule causes the module to define a boost project of searchable
+# pre-built boost libraries, or references a source tree of the boost library.
+# If the 'version' parameter is omitted either the configured default (first in
+# config files) is used or an auto configuration will be attempted.
+#
+rule use-project
+(
+ version ? # The version of the library to use.
+)
+{
+ project.push-current [ project.current ] ;
+ version ?= $(.boost_default) ;
+ version ?= auto_config ;
+
+ if $(.initialized)
+ {
+ if $(.initialized) != $(version)
+ {
+ import errors ;
+ errors.user-error Attempt to use $(__name__) with different
+ parameters. ;
+ }
+ }
+ else
+ {
+ if $(.boost.$(version))
+ {
+ local opt = $(.boost.$(version)) ;
+ local root = [ $(opt).get <root> ] ;
+ local inc = [ $(opt).get <include> ] ;
+ local lib = [ $(opt).get <library> ] ;
+
+ if $(.debug-configuration)
+ {
+ echo notice: using boost library $(version) [ $(opt).raw ] ;
+ }
+
+ .layout = [ $(opt).get <layout> ] ;
+ .layout ?= versioned ;
+ .build_id = [ $(opt).get <build-id> ] ;
+ .version_tag = [ regex.replace $(version) "[*\\/:.\"\' ]" "_" ] ;
+ .initialized = $(version) ;
+
+ if ( $(root) && $(inc) )
+ || ( $(root) && $(lib) )
+ || ( $(lib) && ! $(inc) )
+ || ( ! $(lib) && $(inc) )
+ {
+ import errors ;
+ errors.user-error Ambiguous parameters, use either <root> or
+ <include> with <library>. ;
+ }
+ else if ! $(root) && ! $(inc)
+ {
+ root = [ modules.peek : BOOST_ROOT ] ;
+ }
+
+ local prj = [ project.current ] ;
+ local mod = [ $(prj).project-module ] ;
+
+ if $(root)
+ {
+ modules.call-in $(mod) : use-project boost : $(root) ;
+ }
+ else
+ {
+ project.initialize $(__name__) ;
+ # It is possible to overide the setup of the searched libraries
+ # per version. The (unlikely) 0.0.1 tag is meant as an example
+ # template only.
+ switch $(version)
+ {
+ case 0.0.1 : boost_0_0_1 $(inc) $(lib) ;
+ case * : boost_std $(inc) $(lib) ;
+ }
+ }
+ }
+ else
+ {
+ import errors ;
+ errors.user-error Reference to unconfigured boost version. ;
+ }
+ }
+ project.pop-current ;
+}
+
+local rule boost_lib_std ( id : shared-lib-define )
+{
+ lib $(id) : : : : <link>shared:<define>$(shared-lib-define) ;
+}
+
+rule boost_std ( inc ? lib ? )
+{
+# The default definitions for pre-built libraries.
+
+ project boost
+ : usage-requirements <include>$(inc) <define>BOOST_ALL_NO_LIB
+ : requirements <tag>@tag_std <search>$(lib)
+ ;
+
+ alias headers ;
+ boost_lib_std chrono : BOOST_CHRONO_DYN_LINK ;
+ boost_lib_std date_time : BOOST_DATE_TIME_DYN_LINK ;
+ boost_lib_std filesystem : BOOST_FILE_SYSTEM_DYN_LINK ;
+ boost_lib_std graph : BOOST_GRAPH_DYN_LINK ;
+ boost_lib_std graph_parallel : BOOST_GRAPH_DYN_LINK ;
+ boost_lib_std iostreams : BOOST_IOSTREAMS_DYN_LINK ;
+ boost_lib_std locale : BOOST_LOCALE_DYN_LINK ;
+ boost_lib_std math_tr1 : BOOST_MATH_TR1_DYN_LINK ;
+ boost_lib_std math_tr1f : BOOST_MATH_TR1_DYN_LINK ;
+ boost_lib_std math_tr1l : BOOST_MATH_TR1_DYN_LINK ;
+ boost_lib_std math_c99 : BOOST_MATH_TR1_DYN_LINK ;
+ boost_lib_std math_c99f : BOOST_MATH_TR1_DYN_LINK ;
+ boost_lib_std math_c99l : BOOST_MATH_TR1_DYN_LINK ;
+ boost_lib_std mpi : BOOST_MPI_DYN_LINK ;
+ boost_lib_std program_options : BOOST_PROGRAM_OPTIONS_DYN_LINK ;
+ boost_lib_std python : BOOST_PYTHON_DYN_LINK ;
+ boost_lib_std python3 : BOOST_PYTHON_DYN_LINK ;
+ boost_lib_std random : BOOST_RANDOM_DYN_LINK ;
+ boost_lib_std regex : BOOST_REGEX_DYN_LINK ;
+ boost_lib_std serialization : BOOST_SERIALIZATION_DYN_LINK ;
+ boost_lib_std wserialization : BOOST_SERIALIZATION_DYN_LINK ;
+ boost_lib_std signals : BOOST_SIGNALS_DYN_LINK ;
+ boost_lib_std system : BOOST_SYSTEM_DYN_LINK ;
+ boost_lib_std unit_test_framework : BOOST_TEST_DYN_LINK ;
+ boost_lib_std prg_exec_monitor : BOOST_TEST_DYN_LINK ;
+ boost_lib_std test_exec_monitor : BOOST_TEST_DYN_LINK ;
+ boost_lib_std thread : BOOST_THREAD_DYN_DLL ;
+ boost_lib_std wave : BOOST_WAVE_DYN_LINK ;
+}
+
+# Example placeholder for rules defining Boost library project & library targets
+# for a specific Boost library version. Copy under a different name and model
+# after the boost_std rule. Please note that it is also possible to have a per
+# version taging rule in case the tagging algorithm changes between versions.
+#
+rule boost_0_0_1 ( inc ? lib ? )
+{
+ echo "You are trying to use an example placeholder for boost libs." ;
+}
+
+rule tag_std ( name : type ? : property-set )
+{
+ name = boost_$(name) ;
+ if ( [ $(property-set).get <link> ] in static ) &&
+ ( [ $(property-set).get <target-os> ] in windows )
+ {
+ name = lib$(name) ;
+ }
+
+ local result ;
+ if $(.layout) = system
+ {
+ local version = [ MATCH ^([0-9]+)_([0-9]+) : $(.version_tag) ] ;
+ if $(version[1]) = "1" && [ numbers.less $(version[2]) 39 ]
+ {
+ result = [ tag_tagged $(name) : $(type) : $(property-set) ] ;
+ }
+ else
+ {
+ result = [ tag_system $(name) : $(type) : $(property-set) ] ;
+ }
+ }
+ else if $(.layout) = tagged
+ {
+ result = [ tag_tagged $(name) : $(type) : $(property-set) ] ;
+ }
+ else if $(.layout) = versioned
+ {
+ result = [ tag_versioned $(name) : $(type) : $(property-set) ] ;
+ }
+ else
+ {
+ import errors ;
+ errors.error Missing layout. ;
+ }
+
+ return $(result) ;
+}
+
+rule tag_system ( name : type ? : property-set )
+{
+ return [ common.format-name <base> -$(.build_id) : $(name) : $(type) :
+ $(property-set) ] ;
+}
+
+rule tag_tagged ( name : type ? : property-set )
+{
+ return [ common.format-name <base> <threading> <runtime> -$(.build_id) :
+ $(name) : $(type) : $(property-set) ] ;
+}
+
+rule tag_versioned ( name : type ? : property-set )
+{
+ return [ common.format-name <base> <toolset> <threading> <runtime>
+ -$(.version_tag) -$(.build_id) : $(name) : $(type) : $(property-set) ] ;
+}
diff --git a/tools/build/src/contrib/boost.py b/tools/build/src/contrib/boost.py
new file mode 100644
index 0000000000..e0bbcf34de
--- /dev/null
+++ b/tools/build/src/contrib/boost.py
@@ -0,0 +1,279 @@
+# $Id: boost.jam 62249 2010-05-26 19:05:19Z steven_watanabe $
+# Copyright 2008 Roland Schwarz
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Boost library support module.
+#
+# This module allows to use the boost library from boost-build projects.
+# The location of a boost source tree or the path to a pre-built
+# version of the library can be configured from either site-config.jam
+# or user-config.jam. If no location is configured the module looks for
+# a BOOST_ROOT environment variable, which should point to a boost source
+# tree. As a last resort it tries to use pre-built libraries from the standard
+# search path of the compiler.
+#
+# If the location to a source tree is known, the module can be configured
+# from the *-config.jam files:
+#
+# using boost : 1.35 : <root>/path-to-boost-root ;
+#
+# If the location to a pre-built version is known:
+#
+# using boost : 1.34
+# : <include>/usr/local/include/boost_1_34
+# <library>/usr/local/lib
+# ;
+#
+# It is legal to configure more than one boost library version in the config
+# files. The version identifier is used to disambiguate between them.
+# The first configured version becomes the default.
+#
+# To use a boost library you need to put a 'use' statement into your
+# Jamfile:
+#
+# import boost ;
+#
+# boost.use-project 1.35 ;
+#
+# If you don't care about a specific version you just can omit the version
+# part, in which case the default is picked up:
+#
+# boost.use-project ;
+#
+# The library can be referenced with the project identifier '/boost'. To
+# reference the program_options you would specify:
+#
+# exe myexe : mysrc.cpp : <library>/boost//program_options ;
+#
+# Note that the requirements are automatically transformed into suitable
+# tags to find the correct pre-built library.
+#
+
+import re
+
+import bjam
+
+from b2.build import alias, property, property_set, feature
+from b2.manager import get_manager
+from b2.tools import builtin, common
+from b2.util import bjam_signature, regex
+
+
+# TODO: This is currently necessary in Python Port, but was not in Jam.
+feature.feature('layout', ['system', 'versioned', 'tag'], ['optional'])
+feature.feature('root', [], ['optional', 'free'])
+feature.feature('build-id', [], ['optional', 'free'])
+
+__initialized = None
+__boost_auto_config = property_set.create([property.Property('layout', 'system')])
+__boost_configured = {}
+__boost_default = None
+__build_id = None
+
+__debug = None
+
+def debug():
+ global __debug
+ if __debug is None:
+ __debug = "--debug-configuration" in bjam.variable("ARGV")
+ return __debug
+
+
+# Configuration of the boost library to use.
+#
+# This can either be a boost source tree or
+# pre-built libraries. The 'version' parameter must be a valid boost
+# version number, e.g. 1.35, if specifying a pre-built version with
+# versioned layout. It may be a symbolic name, e.g. 'trunk' if specifying
+# a source tree. The options are specified as named parameters (like
+# properties). The following paramters are available:
+#
+# <root>/path-to-boost-root: Specify a source tree.
+#
+# <include>/path-to-include: The include directory to search.
+#
+# <library>/path-to-library: The library directory to search.
+#
+# <layout>system or <layout>versioned.
+#
+# <build-id>my_build_id: The custom build id to use.
+#
+def init(version, options = None):
+ assert(isinstance(version,list))
+ assert(len(version)==1)
+ version = version[0]
+ if version in __boost_configured:
+ get_manager().errors()("Boost {} already configured.".format(version));
+ else:
+ global __boost_default
+ if debug():
+ if not __boost_default:
+ print "notice: configuring default boost library {}".format(version)
+ print "notice: configuring boost library {}".format(version)
+
+ if not __boost_default:
+ __boost_default = version
+ properties = []
+ for option in options:
+ properties.append(property.create_from_string(option))
+ __boost_configured[ version ] = property_set.PropertySet(properties)
+
+projects = get_manager().projects()
+rules = projects.project_rules()
+
+
+# Use a certain version of the library.
+#
+# The use-project rule causes the module to define a boost project of
+# searchable pre-built boost libraries, or references a source tree
+# of the boost library. If the 'version' parameter is omitted either
+# the configured default (first in config files) is used or an auto
+# configuration will be attempted.
+#
+@bjam_signature(([ "version", "?" ], ))
+def use_project(version = None):
+ projects.push_current( projects.current() )
+ if not version:
+ version = __boost_default
+ if not version:
+ version = "auto_config"
+
+ global __initialized
+ if __initialized:
+ if __initialized != version:
+ get_manager().errors()('Attempt to use {} with different parameters'.format('boost'))
+ else:
+ if version in __boost_configured:
+ opts = __boost_configured[ version ]
+ root = opts.get('<root>' )
+ inc = opts.get('<include>')
+ lib = opts.get('<library>')
+
+ if debug():
+ print "notice: using boost library {} {}".format( version, opt.raw() )
+
+ global __layout
+ global __version_tag
+ __layout = opts.get('<layout>')
+ if not __layout:
+ __layout = 'versioned'
+ __build_id = opts.get('<build-id>')
+ __version_tag = re.sub("[*\\/:.\"\' ]", "_", version)
+ __initialized = version
+
+ if ( root and inc ) or \
+ ( root and lib ) or \
+ ( lib and not inc ) or \
+ ( not lib and inc ):
+ get_manager().errors()("Ambiguous parameters, use either <root> or <inlude> with <library>.")
+ elif not root and not inc:
+ root = bjam.variable("BOOST_ROOT")
+
+ module = projects.current().project_module()
+
+ if root:
+ bjam.call('call-in-module', module, 'use-project', ['boost', root])
+ else:
+ projects.initialize(__name__)
+ if version == '0.0.1':
+ boost_0_0_1( inc, lib )
+ else:
+ boost_std( inc, lib )
+ else:
+ get_manager().errors()("Reference to unconfigured boost version.")
+ projects.pop_current()
+
+
+rules.add_rule( 'boost.use-project', use_project )
+
+def boost_std(inc = None, lib = None):
+ # The default definitions for pre-built libraries.
+ rules.project(
+ ['boost'],
+ ['usage-requirements'] + ['<include>{}'.format(i) for i in inc] + ['<define>BOOST_ALL_NO_LIB'],
+ ['requirements'] + ['<search>{}'.format(l) for l in lib])
+
+ # TODO: There should be a better way to add a Python function into a
+ # project requirements property set.
+ tag_prop_set = property_set.create([property.Property('<tag>', tag_std)])
+ attributes = projects.attributes(projects.current().project_module())
+ attributes.requirements = attributes.requirements.refine(tag_prop_set)
+
+ alias('headers')
+
+ def boost_lib(lib_name, dyn_link_macro):
+ if (isinstance(lib_name,str)):
+ lib_name = [lib_name]
+ builtin.lib(lib_name, usage_requirements=['<link>shared:<define>{}'.format(dyn_link_macro)])
+
+ boost_lib('date_time' , 'BOOST_DATE_TIME_DYN_LINK' )
+ boost_lib('filesystem' , 'BOOST_FILE_SYSTEM_DYN_LINK' )
+ boost_lib('graph' , 'BOOST_GRAPH_DYN_LINK' )
+ boost_lib('graph_parallel' , 'BOOST_GRAPH_DYN_LINK' )
+ boost_lib('iostreams' , 'BOOST_IOSTREAMS_DYN_LINK' )
+ boost_lib('locale' , 'BOOST_LOG_DYN_LINK' )
+ boost_lib('log' , 'BOOST_LOG_DYN_LINK' )
+ boost_lib('log_setup' , 'BOOST_LOG_DYN_LINK' )
+ boost_lib('math_tr1' , 'BOOST_MATH_TR1_DYN_LINK' )
+ boost_lib('math_tr1f' , 'BOOST_MATH_TR1_DYN_LINK' )
+ boost_lib('math_tr1l' , 'BOOST_MATH_TR1_DYN_LINK' )
+ boost_lib('math_c99' , 'BOOST_MATH_TR1_DYN_LINK' )
+ boost_lib('math_c99f' , 'BOOST_MATH_TR1_DYN_LINK' )
+ boost_lib('math_c99l' , 'BOOST_MATH_TR1_DYN_LINK' )
+ boost_lib('mpi' , 'BOOST_MPI_DYN_LINK' )
+ boost_lib('program_options' , 'BOOST_PROGRAM_OPTIONS_DYN_LINK')
+ boost_lib('python' , 'BOOST_PYTHON_DYN_LINK' )
+ boost_lib('python3' , 'BOOST_PYTHON_DYN_LINK' )
+ boost_lib('random' , 'BOOST_RANDOM_DYN_LINK' )
+ boost_lib('regex' , 'BOOST_REGEX_DYN_LINK' )
+ boost_lib('serialization' , 'BOOST_SERIALIZATION_DYN_LINK' )
+ boost_lib('wserialization' , 'BOOST_SERIALIZATION_DYN_LINK' )
+ boost_lib('signals' , 'BOOST_SIGNALS_DYN_LINK' )
+ boost_lib('system' , 'BOOST_SYSTEM_DYN_LINK' )
+ boost_lib('unit_test_framework' , 'BOOST_TEST_DYN_LINK' )
+ boost_lib('prg_exec_monitor' , 'BOOST_TEST_DYN_LINK' )
+ boost_lib('test_exec_monitor' , 'BOOST_TEST_DYN_LINK' )
+ boost_lib('thread' , 'BOOST_THREAD_DYN_DLL' )
+ boost_lib('wave' , 'BOOST_WAVE_DYN_LINK' )
+
+def boost_0_0_1( inc, lib ):
+ print "You are trying to use an example placeholder for boost libs." ;
+ # Copy this template to another place (in the file boost.jam)
+ # and define a project and libraries modelled after the
+ # boost_std rule. Please note that it is also possible to have
+ # a per version taging rule in case they are different between
+ # versions.
+
+def tag_std(name, type, prop_set):
+ name = 'boost_' + name
+ if 'static' in prop_set.get('<link>') and 'windows' in prop_set.get('<target-os>'):
+ name = 'lib' + name
+ result = None
+
+ if __layout == 'system':
+ versionRe = re.search('^([0-9]+)_([0-9]+)', __version_tag)
+ if versionRe and versionRe.group(1) == '1' and int(versionRe.group(2)) < 39:
+ result = tag_tagged(name, type, prop_set)
+ else:
+ result = tag_system(name, type, prop_set)
+ elif __layout == 'tagged':
+ result = tag_tagged(name, type, prop_set)
+ elif __layout == 'versioned':
+ result = tag_versioned(name, type, prop_set)
+ else:
+ get_manager().errors()("Missing layout")
+ return result
+
+def tag_maybe(param):
+ return ['-{}'.format(param)] if param else []
+
+def tag_system(name, type, prop_set):
+ return common.format_name(['<base>'] + tag_maybe(__build_id), name, type, prop_set)
+
+def tag_system(name, type, prop_set):
+ return common.format_name(['<base>', '<threading>', '<runtime>'] + tag_maybe(__build_id), name, type, prop_set)
+
+def tag_versioned(name, type, prop_set):
+ return common.format_name(['<base>', '<toolset>', '<threading>', '<runtime>'] + tag_maybe(__version_tag) + tag_maybe(__build_id),
+ name, type, prop_set)
diff --git a/tools/build/v2/contrib/tntnet.jam b/tools/build/src/contrib/tntnet.jam
index 0bd0ae5590..0bd0ae5590 100644
--- a/tools/build/v2/contrib/tntnet.jam
+++ b/tools/build/src/contrib/tntnet.jam
diff --git a/tools/build/v2/contrib/wxFormBuilder.jam b/tools/build/src/contrib/wxFormBuilder.jam
index c9ee2de729..c9ee2de729 100644
--- a/tools/build/v2/contrib/wxFormBuilder.jam
+++ b/tools/build/src/contrib/wxFormBuilder.jam
diff --git a/tools/build/v2/engine/Jambase b/tools/build/src/engine/Jambase
index 94f8fbde5c..94f8fbde5c 100644
--- a/tools/build/v2/engine/Jambase
+++ b/tools/build/src/engine/Jambase
diff --git a/tools/build/v2/engine/boehm_gc/AmigaOS.c b/tools/build/src/engine/boehm_gc/AmigaOS.c
index d171503940..d171503940 100644
--- a/tools/build/v2/engine/boehm_gc/AmigaOS.c
+++ b/tools/build/src/engine/boehm_gc/AmigaOS.c
diff --git a/tools/build/v2/engine/boehm_gc/BCC_MAKEFILE b/tools/build/src/engine/boehm_gc/BCC_MAKEFILE
index b825ec4df7..b825ec4df7 100644
--- a/tools/build/v2/engine/boehm_gc/BCC_MAKEFILE
+++ b/tools/build/src/engine/boehm_gc/BCC_MAKEFILE
diff --git a/tools/build/v2/engine/boehm_gc/ChangeLog b/tools/build/src/engine/boehm_gc/ChangeLog
index 96bf3171e9..96bf3171e9 100644
--- a/tools/build/v2/engine/boehm_gc/ChangeLog
+++ b/tools/build/src/engine/boehm_gc/ChangeLog
diff --git a/tools/build/v2/engine/boehm_gc/EMX_MAKEFILE b/tools/build/src/engine/boehm_gc/EMX_MAKEFILE
index c7e5bb806e..c7e5bb806e 100644
--- a/tools/build/v2/engine/boehm_gc/EMX_MAKEFILE
+++ b/tools/build/src/engine/boehm_gc/EMX_MAKEFILE
diff --git a/tools/build/v2/engine/boehm_gc/MacOS.c b/tools/build/src/engine/boehm_gc/MacOS.c
index b56bea782b..b56bea782b 100644
--- a/tools/build/v2/engine/boehm_gc/MacOS.c
+++ b/tools/build/src/engine/boehm_gc/MacOS.c
diff --git a/tools/build/v2/engine/boehm_gc/MacProjects.sit.hqx b/tools/build/src/engine/boehm_gc/MacProjects.sit.hqx
index 99dff88b4c..99dff88b4c 100644
--- a/tools/build/v2/engine/boehm_gc/MacProjects.sit.hqx
+++ b/tools/build/src/engine/boehm_gc/MacProjects.sit.hqx
diff --git a/tools/build/v2/engine/boehm_gc/Mac_files/MacOS_Test_config.h b/tools/build/src/engine/boehm_gc/Mac_files/MacOS_Test_config.h
index 4e5d252778..4e5d252778 100644
--- a/tools/build/v2/engine/boehm_gc/Mac_files/MacOS_Test_config.h
+++ b/tools/build/src/engine/boehm_gc/Mac_files/MacOS_Test_config.h
diff --git a/tools/build/v2/engine/boehm_gc/Mac_files/MacOS_config.h b/tools/build/src/engine/boehm_gc/Mac_files/MacOS_config.h
index 407bdf154a..407bdf154a 100644
--- a/tools/build/v2/engine/boehm_gc/Mac_files/MacOS_config.h
+++ b/tools/build/src/engine/boehm_gc/Mac_files/MacOS_config.h
diff --git a/tools/build/v2/engine/boehm_gc/Mac_files/dataend.c b/tools/build/src/engine/boehm_gc/Mac_files/dataend.c
index a3e3fe8446..a3e3fe8446 100644
--- a/tools/build/v2/engine/boehm_gc/Mac_files/dataend.c
+++ b/tools/build/src/engine/boehm_gc/Mac_files/dataend.c
diff --git a/tools/build/v2/engine/boehm_gc/Mac_files/datastart.c b/tools/build/src/engine/boehm_gc/Mac_files/datastart.c
index a9e0dd5941..a9e0dd5941 100644
--- a/tools/build/v2/engine/boehm_gc/Mac_files/datastart.c
+++ b/tools/build/src/engine/boehm_gc/Mac_files/datastart.c
diff --git a/tools/build/v2/engine/boehm_gc/Makefile.DLLs b/tools/build/src/engine/boehm_gc/Makefile.DLLs
index 011f49d3bc..011f49d3bc 100644
--- a/tools/build/v2/engine/boehm_gc/Makefile.DLLs
+++ b/tools/build/src/engine/boehm_gc/Makefile.DLLs
diff --git a/tools/build/v2/engine/boehm_gc/Makefile.am b/tools/build/src/engine/boehm_gc/Makefile.am
index e632864df2..e632864df2 100644
--- a/tools/build/v2/engine/boehm_gc/Makefile.am
+++ b/tools/build/src/engine/boehm_gc/Makefile.am
diff --git a/tools/build/v2/engine/boehm_gc/Makefile.direct b/tools/build/src/engine/boehm_gc/Makefile.direct
index 40195a5625..40195a5625 100644
--- a/tools/build/v2/engine/boehm_gc/Makefile.direct
+++ b/tools/build/src/engine/boehm_gc/Makefile.direct
diff --git a/tools/build/v2/engine/boehm_gc/Makefile.dj b/tools/build/src/engine/boehm_gc/Makefile.dj
index 4618eb8456..4618eb8456 100644
--- a/tools/build/v2/engine/boehm_gc/Makefile.dj
+++ b/tools/build/src/engine/boehm_gc/Makefile.dj
diff --git a/tools/build/v2/engine/boehm_gc/Makefile.in b/tools/build/src/engine/boehm_gc/Makefile.in
index f4dbe12d67..f4dbe12d67 100644
--- a/tools/build/v2/engine/boehm_gc/Makefile.in
+++ b/tools/build/src/engine/boehm_gc/Makefile.in
diff --git a/tools/build/v2/engine/boehm_gc/NT_MAKEFILE b/tools/build/src/engine/boehm_gc/NT_MAKEFILE
index c8739effc6..c8739effc6 100755
--- a/tools/build/v2/engine/boehm_gc/NT_MAKEFILE
+++ b/tools/build/src/engine/boehm_gc/NT_MAKEFILE
diff --git a/tools/build/v2/engine/boehm_gc/NT_STATIC_THREADS_MAKEFILE b/tools/build/src/engine/boehm_gc/NT_STATIC_THREADS_MAKEFILE
index d0f912751e..d0f912751e 100644
--- a/tools/build/v2/engine/boehm_gc/NT_STATIC_THREADS_MAKEFILE
+++ b/tools/build/src/engine/boehm_gc/NT_STATIC_THREADS_MAKEFILE
diff --git a/tools/build/v2/engine/boehm_gc/NT_THREADS_MAKEFILE b/tools/build/src/engine/boehm_gc/NT_THREADS_MAKEFILE
index 5c02c9023c..5c02c9023c 100644
--- a/tools/build/v2/engine/boehm_gc/NT_THREADS_MAKEFILE
+++ b/tools/build/src/engine/boehm_gc/NT_THREADS_MAKEFILE
diff --git a/tools/build/v2/engine/boehm_gc/NT_X64_STATIC_THREADS_MAKEFILE b/tools/build/src/engine/boehm_gc/NT_X64_STATIC_THREADS_MAKEFILE
index 91a0f60efc..91a0f60efc 100644
--- a/tools/build/v2/engine/boehm_gc/NT_X64_STATIC_THREADS_MAKEFILE
+++ b/tools/build/src/engine/boehm_gc/NT_X64_STATIC_THREADS_MAKEFILE
diff --git a/tools/build/v2/engine/boehm_gc/OS2_MAKEFILE b/tools/build/src/engine/boehm_gc/OS2_MAKEFILE
index c6bad7abcb..c6bad7abcb 100644
--- a/tools/build/v2/engine/boehm_gc/OS2_MAKEFILE
+++ b/tools/build/src/engine/boehm_gc/OS2_MAKEFILE
diff --git a/tools/build/v2/engine/boehm_gc/PCR-Makefile b/tools/build/src/engine/boehm_gc/PCR-Makefile
index db4c9f0181..db4c9f0181 100644
--- a/tools/build/v2/engine/boehm_gc/PCR-Makefile
+++ b/tools/build/src/engine/boehm_gc/PCR-Makefile
diff --git a/tools/build/v2/engine/boehm_gc/README.QUICK b/tools/build/src/engine/boehm_gc/README.QUICK
index 5ffa505c9e..5ffa505c9e 100644
--- a/tools/build/v2/engine/boehm_gc/README.QUICK
+++ b/tools/build/src/engine/boehm_gc/README.QUICK
diff --git a/tools/build/v2/engine/boehm_gc/SMakefile.amiga b/tools/build/src/engine/boehm_gc/SMakefile.amiga
index 94fb723865..94fb723865 100644
--- a/tools/build/v2/engine/boehm_gc/SMakefile.amiga
+++ b/tools/build/src/engine/boehm_gc/SMakefile.amiga
diff --git a/tools/build/v2/engine/boehm_gc/WCC_MAKEFILE b/tools/build/src/engine/boehm_gc/WCC_MAKEFILE
index 6b1d78baec..6b1d78baec 100644
--- a/tools/build/v2/engine/boehm_gc/WCC_MAKEFILE
+++ b/tools/build/src/engine/boehm_gc/WCC_MAKEFILE
diff --git a/tools/build/v2/engine/boehm_gc/acinclude.m4 b/tools/build/src/engine/boehm_gc/acinclude.m4
index 72602a0040..72602a0040 100644
--- a/tools/build/v2/engine/boehm_gc/acinclude.m4
+++ b/tools/build/src/engine/boehm_gc/acinclude.m4
diff --git a/tools/build/v2/engine/boehm_gc/aclocal.m4 b/tools/build/src/engine/boehm_gc/aclocal.m4
index 8d195b5182..8d195b5182 100644
--- a/tools/build/v2/engine/boehm_gc/aclocal.m4
+++ b/tools/build/src/engine/boehm_gc/aclocal.m4
diff --git a/tools/build/v2/engine/boehm_gc/add_gc_prefix.c b/tools/build/src/engine/boehm_gc/add_gc_prefix.c
index 59515c7866..59515c7866 100644
--- a/tools/build/v2/engine/boehm_gc/add_gc_prefix.c
+++ b/tools/build/src/engine/boehm_gc/add_gc_prefix.c
diff --git a/tools/build/v2/engine/boehm_gc/allchblk.c b/tools/build/src/engine/boehm_gc/allchblk.c
index 0cec29d60e..0cec29d60e 100644
--- a/tools/build/v2/engine/boehm_gc/allchblk.c
+++ b/tools/build/src/engine/boehm_gc/allchblk.c
diff --git a/tools/build/v2/engine/boehm_gc/alloc.c b/tools/build/src/engine/boehm_gc/alloc.c
index 94a968e0af..94a968e0af 100644
--- a/tools/build/v2/engine/boehm_gc/alloc.c
+++ b/tools/build/src/engine/boehm_gc/alloc.c
diff --git a/tools/build/v2/engine/boehm_gc/alpha_mach_dep.S b/tools/build/src/engine/boehm_gc/alpha_mach_dep.S
index d4def2405f..d4def2405f 100644
--- a/tools/build/v2/engine/boehm_gc/alpha_mach_dep.S
+++ b/tools/build/src/engine/boehm_gc/alpha_mach_dep.S
diff --git a/tools/build/v2/engine/boehm_gc/backgraph.c b/tools/build/src/engine/boehm_gc/backgraph.c
index 59c330f467..59c330f467 100644
--- a/tools/build/v2/engine/boehm_gc/backgraph.c
+++ b/tools/build/src/engine/boehm_gc/backgraph.c
diff --git a/tools/build/v2/engine/boehm_gc/bdw-gc.pc b/tools/build/src/engine/boehm_gc/bdw-gc.pc
index 55fc3346f8..55fc3346f8 100644
--- a/tools/build/v2/engine/boehm_gc/bdw-gc.pc
+++ b/tools/build/src/engine/boehm_gc/bdw-gc.pc
diff --git a/tools/build/v2/engine/boehm_gc/bdw-gc.pc.in b/tools/build/src/engine/boehm_gc/bdw-gc.pc.in
index ef4c23410d..ef4c23410d 100644
--- a/tools/build/v2/engine/boehm_gc/bdw-gc.pc.in
+++ b/tools/build/src/engine/boehm_gc/bdw-gc.pc.in
diff --git a/tools/build/v2/engine/boehm_gc/blacklst.c b/tools/build/src/engine/boehm_gc/blacklst.c
index afcad9c216..afcad9c216 100644
--- a/tools/build/v2/engine/boehm_gc/blacklst.c
+++ b/tools/build/src/engine/boehm_gc/blacklst.c
diff --git a/tools/build/v2/engine/boehm_gc/callprocs b/tools/build/src/engine/boehm_gc/callprocs
index a8793f0b72..a8793f0b72 100755
--- a/tools/build/v2/engine/boehm_gc/callprocs
+++ b/tools/build/src/engine/boehm_gc/callprocs
diff --git a/tools/build/v2/engine/boehm_gc/checksums.c b/tools/build/src/engine/boehm_gc/checksums.c
index 0942acb48b..0942acb48b 100644
--- a/tools/build/v2/engine/boehm_gc/checksums.c
+++ b/tools/build/src/engine/boehm_gc/checksums.c
diff --git a/tools/build/v2/engine/boehm_gc/compile b/tools/build/src/engine/boehm_gc/compile
index 3d2170320e..3d2170320e 100755
--- a/tools/build/v2/engine/boehm_gc/compile
+++ b/tools/build/src/engine/boehm_gc/compile
diff --git a/tools/build/v2/engine/boehm_gc/config.guess b/tools/build/src/engine/boehm_gc/config.guess
index 7924ac077d..7924ac077d 100755
--- a/tools/build/v2/engine/boehm_gc/config.guess
+++ b/tools/build/src/engine/boehm_gc/config.guess
diff --git a/tools/build/v2/engine/boehm_gc/config.sub b/tools/build/src/engine/boehm_gc/config.sub
index 70584b007e..70584b007e 100644
--- a/tools/build/v2/engine/boehm_gc/config.sub
+++ b/tools/build/src/engine/boehm_gc/config.sub
diff --git a/tools/build/v2/engine/boehm_gc/configure b/tools/build/src/engine/boehm_gc/configure
index e2092d1cf1..e2092d1cf1 100755
--- a/tools/build/v2/engine/boehm_gc/configure
+++ b/tools/build/src/engine/boehm_gc/configure
diff --git a/tools/build/v2/engine/boehm_gc/configure.ac b/tools/build/src/engine/boehm_gc/configure.ac
index 7922b13fc4..7922b13fc4 100644
--- a/tools/build/v2/engine/boehm_gc/configure.ac
+++ b/tools/build/src/engine/boehm_gc/configure.ac
diff --git a/tools/build/v2/engine/boehm_gc/configure.host b/tools/build/src/engine/boehm_gc/configure.host
index a98a0a7cb3..a98a0a7cb3 100644
--- a/tools/build/v2/engine/boehm_gc/configure.host
+++ b/tools/build/src/engine/boehm_gc/configure.host
diff --git a/tools/build/v2/engine/boehm_gc/configure_atomic_ops.sh b/tools/build/src/engine/boehm_gc/configure_atomic_ops.sh
index 6a0e31a710..6a0e31a710 100755
--- a/tools/build/v2/engine/boehm_gc/configure_atomic_ops.sh
+++ b/tools/build/src/engine/boehm_gc/configure_atomic_ops.sh
diff --git a/tools/build/v2/engine/boehm_gc/cord/cord.am b/tools/build/src/engine/boehm_gc/cord/cord.am
index fc5e8cc203..fc5e8cc203 100644
--- a/tools/build/v2/engine/boehm_gc/cord/cord.am
+++ b/tools/build/src/engine/boehm_gc/cord/cord.am
diff --git a/tools/build/v2/engine/boehm_gc/cord/cordbscs.c b/tools/build/src/engine/boehm_gc/cord/cordbscs.c
index d83f4067de..d83f4067de 100644
--- a/tools/build/v2/engine/boehm_gc/cord/cordbscs.c
+++ b/tools/build/src/engine/boehm_gc/cord/cordbscs.c
diff --git a/tools/build/v2/engine/boehm_gc/cord/cordprnt.c b/tools/build/src/engine/boehm_gc/cord/cordprnt.c
index 6d278feda6..6d278feda6 100644
--- a/tools/build/v2/engine/boehm_gc/cord/cordprnt.c
+++ b/tools/build/src/engine/boehm_gc/cord/cordprnt.c
diff --git a/tools/build/v2/engine/boehm_gc/cord/cordtest.c b/tools/build/src/engine/boehm_gc/cord/cordtest.c
index 08333ca043..08333ca043 100644
--- a/tools/build/v2/engine/boehm_gc/cord/cordtest.c
+++ b/tools/build/src/engine/boehm_gc/cord/cordtest.c
diff --git a/tools/build/v2/engine/boehm_gc/cord/cordxtra.c b/tools/build/src/engine/boehm_gc/cord/cordxtra.c
index b0a746226c..b0a746226c 100644
--- a/tools/build/v2/engine/boehm_gc/cord/cordxtra.c
+++ b/tools/build/src/engine/boehm_gc/cord/cordxtra.c
diff --git a/tools/build/v2/engine/boehm_gc/cord/de.c b/tools/build/src/engine/boehm_gc/cord/de.c
index 989e19a888..989e19a888 100644
--- a/tools/build/v2/engine/boehm_gc/cord/de.c
+++ b/tools/build/src/engine/boehm_gc/cord/de.c
diff --git a/tools/build/v2/engine/boehm_gc/cord/de_cmds.h b/tools/build/src/engine/boehm_gc/cord/de_cmds.h
index f42ddcf2da..f42ddcf2da 100644
--- a/tools/build/v2/engine/boehm_gc/cord/de_cmds.h
+++ b/tools/build/src/engine/boehm_gc/cord/de_cmds.h
diff --git a/tools/build/v2/engine/boehm_gc/cord/de_win.ICO b/tools/build/src/engine/boehm_gc/cord/de_win.ICO
index b20ac3ee16..b20ac3ee16 100644
--- a/tools/build/v2/engine/boehm_gc/cord/de_win.ICO
+++ b/tools/build/src/engine/boehm_gc/cord/de_win.ICO
Binary files differ
diff --git a/tools/build/v2/engine/boehm_gc/cord/de_win.RC b/tools/build/src/engine/boehm_gc/cord/de_win.RC
index 554a300438..554a300438 100644
--- a/tools/build/v2/engine/boehm_gc/cord/de_win.RC
+++ b/tools/build/src/engine/boehm_gc/cord/de_win.RC
diff --git a/tools/build/v2/engine/boehm_gc/cord/de_win.c b/tools/build/src/engine/boehm_gc/cord/de_win.c
index 1871736e39..1871736e39 100644
--- a/tools/build/v2/engine/boehm_gc/cord/de_win.c
+++ b/tools/build/src/engine/boehm_gc/cord/de_win.c
diff --git a/tools/build/v2/engine/boehm_gc/cord/de_win.h b/tools/build/src/engine/boehm_gc/cord/de_win.h
index 57a47b45c4..57a47b45c4 100644
--- a/tools/build/v2/engine/boehm_gc/cord/de_win.h
+++ b/tools/build/src/engine/boehm_gc/cord/de_win.h
diff --git a/tools/build/v2/engine/boehm_gc/darwin_stop_world.c b/tools/build/src/engine/boehm_gc/darwin_stop_world.c
index a2f0926453..a2f0926453 100644
--- a/tools/build/v2/engine/boehm_gc/darwin_stop_world.c
+++ b/tools/build/src/engine/boehm_gc/darwin_stop_world.c
diff --git a/tools/build/v2/engine/boehm_gc/dbg_mlc.c b/tools/build/src/engine/boehm_gc/dbg_mlc.c
index 4b72e390ac..4b72e390ac 100644
--- a/tools/build/v2/engine/boehm_gc/dbg_mlc.c
+++ b/tools/build/src/engine/boehm_gc/dbg_mlc.c
diff --git a/tools/build/v2/engine/boehm_gc/depcomp b/tools/build/src/engine/boehm_gc/depcomp
index 3480ce4e96..3480ce4e96 100755
--- a/tools/build/v2/engine/boehm_gc/depcomp
+++ b/tools/build/src/engine/boehm_gc/depcomp
diff --git a/tools/build/v2/engine/boehm_gc/digimars.mak b/tools/build/src/engine/boehm_gc/digimars.mak
index 2080611826..2080611826 100644
--- a/tools/build/v2/engine/boehm_gc/digimars.mak
+++ b/tools/build/src/engine/boehm_gc/digimars.mak
diff --git a/tools/build/v2/engine/boehm_gc/doc/README b/tools/build/src/engine/boehm_gc/doc/README
index 33a6740bcc..33a6740bcc 100644
--- a/tools/build/v2/engine/boehm_gc/doc/README
+++ b/tools/build/src/engine/boehm_gc/doc/README
diff --git a/tools/build/v2/engine/boehm_gc/doc/README.DGUX386 b/tools/build/src/engine/boehm_gc/doc/README.DGUX386
index 9d6d84788e..9d6d84788e 100644
--- a/tools/build/v2/engine/boehm_gc/doc/README.DGUX386
+++ b/tools/build/src/engine/boehm_gc/doc/README.DGUX386
diff --git a/tools/build/v2/engine/boehm_gc/doc/README.Mac b/tools/build/src/engine/boehm_gc/doc/README.Mac
index ae9c1d5e59..ae9c1d5e59 100644
--- a/tools/build/v2/engine/boehm_gc/doc/README.Mac
+++ b/tools/build/src/engine/boehm_gc/doc/README.Mac
diff --git a/tools/build/v2/engine/boehm_gc/doc/README.MacOSX b/tools/build/src/engine/boehm_gc/doc/README.MacOSX
index f5333d51ad..f5333d51ad 100644
--- a/tools/build/v2/engine/boehm_gc/doc/README.MacOSX
+++ b/tools/build/src/engine/boehm_gc/doc/README.MacOSX
diff --git a/tools/build/v2/engine/boehm_gc/doc/README.OS2 b/tools/build/src/engine/boehm_gc/doc/README.OS2
index 5345bbd0f6..5345bbd0f6 100644
--- a/tools/build/v2/engine/boehm_gc/doc/README.OS2
+++ b/tools/build/src/engine/boehm_gc/doc/README.OS2
diff --git a/tools/build/v2/engine/boehm_gc/doc/README.amiga b/tools/build/src/engine/boehm_gc/doc/README.amiga
index 730dce3fe9..730dce3fe9 100644
--- a/tools/build/v2/engine/boehm_gc/doc/README.amiga
+++ b/tools/build/src/engine/boehm_gc/doc/README.amiga
diff --git a/tools/build/v2/engine/boehm_gc/doc/README.arm.cross b/tools/build/src/engine/boehm_gc/doc/README.arm.cross
index 96744edaf6..96744edaf6 100644
--- a/tools/build/v2/engine/boehm_gc/doc/README.arm.cross
+++ b/tools/build/src/engine/boehm_gc/doc/README.arm.cross
diff --git a/tools/build/v2/engine/boehm_gc/doc/README.autoconf b/tools/build/src/engine/boehm_gc/doc/README.autoconf
index 53fcf5a50b..53fcf5a50b 100644
--- a/tools/build/v2/engine/boehm_gc/doc/README.autoconf
+++ b/tools/build/src/engine/boehm_gc/doc/README.autoconf
diff --git a/tools/build/v2/engine/boehm_gc/doc/README.changes b/tools/build/src/engine/boehm_gc/doc/README.changes
index 27bf33eaaa..27bf33eaaa 100644
--- a/tools/build/v2/engine/boehm_gc/doc/README.changes
+++ b/tools/build/src/engine/boehm_gc/doc/README.changes
diff --git a/tools/build/v2/engine/boehm_gc/doc/README.contributors b/tools/build/src/engine/boehm_gc/doc/README.contributors
index fd5c95f229..fd5c95f229 100644
--- a/tools/build/v2/engine/boehm_gc/doc/README.contributors
+++ b/tools/build/src/engine/boehm_gc/doc/README.contributors
diff --git a/tools/build/v2/engine/boehm_gc/doc/README.cords b/tools/build/src/engine/boehm_gc/doc/README.cords
index 3485e0145a..3485e0145a 100644
--- a/tools/build/v2/engine/boehm_gc/doc/README.cords
+++ b/tools/build/src/engine/boehm_gc/doc/README.cords
diff --git a/tools/build/v2/engine/boehm_gc/doc/README.darwin b/tools/build/src/engine/boehm_gc/doc/README.darwin
index b413ff3385..b413ff3385 100644
--- a/tools/build/v2/engine/boehm_gc/doc/README.darwin
+++ b/tools/build/src/engine/boehm_gc/doc/README.darwin
diff --git a/tools/build/v2/engine/boehm_gc/doc/README.dj b/tools/build/src/engine/boehm_gc/doc/README.dj
index 613bc423cb..613bc423cb 100644
--- a/tools/build/v2/engine/boehm_gc/doc/README.dj
+++ b/tools/build/src/engine/boehm_gc/doc/README.dj
diff --git a/tools/build/v2/engine/boehm_gc/doc/README.environment b/tools/build/src/engine/boehm_gc/doc/README.environment
index d50d37094c..d50d37094c 100644
--- a/tools/build/v2/engine/boehm_gc/doc/README.environment
+++ b/tools/build/src/engine/boehm_gc/doc/README.environment
diff --git a/tools/build/v2/engine/boehm_gc/doc/README.ews4800 b/tools/build/src/engine/boehm_gc/doc/README.ews4800
index 80bca2b3d9..80bca2b3d9 100644
--- a/tools/build/v2/engine/boehm_gc/doc/README.ews4800
+++ b/tools/build/src/engine/boehm_gc/doc/README.ews4800
diff --git a/tools/build/v2/engine/boehm_gc/doc/README.hp b/tools/build/src/engine/boehm_gc/doc/README.hp
index caa8bdd19f..caa8bdd19f 100644
--- a/tools/build/v2/engine/boehm_gc/doc/README.hp
+++ b/tools/build/src/engine/boehm_gc/doc/README.hp
diff --git a/tools/build/v2/engine/boehm_gc/doc/README.linux b/tools/build/src/engine/boehm_gc/doc/README.linux
index 3c50181487..3c50181487 100644
--- a/tools/build/v2/engine/boehm_gc/doc/README.linux
+++ b/tools/build/src/engine/boehm_gc/doc/README.linux
diff --git a/tools/build/v2/engine/boehm_gc/doc/README.macros b/tools/build/src/engine/boehm_gc/doc/README.macros
index 6a9a1fdcb3..6a9a1fdcb3 100644
--- a/tools/build/v2/engine/boehm_gc/doc/README.macros
+++ b/tools/build/src/engine/boehm_gc/doc/README.macros
diff --git a/tools/build/v2/engine/boehm_gc/doc/README.rs6000 b/tools/build/src/engine/boehm_gc/doc/README.rs6000
index f5630b20a3..f5630b20a3 100644
--- a/tools/build/v2/engine/boehm_gc/doc/README.rs6000
+++ b/tools/build/src/engine/boehm_gc/doc/README.rs6000
diff --git a/tools/build/v2/engine/boehm_gc/doc/README.sgi b/tools/build/src/engine/boehm_gc/doc/README.sgi
index 7bdb50a4ef..7bdb50a4ef 100644
--- a/tools/build/v2/engine/boehm_gc/doc/README.sgi
+++ b/tools/build/src/engine/boehm_gc/doc/README.sgi
diff --git a/tools/build/v2/engine/boehm_gc/doc/README.solaris2 b/tools/build/src/engine/boehm_gc/doc/README.solaris2
index 73620342f3..73620342f3 100644
--- a/tools/build/v2/engine/boehm_gc/doc/README.solaris2
+++ b/tools/build/src/engine/boehm_gc/doc/README.solaris2
diff --git a/tools/build/v2/engine/boehm_gc/doc/README.uts b/tools/build/src/engine/boehm_gc/doc/README.uts
index 6be49667d7..6be49667d7 100644
--- a/tools/build/v2/engine/boehm_gc/doc/README.uts
+++ b/tools/build/src/engine/boehm_gc/doc/README.uts
diff --git a/tools/build/v2/engine/boehm_gc/doc/README.win32 b/tools/build/src/engine/boehm_gc/doc/README.win32
index 1dce2b9e78..1dce2b9e78 100644
--- a/tools/build/v2/engine/boehm_gc/doc/README.win32
+++ b/tools/build/src/engine/boehm_gc/doc/README.win32
diff --git a/tools/build/v2/engine/boehm_gc/doc/README.win64 b/tools/build/src/engine/boehm_gc/doc/README.win64
index 9db0e78c61..9db0e78c61 100644
--- a/tools/build/v2/engine/boehm_gc/doc/README.win64
+++ b/tools/build/src/engine/boehm_gc/doc/README.win64
diff --git a/tools/build/v2/engine/boehm_gc/doc/barrett_diagram b/tools/build/src/engine/boehm_gc/doc/barrett_diagram
index 27e80dc15c..27e80dc15c 100644
--- a/tools/build/v2/engine/boehm_gc/doc/barrett_diagram
+++ b/tools/build/src/engine/boehm_gc/doc/barrett_diagram
diff --git a/tools/build/v2/engine/boehm_gc/doc/debugging.html b/tools/build/src/engine/boehm_gc/doc/debugging.html
index 7c65f2bb40..7c65f2bb40 100644
--- a/tools/build/v2/engine/boehm_gc/doc/debugging.html
+++ b/tools/build/src/engine/boehm_gc/doc/debugging.html
diff --git a/tools/build/v2/engine/boehm_gc/doc/doc.am b/tools/build/src/engine/boehm_gc/doc/doc.am
index 8d5f67d333..8d5f67d333 100644
--- a/tools/build/v2/engine/boehm_gc/doc/doc.am
+++ b/tools/build/src/engine/boehm_gc/doc/doc.am
diff --git a/tools/build/v2/engine/boehm_gc/doc/gc.man b/tools/build/src/engine/boehm_gc/doc/gc.man
index 2a550c7124..2a550c7124 100644
--- a/tools/build/v2/engine/boehm_gc/doc/gc.man
+++ b/tools/build/src/engine/boehm_gc/doc/gc.man
diff --git a/tools/build/v2/engine/boehm_gc/doc/gcdescr.html b/tools/build/src/engine/boehm_gc/doc/gcdescr.html
index dc08470e40..dc08470e40 100644
--- a/tools/build/v2/engine/boehm_gc/doc/gcdescr.html
+++ b/tools/build/src/engine/boehm_gc/doc/gcdescr.html
diff --git a/tools/build/v2/engine/boehm_gc/doc/gcinterface.html b/tools/build/src/engine/boehm_gc/doc/gcinterface.html
index 74230aa6ca..74230aa6ca 100644
--- a/tools/build/v2/engine/boehm_gc/doc/gcinterface.html
+++ b/tools/build/src/engine/boehm_gc/doc/gcinterface.html
diff --git a/tools/build/v2/engine/boehm_gc/doc/leak.html b/tools/build/src/engine/boehm_gc/doc/leak.html
index 8f460c9f68..8f460c9f68 100644
--- a/tools/build/v2/engine/boehm_gc/doc/leak.html
+++ b/tools/build/src/engine/boehm_gc/doc/leak.html
diff --git a/tools/build/v2/engine/boehm_gc/doc/overview.html b/tools/build/src/engine/boehm_gc/doc/overview.html
index d31f937005..d31f937005 100644
--- a/tools/build/v2/engine/boehm_gc/doc/overview.html
+++ b/tools/build/src/engine/boehm_gc/doc/overview.html
diff --git a/tools/build/v2/engine/boehm_gc/doc/porting.html b/tools/build/src/engine/boehm_gc/doc/porting.html
index 5a06c228e5..5a06c228e5 100644
--- a/tools/build/v2/engine/boehm_gc/doc/porting.html
+++ b/tools/build/src/engine/boehm_gc/doc/porting.html
diff --git a/tools/build/v2/engine/boehm_gc/doc/scale.html b/tools/build/src/engine/boehm_gc/doc/scale.html
index 2e70148dfb..2e70148dfb 100644
--- a/tools/build/v2/engine/boehm_gc/doc/scale.html
+++ b/tools/build/src/engine/boehm_gc/doc/scale.html
diff --git a/tools/build/v2/engine/boehm_gc/doc/simple_example.html b/tools/build/src/engine/boehm_gc/doc/simple_example.html
index 0bc0953ef0..0bc0953ef0 100644
--- a/tools/build/v2/engine/boehm_gc/doc/simple_example.html
+++ b/tools/build/src/engine/boehm_gc/doc/simple_example.html
diff --git a/tools/build/v2/engine/boehm_gc/doc/tree.html b/tools/build/src/engine/boehm_gc/doc/tree.html
index c46a281cc6..c46a281cc6 100644
--- a/tools/build/v2/engine/boehm_gc/doc/tree.html
+++ b/tools/build/src/engine/boehm_gc/doc/tree.html
diff --git a/tools/build/v2/engine/boehm_gc/dyn_load.c b/tools/build/src/engine/boehm_gc/dyn_load.c
index 36968ba5d7..36968ba5d7 100644
--- a/tools/build/v2/engine/boehm_gc/dyn_load.c
+++ b/tools/build/src/engine/boehm_gc/dyn_load.c
diff --git a/tools/build/v2/engine/boehm_gc/finalize.c b/tools/build/src/engine/boehm_gc/finalize.c
index 6ae36c2ddc..6ae36c2ddc 100644
--- a/tools/build/v2/engine/boehm_gc/finalize.c
+++ b/tools/build/src/engine/boehm_gc/finalize.c
diff --git a/tools/build/v2/engine/boehm_gc/gc.mak b/tools/build/src/engine/boehm_gc/gc.mak
index 5c02c9023c..5c02c9023c 100644
--- a/tools/build/v2/engine/boehm_gc/gc.mak
+++ b/tools/build/src/engine/boehm_gc/gc.mak
diff --git a/tools/build/v2/engine/boehm_gc/gc_cpp.cc b/tools/build/src/engine/boehm_gc/gc_cpp.cc
index c4dc4cd256..c4dc4cd256 100644
--- a/tools/build/v2/engine/boehm_gc/gc_cpp.cc
+++ b/tools/build/src/engine/boehm_gc/gc_cpp.cc
diff --git a/tools/build/v2/engine/boehm_gc/gc_cpp.cpp b/tools/build/src/engine/boehm_gc/gc_cpp.cpp
index f6bd95e59a..f6bd95e59a 100644
--- a/tools/build/v2/engine/boehm_gc/gc_cpp.cpp
+++ b/tools/build/src/engine/boehm_gc/gc_cpp.cpp
diff --git a/tools/build/v2/engine/boehm_gc/gc_dlopen.c b/tools/build/src/engine/boehm_gc/gc_dlopen.c
index 51659d1e82..51659d1e82 100644
--- a/tools/build/v2/engine/boehm_gc/gc_dlopen.c
+++ b/tools/build/src/engine/boehm_gc/gc_dlopen.c
diff --git a/tools/build/v2/engine/boehm_gc/gcj_mlc.c b/tools/build/src/engine/boehm_gc/gcj_mlc.c
index 7e5beb1886..7e5beb1886 100644
--- a/tools/build/v2/engine/boehm_gc/gcj_mlc.c
+++ b/tools/build/src/engine/boehm_gc/gcj_mlc.c
diff --git a/tools/build/v2/engine/boehm_gc/gcname.c b/tools/build/src/engine/boehm_gc/gcname.c
index e2119d7996..e2119d7996 100644
--- a/tools/build/v2/engine/boehm_gc/gcname.c
+++ b/tools/build/src/engine/boehm_gc/gcname.c
diff --git a/tools/build/v2/engine/boehm_gc/headers.c b/tools/build/src/engine/boehm_gc/headers.c
index 6b4eb84922..6b4eb84922 100644
--- a/tools/build/v2/engine/boehm_gc/headers.c
+++ b/tools/build/src/engine/boehm_gc/headers.c
diff --git a/tools/build/v2/engine/boehm_gc/hpux_test_and_clear.s b/tools/build/src/engine/boehm_gc/hpux_test_and_clear.s
index f09b211404..f09b211404 100644
--- a/tools/build/v2/engine/boehm_gc/hpux_test_and_clear.s
+++ b/tools/build/src/engine/boehm_gc/hpux_test_and_clear.s
diff --git a/tools/build/v2/engine/boehm_gc/ia64_save_regs_in_stack.s b/tools/build/src/engine/boehm_gc/ia64_save_regs_in_stack.s
index 3b18c0841d..3b18c0841d 100644
--- a/tools/build/v2/engine/boehm_gc/ia64_save_regs_in_stack.s
+++ b/tools/build/src/engine/boehm_gc/ia64_save_regs_in_stack.s
diff --git a/tools/build/v2/engine/boehm_gc/if_mach.c b/tools/build/src/engine/boehm_gc/if_mach.c
index d6e0a70d74..d6e0a70d74 100644
--- a/tools/build/v2/engine/boehm_gc/if_mach.c
+++ b/tools/build/src/engine/boehm_gc/if_mach.c
diff --git a/tools/build/v2/engine/boehm_gc/if_not_there.c b/tools/build/src/engine/boehm_gc/if_not_there.c
index 7af6fba4e0..7af6fba4e0 100644
--- a/tools/build/v2/engine/boehm_gc/if_not_there.c
+++ b/tools/build/src/engine/boehm_gc/if_not_there.c
diff --git a/tools/build/v2/engine/boehm_gc/include/cord.h b/tools/build/src/engine/boehm_gc/include/cord.h
index 926089e86f..926089e86f 100644
--- a/tools/build/v2/engine/boehm_gc/include/cord.h
+++ b/tools/build/src/engine/boehm_gc/include/cord.h
diff --git a/tools/build/v2/engine/boehm_gc/include/ec.h b/tools/build/src/engine/boehm_gc/include/ec.h
index c829b83ad1..c829b83ad1 100644
--- a/tools/build/v2/engine/boehm_gc/include/ec.h
+++ b/tools/build/src/engine/boehm_gc/include/ec.h
diff --git a/tools/build/v2/engine/boehm_gc/include/gc.h b/tools/build/src/engine/boehm_gc/include/gc.h
index cc950888f0..cc950888f0 100644
--- a/tools/build/v2/engine/boehm_gc/include/gc.h
+++ b/tools/build/src/engine/boehm_gc/include/gc.h
diff --git a/tools/build/v2/engine/boehm_gc/include/gc_allocator.h b/tools/build/src/engine/boehm_gc/include/gc_allocator.h
index 4f3117b3bd..4f3117b3bd 100644
--- a/tools/build/v2/engine/boehm_gc/include/gc_allocator.h
+++ b/tools/build/src/engine/boehm_gc/include/gc_allocator.h
diff --git a/tools/build/v2/engine/boehm_gc/include/gc_amiga_redirects.h b/tools/build/src/engine/boehm_gc/include/gc_amiga_redirects.h
index 9e975c8c83..9e975c8c83 100644
--- a/tools/build/v2/engine/boehm_gc/include/gc_amiga_redirects.h
+++ b/tools/build/src/engine/boehm_gc/include/gc_amiga_redirects.h
diff --git a/tools/build/v2/engine/boehm_gc/include/gc_backptr.h b/tools/build/src/engine/boehm_gc/include/gc_backptr.h
index 5899496e0f..5899496e0f 100644
--- a/tools/build/v2/engine/boehm_gc/include/gc_backptr.h
+++ b/tools/build/src/engine/boehm_gc/include/gc_backptr.h
diff --git a/tools/build/v2/engine/boehm_gc/include/gc_config_macros.h b/tools/build/src/engine/boehm_gc/include/gc_config_macros.h
index 66abf0b1e3..66abf0b1e3 100644
--- a/tools/build/v2/engine/boehm_gc/include/gc_config_macros.h
+++ b/tools/build/src/engine/boehm_gc/include/gc_config_macros.h
diff --git a/tools/build/v2/engine/boehm_gc/include/gc_cpp.h b/tools/build/src/engine/boehm_gc/include/gc_cpp.h
index d3df211210..d3df211210 100644
--- a/tools/build/v2/engine/boehm_gc/include/gc_cpp.h
+++ b/tools/build/src/engine/boehm_gc/include/gc_cpp.h
diff --git a/tools/build/v2/engine/boehm_gc/include/gc_gcj.h b/tools/build/src/engine/boehm_gc/include/gc_gcj.h
index 699ddf5d48..699ddf5d48 100644
--- a/tools/build/v2/engine/boehm_gc/include/gc_gcj.h
+++ b/tools/build/src/engine/boehm_gc/include/gc_gcj.h
diff --git a/tools/build/v2/engine/boehm_gc/include/gc_inline.h b/tools/build/src/engine/boehm_gc/include/gc_inline.h
index da7e2e91f0..da7e2e91f0 100644
--- a/tools/build/v2/engine/boehm_gc/include/gc_inline.h
+++ b/tools/build/src/engine/boehm_gc/include/gc_inline.h
diff --git a/tools/build/v2/engine/boehm_gc/include/gc_mark.h b/tools/build/src/engine/boehm_gc/include/gc_mark.h
index 8ee50b5d4d..8ee50b5d4d 100644
--- a/tools/build/v2/engine/boehm_gc/include/gc_mark.h
+++ b/tools/build/src/engine/boehm_gc/include/gc_mark.h
diff --git a/tools/build/v2/engine/boehm_gc/include/gc_pthread_redirects.h b/tools/build/src/engine/boehm_gc/include/gc_pthread_redirects.h
index b567f63e14..b567f63e14 100644
--- a/tools/build/v2/engine/boehm_gc/include/gc_pthread_redirects.h
+++ b/tools/build/src/engine/boehm_gc/include/gc_pthread_redirects.h
diff --git a/tools/build/v2/engine/boehm_gc/include/gc_tiny_fl.h b/tools/build/src/engine/boehm_gc/include/gc_tiny_fl.h
index 52b6864b6b..52b6864b6b 100644
--- a/tools/build/v2/engine/boehm_gc/include/gc_tiny_fl.h
+++ b/tools/build/src/engine/boehm_gc/include/gc_tiny_fl.h
diff --git a/tools/build/v2/engine/boehm_gc/include/gc_typed.h b/tools/build/src/engine/boehm_gc/include/gc_typed.h
index 1086acdd12..1086acdd12 100644
--- a/tools/build/v2/engine/boehm_gc/include/gc_typed.h
+++ b/tools/build/src/engine/boehm_gc/include/gc_typed.h
diff --git a/tools/build/v2/engine/boehm_gc/include/include.am b/tools/build/src/engine/boehm_gc/include/include.am
index 78c57c346c..78c57c346c 100644
--- a/tools/build/v2/engine/boehm_gc/include/include.am
+++ b/tools/build/src/engine/boehm_gc/include/include.am
diff --git a/tools/build/v2/engine/boehm_gc/include/javaxfc.h b/tools/build/src/engine/boehm_gc/include/javaxfc.h
index 23e01005af..23e01005af 100644
--- a/tools/build/v2/engine/boehm_gc/include/javaxfc.h
+++ b/tools/build/src/engine/boehm_gc/include/javaxfc.h
diff --git a/tools/build/v2/engine/boehm_gc/include/leak_detector.h b/tools/build/src/engine/boehm_gc/include/leak_detector.h
index 1d02f40076..1d02f40076 100644
--- a/tools/build/v2/engine/boehm_gc/include/leak_detector.h
+++ b/tools/build/src/engine/boehm_gc/include/leak_detector.h
diff --git a/tools/build/v2/engine/boehm_gc/include/new_gc_alloc.h b/tools/build/src/engine/boehm_gc/include/new_gc_alloc.h
index b4906af548..b4906af548 100644
--- a/tools/build/v2/engine/boehm_gc/include/new_gc_alloc.h
+++ b/tools/build/src/engine/boehm_gc/include/new_gc_alloc.h
diff --git a/tools/build/v2/engine/boehm_gc/include/private/cord_pos.h b/tools/build/src/engine/boehm_gc/include/private/cord_pos.h
index d2b24bb8ab..d2b24bb8ab 100644
--- a/tools/build/v2/engine/boehm_gc/include/private/cord_pos.h
+++ b/tools/build/src/engine/boehm_gc/include/private/cord_pos.h
diff --git a/tools/build/v2/engine/boehm_gc/include/private/darwin_semaphore.h b/tools/build/src/engine/boehm_gc/include/private/darwin_semaphore.h
index 0f43982d5c..0f43982d5c 100644
--- a/tools/build/v2/engine/boehm_gc/include/private/darwin_semaphore.h
+++ b/tools/build/src/engine/boehm_gc/include/private/darwin_semaphore.h
diff --git a/tools/build/v2/engine/boehm_gc/include/private/darwin_stop_world.h b/tools/build/src/engine/boehm_gc/include/private/darwin_stop_world.h
index f6f5314ee3..f6f5314ee3 100644
--- a/tools/build/v2/engine/boehm_gc/include/private/darwin_stop_world.h
+++ b/tools/build/src/engine/boehm_gc/include/private/darwin_stop_world.h
diff --git a/tools/build/v2/engine/boehm_gc/include/private/dbg_mlc.h b/tools/build/src/engine/boehm_gc/include/private/dbg_mlc.h
index fcd027c4fe..fcd027c4fe 100644
--- a/tools/build/v2/engine/boehm_gc/include/private/dbg_mlc.h
+++ b/tools/build/src/engine/boehm_gc/include/private/dbg_mlc.h
diff --git a/tools/build/v2/engine/boehm_gc/include/private/gc_hdrs.h b/tools/build/src/engine/boehm_gc/include/private/gc_hdrs.h
index 559556ca78..559556ca78 100644
--- a/tools/build/v2/engine/boehm_gc/include/private/gc_hdrs.h
+++ b/tools/build/src/engine/boehm_gc/include/private/gc_hdrs.h
diff --git a/tools/build/v2/engine/boehm_gc/include/private/gc_locks.h b/tools/build/src/engine/boehm_gc/include/private/gc_locks.h
index d7c83b07b9..d7c83b07b9 100644
--- a/tools/build/v2/engine/boehm_gc/include/private/gc_locks.h
+++ b/tools/build/src/engine/boehm_gc/include/private/gc_locks.h
diff --git a/tools/build/v2/engine/boehm_gc/include/private/gc_pmark.h b/tools/build/src/engine/boehm_gc/include/private/gc_pmark.h
index 36083970ac..36083970ac 100644
--- a/tools/build/v2/engine/boehm_gc/include/private/gc_pmark.h
+++ b/tools/build/src/engine/boehm_gc/include/private/gc_pmark.h
diff --git a/tools/build/v2/engine/boehm_gc/include/private/gc_priv.h b/tools/build/src/engine/boehm_gc/include/private/gc_priv.h
index ec93ffea9c..ec93ffea9c 100644
--- a/tools/build/v2/engine/boehm_gc/include/private/gc_priv.h
+++ b/tools/build/src/engine/boehm_gc/include/private/gc_priv.h
diff --git a/tools/build/v2/engine/boehm_gc/include/private/gcconfig.h b/tools/build/src/engine/boehm_gc/include/private/gcconfig.h
index 20f35bc3a1..20f35bc3a1 100644
--- a/tools/build/v2/engine/boehm_gc/include/private/gcconfig.h
+++ b/tools/build/src/engine/boehm_gc/include/private/gcconfig.h
diff --git a/tools/build/v2/engine/boehm_gc/include/private/msvc_dbg.h b/tools/build/src/engine/boehm_gc/include/private/msvc_dbg.h
index 1d3030aaab..1d3030aaab 100644
--- a/tools/build/v2/engine/boehm_gc/include/private/msvc_dbg.h
+++ b/tools/build/src/engine/boehm_gc/include/private/msvc_dbg.h
diff --git a/tools/build/v2/engine/boehm_gc/include/private/pthread_stop_world.h b/tools/build/src/engine/boehm_gc/include/private/pthread_stop_world.h
index 6f9197a1f6..6f9197a1f6 100644
--- a/tools/build/v2/engine/boehm_gc/include/private/pthread_stop_world.h
+++ b/tools/build/src/engine/boehm_gc/include/private/pthread_stop_world.h
diff --git a/tools/build/v2/engine/boehm_gc/include/private/pthread_support.h b/tools/build/src/engine/boehm_gc/include/private/pthread_support.h
index 77f1ad1a90..77f1ad1a90 100644
--- a/tools/build/v2/engine/boehm_gc/include/private/pthread_support.h
+++ b/tools/build/src/engine/boehm_gc/include/private/pthread_support.h
diff --git a/tools/build/v2/engine/boehm_gc/include/private/specific.h b/tools/build/src/engine/boehm_gc/include/private/specific.h
index fc2e8f9e66..fc2e8f9e66 100644
--- a/tools/build/v2/engine/boehm_gc/include/private/specific.h
+++ b/tools/build/src/engine/boehm_gc/include/private/specific.h
diff --git a/tools/build/v2/engine/boehm_gc/include/private/thread_local_alloc.h b/tools/build/src/engine/boehm_gc/include/private/thread_local_alloc.h
index 4c2c5362fb..4c2c5362fb 100644
--- a/tools/build/v2/engine/boehm_gc/include/private/thread_local_alloc.h
+++ b/tools/build/src/engine/boehm_gc/include/private/thread_local_alloc.h
diff --git a/tools/build/v2/engine/boehm_gc/include/weakpointer.h b/tools/build/src/engine/boehm_gc/include/weakpointer.h
index 84906b00a6..84906b00a6 100644
--- a/tools/build/v2/engine/boehm_gc/include/weakpointer.h
+++ b/tools/build/src/engine/boehm_gc/include/weakpointer.h
diff --git a/tools/build/v2/engine/boehm_gc/install-sh b/tools/build/src/engine/boehm_gc/install-sh
index 398a88e142..398a88e142 100755
--- a/tools/build/v2/engine/boehm_gc/install-sh
+++ b/tools/build/src/engine/boehm_gc/install-sh
diff --git a/tools/build/v2/engine/boehm_gc/libtool.m4 b/tools/build/src/engine/boehm_gc/libtool.m4
index 0f53cb592e..0f53cb592e 100644
--- a/tools/build/v2/engine/boehm_gc/libtool.m4
+++ b/tools/build/src/engine/boehm_gc/libtool.m4
diff --git a/tools/build/v2/engine/boehm_gc/ltmain.sh b/tools/build/src/engine/boehm_gc/ltmain.sh
index 06823e057a..06823e057a 100755
--- a/tools/build/v2/engine/boehm_gc/ltmain.sh
+++ b/tools/build/src/engine/boehm_gc/ltmain.sh
diff --git a/tools/build/v2/engine/boehm_gc/mach_dep.c b/tools/build/src/engine/boehm_gc/mach_dep.c
index 4f05843c63..4f05843c63 100644
--- a/tools/build/v2/engine/boehm_gc/mach_dep.c
+++ b/tools/build/src/engine/boehm_gc/mach_dep.c
diff --git a/tools/build/v2/engine/boehm_gc/malloc.c b/tools/build/src/engine/boehm_gc/malloc.c
index 94032640e6..94032640e6 100644
--- a/tools/build/v2/engine/boehm_gc/malloc.c
+++ b/tools/build/src/engine/boehm_gc/malloc.c
diff --git a/tools/build/v2/engine/boehm_gc/mallocx.c b/tools/build/src/engine/boehm_gc/mallocx.c
index 4649b73ac9..4649b73ac9 100644
--- a/tools/build/v2/engine/boehm_gc/mallocx.c
+++ b/tools/build/src/engine/boehm_gc/mallocx.c
diff --git a/tools/build/v2/engine/boehm_gc/mark.c b/tools/build/src/engine/boehm_gc/mark.c
index 3e5c46d1b0..3e5c46d1b0 100644
--- a/tools/build/v2/engine/boehm_gc/mark.c
+++ b/tools/build/src/engine/boehm_gc/mark.c
diff --git a/tools/build/v2/engine/boehm_gc/mark_rts.c b/tools/build/src/engine/boehm_gc/mark_rts.c
index 05ece1c2bf..05ece1c2bf 100644
--- a/tools/build/v2/engine/boehm_gc/mark_rts.c
+++ b/tools/build/src/engine/boehm_gc/mark_rts.c
diff --git a/tools/build/v2/engine/boehm_gc/mips_sgi_mach_dep.s b/tools/build/src/engine/boehm_gc/mips_sgi_mach_dep.s
index 56390280a7..56390280a7 100644
--- a/tools/build/v2/engine/boehm_gc/mips_sgi_mach_dep.s
+++ b/tools/build/src/engine/boehm_gc/mips_sgi_mach_dep.s
diff --git a/tools/build/v2/engine/boehm_gc/mips_ultrix_mach_dep.s b/tools/build/src/engine/boehm_gc/mips_ultrix_mach_dep.s
index 178224e31c..178224e31c 100644
--- a/tools/build/v2/engine/boehm_gc/mips_ultrix_mach_dep.s
+++ b/tools/build/src/engine/boehm_gc/mips_ultrix_mach_dep.s
diff --git a/tools/build/v2/engine/boehm_gc/misc.c b/tools/build/src/engine/boehm_gc/misc.c
index 37e509336a..37e509336a 100644
--- a/tools/build/v2/engine/boehm_gc/misc.c
+++ b/tools/build/src/engine/boehm_gc/misc.c
diff --git a/tools/build/v2/engine/boehm_gc/missing b/tools/build/src/engine/boehm_gc/missing
index dd583709f5..dd583709f5 100755
--- a/tools/build/v2/engine/boehm_gc/missing
+++ b/tools/build/src/engine/boehm_gc/missing
diff --git a/tools/build/v2/engine/boehm_gc/mkinstalldirs b/tools/build/src/engine/boehm_gc/mkinstalldirs
index 82a561f435..82a561f435 100755
--- a/tools/build/v2/engine/boehm_gc/mkinstalldirs
+++ b/tools/build/src/engine/boehm_gc/mkinstalldirs
diff --git a/tools/build/v2/engine/boehm_gc/msvc_dbg.c b/tools/build/src/engine/boehm_gc/msvc_dbg.c
index b172025ba5..b172025ba5 100644
--- a/tools/build/v2/engine/boehm_gc/msvc_dbg.c
+++ b/tools/build/src/engine/boehm_gc/msvc_dbg.c
diff --git a/tools/build/v2/engine/boehm_gc/new_hblk.c b/tools/build/src/engine/boehm_gc/new_hblk.c
index 5d5a56f2af..5d5a56f2af 100644
--- a/tools/build/v2/engine/boehm_gc/new_hblk.c
+++ b/tools/build/src/engine/boehm_gc/new_hblk.c
diff --git a/tools/build/v2/engine/boehm_gc/obj_map.c b/tools/build/src/engine/boehm_gc/obj_map.c
index c295c4033b..c295c4033b 100644
--- a/tools/build/v2/engine/boehm_gc/obj_map.c
+++ b/tools/build/src/engine/boehm_gc/obj_map.c
diff --git a/tools/build/v2/engine/boehm_gc/os_dep.c b/tools/build/src/engine/boehm_gc/os_dep.c
index bb8fa08f61..bb8fa08f61 100644
--- a/tools/build/v2/engine/boehm_gc/os_dep.c
+++ b/tools/build/src/engine/boehm_gc/os_dep.c
diff --git a/tools/build/v2/engine/boehm_gc/pcr_interface.c b/tools/build/src/engine/boehm_gc/pcr_interface.c
index 77bddf80bd..77bddf80bd 100644
--- a/tools/build/v2/engine/boehm_gc/pcr_interface.c
+++ b/tools/build/src/engine/boehm_gc/pcr_interface.c
diff --git a/tools/build/v2/engine/boehm_gc/pthread_stop_world.c b/tools/build/src/engine/boehm_gc/pthread_stop_world.c
index 3a1524b96b..3a1524b96b 100644
--- a/tools/build/v2/engine/boehm_gc/pthread_stop_world.c
+++ b/tools/build/src/engine/boehm_gc/pthread_stop_world.c
diff --git a/tools/build/v2/engine/boehm_gc/pthread_support.c b/tools/build/src/engine/boehm_gc/pthread_support.c
index 955eea2daa..955eea2daa 100644
--- a/tools/build/v2/engine/boehm_gc/pthread_support.c
+++ b/tools/build/src/engine/boehm_gc/pthread_support.c
diff --git a/tools/build/v2/engine/boehm_gc/ptr_chck.c b/tools/build/src/engine/boehm_gc/ptr_chck.c
index d04d2daf52..d04d2daf52 100644
--- a/tools/build/v2/engine/boehm_gc/ptr_chck.c
+++ b/tools/build/src/engine/boehm_gc/ptr_chck.c
diff --git a/tools/build/v2/engine/boehm_gc/real_malloc.c b/tools/build/src/engine/boehm_gc/real_malloc.c
index 85befdcbaa..85befdcbaa 100644
--- a/tools/build/v2/engine/boehm_gc/real_malloc.c
+++ b/tools/build/src/engine/boehm_gc/real_malloc.c
diff --git a/tools/build/v2/engine/boehm_gc/reclaim.c b/tools/build/src/engine/boehm_gc/reclaim.c
index cd50fa4811..cd50fa4811 100644
--- a/tools/build/v2/engine/boehm_gc/reclaim.c
+++ b/tools/build/src/engine/boehm_gc/reclaim.c
diff --git a/tools/build/v2/engine/boehm_gc/rs6000_mach_dep.s b/tools/build/src/engine/boehm_gc/rs6000_mach_dep.s
index 12bf9a84d5..12bf9a84d5 100644
--- a/tools/build/v2/engine/boehm_gc/rs6000_mach_dep.s
+++ b/tools/build/src/engine/boehm_gc/rs6000_mach_dep.s
diff --git a/tools/build/v2/engine/boehm_gc/setjmp_t.c b/tools/build/src/engine/boehm_gc/setjmp_t.c
index 5a171df0cb..5a171df0cb 100644
--- a/tools/build/v2/engine/boehm_gc/setjmp_t.c
+++ b/tools/build/src/engine/boehm_gc/setjmp_t.c
diff --git a/tools/build/v2/engine/boehm_gc/sparc_mach_dep.S b/tools/build/src/engine/boehm_gc/sparc_mach_dep.S
index e8ad6556f9..e8ad6556f9 100644
--- a/tools/build/v2/engine/boehm_gc/sparc_mach_dep.S
+++ b/tools/build/src/engine/boehm_gc/sparc_mach_dep.S
diff --git a/tools/build/v2/engine/boehm_gc/sparc_netbsd_mach_dep.s b/tools/build/src/engine/boehm_gc/sparc_netbsd_mach_dep.s
index bc3f160373..bc3f160373 100644
--- a/tools/build/v2/engine/boehm_gc/sparc_netbsd_mach_dep.s
+++ b/tools/build/src/engine/boehm_gc/sparc_netbsd_mach_dep.s
diff --git a/tools/build/v2/engine/boehm_gc/sparc_sunos4_mach_dep.s b/tools/build/src/engine/boehm_gc/sparc_sunos4_mach_dep.s
index 41858073ef..41858073ef 100644
--- a/tools/build/v2/engine/boehm_gc/sparc_sunos4_mach_dep.s
+++ b/tools/build/src/engine/boehm_gc/sparc_sunos4_mach_dep.s
diff --git a/tools/build/v2/engine/boehm_gc/specific.c b/tools/build/src/engine/boehm_gc/specific.c
index b8ea49dd32..b8ea49dd32 100644
--- a/tools/build/v2/engine/boehm_gc/specific.c
+++ b/tools/build/src/engine/boehm_gc/specific.c
diff --git a/tools/build/v2/engine/boehm_gc/stubborn.c b/tools/build/src/engine/boehm_gc/stubborn.c
index f4e0958399..f4e0958399 100644
--- a/tools/build/v2/engine/boehm_gc/stubborn.c
+++ b/tools/build/src/engine/boehm_gc/stubborn.c
diff --git a/tools/build/v2/engine/boehm_gc/tests/leak_test.c b/tools/build/src/engine/boehm_gc/tests/leak_test.c
index d6a60d47a7..d6a60d47a7 100644
--- a/tools/build/v2/engine/boehm_gc/tests/leak_test.c
+++ b/tools/build/src/engine/boehm_gc/tests/leak_test.c
diff --git a/tools/build/v2/engine/boehm_gc/tests/middle.c b/tools/build/src/engine/boehm_gc/tests/middle.c
index ebb348c493..ebb348c493 100644
--- a/tools/build/v2/engine/boehm_gc/tests/middle.c
+++ b/tools/build/src/engine/boehm_gc/tests/middle.c
diff --git a/tools/build/v2/engine/boehm_gc/tests/test.c b/tools/build/src/engine/boehm_gc/tests/test.c
index 47cf99b596..47cf99b596 100644
--- a/tools/build/v2/engine/boehm_gc/tests/test.c
+++ b/tools/build/src/engine/boehm_gc/tests/test.c
diff --git a/tools/build/v2/engine/boehm_gc/tests/test_cpp.cc b/tools/build/src/engine/boehm_gc/tests/test_cpp.cc
index 9bf53de811..9bf53de811 100644
--- a/tools/build/v2/engine/boehm_gc/tests/test_cpp.cc
+++ b/tools/build/src/engine/boehm_gc/tests/test_cpp.cc
diff --git a/tools/build/v2/engine/boehm_gc/tests/tests.am b/tools/build/src/engine/boehm_gc/tests/tests.am
index aedac1fa67..aedac1fa67 100644
--- a/tools/build/v2/engine/boehm_gc/tests/tests.am
+++ b/tools/build/src/engine/boehm_gc/tests/tests.am
diff --git a/tools/build/v2/engine/boehm_gc/tests/thread_leak_test.c b/tools/build/src/engine/boehm_gc/tests/thread_leak_test.c
index f2c15e9671..f2c15e9671 100644
--- a/tools/build/v2/engine/boehm_gc/tests/thread_leak_test.c
+++ b/tools/build/src/engine/boehm_gc/tests/thread_leak_test.c
diff --git a/tools/build/v2/engine/boehm_gc/thread_local_alloc.c b/tools/build/src/engine/boehm_gc/thread_local_alloc.c
index f747b35570..f747b35570 100644
--- a/tools/build/v2/engine/boehm_gc/thread_local_alloc.c
+++ b/tools/build/src/engine/boehm_gc/thread_local_alloc.c
diff --git a/tools/build/v2/engine/boehm_gc/threadlibs.c b/tools/build/src/engine/boehm_gc/threadlibs.c
index f2ab58250f..f2ab58250f 100644
--- a/tools/build/v2/engine/boehm_gc/threadlibs.c
+++ b/tools/build/src/engine/boehm_gc/threadlibs.c
diff --git a/tools/build/v2/engine/boehm_gc/typd_mlc.c b/tools/build/src/engine/boehm_gc/typd_mlc.c
index c46616323d..c46616323d 100644
--- a/tools/build/v2/engine/boehm_gc/typd_mlc.c
+++ b/tools/build/src/engine/boehm_gc/typd_mlc.c
diff --git a/tools/build/v2/engine/boehm_gc/version.h b/tools/build/src/engine/boehm_gc/version.h
index 112dec30ea..112dec30ea 100644
--- a/tools/build/v2/engine/boehm_gc/version.h
+++ b/tools/build/src/engine/boehm_gc/version.h
diff --git a/tools/build/v2/engine/boehm_gc/win32_threads.c b/tools/build/src/engine/boehm_gc/win32_threads.c
index ac57971900..ac57971900 100644
--- a/tools/build/v2/engine/boehm_gc/win32_threads.c
+++ b/tools/build/src/engine/boehm_gc/win32_threads.c
diff --git a/tools/build/v2/engine/boost-jam.spec b/tools/build/src/engine/boost-jam.spec
index bc572fc96e..bc572fc96e 100644
--- a/tools/build/v2/engine/boost-jam.spec
+++ b/tools/build/src/engine/boost-jam.spec
diff --git a/tools/build/v2/engine/boost-no-inspect b/tools/build/src/engine/boost-no-inspect
index 8a06f3a707..8a06f3a707 100644
--- a/tools/build/v2/engine/boost-no-inspect
+++ b/tools/build/src/engine/boost-no-inspect
diff --git a/tools/build/src/engine/build.bat b/tools/build/src/engine/build.bat
new file mode 100644
index 0000000000..c96e508e75
--- /dev/null
+++ b/tools/build/src/engine/build.bat
@@ -0,0 +1,615 @@
+@ECHO OFF
+
+REM ~ Copyright 2002-2007 Rene Rivera.
+REM ~ Distributed under the Boost Software License, Version 1.0.
+REM ~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+setlocal
+goto Start
+
+
+:Set_Error
+color 00
+goto :eof
+
+
+:Clear_Error
+ver >nul
+goto :eof
+
+
+:Error_Print
+REM Output an error message and set the errorlevel to indicate failure.
+setlocal
+ECHO ###
+ECHO ### %1
+ECHO ###
+ECHO ### You can specify the toolset as the argument, i.e.:
+ECHO ### .\build.bat msvc
+ECHO ###
+ECHO ### Toolsets supported by this script are: borland, como, gcc, gcc-nocygwin,
+ECHO ### intel-win32, metrowerks, mingw, msvc, vc7, vc8, vc9, vc10, vc11, vc12, vc14
+ECHO ###
+call :Set_Error
+endlocal
+goto :eof
+
+
+:Test_Path
+REM Tests for the given executable file presence in the directories in the PATH
+REM environment variable. Additionaly sets FOUND_PATH to the path of the
+REM found file.
+call :Clear_Error
+setlocal
+set test=%~$PATH:1
+endlocal
+if not errorlevel 1 set FOUND_PATH=%~dp$PATH:1
+goto :eof
+
+
+:Test_Option
+REM Tests whether the given string is in the form of an option: "--*"
+call :Clear_Error
+setlocal
+set test=%1
+if not defined test (
+ call :Set_Error
+ goto Test_Option_End
+)
+set test=###%test%###
+set test=%test:"###=%
+set test=%test:###"=%
+set test=%test:###=%
+if not "-" == "%test:~1,1%" call :Set_Error
+:Test_Option_End
+endlocal
+goto :eof
+
+
+:Test_Empty
+REM Tests whether the given string is not empty
+call :Clear_Error
+setlocal
+set test=%1
+if not defined test (
+ call :Clear_Error
+ goto Test_Empty_End
+)
+set test=###%test%###
+set test=%test:"###=%
+set test=%test:###"=%
+set test=%test:###=%
+if not "" == "%test%" call :Set_Error
+:Test_Empty_End
+endlocal
+goto :eof
+
+
+:Call_If_Exists
+if EXIST %1 call %*
+goto :eof
+
+
+:Guess_Toolset
+REM Try and guess the toolset to bootstrap the build with...
+REM Sets BOOST_JAM_TOOLSET to the first found toolset.
+REM May also set BOOST_JAM_TOOLSET_ROOT to the
+REM location of the found toolset.
+
+call :Clear_Error
+call :Test_Empty %ProgramFiles%
+if not errorlevel 1 set ProgramFiles=C:\Program Files
+
+call :Clear_Error
+if NOT "_%VS140COMNTOOLS%_" == "__" (
+ set "BOOST_JAM_TOOLSET=vc14"
+ set "BOOST_JAM_TOOLSET_ROOT=%VS140COMNTOOLS%..\..\VC\"
+ goto :eof)
+call :Clear_Error
+if EXIST "%ProgramFiles%\Microsoft Visual Studio 14.0\VC\VCVARSALL.BAT" (
+ set "BOOST_JAM_TOOLSET=vc14"
+ set "BOOST_JAM_TOOLSET_ROOT=%ProgramFiles%\Microsoft Visual Studio 14.0\VC\"
+ goto :eof)
+call :Clear_Error
+if NOT "_%VS120COMNTOOLS%_" == "__" (
+ set "BOOST_JAM_TOOLSET=vc12"
+ set "BOOST_JAM_TOOLSET_ROOT=%VS120COMNTOOLS%..\..\VC\"
+ goto :eof)
+call :Clear_Error
+if EXIST "%ProgramFiles%\Microsoft Visual Studio 12.0\VC\VCVARSALL.BAT" (
+ set "BOOST_JAM_TOOLSET=vc12"
+ set "BOOST_JAM_TOOLSET_ROOT=%ProgramFiles%\Microsoft Visual Studio 12.0\VC\"
+ goto :eof)
+call :Clear_Error
+if NOT "_%VS110COMNTOOLS%_" == "__" (
+ set "BOOST_JAM_TOOLSET=vc11"
+ set "BOOST_JAM_TOOLSET_ROOT=%VS110COMNTOOLS%..\..\VC\"
+ goto :eof)
+call :Clear_Error
+if EXIST "%ProgramFiles%\Microsoft Visual Studio 11.0\VC\VCVARSALL.BAT" (
+ set "BOOST_JAM_TOOLSET=vc11"
+ set "BOOST_JAM_TOOLSET_ROOT=%ProgramFiles%\Microsoft Visual Studio 11.0\VC\"
+ goto :eof)
+call :Clear_Error
+if NOT "_%VS100COMNTOOLS%_" == "__" (
+ set "BOOST_JAM_TOOLSET=vc10"
+ set "BOOST_JAM_TOOLSET_ROOT=%VS100COMNTOOLS%..\..\VC\"
+ goto :eof)
+call :Clear_Error
+if EXIST "%ProgramFiles%\Microsoft Visual Studio 10.0\VC\VCVARSALL.BAT" (
+ set "BOOST_JAM_TOOLSET=vc10"
+ set "BOOST_JAM_TOOLSET_ROOT=%ProgramFiles%\Microsoft Visual Studio 10.0\VC\"
+ goto :eof)
+call :Clear_Error
+if NOT "_%VS90COMNTOOLS%_" == "__" (
+ set "BOOST_JAM_TOOLSET=vc9"
+ set "BOOST_JAM_TOOLSET_ROOT=%VS90COMNTOOLS%..\..\VC\"
+ goto :eof)
+call :Clear_Error
+if EXIST "%ProgramFiles%\Microsoft Visual Studio 9.0\VC\VCVARSALL.BAT" (
+ set "BOOST_JAM_TOOLSET=vc9"
+ set "BOOST_JAM_TOOLSET_ROOT=%ProgramFiles%\Microsoft Visual Studio 9.0\VC\"
+ goto :eof)
+call :Clear_Error
+if NOT "_%VS80COMNTOOLS%_" == "__" (
+ set "BOOST_JAM_TOOLSET=vc8"
+ set "BOOST_JAM_TOOLSET_ROOT=%VS80COMNTOOLS%..\..\VC\"
+ goto :eof)
+call :Clear_Error
+if EXIST "%ProgramFiles%\Microsoft Visual Studio 8\VC\VCVARSALL.BAT" (
+ set "BOOST_JAM_TOOLSET=vc8"
+ set "BOOST_JAM_TOOLSET_ROOT=%ProgramFiles%\Microsoft Visual Studio 8\VC\"
+ goto :eof)
+call :Clear_Error
+if NOT "_%VS71COMNTOOLS%_" == "__" (
+ set "BOOST_JAM_TOOLSET=vc7"
+ set "BOOST_JAM_TOOLSET_ROOT=%VS71COMNTOOLS%\..\..\VC7\"
+ goto :eof)
+call :Clear_Error
+if NOT "_%VCINSTALLDIR%_" == "__" (
+ REM %VCINSTALLDIR% is also set for VC9 (and probably VC8)
+ set "BOOST_JAM_TOOLSET=vc7"
+ set "BOOST_JAM_TOOLSET_ROOT=%VCINSTALLDIR%\VC7\"
+ goto :eof)
+call :Clear_Error
+if EXIST "%ProgramFiles%\Microsoft Visual Studio .NET 2003\VC7\bin\VCVARS32.BAT" (
+ set "BOOST_JAM_TOOLSET=vc7"
+ set "BOOST_JAM_TOOLSET_ROOT=%ProgramFiles%\Microsoft Visual Studio .NET 2003\VC7\"
+ goto :eof)
+call :Clear_Error
+if EXIST "%ProgramFiles%\Microsoft Visual Studio .NET\VC7\bin\VCVARS32.BAT" (
+ set "BOOST_JAM_TOOLSET=vc7"
+ set "BOOST_JAM_TOOLSET_ROOT=%ProgramFiles%\Microsoft Visual Studio .NET\VC7\"
+ goto :eof)
+call :Clear_Error
+if NOT "_%MSVCDir%_" == "__" (
+ set "BOOST_JAM_TOOLSET=msvc"
+ set "BOOST_JAM_TOOLSET_ROOT=%MSVCDir%\"
+ goto :eof)
+call :Clear_Error
+if EXIST "%ProgramFiles%\Microsoft Visual Studio\VC98\bin\VCVARS32.BAT" (
+ set "BOOST_JAM_TOOLSET=msvc"
+ set "BOOST_JAM_TOOLSET_ROOT=%ProgramFiles%\Microsoft Visual Studio\VC98\"
+ goto :eof)
+call :Clear_Error
+if EXIST "%ProgramFiles%\Microsoft Visual C++\VC98\bin\VCVARS32.BAT" (
+ set "BOOST_JAM_TOOLSET=msvc"
+ set "BOOST_JAM_TOOLSET_ROOT=%ProgramFiles%\Microsoft Visual C++\VC98\"
+ goto :eof)
+call :Clear_Error
+call :Test_Path cl.exe
+if not errorlevel 1 (
+ set "BOOST_JAM_TOOLSET=msvc"
+ set "BOOST_JAM_TOOLSET_ROOT=%FOUND_PATH%..\"
+ goto :eof)
+call :Clear_Error
+call :Test_Path vcvars32.bat
+if not errorlevel 1 (
+ set "BOOST_JAM_TOOLSET=msvc"
+ call "%FOUND_PATH%VCVARS32.BAT"
+ set "BOOST_JAM_TOOLSET_ROOT=%MSVCDir%\"
+ goto :eof)
+call :Clear_Error
+if EXIST "C:\Borland\BCC55\Bin\bcc32.exe" (
+ set "BOOST_JAM_TOOLSET=borland"
+ set "BOOST_JAM_TOOLSET_ROOT=C:\Borland\BCC55\"
+ goto :eof)
+call :Clear_Error
+call :Test_Path bcc32.exe
+if not errorlevel 1 (
+ set "BOOST_JAM_TOOLSET=borland"
+ set "BOOST_JAM_TOOLSET_ROOT=%FOUND_PATH%..\"
+ goto :eof)
+call :Clear_Error
+call :Test_Path icl.exe
+if not errorlevel 1 (
+ set "BOOST_JAM_TOOLSET=intel-win32"
+ set "BOOST_JAM_TOOLSET_ROOT=%FOUND_PATH%..\"
+ goto :eof)
+call :Clear_Error
+if EXIST "C:\MinGW\bin\gcc.exe" (
+ set "BOOST_JAM_TOOLSET=mingw"
+ set "BOOST_JAM_TOOLSET_ROOT=C:\MinGW\"
+ goto :eof)
+call :Clear_Error
+if NOT "_%CWFolder%_" == "__" (
+ set "BOOST_JAM_TOOLSET=metrowerks"
+ set "BOOST_JAM_TOOLSET_ROOT=%CWFolder%\"
+ goto :eof )
+call :Clear_Error
+call :Test_Path mwcc.exe
+if not errorlevel 1 (
+ set "BOOST_JAM_TOOLSET=metrowerks"
+ set "BOOST_JAM_TOOLSET_ROOT=%FOUND_PATH%..\..\"
+ goto :eof)
+call :Clear_Error
+call :Error_Print "Could not find a suitable toolset."
+goto :eof
+
+
+:Guess_Yacc
+REM Tries to find bison or yacc in common places so we can build the grammar.
+call :Clear_Error
+call :Test_Path yacc.exe
+if not errorlevel 1 (
+ set "YACC=yacc -d"
+ goto :eof)
+call :Clear_Error
+call :Test_Path bison.exe
+if not errorlevel 1 (
+ set "YACC=bison -d --yacc"
+ goto :eof)
+call :Clear_Error
+if EXIST "C:\Program Files\GnuWin32\bin\bison.exe" (
+ set "YACC=C:\Program Files\GnuWin32\bin\bison.exe" -d --yacc
+ goto :eof)
+call :Clear_Error
+call :Error_Print "Could not find Yacc to build the Jam grammar."
+goto :eof
+
+
+:Start
+set BOOST_JAM_TOOLSET=
+set BOOST_JAM_ARGS=
+
+REM If no arguments guess the toolset;
+REM or if first argument is an option guess the toolset;
+REM otherwise the argument is the toolset to use.
+call :Clear_Error
+call :Test_Empty %1
+if not errorlevel 1 (
+ call :Guess_Toolset
+ if not errorlevel 1 ( goto Setup_Toolset ) else ( goto Finish )
+)
+
+call :Clear_Error
+call :Test_Option %1
+if not errorlevel 1 (
+ call :Guess_Toolset
+ if not errorlevel 1 ( goto Setup_Toolset ) else ( goto Finish )
+)
+
+call :Clear_Error
+set BOOST_JAM_TOOLSET=%1
+shift
+goto Setup_Toolset
+
+
+:Setup_Toolset
+REM Setup the toolset command and options. This bit of code
+REM needs to be flexible enough to handle both when
+REM the toolset was guessed at and found, or when the toolset
+REM was indicated in the command arguments.
+REM NOTE: The strange multiple "if ?? == _toolset_" tests are that way
+REM because in BAT variables are subsituted only once during a single
+REM command. A complete "if ... else ..."
+REM is a single command, even though it's in multiple lines here.
+:Setup_Args
+call :Clear_Error
+call :Test_Empty %1
+if not errorlevel 1 goto Config_Toolset
+call :Clear_Error
+call :Test_Option %1
+if errorlevel 1 (
+ set BOOST_JAM_ARGS=%BOOST_JAM_ARGS% %1
+ shift
+ goto Setup_Args
+)
+:Config_Toolset
+if NOT "_%BOOST_JAM_TOOLSET%_" == "_metrowerks_" goto Skip_METROWERKS
+if NOT "_%CWFolder%_" == "__" (
+ set "BOOST_JAM_TOOLSET_ROOT=%CWFolder%\"
+ )
+set "PATH=%BOOST_JAM_TOOLSET_ROOT%Other Metrowerks Tools\Command Line Tools;%PATH%"
+set "BOOST_JAM_CC=mwcc -runtime ss -cwd include -DNT -lkernel32.lib -ladvapi32.lib -luser32.lib"
+set "BOOST_JAM_OPT_JAM=-o bootstrap\jam0.exe"
+set "BOOST_JAM_OPT_MKJAMBASE=-o bootstrap\mkjambase0.exe"
+set "BOOST_JAM_OPT_YYACC=-o bootstrap\yyacc0.exe"
+set "_known_=1"
+:Skip_METROWERKS
+if NOT "_%BOOST_JAM_TOOLSET%_" == "_msvc_" goto Skip_MSVC
+if NOT "_%MSVCDir%_" == "__" (
+ set "BOOST_JAM_TOOLSET_ROOT=%MSVCDir%\"
+ )
+call :Call_If_Exists "%BOOST_JAM_TOOLSET_ROOT%bin\VCVARS32.BAT"
+if not "_%BOOST_JAM_TOOLSET_ROOT%_" == "__" (
+ set "PATH=%BOOST_JAM_TOOLSET_ROOT%bin;%PATH%"
+ )
+set "BOOST_JAM_CC=cl /nologo /GZ /Zi /MLd /Fobootstrap/ /Fdbootstrap/ -DNT -DYYDEBUG kernel32.lib advapi32.lib user32.lib"
+set "BOOST_JAM_OPT_JAM=/Febootstrap\jam0"
+set "BOOST_JAM_OPT_MKJAMBASE=/Febootstrap\mkjambase0"
+set "BOOST_JAM_OPT_YYACC=/Febootstrap\yyacc0"
+set "_known_=1"
+:Skip_MSVC
+if NOT "_%BOOST_JAM_TOOLSET%_" == "_vc7_" goto Skip_VC7
+if NOT "_%VS71COMNTOOLS%_" == "__" (
+ set "BOOST_JAM_TOOLSET_ROOT=%VS71COMNTOOLS%..\..\VC7\"
+ )
+if "_%VCINSTALLDIR%_" == "__" call :Call_If_Exists "%BOOST_JAM_TOOLSET_ROOT%bin\VCVARS32.BAT"
+if NOT "_%BOOST_JAM_TOOLSET_ROOT%_" == "__" (
+ if "_%VCINSTALLDIR%_" == "__" (
+ set "PATH=%BOOST_JAM_TOOLSET_ROOT%bin;%PATH%"
+ ) )
+set "BOOST_JAM_CC=cl /nologo /GZ /Zi /MLd /Fobootstrap/ /Fdbootstrap/ -DNT -DYYDEBUG kernel32.lib advapi32.lib user32.lib"
+set "BOOST_JAM_OPT_JAM=/Febootstrap\jam0"
+set "BOOST_JAM_OPT_MKJAMBASE=/Febootstrap\mkjambase0"
+set "BOOST_JAM_OPT_YYACC=/Febootstrap\yyacc0"
+set "_known_=1"
+:Skip_VC7
+if NOT "_%BOOST_JAM_TOOLSET%_" == "_vc8_" goto Skip_VC8
+if NOT "_%VS80COMNTOOLS%_" == "__" (
+ set "BOOST_JAM_TOOLSET_ROOT=%VS80COMNTOOLS%..\..\VC\"
+ )
+if "_%VCINSTALLDIR%_" == "__" call :Call_If_Exists "%BOOST_JAM_TOOLSET_ROOT%VCVARSALL.BAT" %BOOST_JAM_ARGS%
+if NOT "_%BOOST_JAM_TOOLSET_ROOT%_" == "__" (
+ if "_%VCINSTALLDIR%_" == "__" (
+ set "PATH=%BOOST_JAM_TOOLSET_ROOT%bin;%PATH%"
+ ) )
+set "BOOST_JAM_CC=cl /nologo /RTC1 /Zi /MTd /Fobootstrap/ /Fdbootstrap/ -DNT -DYYDEBUG -wd4996 kernel32.lib advapi32.lib user32.lib"
+set "BOOST_JAM_OPT_JAM=/Febootstrap\jam0"
+set "BOOST_JAM_OPT_MKJAMBASE=/Febootstrap\mkjambase0"
+set "BOOST_JAM_OPT_YYACC=/Febootstrap\yyacc0"
+set "_known_=1"
+:Skip_VC8
+if NOT "_%BOOST_JAM_TOOLSET%_" == "_vc9_" goto Skip_VC9
+if NOT "_%VS90COMNTOOLS%_" == "__" (
+ set "BOOST_JAM_TOOLSET_ROOT=%VS90COMNTOOLS%..\..\VC\"
+ )
+if "_%VCINSTALLDIR%_" == "__" call :Call_If_Exists "%BOOST_JAM_TOOLSET_ROOT%VCVARSALL.BAT" %BOOST_JAM_ARGS%
+if NOT "_%BOOST_JAM_TOOLSET_ROOT%_" == "__" (
+ if "_%VCINSTALLDIR%_" == "__" (
+ set "PATH=%BOOST_JAM_TOOLSET_ROOT%bin;%PATH%"
+ ) )
+set "BOOST_JAM_CC=cl /nologo /RTC1 /Zi /MTd /Fobootstrap/ /Fdbootstrap/ -DNT -DYYDEBUG -wd4996 kernel32.lib advapi32.lib user32.lib"
+set "BOOST_JAM_OPT_JAM=/Febootstrap\jam0"
+set "BOOST_JAM_OPT_MKJAMBASE=/Febootstrap\mkjambase0"
+set "BOOST_JAM_OPT_YYACC=/Febootstrap\yyacc0"
+set "_known_=1"
+:Skip_VC9
+if NOT "_%BOOST_JAM_TOOLSET%_" == "_vc10_" goto Skip_VC10
+if NOT "_%VS100COMNTOOLS%_" == "__" (
+ set "BOOST_JAM_TOOLSET_ROOT=%VS100COMNTOOLS%..\..\VC\"
+ )
+if "_%VCINSTALLDIR%_" == "__" call :Call_If_Exists "%BOOST_JAM_TOOLSET_ROOT%VCVARSALL.BAT" %BOOST_JAM_ARGS%
+if NOT "_%BOOST_JAM_TOOLSET_ROOT%_" == "__" (
+ if "_%VCINSTALLDIR%_" == "__" (
+ set "PATH=%BOOST_JAM_TOOLSET_ROOT%bin;%PATH%"
+ ) )
+set "BOOST_JAM_CC=cl /nologo /RTC1 /Zi /MTd /Fobootstrap/ /Fdbootstrap/ -DNT -DYYDEBUG -wd4996 kernel32.lib advapi32.lib user32.lib"
+set "BOOST_JAM_OPT_JAM=/Febootstrap\jam0"
+set "BOOST_JAM_OPT_MKJAMBASE=/Febootstrap\mkjambase0"
+set "BOOST_JAM_OPT_YYACC=/Febootstrap\yyacc0"
+set "_known_=1"
+:Skip_VC10
+if NOT "_%BOOST_JAM_TOOLSET%_" == "_vc11_" goto Skip_VC11
+if NOT "_%VS110COMNTOOLS%_" == "__" (
+ set "BOOST_JAM_TOOLSET_ROOT=%VS110COMNTOOLS%..\..\VC\"
+ )
+if "_%VCINSTALLDIR%_" == "__" call :Call_If_Exists "%BOOST_JAM_TOOLSET_ROOT%VCVARSALL.BAT" %BOOST_JAM_ARGS%
+if NOT "_%BOOST_JAM_TOOLSET_ROOT%_" == "__" (
+ if "_%VCINSTALLDIR%_" == "__" (
+ set "PATH=%BOOST_JAM_TOOLSET_ROOT%bin;%PATH%"
+ ) )
+set "BOOST_JAM_CC=cl /nologo /RTC1 /Zi /MTd /Fobootstrap/ /Fdbootstrap/ -DNT -DYYDEBUG -wd4996 kernel32.lib advapi32.lib user32.lib"
+set "BOOST_JAM_OPT_JAM=/Febootstrap\jam0"
+set "BOOST_JAM_OPT_MKJAMBASE=/Febootstrap\mkjambase0"
+set "BOOST_JAM_OPT_YYACC=/Febootstrap\yyacc0"
+set "_known_=1"
+:Skip_VC11
+if NOT "_%BOOST_JAM_TOOLSET%_" == "_vc12_" goto Skip_VC12
+if NOT "_%VS120COMNTOOLS%_" == "__" (
+ set "BOOST_JAM_TOOLSET_ROOT=%VS120COMNTOOLS%..\..\VC\"
+ )
+if "_%VCINSTALLDIR%_" == "__" call :Call_If_Exists "%BOOST_JAM_TOOLSET_ROOT%VCVARSALL.BAT" %BOOST_JAM_ARGS%
+if NOT "_%BOOST_JAM_TOOLSET_ROOT%_" == "__" (
+ if "_%VCINSTALLDIR%_" == "__" (
+ set "PATH=%BOOST_JAM_TOOLSET_ROOT%bin;%PATH%"
+ ) )
+set "BOOST_JAM_CC=cl /nologo /RTC1 /Zi /MTd /Fobootstrap/ /Fdbootstrap/ -DNT -DYYDEBUG -wd4996 kernel32.lib advapi32.lib user32.lib"
+set "BOOST_JAM_OPT_JAM=/Febootstrap\jam0"
+set "BOOST_JAM_OPT_MKJAMBASE=/Febootstrap\mkjambase0"
+set "BOOST_JAM_OPT_YYACC=/Febootstrap\yyacc0"
+set "_known_=1"
+:Skip_VC12
+if NOT "_%BOOST_JAM_TOOLSET%_" == "_vc14_" goto Skip_VC14
+if NOT "_%VS140COMNTOOLS%_" == "__" (
+ set "BOOST_JAM_TOOLSET_ROOT=%VS140COMNTOOLS%..\..\VC\"
+ )
+if "_%VCINSTALLDIR%_" == "__" call :Call_If_Exists "%BOOST_JAM_TOOLSET_ROOT%VCVARSALL.BAT" %BOOST_JAM_ARGS%
+if NOT "_%BOOST_JAM_TOOLSET_ROOT%_" == "__" (
+ if "_%VCINSTALLDIR%_" == "__" (
+ set "PATH=%BOOST_JAM_TOOLSET_ROOT%bin;%PATH%"
+ ) )
+set "BOOST_JAM_CC=cl /nologo /RTC1 /Zi /MTd /Fobootstrap/ /Fdbootstrap/ -DNT -DYYDEBUG -wd4996 kernel32.lib advapi32.lib user32.lib"
+set "BOOST_JAM_OPT_JAM=/Febootstrap\jam0"
+set "BOOST_JAM_OPT_MKJAMBASE=/Febootstrap\mkjambase0"
+set "BOOST_JAM_OPT_YYACC=/Febootstrap\yyacc0"
+set "_known_=1"
+:Skip_VC14
+if NOT "_%BOOST_JAM_TOOLSET%_" == "_borland_" goto Skip_BORLAND
+if "_%BOOST_JAM_TOOLSET_ROOT%_" == "__" (
+ call :Test_Path bcc32.exe )
+if "_%BOOST_JAM_TOOLSET_ROOT%_" == "__" (
+ if not errorlevel 1 (
+ set "BOOST_JAM_TOOLSET_ROOT=%FOUND_PATH%..\"
+ ) )
+if not "_%BOOST_JAM_TOOLSET_ROOT%_" == "__" (
+ set "PATH=%BOOST_JAM_TOOLSET_ROOT%Bin;%PATH%"
+ )
+set "BOOST_JAM_CC=bcc32 -WC -w- -q -I%BOOST_JAM_TOOLSET_ROOT%Include -L%BOOST_JAM_TOOLSET_ROOT%Lib /DNT -nbootstrap"
+set "BOOST_JAM_OPT_JAM=-ejam0"
+set "BOOST_JAM_OPT_MKJAMBASE=-emkjambasejam0"
+set "BOOST_JAM_OPT_YYACC=-eyyacc0"
+set "_known_=1"
+:Skip_BORLAND
+if NOT "_%BOOST_JAM_TOOLSET%_" == "_como_" goto Skip_COMO
+set "BOOST_JAM_CC=como -DNT"
+set "BOOST_JAM_OPT_JAM=-o bootstrap\jam0.exe"
+set "BOOST_JAM_OPT_MKJAMBASE=-o bootstrap\mkjambase0.exe"
+set "BOOST_JAM_OPT_YYACC=-o bootstrap\yyacc0.exe"
+set "_known_=1"
+:Skip_COMO
+if NOT "_%BOOST_JAM_TOOLSET%_" == "_gcc_" goto Skip_GCC
+set "BOOST_JAM_CC=gcc -DNT"
+set "BOOST_JAM_OPT_JAM=-o bootstrap\jam0.exe"
+set "BOOST_JAM_OPT_MKJAMBASE=-o bootstrap\mkjambase0.exe"
+set "BOOST_JAM_OPT_YYACC=-o bootstrap\yyacc0.exe"
+set "_known_=1"
+:Skip_GCC
+if NOT "_%BOOST_JAM_TOOLSET%_" == "_gcc-nocygwin_" goto Skip_GCC_NOCYGWIN
+set "BOOST_JAM_CC=gcc -DNT -mno-cygwin"
+set "BOOST_JAM_OPT_JAM=-o bootstrap\jam0.exe"
+set "BOOST_JAM_OPT_MKJAMBASE=-o bootstrap\mkjambase0.exe"
+set "BOOST_JAM_OPT_YYACC=-o bootstrap\yyacc0.exe"
+set "_known_=1"
+:Skip_GCC_NOCYGWIN
+if NOT "_%BOOST_JAM_TOOLSET%_" == "_intel-win32_" goto Skip_INTEL_WIN32
+set "BOOST_JAM_CC=icl -DNT /nologo kernel32.lib advapi32.lib user32.lib"
+set "BOOST_JAM_OPT_JAM=/Febootstrap\jam0"
+set "BOOST_JAM_OPT_MKJAMBASE=/Febootstrap\mkjambase0"
+set "BOOST_JAM_OPT_YYACC=/Febootstrap\yyacc0"
+set "_known_=1"
+:Skip_INTEL_WIN32
+if NOT "_%BOOST_JAM_TOOLSET%_" == "_mingw_" goto Skip_MINGW
+if not "_%BOOST_JAM_TOOLSET_ROOT%_" == "__" (
+ set "PATH=%BOOST_JAM_TOOLSET_ROOT%bin;%PATH%"
+ )
+set "BOOST_JAM_CC=gcc -DNT"
+set "BOOST_JAM_OPT_JAM=-o bootstrap\jam0.exe"
+set "BOOST_JAM_OPT_MKJAMBASE=-o bootstrap\mkjambase0.exe"
+set "BOOST_JAM_OPT_YYACC=-o bootstrap\yyacc0.exe"
+set "_known_=1"
+:Skip_MINGW
+call :Clear_Error
+if "_%_known_%_" == "__" (
+ call :Error_Print "Unknown toolset: %BOOST_JAM_TOOLSET%"
+)
+if errorlevel 1 goto Finish
+
+echo ###
+echo ### Using '%BOOST_JAM_TOOLSET%' toolset.
+echo ###
+
+set YYACC_SOURCES=yyacc.c
+set MKJAMBASE_SOURCES=mkjambase.c
+set BJAM_SOURCES=
+set BJAM_SOURCES=%BJAM_SOURCES% command.c compile.c constants.c debug.c
+set BJAM_SOURCES=%BJAM_SOURCES% execcmd.c execnt.c filent.c frames.c function.c
+set BJAM_SOURCES=%BJAM_SOURCES% glob.c hash.c hdrmacro.c headers.c jam.c
+set BJAM_SOURCES=%BJAM_SOURCES% jambase.c jamgram.c lists.c make.c make1.c
+set BJAM_SOURCES=%BJAM_SOURCES% object.c option.c output.c parse.c pathnt.c
+set BJAM_SOURCES=%BJAM_SOURCES% pathsys.c regexp.c rules.c scan.c search.c
+set BJAM_SOURCES=%BJAM_SOURCES% subst.c timestamp.c variable.c modules.c
+set BJAM_SOURCES=%BJAM_SOURCES% strings.c filesys.c builtins.c md5.c class.c
+set BJAM_SOURCES=%BJAM_SOURCES% cwd.c w32_getreg.c native.c modules/set.c
+set BJAM_SOURCES=%BJAM_SOURCES% modules/path.c modules/regex.c
+set BJAM_SOURCES=%BJAM_SOURCES% modules/property-set.c modules/sequence.c
+set BJAM_SOURCES=%BJAM_SOURCES% modules/order.c
+
+set BJAM_UPDATE=
+:Check_Update
+call :Test_Empty %1
+if not errorlevel 1 goto Check_Update_End
+call :Clear_Error
+setlocal
+set test=%1
+set test=###%test%###
+set test=%test:"###=%
+set test=%test:###"=%
+set test=%test:###=%
+if "%test%" == "--update" goto Found_Update
+endlocal & set BOOST_JAM_TOOLSET=%BOOST_JAM_TOOLSET%
+shift
+if not "_%BJAM_UPDATE%_" == "_update_" goto Check_Update
+:Found_Update
+endlocal & set BOOST_JAM_TOOLSET=%BOOST_JAM_TOOLSET%
+set BJAM_UPDATE=update
+:Check_Update_End
+if "_%BJAM_UPDATE%_" == "_update_" (
+ if not exist ".\bootstrap\jam0.exe" (
+ set BJAM_UPDATE=
+ )
+)
+
+@echo ON
+@if "_%BJAM_UPDATE%_" == "_update_" goto Skip_Bootstrap
+if exist bootstrap rd /S /Q bootstrap
+md bootstrap
+@if not exist jamgram.y goto Bootstrap_GrammarPrep
+@if not exist jamgramtab.h goto Bootstrap_GrammarPrep
+@goto Skip_GrammarPrep
+:Bootstrap_GrammarPrep
+%BOOST_JAM_CC% %BOOST_JAM_OPT_YYACC% %YYACC_SOURCES%
+@if not exist ".\bootstrap\yyacc0.exe" goto Skip_GrammarPrep
+.\bootstrap\yyacc0 jamgram.y jamgramtab.h jamgram.yy
+:Skip_GrammarPrep
+@if not exist jamgram.c goto Bootstrap_GrammarBuild
+@if not exist jamgram.h goto Bootstrap_GrammarBuild
+@goto Skip_GrammarBuild
+:Bootstrap_GrammarBuild
+@echo OFF
+if "_%YACC%_" == "__" (
+ call :Guess_Yacc
+)
+if errorlevel 1 goto Finish
+@echo ON
+%YACC% jamgram.y
+@if errorlevel 1 goto Finish
+del /f jamgram.c
+rename y.tab.c jamgram.c
+del /f jamgram.h
+rename y.tab.h jamgram.h
+:Skip_GrammarBuild
+@echo ON
+@if exist jambase.c goto Skip_Jambase
+%BOOST_JAM_CC% %BOOST_JAM_OPT_MKJAMBASE% %MKJAMBASE_SOURCES%
+@if not exist ".\bootstrap\mkjambase0.exe" goto Skip_Jambase
+.\bootstrap\mkjambase0 jambase.c Jambase
+:Skip_Jambase
+%BOOST_JAM_CC% %BOOST_JAM_OPT_JAM% %BJAM_SOURCES%
+:Skip_Bootstrap
+@if not exist ".\bootstrap\jam0.exe" goto Skip_Jam
+@set args=%*
+@echo OFF
+:Set_Args
+setlocal
+call :Test_Empty %args%
+if not errorlevel 1 goto Set_Args_End
+set test=###%args:~0,2%###
+set test=%test:"###=%
+set test=%test:###"=%
+set test=%test:###=%
+set test=%test:~0,1%
+if "-" == "%test%" goto Set_Args_End
+endlocal
+set args=%args:~1%
+goto Set_Args
+:Set_Args_End
+@echo ON
+@if "_%BJAM_UPDATE%_" == "_update_" goto Skip_Clean
+.\bootstrap\jam0 -f build.jam --toolset=%BOOST_JAM_TOOLSET% "--toolset-root=%BOOST_JAM_TOOLSET_ROOT% " %args% clean
+:Skip_Clean
+.\bootstrap\jam0 -f build.jam --toolset=%BOOST_JAM_TOOLSET% "--toolset-root=%BOOST_JAM_TOOLSET_ROOT% " %args%
+:Skip_Jam
+
+:Finish
diff --git a/tools/build/src/engine/build.jam b/tools/build/src/engine/build.jam
new file mode 100644
index 0000000000..0263d4637b
--- /dev/null
+++ b/tools/build/src/engine/build.jam
@@ -0,0 +1,1030 @@
+#~ Copyright 2002-2007 Rene Rivera.
+#~ Distributed under the Boost Software License, Version 1.0.
+#~ (See accompanying file LICENSE_1_0.txt or copy at
+#~ http://www.boost.org/LICENSE_1_0.txt)
+
+# Clean env vars of any "extra" empty values.
+for local v in ARGV CC CFLAGS LIBS
+{
+ local values ;
+ for local x in $($(v))
+ {
+ if $(x) != ""
+ {
+ values += $(x) ;
+ }
+ }
+ $(v) = $(values) ;
+}
+
+# Platform related specifics.
+if $(OS) = NT { rule .path { return "$(<:J=\\)" ; } ./ = "/" ; }
+else { rule .path { return "$(<:J=/)" ; } }
+
+. = "." ;
+./ ?= "" ;
+
+# Info about what we are building.
+_VERSION_ = 3 1 19 ;
+NAME = boost-jam ;
+VERSION = $(_VERSION_:J=$(.)) ;
+RELEASE = 1 ;
+LICENSE = LICENSE_1_0 ;
+
+# Generate development debug binaries?
+if --debug in $(ARGV)
+{
+ debug = true ;
+}
+
+if --profile in $(ARGV)
+{
+ profile = true ;
+}
+
+# Attempt to generate and/or build the grammar?
+if --grammar in $(ARGV)
+{
+ grammar = true ;
+}
+
+# Do we need to add a default build type argument?
+if ! ( --release in $(ARGV) ) &&
+ ! ( --debug in $(ARGV) ) &&
+ ! ( --profile in $(ARGV) )
+{
+ ARGV += --release ;
+}
+
+# Enable, and configure, Python hooks.
+with-python = ;
+python-location = [ MATCH --with-python=(.*) : $(ARGV) ] ;
+if $(python-location)
+{
+ with-python = true ;
+}
+if $(with-python)
+{
+ if $(OS) = NT
+ {
+ --python-include = [ .path $(python-location) include ] ;
+ --python-lib = ;
+ for local v in 27 26 25 24 23 22
+ {
+ --python-lib ?=
+ [ GLOB [ .path $(python-location) libs ] : "python$(v).lib" ]
+ [ GLOB $(python-location) [ .path $(python-location) libs ]
+ $(Path) $(PATH) $(path) : "python$(v).dll" ]
+ ;
+ if ! $(--python-lib[2])
+ {
+ --python-lib = ;
+ }
+ }
+ --python-lib = $(--python-lib[1]) ;
+ }
+ else if $(OS) = MACOSX
+ {
+ --python-include = [ .path $(python-location) Headers ] ;
+ --python-lib = $(python-location) Python ;
+ }
+ else
+ {
+ --python-include = ;
+ --python-lib = ;
+ for local v in 2.7 2.6 2.5 2.4 2.3 2.2
+ {
+ local inc = [ GLOB [ .path $(python-location) include ] : python$(v) ] ;
+ local lib = [ GLOB [ .path $(python-location) lib ] : libpython$(v)* ] ;
+ if $(inc) && $(lib)
+ {
+ --python-include ?= $(inc) ;
+ --python-lib ?= $(lib[1]:D) python$(v) ;
+ }
+ }
+ }
+}
+
+# Boehm GC?
+if --gc in $(ARGV)
+{
+ --boehm-gc = true ;
+}
+if $(--boehm-gc)
+{
+ --extra-include += [ .path [ PWD ] "boehm_gc" "include" ] ;
+}
+
+# Duma?
+if --duma in $(ARGV)
+{
+ --duma = true ;
+}
+if $(--duma)
+{
+ --extra-include += [ .path [ PWD ] "duma" ] ;
+}
+
+# An explicit root for the toolset? (trim spaces)
+toolset-root = [ MATCH --toolset-root=(.*) : $(ARGV) ] ;
+{
+ local t = [ MATCH "[ ]*(.*)" : $(toolset-root:J=" ") ] ;
+ toolset-root = ;
+ while $(t)
+ {
+ t = [ MATCH "([^ ]+)([ ]*)(.*)" : $(t) ] ;
+ toolset-root += $(t[1]) ;
+ if $(t[3]) { toolset-root += $(t[2]) ; }
+ t = $(t[3]) ;
+ }
+ toolset-root = $(toolset-root:J="") ;
+}
+
+# Configure the implemented toolsets. These are minimal commands and options to
+# compile the full Jam. When adding new toolsets make sure to add them to the
+# "known" list also.
+
+rule toolset ( name command .type ? : opt.out + : opt.define * : flags * : linklibs * )
+{
+ .type ?= "" ;
+ tool.$(name)$(.type).cc ?= $(command) ;
+ tool.$(name)$(.type).opt.out ?= $(opt.out) ;
+ tool.$(name)$(.type).opt.define ?= $(opt.define) ;
+ tool.$(name)$(.type).flags ?= $(flags) ;
+ tool.$(name)$(.type).linklibs ?= $(linklibs) ;
+ if ! $(name) in $(toolsets) { toolsets += $(name) ; }
+}
+
+rule if-os ( os + : yes-opt * : no-opt * )
+ { if $(os) in $(OS) { return $(yes-opt) ; } else { return $(no-opt) ; } }
+
+rule opt ( type : yes-opt * : no-opt * )
+ { if $(type) in $(ARGV) { return $(yes-opt) ; } else { return $(no-opt) ; } }
+
+## HP-UX aCC compiler
+toolset acc cc : "-o " : -D
+ : -Ae
+ [ opt --release : -s -O3 ]
+ [ opt --debug : -g -pg ]
+ -I$(--python-include) -I$(--extra-include)
+ : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
+## Borland C++ 5.5.x
+toolset borland bcc32 : -e -n : /D
+ : -WC -w- -q "-I$(toolset-root)Include" "-L$(toolset-root)Lib"
+ [ opt --release : -O2 -vi -w-inl ]
+ [ opt --debug : -v -Od -vi- ]
+ -I$(--python-include) -I$(--extra-include)
+ : $(--python-lib[1]) ;
+## Generic Unix cc
+if ! $(CC) { CC = cc ; }
+toolset cc $(CC) : "-o " : -D
+ : $(CFLAGS)
+ [ opt --release : -s -O ]
+ [ opt --debug : -g ]
+ -I$(--python-include) -I$(--extra-include)
+ : $(LIBS) -L$(--python-lib[1]) -l$(--python-lib[2]) ;
+## Comeau C/C++ 4.x
+toolset como como : "-o " : -D
+ : --c
+ [ opt --release : --inlining ]
+ [ opt --debug : --no_inlining ]
+ -I$(--python-include) -I$(--extra-include)
+ : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
+## Clang Linux 2.8+
+toolset clang clang : "-o " : -D
+ : -Wno-unused -Wno-format
+ [ opt --release : -Os ]
+ [ opt --debug : -g -O0 -fno-inline ]
+ [ opt --profile : -finline-functions -g ]
+ -I$(--python-include) -I$(--extra-include)
+ : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
+## MacOSX Darwin, using GCC 2.9.x, 3.x
+toolset darwin cc : "-o " : -D
+ :
+ [ opt --release : -Wl,-x -O3 -finline-functions ]
+ [ opt --debug : -g -O0 -fno-inline -pg ]
+ [ opt --profile : -Wl,-x -O3 -finline-functions -g -pg ]
+ -I$(--python-include) -I$(--extra-include)
+ : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
+## GCC 2.x, 3.x, 4.x
+toolset gcc gcc : "-o " : -D
+ : -pedantic -fno-strict-aliasing
+ [ opt --release : [ opt --symbols : -g : -s ] -O3 ]
+ [ opt --debug : -g -O0 -fno-inline ]
+ [ opt --profile : -O3 -g -pg ]
+ -I$(--python-include) -I$(--extra-include) -Wno-long-long
+ : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
+## GCC 2.x, 3.x on CYGWIN but without cygwin1.dll
+toolset gcc-nocygwin gcc : "-o " : -D
+ : -s -O3 -mno-cygwin
+ [ opt --release : -finline-functions ]
+ [ opt --debug : -s -O3 -fno-inline -pg ]
+ -I$(--python-include) -I$(--extra-include)
+ : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
+## Intel C/C++ for Darwin
+toolset intel-darwin icc : "-o " : -D
+ :
+ [ opt --release : -O3 ]
+ [ opt --debug : -g -O0 -p ]
+ -I$(--python-include) -I$(--extra-include)
+ : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
+## Intel C/C++ for Linux
+toolset intel-linux icc : "-o " : -D
+ :
+ [ opt --release : -Xlinker -s -O3 ]
+ [ opt --debug : -g -O0 -p ]
+ -I$(--python-include) -I$(--extra-include)
+ : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
+## Intel C/C++ for Win32
+toolset intel-win32 icl : /Fe : -D
+ : /nologo
+ [ opt --release : /MT /O2 /Ob2 /Gy /GF /GA /GB ]
+ [ opt --debug : /MTd /DEBUG /Z7 /Od /Ob0 ]
+ -I$(--python-include) -I$(--extra-include)
+ : kernel32.lib advapi32.lib user32.lib $(--python-lib[1]) ;
+## KCC ?
+toolset kcc KCC : "-o " : -D
+ :
+ [ opt --release : -s +K2 ]
+ [ opt --debug : -g +K0 ]
+ -I$(--python-include) -I$(--extra-include)
+ : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
+## Borland Kylix
+toolset kylix bc++ : -o : -D
+ : -tC -q
+ [ opt --release : -O2 -vi -w-inl ]
+ [ opt --debug : -v -Od -vi- ]
+ -I$(--python-include) -I$(--extra-include)
+ : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
+## Metrowerks CodeWarrior 8.x
+{
+ # Even though CW can compile all files at once, it crashes if it tries in
+ # the bjam case.
+ local mwcc ; if $(OS) != NT { mwcc = mwc$(OSPLAT:L) ; }
+ mwcc ?= mwcc ;
+ toolset metrowerks $(mwcc) : "-o " : -D
+ : -c -lang c -subsystem console -cwd include
+ [ opt --release : -runtime ss -opt full -inline all ]
+ [ opt --debug : -runtime ssd -opt none -inline off ]
+ -I$(--python-include) -I$(--extra-include) ;
+ toolset metrowerks $(mwcc) .link : "-o " :
+ : -subsystem console -lkernel32.lib -ladvapi32.lib -luser32.lib
+ [ opt --release : -runtime ss ]
+ [ opt --debug : -runtime ssd ]
+ : $(--python-lib[1]) ;
+}
+## MINGW GCC
+toolset mingw gcc : "-o " : -D
+ :
+ [ opt --release : -s -O3 -finline-functions ]
+ [ opt --debug : -g -O0 -fno-inline -pg ]
+ -I$(--python-include) -I$(--extra-include)
+ : $(--python-lib[2]) ;
+## MIPS Pro
+toolset mipspro cc : "-o " : -D
+ :
+ [ opt --release : -s -O3 -g0 -INLINE:none ]
+ [ opt --debug : -g -O0 -INLINE ]
+ -I$(--python-include) -I$(--extra-include)
+ : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
+## Microsoft Visual Studio C++ 6.x
+toolset msvc cl : /Fe /Fe /Fd /Fo : -D
+ : /nologo
+ [ opt --release : /ML /O2 /Ob2 /Gy /GF /GA /GB ]
+ [ opt --debug : /MLd /DEBUG /Z7 /Od /Ob0 ]
+ -I$(--python-include) -I$(--extra-include)
+ : kernel32.lib advapi32.lib user32.lib $(--python-lib[1]) ;
+## QNX 6.x GCC 3.x/2.95.3
+toolset qcc qcc : "-o " : -D
+ : -Wc,-pedantic -Wc,-fno-strict-aliasing
+ [ opt --release : [ opt --symbols : -g ] -O3 -Wc,-finline-functions ]
+ [ opt --debug : -g -O0 -Wc,-fno-inline ]
+ -I$(--python-include) -I$(--extra-include)
+ : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
+## Qlogic Pathscale 2.4
+toolset pathscale pathcc : "-o " : -D
+ :
+ [ opt --release : -s -Ofast -O3 ]
+ [ opt --debug : -g ]
+ -I$(--python-include) -I$(--extra-include)
+ : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
+## Portland Group Pgi 6.2
+toolset pgi pgcc : "-o " : -D
+ :
+ [ opt --release : -s -O3 ]
+ [ opt --debug : -g ]
+ -I$(--python-include) -I$(--extra-include)
+ : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
+## Sun Workshop 6 C++
+toolset sun cc : "-o " : -D
+ :
+ [ opt --release : -s -xO3 ]
+ [ opt --debug : -g ]
+ -I$(--python-include) -I$(--extra-include)
+ : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
+## Sun Workshop 6 C++ (old alias)
+toolset sunpro cc : "-o " : -D
+ :
+ [ opt --release : -s -xO3 ]
+ [ opt --debug : -g ]
+ -I$(--python-include) -I$(--extra-include)
+ : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
+## Compaq Alpha CXX
+toolset tru64cxx cc : "-o " : -D
+ :
+ [ opt --release : -s -O5 -inline speed ]
+ [ opt --debug : -g -O0 -pg ]
+ -I$(--python-include) -I$(--extra-include)
+ : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
+## IBM VisualAge C++
+toolset vacpp xlc : "-o " : -D
+ :
+ [ opt --release : -s -O3 -qstrict -qinline ]
+ [ opt --debug : -g -qNOOPTimize -qnoinline -pg ]
+ -I$(--python-include) -I$(--extra-include)
+ : -L$(--python-lib[1]) -l$(--python-lib[2]) [ if-os AIX : -bmaxdata:0x40000000 ] ;
+## Microsoft Visual C++ .NET 7.x
+toolset vc7 cl : /Fe /Fe /Fd /Fo : -D
+ : /nologo
+ [ opt --release : /ML /O2 /Ob2 /Gy /GF /GA /GB ]
+ [ opt --debug : /MLd /DEBUG /Z7 /Od /Ob0 ]
+ -I$(--python-include) -I$(--extra-include)
+ : kernel32.lib advapi32.lib user32.lib $(--python-lib[1]) ;
+## Microsoft Visual C++ 2005
+toolset vc8 cl : /Fe /Fe /Fd /Fo : -D
+ : /nologo
+ [ opt --release : /MT /O2 /Ob2 /Gy /GF /GA /wd4996 ]
+ [ opt --debug : /MTd /DEBUG /Z7 /Od /Ob0 /wd4996 ]
+ -I$(--python-include) -I$(--extra-include)
+ : kernel32.lib advapi32.lib user32.lib $(--python-lib[1]) ;
+## Microsoft Visual C++ 2008
+toolset vc9 cl : /Fe /Fe /Fd /Fo : -D
+ : /nologo
+ [ opt --release : /MT /O2 /Ob2 /Gy /GF /GA /wd4996 ]
+ [ opt --debug : /MTd /DEBUG /Z7 /Od /Ob0 /wd4996 ]
+ -I$(--python-include) -I$(--extra-include)
+ : kernel32.lib advapi32.lib user32.lib $(--python-lib[1]) ;
+## Microsoft Visual C++ 2010
+toolset vc10 cl : /Fe /Fe /Fd /Fo : -D
+ : /nologo
+ [ opt --release : /MT /O2 /Ob2 /Gy /GF /GA /wd4996 ]
+ [ opt --debug : /MTd /DEBUG /Z7 /Od /Ob0 /wd4996 ]
+ -I$(--python-include) -I$(--extra-include)
+ : kernel32.lib advapi32.lib user32.lib $(--python-lib[1]) ;
+## Microsoft Visual C++ 2012
+toolset vc11 cl : /Fe /Fe /Fd /Fo : -D
+ : /nologo
+ [ opt --release : /GL /MT /O2 /Ob2 /Gy /GF /GA /wd4996 ]
+ [ opt --debug : /MTd /DEBUG /Z7 /Od /Ob0 /wd4996 ]
+ -I$(--python-include) -I$(--extra-include)
+ : kernel32.lib advapi32.lib user32.lib $(--python-lib[1]) ;
+## Microsoft Visual C++ 2013
+toolset vc12 cl : /Fe /Fe /Fd /Fo : -D
+ : /nologo
+ [ opt --release : /GL /MT /O2 /Ob2 /Gy /GF /GA /wd4996 ]
+ [ opt --debug : /MTd /DEBUG /Z7 /Od /Ob0 /wd4996 ]
+ -I$(--python-include) -I$(--extra-include)
+ : kernel32.lib advapi32.lib user32.lib $(--python-lib[1]) ;
+toolset vc14 cl : /Fe /Fe /Fd /Fo : -D
+ : /nologo
+ [ opt --release : /GL /MT /O2 /Ob2 /Gy /GF /GA /wd4996 ]
+ [ opt --debug : /MTd /DEBUG /Z7 /Od /Ob0 /wd4996 ]
+ -I$(--python-include) -I$(--extra-include)
+ : kernel32.lib advapi32.lib user32.lib $(--python-lib[1]) ;
+
+# First set the build commands and options according to the
+# preset toolset.
+toolset = [ MATCH --toolset=(.*) : $(ARGV) ] ;
+if ! $(toolset)
+{
+ # For some reason, the following test does not catch empty toolset.
+ ECHO "###" ;
+ ECHO "###" No toolset specified. Please use --toolset option. ;
+ ECHO "###" ;
+ ECHO "###" Known toolsets are: $(toolsets:J=", ") ;
+ EXIT "###" ;
+}
+if ! $(toolset) in $(toolsets)
+{
+ ECHO "###" ;
+ ECHO "###" Unknown toolset: $(toolset) ;
+ ECHO "###" ;
+ ECHO "###" Known toolsets are: $(toolsets:J=", ") ;
+ EXIT "###" ;
+}
+--cc = $(tool.$(toolset).cc) ;
+if $(tool.$(toolset).opt.out[2])
+{
+ if $(tool.$(toolset).opt.out[1]) = $(tool.$(toolset).opt.out[2])
+ {
+ --out = $(tool.$(toolset).opt.out[1]) ;
+ --dir = $(tool.$(toolset).opt.out[3-]) ;
+ }
+ else
+ {
+ --bin = $(tool.$(toolset).opt.out[1]) ;
+ --dir = $(tool.$(toolset).opt.out[2-]) ;
+ }
+}
+else
+{
+ --out = $(tool.$(toolset).opt.out) ;
+}
+--def = $(tool.$(toolset).opt.define) ;
+--flags = $(tool.$(toolset).flags) ;
+--defs = $(tool.$(toolset).defines) ;
+--libs = $(tool.$(toolset).linklibs) ;
+if $(tool.$(toolset).link.cc)
+{
+ --link = $(tool.$(toolset).link.cc) ;
+ if $(tool.$(toolset).link.opt.out[2])
+ {
+ if $(tool.$(toolset).link.opt.out[1]) = $(tool.$(toolset).link.opt.out[2])
+ {
+ --link-out = $(tool.$(toolset).link.opt.out[1]) ;
+ --link-dir = $(tool.$(toolset).link.opt.out[3-]) ;
+ }
+ else
+ {
+ --link-bin = $(tool.$(toolset).link.opt.out[1]) ;
+ --link-dir = $(tool.$(toolset).link.opt.out[2-]) ;
+ }
+ }
+ else
+ {
+ --link-out = $(tool.$(toolset).link.opt.out) ;
+ }
+ --link-def = $(tool.$(toolset).link.opt.define) ;
+ --link-flags = $(tool.$(toolset).link.flags) ;
+ --link-defs = $(tool.$(toolset).link.defines) ;
+ --link-libs = $(tool.$(toolset).link.linklibs) ;
+}
+
+# Put executables in platform-specific subdirectory.
+locate-target = $(LOCATE_TARGET) ;
+if $(OSPLAT)
+{
+ locate-target ?= bin$(.)$(OS:L)$(OSPLAT:L) ;
+ platform = $(OS:L)$(OSPLAT:L) ;
+}
+else
+{
+ locate-target ?= bin$(.)$(OS:L) ;
+ platform = $(OS:L) ;
+}
+if $(debug)
+{
+ locate-target = [ .path $(locate-target)$(.)debug ] ;
+}
+if $(profile)
+{
+ locate-target = [ .path $(locate-target)$(.)profile ] ;
+}
+else
+{
+ locate-target = [ .path $(locate-target) ] ;
+}
+
+if --show-locate-target in $(ARGV)
+{
+ ECHO $(locate-target) ;
+}
+
+# We have some different files for UNIX, and NT.
+jam.source =
+ command.c compile.c constants.c debug.c execcmd.c frames.c function.c glob.c
+ hash.c hcache.c headers.c hdrmacro.c jam.c jambase.c jamgram.c lists.c
+ make.c make1.c mem.c object.c option.c output.c parse.c pathsys.c regexp.c
+ rules.c scan.c search.c subst.c w32_getreg.c timestamp.c variable.c
+ modules.c strings.c filesys.c builtins.c class.c cwd.c native.c md5.c
+ [ .path modules set.c ] [ .path modules path.c ] [ .path modules regex.c ]
+ [ .path modules property-set.c ] [ .path modules sequence.c ] [ .path modules order.c ] ;
+if $(OS) = NT
+{
+ jam.source += execnt.c filent.c pathnt.c ;
+}
+else
+{
+ jam.source += execunix.c fileunix.c pathunix.c ;
+}
+
+# Debug assertions, or not.
+if ! $(debug) || --noassert in $(ARGV)
+{
+ --defs += NDEBUG ;
+}
+
+# Enable some optional features.
+--defs += OPT_HEADER_CACHE_EXT ;
+--defs += OPT_GRAPH_DEBUG_EXT ;
+--defs += OPT_SEMAPHORE ;
+--defs += OPT_AT_FILES ;
+--defs += OPT_DEBUG_PROFILE ;
+
+# Bug fixes
+--defs += OPT_FIX_TARGET_VARIABLES_EXT ;
+#~ --defs += OPT_NO_EXTERNAL_VARIABLE_SPLIT ;
+
+# Improvements
+--defs += OPT_IMPROVED_PATIENCE_EXT ;
+
+# Use Boehm GC memory allocator?
+if $(--boehm-gc)
+{
+ --defs += OPT_BOEHM_GC ;
+ if $(debug)
+ {
+ --defs += GC_DEBUG ;
+ }
+}
+
+if $(--duma)
+{
+ --defs += OPT_DUMA ;
+}
+
+if ( $(OS) = NT ) && ! NT in $(--defs)
+{
+ --defs += NT ;
+}
+--defs += YYSTACKSIZE=5000 ;
+
+if $(with-python)
+{
+ --defs += HAVE_PYTHON ;
+}
+
+if $(debug)
+{
+ --defs += BJAM_NEWSTR_NO_ALLOCATE ;
+}
+
+
+# The basic symbolic targets...
+NOTFILE all clean dist ;
+ALWAYS clean ;
+
+# Utility rules and actions...
+rule .clean
+{
+ [DELETE] clean : $(<) ;
+}
+if $(OS) = NT { actions piecemeal together existing [DELETE] {
+ del /F /Q "$(>)"
+} }
+if $(UNIX) = true { actions piecemeal together existing [DELETE] {
+ rm -f "$(>)"
+} }
+if $(OS) = NT {
+ --chmod+w = "attrib -r " ;
+}
+if $(UNIX) = true {
+ --chmod+w = "chmod +w " ;
+}
+
+rule .mkdir
+{
+ NOUPDATE $(<) ;
+ if $(<:P) { DEPENDS $(<) : $(<:P) ; .mkdir $(<:P) ; }
+ if ! $(md<$(<)>) { [MKDIR] $(<) ; md<$(<)> = - ; }
+}
+if $(OS) = NT { actions [MKDIR] {
+ md "$(<)"
+} }
+if $(UNIX) = true { actions [MKDIR] {
+ mkdir "$(<)"
+} }
+
+rule .exe
+{
+ local exe = $(<) ;
+ if $(OS) = NT || ( $(UNIX) = true && $(OS) = CYGWIN ) { exe = $(exe:S=.exe) ; }
+ LOCATE on $(exe) = $(locate-target) ;
+ DEPENDS all : $(exe) ;
+ .mkdir $(locate-target) ;
+ if $(--link)
+ {
+ local objs ;
+ for local s in $(>)
+ {
+ # Translate any subdir elements into a simple file name.
+ local o = [ MATCH "([^/]+)[/]?(.+)" : $(s) ] ;
+ o = $(o:J=_) ;
+ o = $(o:S=.o) ;
+ objs += $(o) ;
+ LOCATE on $(o) = $(locate-target) ;
+ DEPENDS $(exe) : $(o) ;
+ DEPENDS $(o) : $(s) ;
+ DEPENDS $(o) : $(locate-target) ;
+ [COMPILE] $(o) : $(s) ;
+ .clean $(o) ;
+ }
+ DEPENDS $(exe) : $(objs) ;
+ DEPENDS $(exe) : $(locate-target) ;
+ [COMPILE.LINK] $(exe) : $(objs) ;
+ .clean $(exe) ;
+ }
+ else
+ {
+ DEPENDS $(exe) : $(>) ;
+ DEPENDS $(exe) : $(locate-target) ;
+ [COMPILE] $(exe) : $(>) ;
+ .clean $(exe) ;
+ }
+ return $(exe) ;
+}
+if ! $(--def[2]) { actions [COMPILE] {
+ "$(--cc)" "$(--bin)$(<:D=)" "$(--dir)$(<:D)$(./)" $(--out)$(<) "$(--def)$(--defs)" "$(--flags)" "$(>)" "$(--libs)"
+} }
+else { actions [COMPILE] {
+ "$(--cc)" "$(--bin)$(<:D=)" "$(--dir)$(<:D)$(./)" $(--out)$(<) "$(--def[1])$(--defs:J=$(--def[2]))$(--def[3])" "$(--flags)" "$(>)" "$(--libs)"
+} }
+
+actions [COMPILE.LINK] {
+ "$(--link)" "$(--link-bin)$(<:D=)" "$(--link-dir)$(<:D)$(./)" "$(--link-out)$(<)" "$(--link-def)$(--link-defs)" "$(--link-flags)" "$(>)" "$(--link-libs)"
+}
+
+rule .link
+{
+ DEPENDS all : $(<) ;
+ DEPENDS $(<) : $(>) ;
+ [LINK] $(<) : $(>) ;
+ .clean $(<) ;
+}
+if $(OS) = NT { actions [LINK] {
+ copy "$(>)" "$(<)"
+} }
+if $(UNIX) = true { actions [LINK] {
+ ln -fs "$(>)" "$(<)"
+} }
+
+rule .copy
+{
+ DEPENDS all : $(<) ;
+ DEPENDS $(<) : $(>) ;
+ [COPY] $(<) : $(>) ;
+ .clean $(<) ;
+}
+
+# Will be redefined later.
+actions [COPY]
+{
+}
+
+
+rule .move
+{
+ DEPENDS $(<) : $(>) ;
+ [MOVE] $(<) : $(>) ;
+}
+if $(OS) = NT { actions [MOVE] {
+ del /f "$(<)"
+ rename "$(>)" "$(<)"
+} }
+if $(UNIX) = true { actions [MOVE] {
+ mv -f "$(>)" "$(<)"
+} }
+
+# Generate the grammar tokens table, and the real yacc grammar.
+rule .yyacc
+{
+ local exe = [ .exe yyacc : yyacc.c ] ;
+ NOUPDATE $(exe) ;
+ DEPENDS $(<) : $(exe) $(>) ;
+ LEAVES $(<) ;
+ yyacc.exe on $(<) = $(exe:R=$(locate-target)) ;
+ [YYACC] $(<) : $(>) ;
+}
+actions [YYACC] {
+ $(--chmod+w)$(<[1])
+ $(--chmod+w)$(<[2])
+ "$(yyacc.exe)" "$(<)" "$(>)"
+}
+if $(grammar)
+{
+ .yyacc jamgram.y jamgramtab.h : jamgram.yy ;
+}
+else if $(debug)
+{
+ .exe yyacc : yyacc.c ;
+}
+
+# How to build the grammar.
+if $(OS) = NT
+{
+ SUFEXE = .exe ;
+ # try some other likely spellings...
+ PATH ?= $(Path) ;
+ PATH ?= $(path) ;
+}
+SUFEXE ?= "" ;
+
+yacc ?= [ GLOB $(PATH) : yacc$(SUFEXE) ] ;
+yacc ?= [ GLOB $(PATH) : bison$(SUFEXE) ] ;
+yacc ?= [ GLOB "$(ProgramFiles:J= )\\GnuWin32\\bin"
+ "C:\\Program Files\\GnuWin32\\bin" : bison$(SUFEXE) ] ;
+yacc = $(yacc[1]) ;
+switch $(yacc:D=:S=)
+{
+ case bison : yacc += -d --yacc ;
+ case yacc : yacc += -d ;
+}
+if $(debug) && $(yacc)
+{
+ yacc += -t -v ;
+}
+yacc += $(YACCFLAGS) ;
+
+rule .yacc
+{
+ DEPENDS $(<) : $(>) ;
+ LEAVES $(<) ;
+ [YACC] $(<) : $(>) ;
+}
+if $(OS) = NT { actions [YACC] {
+ "$(yacc)" "$(>)"
+ if not errorlevel 1 (
+ del /f "$(<[1])"
+ rename y.tab$(<[1]:S) "$(<[1])"
+ del /f $(<[2])
+ rename y.tab$(<[2]:S) "$(<[2])"
+ ) else set _error_ =
+} }
+if $(UNIX) = true { actions [YACC] {
+ if ` "$(yacc)" "$(>)" ` ; then
+ mv -f y.tab$(<[1]:S) "$(<[1])"
+ mv -f y.tab$(<[2]:S) "$(<[2])"
+ else
+ exit 1
+ fi
+} }
+if $(grammar) && ! $(yacc)
+{
+ EXIT Could not find the 'yacc' tool, and therefore can not build the
+ grammar. ;
+}
+if $(grammar) && $(yacc)
+{
+ .yacc jamgram.c jamgram.h : jamgram.y ;
+}
+
+# How to build the compiled in jambase.
+rule .mkjambase
+{
+ local exe = [ .exe mkjambase : mkjambase.c ] ;
+ DEPENDS $(<) : $(exe) $(>) ;
+ LEAVES $(<) ;
+ mkjambase.exe on $(<) = $(exe:R=$(locate-target)) ;
+ [MKJAMBASE] $(<) : $(>) ;
+}
+actions [MKJAMBASE] {
+ $(--chmod+w)$(<)
+ $(mkjambase.exe) "$(<)" "$(>)"
+}
+if $(debug)
+{
+ .mkjambase jambase.c : Jambase ;
+}
+
+# How to build Jam.
+rule .jam
+{
+ $(>).exe = [ .exe $(>) : $(jam.source) ] ;
+ DEPENDS all : $($(>).exe) ;
+
+ # Make a copy under the old name.
+ $(<).exe = $(<:S=$($(>).exe:S)) ;
+ LOCATE on $($(<).exe) = $(locate-target) ;
+ .copy $($(<).exe) : $($(>).exe) ;
+ DEPENDS all : $($(<).exe) ;
+}
+.jam bjam : b2 ;
+
+
+# Scan sources for header dependencies.
+#
+# In order to keep things simple, we made a slight compromise here - we only
+# detect changes in headers included relative to the current folder as opposed
+# to those included from somewhere on the include path.
+rule .scan ( targets + )
+{
+ HDRRULE on $(targets) = .hdr.scan ;
+ HDRSCAN on $(targets) = "^[ \t]*#[ \t]*include[ \t]*\"([^\"]*)\".*$" ;
+}
+rule .hdr.scan ( target : includes * : binding )
+{
+ local target-path = [ NORMALIZE_PATH $(binding:D) ] ;
+ # Extra grist provides target name uniqueness when referencing same name
+ # header files from different folders.
+ local include-targets = <$(target-path)>$(includes) ;
+ NOCARE $(include-targets) ;
+ INCLUDES $(target) : $(include-targets) ;
+ SEARCH on $(include-targets) = $(target-path) ;
+ ISFILE $(include-targets) ;
+ .scan $(include-targets) ;
+}
+.scan $(jam.source) ;
+
+
+# Distribution making from here on out. Assumes that the docs are already built
+# as HTML at ../doc/html. Otherwise they will not be included in the built
+# distribution archive.
+dist.license =
+ [ GLOB . : $(LICENSE).txt ]
+ ;
+dist.license = $(dist.license:D=)
+ [ GLOB [ .path .. .. .. ] : $(LICENSE).txt ]
+ [ GLOB [ .path .. boost ] : $(LICENSE).txt ] ;
+dist.docs =
+ [ GLOB . : *.png *.css *.html ]
+ ;
+dist.docs = $(dist.docs:D=)
+ [ GLOB [ .path images ] : *.png ]
+ [ GLOB [ .path jam ] : *.html ]
+ ;
+dist.source =
+ [ GLOB . : *.c *.h ]
+ ;
+dist.source = $(dist.source:D=)
+ $(dist.license[1])
+ $(dist.docs)
+ build.jam build.bat build.sh
+ Jambase
+ jamgram.y jamgram.yy
+ [ .path modules set.c ]
+ [ .path modules path.c ]
+ [ .path modules regex.c ]
+ [ .path modules property-set.c ]
+ [ .path modules sequence.c ]
+ [ .path modules order.c ]
+ [ GLOB [ .path boehm_gc ] : * ]
+ [ GLOB [ .path boehm_gc include ] : * ]
+ [ GLOB [ .path boehm_gc include private ] : * ]
+ [ GLOB [ .path boehm_gc cord ] : * ]
+ [ GLOB [ .path boehm_gc Mac_files ] : * ]
+ [ GLOB [ .path boehm_gc tests ] : * ]
+ [ GLOB [ .path boehm_gc doc ] : * ]
+ ;
+dist.bin =
+ bjam
+ ;
+dist.bin =
+ $(dist.license[1])
+ $(dist.bin:S=$(bjam.exe:S))
+ ;
+
+if $(OS) = NT
+{
+ zip ?= [ GLOB "$(ProgramFiles:J= )\\7-ZIP" "C:\\Program Files\\7-ZIP" : "7z.exe" ] ;
+ zip ?= [ GLOB "$(ProgramFiles:J= )\\7-ZIP" "C:\\Program Files\\7-ZIP" : "7zn.exe" ] ;
+ zip ?= [ GLOB $(PATH) : zip.exe ] ;
+ zip ?= zip ;
+ zip = $(zip[1]) ;
+ switch $(zip:D=:S=)
+ {
+ case 7z* : zip += a -r -tzip -mx=9 ;
+ case zip : zip += -9r ;
+ }
+ actions piecemeal [PACK] {
+ "$(zip)" "$(<)" "$(>)"
+ }
+ actions piecemeal [ZIP] {
+ "$(zip)" "$(<)" "$(>)"
+ }
+ actions piecemeal [COPY] {
+ copy /Y "$(>)" "$(<)" >NUL:
+ }
+}
+if $(UNIX) = true
+{
+ tar ?= [ GLOB $(PATH) : star bsdtar tar ] ;
+ tar = $(tar[1]) ;
+ switch $(tar:D=:S=)
+ {
+ case star : tar += -c artype=pax -D -d -to-stdout ;
+ case * : tar += -c -f - ;
+ }
+ actions [PACK] {
+ "$(tar)" "$(>)" | gzip -c9 > "$(<)"
+ }
+ #~ actions [PACK] {
+ #~ tar cf "$(<:S=.tar)" "$(>)"
+ #~ }
+ actions [ZIP] {
+ gzip -c9 "$(>)" > "$(<)"
+ }
+ actions [COPY] {
+ cp -Rpf "$(>)" "$(<)"
+ }
+}
+
+# The single binary, compressed.
+rule .binary
+{
+ local zip ;
+ if $(OS) = NT { zip = $($(<).exe:S=.zip) ; }
+ if $(UNIX) = true { zip = $($(<).exe:S=.tgz) ; }
+ zip = $(zip:S=)-$(VERSION)-$(RELEASE)-$(platform)$(zip:S) ;
+ DEPENDS $(zip) : $($(<).exe) ;
+ DEPENDS dist : $(zip) ;
+ #~ LOCATE on $(zip) = $(locate-target) ;
+ if $(OS) = NT { [ZIP] $(zip) : $($(<).exe) ; }
+ if $(UNIX) = true { [PACK] $(zip) : $($(<).exe) ; }
+ .clean $(zip) ;
+}
+
+# Package some file.
+rule .package ( dst-dir : src-files + )
+{
+ local dst-files ;
+ local src-files-actual ;
+ for local src-path in $(src-files)
+ {
+ if ! [ GLOB $(src-path:P) : $(src-path:B) ] || [ CHECK_IF_FILE $(src-path) ]
+ {
+ local src-subdir = $(src-path:D) ;
+ local src-file = $(src-path) ;
+ while $(src-subdir:D) { src-subdir = $(src-subdir:D) ; }
+ if $(src-subdir) = ".."
+ {
+ src-file = $(src-file:D=) ;
+ }
+ dst-files += $(src-file:R=$(dst-dir)) ;
+ src-files-actual += $(src-path) ;
+ }
+ }
+
+ local pack ;
+ if $(OS) = NT { pack = $(dst-dir).zip ; }
+ if $(UNIX) = true { pack = $(dst-dir).tgz ; }
+
+ DEPENDS dist : $(pack) ;
+ DEPENDS $(pack) : $(dst-files) ;
+
+ local dst-files-queue = $(dst-files) ;
+ for local src-path in $(src-files-actual)
+ {
+ local dst-file = $(dst-files-queue[1]) ;
+ dst-files-queue = $(dst-files-queue[2-]) ;
+ DEPENDS $(dst-file) : $(src-path) $(dst-file:D) ;
+ .mkdir $(dst-file:D) ;
+
+ [COPY] $(dst-file) : $(src-path) ;
+ .clean $(dst-file) ;
+ }
+
+ [PACK] $(pack) : $(dst-files) ;
+ .clean $(pack) ;
+}
+
+# RPM distro file.
+rpm-tool = [ GLOB $(PATH) : "rpmbuild" ] ;
+rpm-tool ?= [ GLOB $(PATH) : "rpm" ] ;
+rpm-tool = $(rpm-tool[1]) ;
+rule .rpm ( name : source )
+{
+ local rpm-arch ;
+ switch $(OSPLAT)
+ {
+ case X86 : rpm-arch ?= i386 ;
+ case PPC : rpm-arch ?= ppc ;
+ case AXP : rpm-arch ?= alpha ;
+ # no guaranty for these:
+ case IA64 : rpm-arch ?= ia64 ;
+ case ARM : rpm-arch ?= arm ;
+ case SPARC : rpm-arch ?= sparc ;
+ case * : rpm-arch ?= other ;
+ }
+ local target = $(name)-rpm ;
+ NOTFILE $(target) ;
+ DEPENDS dist : $(target) ;
+ DEPENDS $(target) : $(name).$(rpm-arch).rpm $(name).src.rpm ;
+ DEPENDS $(name).$(rpm-arch).rpm : $(source) ;
+ DEPENDS $(name).src.rpm : $(name).$(rpm-arch).rpm ;
+ docs on $(target) = $(dist.docs:J=" ") ;
+ arch on $(target) = $(rpm-arch) ;
+ if $(rpm-arch) = ppc { target-opt on $(target) = --target= ; }
+ else { target-opt on $(target) = "--target " ; }
+ [RPM] $(target) : $(source) ;
+ .clean $(name).$(rpm-arch).rpm $(name).src.rpm ;
+}
+actions [RPM] {
+ set -e
+ export BOOST_JAM_TOOLSET="$(toolset)"
+ $(rpm-tool) -ta $(target-opt)$(arch) $(>) | tee rpm.out
+ cp `grep -e '^Wrote:' rpm.out | sed 's/^Wrote: //'` .
+ rm -f rpm.out
+}
+
+# The distribution targets. Do not bother with them unless this is a
+# distribution build.
+if dist in $(ARGV)
+{
+ #~ .binary bjam ;
+ .package $(NAME)-$(VERSION) : $(dist.source) ;
+ .package $(NAME)-$(VERSION)-$(RELEASE)-$(platform) : $(dist.bin) ;
+ if $(rpm-tool)
+ {
+ #~ .rpm $(NAME)-$(VERSION)-$(RELEASE) : $(NAME)-$(VERSION).tgz ;
+ }
+}
diff --git a/tools/build/src/engine/build.sh b/tools/build/src/engine/build.sh
new file mode 100755
index 0000000000..470ea3c070
--- /dev/null
+++ b/tools/build/src/engine/build.sh
@@ -0,0 +1,303 @@
+#!/bin/sh
+
+#~ Copyright 2002-2005 Rene Rivera.
+#~ Distributed under the Boost Software License, Version 1.0.
+#~ (See accompanying file LICENSE_1_0.txt or copy at
+#~ http://www.boost.org/LICENSE_1_0.txt)
+
+# Reset the toolset.
+BOOST_JAM_TOOLSET=
+
+# Run a command, and echo before doing so. Also checks the exit status and quits
+# if there was an error.
+echo_run ()
+{
+ echo "$@"
+ $@
+ r=$?
+ if test $r -ne 0 ; then
+ exit $r
+ fi
+}
+
+# Print an error message, and exit with a status of 1.
+error_exit ()
+{
+ echo "###"
+ echo "###" "$@"
+ echo "###"
+ echo "### You can specify the toolset as the argument, i.e.:"
+ echo "### ./build.sh gcc"
+ echo "###"
+ echo "### Toolsets supported by this script are:"
+ echo "### acc, como, darwin, gcc, intel-darwin, intel-linux, kcc, kylix,"
+ echo "### mipspro, mingw(msys), pathscale, pgi, qcc, sun, sunpro, tru64cxx, vacpp"
+ echo "###"
+ echo "### A special toolset; cc, is available which is used as a fallback"
+ echo "### when a more specific toolset is not found and the cc command is"
+ echo "### detected. The 'cc' toolset will use the CC, CFLAGS, and LIBS"
+ echo "### environment variables, if present."
+ echo "###"
+ exit 1
+}
+
+# Check that a command is in the PATH.
+test_path ()
+{
+ if `command -v command 1>/dev/null 2>/dev/null`; then
+ command -v $1 1>/dev/null 2>/dev/null
+ else
+ hash $1 1>/dev/null 2>/dev/null
+ fi
+}
+
+# Check that the OS name, as returned by "uname", is as given.
+test_uname ()
+{
+ if test_path uname; then
+ test `uname` = $*
+ fi
+}
+
+# Try and guess the toolset to bootstrap the build with...
+Guess_Toolset ()
+{
+ if test -r /mingw/bin/gcc ; then
+ BOOST_JAM_TOOLSET=mingw
+ BOOST_JAM_TOOLSET_ROOT=/mingw/
+ elif test_uname Darwin ; then BOOST_JAM_TOOLSET=darwin
+ elif test_uname IRIX ; then BOOST_JAM_TOOLSET=mipspro
+ elif test_uname IRIX64 ; then BOOST_JAM_TOOLSET=mipspro
+ elif test_uname OSF1 ; then BOOST_JAM_TOOLSET=tru64cxx
+ elif test_uname QNX && test_path qcc ; then BOOST_JAM_TOOLSET=qcc
+ elif test_path gcc ; then BOOST_JAM_TOOLSET=gcc
+ elif test_path icc ; then BOOST_JAM_TOOLSET=intel-linux
+ elif test -r /opt/intel/cc/9.0/bin/iccvars.sh ; then
+ BOOST_JAM_TOOLSET=intel-linux
+ BOOST_JAM_TOOLSET_ROOT=/opt/intel/cc/9.0
+ elif test -r /opt/intel_cc_80/bin/iccvars.sh ; then
+ BOOST_JAM_TOOLSET=intel-linux
+ BOOST_JAM_TOOLSET_ROOT=/opt/intel_cc_80
+ elif test -r /opt/intel/compiler70/ia32/bin/iccvars.sh ; then
+ BOOST_JAM_TOOLSET=intel-linux
+ BOOST_JAM_TOOLSET_ROOT=/opt/intel/compiler70/ia32/
+ elif test -r /opt/intel/compiler60/ia32/bin/iccvars.sh ; then
+ BOOST_JAM_TOOLSET=intel-linux
+ BOOST_JAM_TOOLSET_ROOT=/opt/intel/compiler60/ia32/
+ elif test -r /opt/intel/compiler50/ia32/bin/iccvars.sh ; then
+ BOOST_JAM_TOOLSET=intel-linux
+ BOOST_JAM_TOOLSET_ROOT=/opt/intel/compiler50/ia32/
+ elif test_path pgcc ; then BOOST_JAM_TOOLSET=pgi
+ elif test_path pathcc ; then BOOST_JAM_TOOLSET=pathscale
+ elif test_path xlc ; then BOOST_JAM_TOOLSET=vacpp
+ elif test_path como ; then BOOST_JAM_TOOLSET=como
+ elif test_path KCC ; then BOOST_JAM_TOOLSET=kcc
+ elif test_path bc++ ; then BOOST_JAM_TOOLSET=kylix
+ elif test_path aCC ; then BOOST_JAM_TOOLSET=acc
+ elif test_uname HP-UX ; then BOOST_JAM_TOOLSET=acc
+ elif test -r /opt/SUNWspro/bin/cc ; then
+ BOOST_JAM_TOOLSET=sunpro
+ BOOST_JAM_TOOLSET_ROOT=/opt/SUNWspro/
+ # Test for "cc" as the default fallback.
+ elif test_path $CC ; then BOOST_JAM_TOOLSET=cc
+ elif test_path cc ; then
+ BOOST_JAM_TOOLSET=cc
+ CC=cc
+ fi
+ if test "$BOOST_JAM_TOOLSET" = "" ; then
+ error_exit "Could not find a suitable toolset."
+ fi
+}
+
+# The one option we support in the invocation
+# is the name of the toolset to force building
+# with.
+case "$1" in
+ --guess-toolset) Guess_Toolset ; echo "$BOOST_JAM_TOOLSET" ; exit 1 ;;
+ -*) Guess_Toolset ;;
+ ?*) BOOST_JAM_TOOLSET=$1 ; shift ;;
+ *) Guess_Toolset ;;
+esac
+BOOST_JAM_OPT_JAM="-o bootstrap/jam0"
+BOOST_JAM_OPT_MKJAMBASE="-o bootstrap/mkjambase0"
+BOOST_JAM_OPT_YYACC="-o bootstrap/yyacc0"
+case $BOOST_JAM_TOOLSET in
+ mingw)
+ if test -r ${BOOST_JAM_TOOLSET_ROOT}bin/gcc ; then
+ export PATH=${BOOST_JAM_TOOLSET_ROOT}bin:$PATH
+ fi
+ BOOST_JAM_CC="gcc -DNT"
+ ;;
+
+ gcc)
+ BOOST_JAM_CC=gcc
+ ;;
+
+ darwin)
+ BOOST_JAM_CC=cc
+ ;;
+
+ intel-darwin)
+ BOOST_JAM_CC=icc
+ ;;
+
+ intel-linux)
+ if test -r /opt/intel/cc/9.0/bin/iccvars.sh ; then
+ BOOST_JAM_TOOLSET_ROOT=/opt/intel/cc/9.0/
+ elif test -r /opt/intel_cc_80/bin/iccvars.sh ; then
+ BOOST_JAM_TOOLSET_ROOT=/opt/intel_cc_80/
+ elif test -r /opt/intel/compiler70/ia32/bin/iccvars.sh ; then
+ BOOST_JAM_TOOLSET_ROOT=/opt/intel/compiler70/ia32/
+ elif test -r /opt/intel/compiler60/ia32/bin/iccvars.sh ; then
+ BOOST_JAM_TOOLSET_ROOT=/opt/intel/compiler60/ia32/
+ elif test -r /opt/intel/compiler50/ia32/bin/iccvars.sh ; then
+ BOOST_JAM_TOOLSET_ROOT=/opt/intel/compiler50/ia32/
+ fi
+ if test -r ${BOOST_JAM_TOOLSET_ROOT}bin/iccvars.sh ; then
+ # iccvars does not change LD_RUN_PATH. We adjust LD_RUN_PATH here in
+ # order not to have to rely on ld.so.conf knowing the icc library
+ # directory. We do this before running iccvars.sh in order to allow a
+ # user to add modifications to LD_RUN_PATH in iccvars.sh.
+ if test -z "${LD_RUN_PATH}"; then
+ LD_RUN_PATH="${BOOST_JAM_TOOLSET_ROOT}lib"
+ else
+ LD_RUN_PATH="${BOOST_JAM_TOOLSET_ROOT}lib:${LD_RUN_PATH}"
+ fi
+ export LD_RUN_PATH
+ . ${BOOST_JAM_TOOLSET_ROOT}bin/iccvars.sh
+ fi
+ BOOST_JAM_CC=icc
+ ;;
+
+ vacpp)
+ BOOST_JAM_CC=xlc
+ ;;
+
+ como)
+ BOOST_JAM_CC="como --c"
+ ;;
+
+ kcc)
+ BOOST_JAM_CC=KCC
+ ;;
+
+ kylix)
+ BOOST_JAM_CC=bc++
+ ;;
+
+ mipspro)
+ BOOST_JAM_CC=cc
+ ;;
+
+ pathscale)
+ BOOST_JAM_CC=pathcc
+ ;;
+
+ pgi)
+ BOOST_JAM_CC=pgcc
+ ;;
+
+ sun*)
+ if test -z "${BOOST_JAM_TOOLSET_ROOT}" -a -r /opt/SUNWspro/bin/cc ; then
+ BOOST_JAM_TOOLSET_ROOT=/opt/SUNWspro/
+ fi
+ if test -r "${BOOST_JAM_TOOLSET_ROOT}bin/cc" ; then
+ PATH=${BOOST_JAM_TOOLSET_ROOT}bin:${PATH}
+ export PATH
+ fi
+ BOOST_JAM_CC=cc
+ ;;
+
+ clang*)
+ BOOST_JAM_CC="clang -Wno-unused -Wno-format"
+ BOOST_JAM_TOOLSET=clang
+ ;;
+
+ tru64cxx)
+ BOOST_JAM_CC=cc
+ ;;
+
+ acc)
+ BOOST_JAM_CC="cc -Ae"
+ ;;
+
+ cc)
+ if test -z "$CC" ; then CC=cc ; fi
+ BOOST_JAM_CC=$CC
+ BOOST_JAM_OPT_JAM="$BOOST_JAM_OPT_JAM $CFLAGS $LIBS"
+ BOOST_JAM_OPT_MKJAMBASE="$BOOST_JAM_OPT_MKJAMBASE $CFLAGS $LIBS"
+ BOOST_JAM_OPT_YYACC="$BOOST_JAM_OPT_YYACC $CFLAGS $LIBS"
+ ;;
+
+ qcc)
+ BOOST_JAM_CC=qcc
+ ;;
+
+ *)
+ error_exit "Unknown toolset: $BOOST_JAM_TOOLSET"
+ ;;
+esac
+
+echo "###"
+echo "### Using '$BOOST_JAM_TOOLSET' toolset."
+echo "###"
+
+YYACC_SOURCES="yyacc.c"
+MKJAMBASE_SOURCES="mkjambase.c"
+BJAM_SOURCES="\
+ command.c compile.c constants.c debug.c execcmd.c frames.c function.c glob.c\
+ hash.c hdrmacro.c headers.c jam.c jambase.c jamgram.c lists.c make.c make1.c\
+ object.c option.c output.c parse.c pathsys.c regexp.c rules.c\
+ scan.c search.c subst.c timestamp.c variable.c modules.c strings.c filesys.c\
+ builtins.c class.c cwd.c native.c md5.c w32_getreg.c modules/set.c\
+ modules/path.c modules/regex.c modules/property-set.c modules/sequence.c\
+ modules/order.c"
+case $BOOST_JAM_TOOLSET in
+ mingw)
+ BJAM_SOURCES="${BJAM_SOURCES} execnt.c filent.c pathnt.c"
+ ;;
+
+ *)
+ BJAM_SOURCES="${BJAM_SOURCES} execunix.c fileunix.c pathunix.c"
+ ;;
+esac
+
+BJAM_UPDATE=
+if test "$1" = "--update" -o "$2" = "--update" -o "$3" = "--update" -o "$4" = "--update" ; then
+ BJAM_UPDATE="update"
+fi
+if test "${BJAM_UPDATE}" = "update" -a ! -x "./bootstrap/jam0" ; then
+ BJAM_UPDATE=
+fi
+
+if test "${BJAM_UPDATE}" != "update" ; then
+ echo_run rm -rf bootstrap
+ echo_run mkdir bootstrap
+ if test ! -r jamgram.y -o ! -r jamgramtab.h ; then
+ echo_run ${BOOST_JAM_CC} ${BOOST_JAM_OPT_YYACC} ${YYACC_SOURCES}
+ if test -x "./bootstrap/yyacc0" ; then
+ echo_run ./bootstrap/yyacc0 jamgram.y jamgramtab.h jamgram.yy
+ fi
+ fi
+ if test ! -r jamgram.c -o ! -r jamgram.h ; then
+ if test_path yacc ; then YACC="yacc -d"
+ elif test_path bison ; then YACC="bison -y -d --yacc"
+ fi
+ echo_run $YACC jamgram.y
+ mv -f y.tab.c jamgram.c
+ mv -f y.tab.h jamgram.h
+ fi
+ if test ! -r jambase.c ; then
+ echo_run ${BOOST_JAM_CC} ${BOOST_JAM_OPT_MKJAMBASE} ${MKJAMBASE_SOURCES}
+ if test -x "./bootstrap/mkjambase0" ; then
+ echo_run ./bootstrap/mkjambase0 jambase.c Jambase
+ fi
+ fi
+ echo_run ${BOOST_JAM_CC} ${BOOST_JAM_OPT_JAM} ${BJAM_SOURCES}
+fi
+if test -x "./bootstrap/jam0" ; then
+ if test "${BJAM_UPDATE}" != "update" ; then
+ echo_run ./bootstrap/jam0 -f build.jam --toolset=$BOOST_JAM_TOOLSET "--toolset-root=$BOOST_JAM_TOOLSET_ROOT" "$@" clean
+ fi
+ echo_run ./bootstrap/jam0 -f build.jam --toolset=$BOOST_JAM_TOOLSET "--toolset-root=$BOOST_JAM_TOOLSET_ROOT" "$@"
+fi
diff --git a/tools/build/src/engine/builtins.c b/tools/build/src/engine/builtins.c
new file mode 100644
index 0000000000..daa73db9aa
--- /dev/null
+++ b/tools/build/src/engine/builtins.c
@@ -0,0 +1,2479 @@
+/*
+ * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+#include "jam.h"
+#include "builtins.h"
+
+#include "compile.h"
+#include "constants.h"
+#include "cwd.h"
+#include "filesys.h"
+#include "frames.h"
+#include "hash.h"
+#include "hdrmacro.h"
+#include "lists.h"
+#include "make.h"
+#include "md5.h"
+#include "native.h"
+#include "object.h"
+#include "parse.h"
+#include "pathsys.h"
+#include "rules.h"
+#include "strings.h"
+#include "subst.h"
+#include "timestamp.h"
+#include "variable.h"
+
+#include <ctype.h>
+
+#ifdef OS_NT
+#include <windows.h>
+#ifndef FSCTL_GET_REPARSE_POINT
+/* MinGW's version of windows.h is missing this, so we need
+ * to include winioctl.h directly
+ */
+#include <winioctl.h>
+#endif
+#endif
+
+#if defined(USE_EXECUNIX)
+# include <sys/types.h>
+# include <sys/wait.h>
+#else
+/*
+ * NT does not have wait() and associated macros and uses the system() return
+ * value instead. Status code group are documented at:
+ * http://msdn.microsoft.com/en-gb/library/ff565436.aspx
+ */
+# define WIFEXITED(w) (((w) & 0XFFFFFF00) == 0)
+# define WEXITSTATUS(w)(w)
+#endif
+
+/*
+ * builtins.c - builtin jam rules
+ *
+ * External routines:
+ * load_builtins() - define builtin rules
+ * unknown_rule() - reports an unknown rule occurrence to the
+ * user and exits
+ *
+ * Internal routines:
+ * append_if_exists() - if file exists, append it to the list
+ * builtin_calc() - CALC rule
+ * builtin_delete_module() - DELETE_MODULE ( MODULE ? )
+ * builtin_depends() - DEPENDS/INCLUDES rule
+ * builtin_echo() - ECHO rule
+ * builtin_exit() - EXIT rule
+ * builtin_export() - EXPORT ( MODULE ? : RULES * )
+ * builtin_flags() - NOCARE, NOTFILE, TEMPORARY rule
+ * builtin_glob() - GLOB rule
+ * builtin_glob_recursive() - ???
+ * builtin_hdrmacro() - ???
+ * builtin_import() - IMPORT rule
+ * builtin_match() - MATCH rule, regexp matching
+ * builtin_rebuilds() - REBUILDS rule
+ * builtin_rulenames() - RULENAMES ( MODULE ? )
+ * builtin_split_by_characters() - splits the given string into tokens
+ * builtin_varnames() - VARNAMES ( MODULE ? )
+ * get_source_line() - get a frame's file and line number
+ * information
+ */
+
+
+/*
+ * compile_builtin() - define builtin rules
+ */
+
+#define P0 (PARSE *)0
+#define C0 (OBJECT *)0
+
+#if defined( OS_NT ) || defined( OS_CYGWIN )
+ LIST * builtin_system_registry ( FRAME *, int );
+ LIST * builtin_system_registry_names( FRAME *, int );
+#endif
+
+int glob( char const * s, char const * c );
+
+void backtrace ( FRAME * );
+void backtrace_line ( FRAME * );
+void print_source_line( FRAME * );
+
+
+RULE * bind_builtin( char const * name_, LIST * (* f)( FRAME *, int flags ),
+ int flags, char const * * args )
+{
+ FUNCTION * func;
+ RULE * result;
+ OBJECT * name = object_new( name_ );
+
+ func = function_builtin( f, flags, args );
+
+ result = new_rule_body( root_module(), name, func, 1 );
+
+ function_free( func );
+
+ object_free( name );
+
+ return result;
+}
+
+
+RULE * duplicate_rule( char const * name_, RULE * other )
+{
+ OBJECT * name = object_new( name_ );
+ RULE * result = import_rule( other, root_module(), name );
+ object_free( name );
+ return result;
+}
+
+
+/*
+ * load_builtins() - define builtin rules
+ */
+
+void load_builtins()
+{
+ duplicate_rule( "Always",
+ bind_builtin( "ALWAYS",
+ builtin_flags, T_FLAG_TOUCHED, 0 ) );
+
+ duplicate_rule( "Depends",
+ bind_builtin( "DEPENDS",
+ builtin_depends, 0, 0 ) );
+
+ duplicate_rule( "echo",
+ duplicate_rule( "Echo",
+ bind_builtin( "ECHO",
+ builtin_echo, 0, 0 ) ) );
+
+ {
+ char const * args[] = { "message", "*", ":", "result-value", "?", 0 };
+ duplicate_rule( "exit",
+ duplicate_rule( "Exit",
+ bind_builtin( "EXIT",
+ builtin_exit, 0, args ) ) );
+ }
+
+ {
+ char const * args[] = { "directories", "*", ":", "patterns", "*", ":",
+ "case-insensitive", "?", 0 };
+ duplicate_rule( "Glob",
+ bind_builtin( "GLOB", builtin_glob, 0, args ) );
+ }
+
+ {
+ char const * args[] = { "patterns", "*", 0 };
+ bind_builtin( "GLOB-RECURSIVELY",
+ builtin_glob_recursive, 0, args );
+ }
+
+ duplicate_rule( "Includes",
+ bind_builtin( "INCLUDES",
+ builtin_depends, 1, 0 ) );
+
+ {
+ char const * args[] = { "targets", "*", ":", "targets-to-rebuild", "*",
+ 0 };
+ bind_builtin( "REBUILDS",
+ builtin_rebuilds, 0, args );
+ }
+
+ duplicate_rule( "Leaves",
+ bind_builtin( "LEAVES",
+ builtin_flags, T_FLAG_LEAVES, 0 ) );
+
+ duplicate_rule( "Match",
+ bind_builtin( "MATCH",
+ builtin_match, 0, 0 ) );
+
+ {
+ char const * args[] = { "string", ":", "delimiters", 0 };
+ bind_builtin( "SPLIT_BY_CHARACTERS",
+ builtin_split_by_characters, 0, args );
+ }
+
+ duplicate_rule( "NoCare",
+ bind_builtin( "NOCARE",
+ builtin_flags, T_FLAG_NOCARE, 0 ) );
+
+ duplicate_rule( "NOTIME",
+ duplicate_rule( "NotFile",
+ bind_builtin( "NOTFILE",
+ builtin_flags, T_FLAG_NOTFILE, 0 ) ) );
+
+ duplicate_rule( "NoUpdate",
+ bind_builtin( "NOUPDATE",
+ builtin_flags, T_FLAG_NOUPDATE, 0 ) );
+
+ duplicate_rule( "Temporary",
+ bind_builtin( "TEMPORARY",
+ builtin_flags, T_FLAG_TEMP, 0 ) );
+
+ bind_builtin( "ISFILE",
+ builtin_flags, T_FLAG_ISFILE, 0 );
+
+ duplicate_rule( "HdrMacro",
+ bind_builtin( "HDRMACRO",
+ builtin_hdrmacro, 0, 0 ) );
+
+ /* FAIL_EXPECTED is used to indicate that the result of a target build
+ * action should be inverted (ok <=> fail) this can be useful when
+ * performing test runs from Jamfiles.
+ */
+ bind_builtin( "FAIL_EXPECTED",
+ builtin_flags, T_FLAG_FAIL_EXPECTED, 0 );
+
+ bind_builtin( "RMOLD",
+ builtin_flags, T_FLAG_RMOLD, 0 );
+
+ {
+ char const * args[] = { "targets", "*", 0 };
+ bind_builtin( "UPDATE",
+ builtin_update, 0, args );
+ }
+
+ {
+ char const * args[] = { "targets", "*",
+ ":", "log", "?",
+ ":", "ignore-minus-n", "?",
+ ":", "ignore-minus-q", "?", 0 };
+ bind_builtin( "UPDATE_NOW",
+ builtin_update_now, 0, args );
+ }
+
+ {
+ char const * args[] = { "string", "pattern", "replacements", "+", 0 };
+ duplicate_rule( "subst",
+ bind_builtin( "SUBST",
+ builtin_subst, 0, args ) );
+ }
+
+ {
+ char const * args[] = { "module", "?", 0 };
+ bind_builtin( "RULENAMES",
+ builtin_rulenames, 0, args );
+ }
+
+ {
+ char const * args[] = { "module", "?", 0 };
+ bind_builtin( "VARNAMES",
+ builtin_varnames, 0, args );
+ }
+
+ {
+ char const * args[] = { "module", "?", 0 };
+ bind_builtin( "DELETE_MODULE",
+ builtin_delete_module, 0, args );
+ }
+
+ {
+ char const * args[] = { "source_module", "?",
+ ":", "source_rules", "*",
+ ":", "target_module", "?",
+ ":", "target_rules", "*",
+ ":", "localize", "?", 0 };
+ bind_builtin( "IMPORT",
+ builtin_import, 0, args );
+ }
+
+ {
+ char const * args[] = { "module", "?", ":", "rules", "*", 0 };
+ bind_builtin( "EXPORT",
+ builtin_export, 0, args );
+ }
+
+ {
+ char const * args[] = { "levels", "?", 0 };
+ bind_builtin( "CALLER_MODULE",
+ builtin_caller_module, 0, args );
+ }
+
+ {
+ char const * args[] = { "levels", "?", 0 };
+ bind_builtin( "BACKTRACE",
+ builtin_backtrace, 0, args );
+ }
+
+ {
+ char const * args[] = { 0 };
+ bind_builtin( "PWD",
+ builtin_pwd, 0, args );
+ }
+
+ {
+ char const * args[] = { "modules_to_import", "+",
+ ":", "target_module", "?", 0 };
+ bind_builtin( "IMPORT_MODULE",
+ builtin_import_module, 0, args );
+ }
+
+ {
+ char const * args[] = { "module", "?", 0 };
+ bind_builtin( "IMPORTED_MODULES",
+ builtin_imported_modules, 0, args );
+ }
+
+ {
+ char const * args[] = { "instance_module", ":", "class_module", 0 };
+ bind_builtin( "INSTANCE",
+ builtin_instance, 0, args );
+ }
+
+ {
+ char const * args[] = { "sequence", "*", 0 };
+ bind_builtin( "SORT",
+ builtin_sort, 0, args );
+ }
+
+ {
+ char const * args[] = { "path_parts", "*", 0 };
+ bind_builtin( "NORMALIZE_PATH",
+ builtin_normalize_path, 0, args );
+ }
+
+ {
+ char const * args[] = { "args", "*", 0 };
+ bind_builtin( "CALC",
+ builtin_calc, 0, args );
+ }
+
+ {
+ char const * args[] = { "module", ":", "rule", 0 };
+ bind_builtin( "NATIVE_RULE",
+ builtin_native_rule, 0, args );
+ }
+
+ {
+ char const * args[] = { "module", ":", "rule", ":", "version", 0 };
+ bind_builtin( "HAS_NATIVE_RULE",
+ builtin_has_native_rule, 0, args );
+ }
+
+ {
+ char const * args[] = { "module", "*", 0 };
+ bind_builtin( "USER_MODULE",
+ builtin_user_module, 0, args );
+ }
+
+ {
+ char const * args[] = { 0 };
+ bind_builtin( "NEAREST_USER_LOCATION",
+ builtin_nearest_user_location, 0, args );
+ }
+
+ {
+ char const * args[] = { "file", 0 };
+ bind_builtin( "CHECK_IF_FILE",
+ builtin_check_if_file, 0, args );
+ }
+
+#ifdef HAVE_PYTHON
+ {
+ char const * args[] = { "python-module",
+ ":", "function",
+ ":", "jam-module",
+ ":", "rule-name", 0 };
+ bind_builtin( "PYTHON_IMPORT_RULE",
+ builtin_python_import_rule, 0, args );
+ }
+#endif
+
+# if defined( OS_NT ) || defined( OS_CYGWIN )
+ {
+ char const * args[] = { "key_path", ":", "data", "?", 0 };
+ bind_builtin( "W32_GETREG",
+ builtin_system_registry, 0, args );
+ }
+
+ {
+ char const * args[] = { "key_path", ":", "result-type", 0 };
+ bind_builtin( "W32_GETREGNAMES",
+ builtin_system_registry_names, 0, args );
+ }
+# endif
+
+ {
+ char const * args[] = { "command", ":", "*", 0 };
+ duplicate_rule( "SHELL",
+ bind_builtin( "COMMAND",
+ builtin_shell, 0, args ) );
+ }
+
+ {
+ char const * args[] = { "string", 0 };
+ bind_builtin( "MD5",
+ builtin_md5, 0, args );
+ }
+
+ {
+ char const * args[] = { "name", ":", "mode", 0 };
+ bind_builtin( "FILE_OPEN",
+ builtin_file_open, 0, args );
+ }
+
+ {
+ char const * args[] = { "string", ":", "width", 0 };
+ bind_builtin( "PAD",
+ builtin_pad, 0, args );
+ }
+
+ {
+ char const * args[] = { "targets", "*", 0 };
+ bind_builtin( "PRECIOUS",
+ builtin_precious, 0, args );
+ }
+
+ {
+ char const * args [] = { 0 };
+ bind_builtin( "SELF_PATH", builtin_self_path, 0, args );
+ }
+
+ {
+ char const * args [] = { "path", 0 };
+ bind_builtin( "MAKEDIR", builtin_makedir, 0, args );
+ }
+
+ {
+ const char * args [] = { "path", 0 };
+ bind_builtin( "READLINK", builtin_readlink, 0, args );
+ }
+
+ /* Initialize builtin modules. */
+ init_set();
+ init_path();
+ init_regex();
+ init_property_set();
+ init_sequence();
+ init_order();
+}
+
+
+/*
+ * builtin_calc() - CALC rule
+ *
+ * Performs simple mathematical operations on two arguments.
+ */
+
+LIST * builtin_calc( FRAME * frame, int flags )
+{
+ LIST * arg = lol_get( frame->args, 0 );
+
+ LIST * result = L0;
+ long lhs_value;
+ long rhs_value;
+ long result_value;
+ char buffer[ 16 ];
+ char const * lhs;
+ char const * op;
+ char const * rhs;
+ LISTITER iter = list_begin( arg );
+ LISTITER const end = list_end( arg );
+
+ if ( iter == end ) return L0;
+ lhs = object_str( list_item( iter ) );
+
+ iter = list_next( iter );
+ if ( iter == end ) return L0;
+ op = object_str( list_item( iter ) );
+
+ iter = list_next( iter );
+ if ( iter == end ) return L0;
+ rhs = object_str( list_item( iter ) );
+
+ lhs_value = atoi( lhs );
+ rhs_value = atoi( rhs );
+
+ if ( !strcmp( "+", op ) )
+ result_value = lhs_value + rhs_value;
+ else if ( !strcmp( "-", op ) )
+ result_value = lhs_value - rhs_value;
+ else
+ return L0;
+
+ sprintf( buffer, "%ld", result_value );
+ result = list_push_back( result, object_new( buffer ) );
+ return result;
+}
+
+
+/*
+ * builtin_depends() - DEPENDS/INCLUDES rule
+ *
+ * The DEPENDS/INCLUDES builtin rule appends each of the listed sources on the
+ * dependency/includes list of each of the listed targets. It binds both the
+ * targets and sources as TARGETs.
+ */
+
+LIST * builtin_depends( FRAME * frame, int flags )
+{
+ LIST * const targets = lol_get( frame->args, 0 );
+ LIST * const sources = lol_get( frame->args, 1 );
+
+ LISTITER iter = list_begin( targets );
+ LISTITER end = list_end( targets );
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ TARGET * const t = bindtarget( list_item( iter ) );
+
+ if ( flags )
+ target_include_many( t, sources );
+ else
+ t->depends = targetlist( t->depends, sources );
+ }
+
+ /* Enter reverse links */
+ iter = list_begin( sources );
+ end = list_end( sources );
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ TARGET * const s = bindtarget( list_item( iter ) );
+ if ( flags )
+ {
+ LISTITER t_iter = list_begin( targets );
+ LISTITER const t_end = list_end( targets );
+ for ( ; t_iter != t_end; t_iter = list_next( t_iter ) )
+ s->dependants = targetentry( s->dependants, bindtarget(
+ list_item( t_iter ) )->includes );
+ }
+ else
+ s->dependants = targetlist( s->dependants, targets );
+ }
+
+ return L0;
+}
+
+
+/*
+ * builtin_rebuilds() - REBUILDS rule
+ *
+ * Appends each of the rebuild-targets listed in its second argument to the
+ * rebuilds list for each of the targets listed in its first argument.
+ */
+
+LIST * builtin_rebuilds( FRAME * frame, int flags )
+{
+ LIST * targets = lol_get( frame->args, 0 );
+ LIST * rebuilds = lol_get( frame->args, 1 );
+ LISTITER iter = list_begin( targets );
+ LISTITER const end = list_end( targets );
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ TARGET * const t = bindtarget( list_item( iter ) );
+ t->rebuilds = targetlist( t->rebuilds, rebuilds );
+ }
+ return L0;
+}
+
+
+/*
+ * builtin_echo() - ECHO rule
+ *
+ * Echoes the targets to the user. No other actions are taken.
+ */
+
+LIST * builtin_echo( FRAME * frame, int flags )
+{
+ list_print( lol_get( frame->args, 0 ) );
+ printf( "\n" );
+ fflush( stdout );
+ return L0;
+}
+
+
+/*
+ * builtin_exit() - EXIT rule
+ *
+ * Echoes the targets to the user and exits the program with a failure status.
+ */
+
+LIST * builtin_exit( FRAME * frame, int flags )
+{
+ LIST * const code = lol_get( frame->args, 1 );
+ list_print( lol_get( frame->args, 0 ) );
+ printf( "\n" );
+ if ( !list_empty( code ) )
+ exit( atoi( object_str( list_front( code ) ) ) );
+ else
+ exit( EXITBAD ); /* yeech */
+ return L0;
+}
+
+
+/*
+ * builtin_flags() - NOCARE, NOTFILE, TEMPORARY rule
+ *
+ * Marks the target with the appropriate flag, for use by make0(). It binds each
+ * target as a TARGET.
+ */
+
+LIST * builtin_flags( FRAME * frame, int flags )
+{
+ LIST * const targets = lol_get( frame->args, 0 );
+ LISTITER iter = list_begin( targets );
+ LISTITER const end = list_end( targets );
+ for ( ; iter != end; iter = list_next( iter ) )
+ bindtarget( list_item( iter ) )->flags |= flags;
+ return L0;
+}
+
+
+/*
+ * builtin_glob() - GLOB rule
+ */
+
+struct globbing
+{
+ LIST * patterns;
+ LIST * results;
+ LIST * case_insensitive;
+};
+
+
+static void downcase_inplace( char * p )
+{
+ for ( ; *p; ++p )
+ *p = tolower( *p );
+}
+
+
+static void builtin_glob_back( void * closure, OBJECT * file, int status,
+ timestamp const * const time )
+{
+ PROFILE_ENTER( BUILTIN_GLOB_BACK );
+
+ struct globbing * const globbing = (struct globbing *)closure;
+ PATHNAME f;
+ string buf[ 1 ];
+ LISTITER iter;
+ LISTITER end;
+
+ /* Null out directory for matching. We wish we had file_dirscan() pass up a
+ * PATHNAME.
+ */
+ path_parse( object_str( file ), &f );
+ f.f_dir.len = 0;
+
+ /* For globbing, we unconditionally ignore current and parent directory
+ * items. Since these items always exist, there is no reason why caller of
+ * GLOB would want to see them. We could also change file_dirscan(), but
+ * then paths with embedded "." and ".." would not work anywhere.
+ */
+ if ( !strcmp( f.f_base.ptr, "." ) || !strcmp( f.f_base.ptr, ".." ) )
+ {
+ PROFILE_EXIT( BUILTIN_GLOB_BACK );
+ return;
+ }
+
+ string_new( buf );
+ path_build( &f, buf );
+
+ if ( globbing->case_insensitive )
+ downcase_inplace( buf->value );
+
+ iter = list_begin( globbing->patterns );
+ end = list_end( globbing->patterns );
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ if ( !glob( object_str( list_item( iter ) ), buf->value ) )
+ {
+ globbing->results = list_push_back( globbing->results, object_copy(
+ file ) );
+ break;
+ }
+ }
+
+ string_free( buf );
+
+ PROFILE_EXIT( BUILTIN_GLOB_BACK );
+}
+
+
+static LIST * downcase_list( LIST * in )
+{
+ LIST * result = L0;
+ LISTITER iter = list_begin( in );
+ LISTITER const end = list_end( in );
+
+ string s[ 1 ];
+ string_new( s );
+
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ string_append( s, object_str( list_item( iter ) ) );
+ downcase_inplace( s->value );
+ result = list_push_back( result, object_new( s->value ) );
+ string_truncate( s, 0 );
+ }
+
+ string_free( s );
+ return result;
+}
+
+
+LIST * builtin_glob( FRAME * frame, int flags )
+{
+ LIST * const l = lol_get( frame->args, 0 );
+ LIST * const r = lol_get( frame->args, 1 );
+
+ LISTITER iter;
+ LISTITER end;
+ struct globbing globbing;
+
+ globbing.results = L0;
+ globbing.patterns = r;
+
+ globbing.case_insensitive =
+# if defined( OS_NT ) || defined( OS_CYGWIN )
+ l; /* Always case-insensitive if any files can be found. */
+# else
+ lol_get( frame->args, 2 );
+# endif
+
+ if ( globbing.case_insensitive )
+ globbing.patterns = downcase_list( r );
+
+ iter = list_begin( l );
+ end = list_end( l );
+ for ( ; iter != end; iter = list_next( iter ) )
+ file_dirscan( list_item( iter ), builtin_glob_back, &globbing );
+
+ if ( globbing.case_insensitive )
+ list_free( globbing.patterns );
+
+ return globbing.results;
+}
+
+
+static int has_wildcards( char const * const str )
+{
+ return str[ strcspn( str, "[]*?" ) ] ? 1 : 0;
+}
+
+
+/*
+ * append_if_exists() - if file exists, append it to the list
+ */
+
+static LIST * append_if_exists( LIST * list, OBJECT * file )
+{
+ return file_query( file )
+ ? list_push_back( list, object_copy( file ) )
+ : list ;
+}
+
+
+LIST * glob1( OBJECT * dirname, OBJECT * pattern )
+{
+ LIST * const plist = list_new( object_copy( pattern ) );
+ struct globbing globbing;
+
+ globbing.results = L0;
+ globbing.patterns = plist;
+
+ globbing.case_insensitive
+# if defined( OS_NT ) || defined( OS_CYGWIN )
+ = plist; /* always case-insensitive if any files can be found */
+# else
+ = L0;
+# endif
+
+ if ( globbing.case_insensitive )
+ globbing.patterns = downcase_list( plist );
+
+ file_dirscan( dirname, builtin_glob_back, &globbing );
+
+ if ( globbing.case_insensitive )
+ list_free( globbing.patterns );
+
+ list_free( plist );
+
+ return globbing.results;
+}
+
+
+LIST * glob_recursive( char const * pattern )
+{
+ LIST * result = L0;
+
+ /* Check if there's metacharacters in pattern */
+ if ( !has_wildcards( pattern ) )
+ {
+ /* No metacharacters. Check if the path exists. */
+ OBJECT * const p = object_new( pattern );
+ result = append_if_exists( result, p );
+ object_free( p );
+ }
+ else
+ {
+ /* Have metacharacters in the pattern. Split into dir/name. */
+ PATHNAME path[ 1 ];
+ path_parse( pattern, path );
+
+ if ( path->f_dir.ptr )
+ {
+ LIST * dirs = L0;
+ string dirname[ 1 ];
+ string basename[ 1 ];
+ string_new( dirname );
+ string_new( basename );
+
+ string_append_range( dirname, path->f_dir.ptr,
+ path->f_dir.ptr + path->f_dir.len );
+
+ path->f_grist.ptr = 0;
+ path->f_grist.len = 0;
+ path->f_dir.ptr = 0;
+ path->f_dir.len = 0;
+ path_build( path, basename );
+
+ dirs = has_wildcards( dirname->value )
+ ? glob_recursive( dirname->value )
+ : list_push_back( dirs, object_new( dirname->value ) );
+
+ if ( has_wildcards( basename->value ) )
+ {
+ OBJECT * const b = object_new( basename->value );
+ LISTITER iter = list_begin( dirs );
+ LISTITER const end = list_end( dirs );
+ for ( ; iter != end; iter = list_next( iter ) )
+ result = list_append( result, glob1( list_item( iter ), b )
+ );
+ object_free( b );
+ }
+ else
+ {
+ LISTITER iter = list_begin( dirs );
+ LISTITER const end = list_end( dirs );
+ string file_string[ 1 ];
+ string_new( file_string );
+
+ /* No wildcard in basename. */
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ OBJECT * p;
+ path->f_dir.ptr = object_str( list_item( iter ) );
+ path->f_dir.len = strlen( object_str( list_item( iter ) ) );
+ path_build( path, file_string );
+
+ p = object_new( file_string->value );
+
+ result = append_if_exists( result, p );
+
+ object_free( p );
+
+ string_truncate( file_string, 0 );
+ }
+
+ string_free( file_string );
+ }
+
+ string_free( dirname );
+ string_free( basename );
+
+ list_free( dirs );
+ }
+ else
+ {
+ /* No directory, just a pattern. */
+ OBJECT * const p = object_new( pattern );
+ result = list_append( result, glob1( constant_dot, p ) );
+ object_free( p );
+ }
+ }
+
+ return result;
+}
+
+
+/*
+ * builtin_glob_recursive() - ???
+ */
+
+LIST * builtin_glob_recursive( FRAME * frame, int flags )
+{
+ LIST * result = L0;
+ LIST * const l = lol_get( frame->args, 0 );
+ LISTITER iter = list_begin( l );
+ LISTITER const end = list_end( l );
+ for ( ; iter != end; iter = list_next( iter ) )
+ result = list_append( result, glob_recursive( object_str( list_item(
+ iter ) ) ) );
+ return result;
+}
+
+
+/*
+ * builtin_match() - MATCH rule, regexp matching
+ */
+
+LIST * builtin_match( FRAME * frame, int flags )
+{
+ LIST * l;
+ LIST * r;
+ LIST * result = L0;
+ LISTITER l_iter;
+ LISTITER l_end;
+ LISTITER r_iter;
+ LISTITER r_end;
+
+ string buf[ 1 ];
+ string_new( buf );
+
+ /* For each pattern */
+
+ l = lol_get( frame->args, 0 );
+ l_iter = list_begin( l );
+ l_end = list_end( l );
+ for ( ; l_iter != l_end; l_iter = list_next( l_iter ) )
+ {
+ /* Result is cached and intentionally never freed. */
+ regexp * re = regex_compile( list_item( l_iter ) );
+
+ /* For each string to match against. */
+ r = lol_get( frame->args, 1 );
+ r_iter = list_begin( r );
+ r_end = list_end( r );
+ for ( ; r_iter != r_end; r_iter = list_next( r_iter ) )
+ {
+ if ( regexec( re, object_str( list_item( r_iter ) ) ) )
+ {
+ int i;
+ int top;
+
+ /* Find highest parameter */
+
+ for ( top = NSUBEXP; top-- > 1; )
+ if ( re->startp[ top ] )
+ break;
+
+ /* And add all parameters up to highest onto list. */
+ /* Must have parameters to have results! */
+ for ( i = 1; i <= top; ++i )
+ {
+ string_append_range( buf, re->startp[ i ], re->endp[ i ] );
+ result = list_push_back( result, object_new( buf->value ) );
+ string_truncate( buf, 0 );
+ }
+ }
+ }
+ }
+
+ string_free( buf );
+ return result;
+}
+
+
+/*
+ * builtin_split_by_characters() - splits the given string into tokens
+ */
+
+LIST * builtin_split_by_characters( FRAME * frame, int flags )
+{
+ LIST * l1 = lol_get( frame->args, 0 );
+ LIST * l2 = lol_get( frame->args, 1 );
+
+ LIST * result = L0;
+
+ string buf[ 1 ];
+
+ char const * delimiters = object_str( list_front( l2 ) );
+ char * t;
+
+ string_copy( buf, object_str( list_front( l1 ) ) );
+
+ t = strtok( buf->value, delimiters );
+ while ( t )
+ {
+ result = list_push_back( result, object_new( t ) );
+ t = strtok( NULL, delimiters );
+ }
+
+ string_free( buf );
+
+ return result;
+}
+
+
+/*
+ * builtin_hdrmacro() - ???
+ */
+
+LIST * builtin_hdrmacro( FRAME * frame, int flags )
+{
+ LIST * const l = lol_get( frame->args, 0 );
+ LISTITER iter = list_begin( l );
+ LISTITER const end = list_end( l );
+
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ TARGET * const t = bindtarget( list_item( iter ) );
+
+ /* Scan file for header filename macro definitions. */
+ if ( DEBUG_HEADER )
+ printf( "scanning '%s' for header file macro definitions\n",
+ object_str( list_item( iter ) ) );
+
+ macro_headers( t );
+ }
+
+ return L0;
+}
+
+
+/*
+ * builtin_rulenames() - RULENAMES ( MODULE ? )
+ *
+ * Returns a list of the non-local rule names in the given MODULE. If MODULE is
+ * not supplied, returns the list of rule names in the global module.
+ */
+
+static void add_rule_name( void * r_, void * result_ )
+{
+ RULE * const r = (RULE *)r_;
+ LIST * * const result = (LIST * *)result_;
+ if ( r->exported )
+ *result = list_push_back( *result, object_copy( r->name ) );
+}
+
+
+LIST * builtin_rulenames( FRAME * frame, int flags )
+{
+ LIST * arg0 = lol_get( frame->args, 0 );
+ LIST * result = L0;
+ module_t * const source_module = bindmodule( list_empty( arg0 )
+ ? 0
+ : list_front( arg0 ) );
+
+ if ( source_module->rules )
+ hashenumerate( source_module->rules, add_rule_name, &result );
+ return result;
+}
+
+
+/*
+ * builtin_varnames() - VARNAMES ( MODULE ? )
+ *
+ * Returns a list of the variable names in the given MODULE. If MODULE is not
+ * supplied, returns the list of variable names in the global module.
+ */
+
+/* helper function for builtin_varnames(), below. Used with hashenumerate, will
+ * prepend the key of each element to the list
+ */
+static void add_hash_key( void * np, void * result_ )
+{
+ LIST * * result = (LIST * *)result_;
+ *result = list_push_back( *result, object_copy( *(OBJECT * *)np ) );
+}
+
+
+LIST * builtin_varnames( FRAME * frame, int flags )
+{
+ LIST * arg0 = lol_get( frame->args, 0 );
+ LIST * result = L0;
+ module_t * source_module = bindmodule( list_empty( arg0 )
+ ? 0
+ : list_front( arg0 ) );
+
+ struct hash * const vars = source_module->variables;
+ if ( vars )
+ hashenumerate( vars, add_hash_key, &result );
+ return result;
+}
+
+
+/*
+ * builtin_delete_module() - DELETE_MODULE ( MODULE ? )
+ *
+ * Clears all rules and variables from the given module.
+ */
+
+LIST * builtin_delete_module( FRAME * frame, int flags )
+{
+ LIST * const arg0 = lol_get( frame->args, 0 );
+ module_t * const source_module = bindmodule( list_empty( arg0 ) ? 0 :
+ list_front( arg0 ) );
+ delete_module( source_module );
+ return L0;
+}
+
+
+/*
+ * unknown_rule() - reports an unknown rule occurrence to the user and exits
+ */
+
+void unknown_rule( FRAME * frame, char const * key, module_t * module,
+ OBJECT * rule_name )
+{
+ backtrace_line( frame->prev );
+ if ( key )
+ printf("%s error", key);
+ else
+ printf("ERROR");
+ printf( ": rule \"%s\" unknown in ", object_str( rule_name ) );
+ if ( module->name )
+ printf( "module \"%s\".\n", object_str( module->name ) );
+ else
+ printf( "root module.\n" );
+ backtrace( frame->prev );
+ exit( 1 );
+}
+
+
+/*
+ * builtin_import() - IMPORT rule
+ *
+ * IMPORT
+ * (
+ * SOURCE_MODULE ? :
+ * SOURCE_RULES * :
+ * TARGET_MODULE ? :
+ * TARGET_RULES * :
+ * LOCALIZE ?
+ * )
+ *
+ * Imports rules from the SOURCE_MODULE into the TARGET_MODULE as local rules.
+ * If either SOURCE_MODULE or TARGET_MODULE is not supplied, it refers to the
+ * global module. SOURCE_RULES specifies which rules from the SOURCE_MODULE to
+ * import; TARGET_RULES specifies the names to give those rules in
+ * TARGET_MODULE. If SOURCE_RULES contains a name that does not correspond to
+ * a rule in SOURCE_MODULE, or if it contains a different number of items than
+ * TARGET_RULES, an error is issued. If LOCALIZE is specified, the rules will be
+ * executed in TARGET_MODULE, with corresponding access to its module local
+ * variables.
+ */
+
+LIST * builtin_import( FRAME * frame, int flags )
+{
+ LIST * source_module_list = lol_get( frame->args, 0 );
+ LIST * source_rules = lol_get( frame->args, 1 );
+ LIST * target_module_list = lol_get( frame->args, 2 );
+ LIST * target_rules = lol_get( frame->args, 3 );
+ LIST * localize = lol_get( frame->args, 4 );
+
+ module_t * target_module = bindmodule( list_empty( target_module_list )
+ ? 0
+ : list_front( target_module_list ) );
+ module_t * source_module = bindmodule( list_empty( source_module_list )
+ ? 0
+ : list_front( source_module_list ) );
+
+ LISTITER source_iter = list_begin( source_rules );
+ LISTITER const source_end = list_end( source_rules );
+ LISTITER target_iter = list_begin( target_rules );
+ LISTITER const target_end = list_end( target_rules );
+
+ for ( ;
+ source_iter != source_end && target_iter != target_end;
+ source_iter = list_next( source_iter ),
+ target_iter = list_next( target_iter ) )
+ {
+ RULE * r;
+ RULE * imported;
+
+ if ( !source_module->rules || !(r = (RULE *)hash_find(
+ source_module->rules, list_item( source_iter ) ) ) )
+ unknown_rule( frame, "IMPORT", source_module, list_item( source_iter
+ ) );
+
+ imported = import_rule( r, target_module, list_item( target_iter ) );
+ if ( !list_empty( localize ) )
+ rule_localize( imported, target_module );
+ /* This rule is really part of some other module. Just refer to it here,
+ * but do not let it out.
+ */
+ imported->exported = 0;
+ }
+
+ if ( source_iter != source_end || target_iter != target_end )
+ {
+ backtrace_line( frame->prev );
+ printf( "import error: length of source and target rule name lists "
+ "don't match!\n" );
+ printf( " source: " );
+ list_print( source_rules );
+ printf( "\n target: " );
+ list_print( target_rules );
+ printf( "\n" );
+ backtrace( frame->prev );
+ exit( 1 );
+ }
+
+ return L0;
+}
+
+
+/*
+ * builtin_export() - EXPORT ( MODULE ? : RULES * )
+ *
+ * The EXPORT rule marks RULES from the SOURCE_MODULE as non-local (and thus
+ * exportable). If an element of RULES does not name a rule in MODULE, an error
+ * is issued.
+ */
+
+LIST * builtin_export( FRAME * frame, int flags )
+{
+ LIST * const module_list = lol_get( frame->args, 0 );
+ LIST * const rules = lol_get( frame->args, 1 );
+ module_t * const m = bindmodule( list_empty( module_list ) ? 0 : list_front(
+ module_list ) );
+
+ LISTITER iter = list_begin( rules );
+ LISTITER const end = list_end( rules );
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ RULE * r;
+ if ( !m->rules || !( r = (RULE *)hash_find( m->rules, list_item( iter )
+ ) ) )
+ unknown_rule( frame, "EXPORT", m, list_item( iter ) );
+ r->exported = 1;
+ }
+ return L0;
+}
+
+
+/*
+ * get_source_line() - get a frame's file and line number information
+ *
+ * This is the execution traceback information to be indicated for in debug
+ * output or an error backtrace.
+ */
+
+static void get_source_line( FRAME * frame, char const * * file, int * line )
+{
+ if ( frame->file )
+ {
+ char const * f = object_str( frame->file );
+ int l = frame->line;
+ if ( !strcmp( f, "+" ) )
+ {
+ f = "jambase.c";
+ l += 3;
+ }
+ *file = f;
+ *line = l;
+ }
+ else
+ {
+ *file = "(builtin)";
+ *line = -1;
+ }
+}
+
+
+void print_source_line( FRAME * frame )
+{
+ char const * file;
+ int line;
+ get_source_line( frame, &file, &line );
+ if ( line < 0 )
+ printf( "(builtin):" );
+ else
+ printf( "%s:%d:", file, line );
+}
+
+
+/*
+ * backtrace_line() - print a single line of error backtrace for the given
+ * frame.
+ */
+
+void backtrace_line( FRAME * frame )
+{
+ if ( frame == 0 )
+ {
+ printf( "(no frame):" );
+ }
+ else
+ {
+ print_source_line( frame );
+ printf( " in %s\n", frame->rulename );
+ }
+}
+
+
+/*
+ * backtrace() - Print the entire backtrace from the given frame to the Jambase
+ * which invoked it.
+ */
+
+void backtrace( FRAME * frame )
+{
+ if ( !frame ) return;
+ while ( ( frame = frame->prev ) )
+ backtrace_line( frame );
+}
+
+
+/*
+ * builtin_backtrace() - A Jam version of the backtrace function, taking no
+ * arguments and returning a list of quadruples: FILENAME LINE MODULE. RULENAME
+ * describing each frame. Note that the module-name is always followed by a
+ * period.
+ */
+
+LIST * builtin_backtrace( FRAME * frame, int flags )
+{
+ LIST * const levels_arg = lol_get( frame->args, 0 );
+ int levels = list_empty( levels_arg )
+ ? (int)( (unsigned int)(-1) >> 1 )
+ : atoi( object_str( list_front( levels_arg ) ) );
+
+ LIST * result = L0;
+ for ( ; ( frame = frame->prev ) && levels; --levels )
+ {
+ char const * file;
+ int line;
+ char buf[ 32 ];
+ string module_name[ 1 ];
+ get_source_line( frame, &file, &line );
+ sprintf( buf, "%d", line );
+ string_new( module_name );
+ if ( frame->module->name )
+ {
+ string_append( module_name, object_str( frame->module->name ) );
+ string_append( module_name, "." );
+ }
+ result = list_push_back( result, object_new( file ) );
+ result = list_push_back( result, object_new( buf ) );
+ result = list_push_back( result, object_new( module_name->value ) );
+ result = list_push_back( result, object_new( frame->rulename ) );
+ string_free( module_name );
+ }
+ return result;
+}
+
+
+/*
+ * builtin_caller_module() - CALLER_MODULE ( levels ? )
+ *
+ * If levels is not supplied, returns the name of the module of the rule which
+ * called the one calling this one. If levels is supplied, it is interpreted as
+ * an integer specifying a number of additional levels of call stack to traverse
+ * in order to locate the module in question. If no such module exists, returns
+ * the empty list. Also returns the empty list when the module in question is
+ * the global module. This rule is needed for implementing module import
+ * behavior.
+ */
+
+LIST * builtin_caller_module( FRAME * frame, int flags )
+{
+ LIST * const levels_arg = lol_get( frame->args, 0 );
+ int const levels = list_empty( levels_arg )
+ ? 0
+ : atoi( object_str( list_front( levels_arg ) ) );
+
+ int i;
+ for ( i = 0; ( i < levels + 2 ) && frame->prev; ++i )
+ frame = frame->prev;
+
+ return frame->module == root_module()
+ ? L0
+ : list_new( object_copy( frame->module->name ) );
+}
+
+
+/*
+ * Return the current working directory.
+ *
+ * Usage: pwd = [ PWD ] ;
+ */
+
+LIST * builtin_pwd( FRAME * frame, int flags )
+{
+ return list_new( object_copy( cwd() ) );
+}
+
+
+/*
+ * Adds targets to the list of target that jam will attempt to update.
+ */
+
+LIST * builtin_update( FRAME * frame, int flags )
+{
+ LIST * result = list_copy( targets_to_update() );
+ LIST * arg1 = lol_get( frame->args, 0 );
+ LISTITER iter = list_begin( arg1 ), end = list_end( arg1 );
+ clear_targets_to_update();
+ for ( ; iter != end; iter = list_next( iter ) )
+ mark_target_for_updating( object_copy( list_item( iter ) ) );
+ return result;
+}
+
+extern int anyhow;
+int last_update_now_status;
+
+/* Takes a list of target names and immediately updates them.
+ *
+ * Parameters:
+ * 1. Target list.
+ * 2. Optional file descriptor (converted to a string) for a log file where all
+ * the related build output should be redirected.
+ * 3. If specified, makes the build temporarily disable the -n option, i.e.
+ * forces all needed out-of-date targets to be rebuilt.
+ * 4. If specified, makes the build temporarily disable the -q option, i.e.
+ * forces the build to continue even if one of the targets fails to build.
+ */
+LIST * builtin_update_now( FRAME * frame, int flags )
+{
+ LIST * targets = lol_get( frame->args, 0 );
+ LIST * log = lol_get( frame->args, 1 );
+ LIST * force = lol_get( frame->args, 2 );
+ LIST * continue_ = lol_get( frame->args, 3 );
+ int status;
+ int original_stdout = 0;
+ int original_stderr = 0;
+ int original_noexec = 0;
+ int original_quitquick = 0;
+
+ if ( !list_empty( log ) )
+ {
+ /* Temporarily redirect stdout and stderr to the given log file. */
+ int const fd = atoi( object_str( list_front( log ) ) );
+ original_stdout = dup( 0 );
+ original_stderr = dup( 1 );
+ dup2( fd, 0 );
+ dup2( fd, 1 );
+ }
+
+ if ( !list_empty( force ) )
+ {
+ original_noexec = globs.noexec;
+ globs.noexec = 0;
+ }
+
+ if ( !list_empty( continue_ ) )
+ {
+ original_quitquick = globs.quitquick;
+ globs.quitquick = 0;
+ }
+
+ status = make( targets, anyhow );
+
+ if ( !list_empty( force ) )
+ {
+ globs.noexec = original_noexec;
+ }
+
+ if ( !list_empty( continue_ ) )
+ {
+ globs.quitquick = original_quitquick;
+ }
+
+ if ( !list_empty( log ) )
+ {
+ /* Flush whatever stdio might have buffered, while descriptions 0 and 1
+ * still refer to the log file.
+ */
+ fflush( stdout );
+ fflush( stderr );
+ dup2( original_stdout, 0 );
+ dup2( original_stderr, 1 );
+ close( original_stdout );
+ close( original_stderr );
+ }
+
+ last_update_now_status = status;
+
+ return status ? L0 : list_new( object_copy( constant_ok ) );
+}
+
+
+LIST * builtin_import_module( FRAME * frame, int flags )
+{
+ LIST * const arg1 = lol_get( frame->args, 0 );
+ LIST * const arg2 = lol_get( frame->args, 1 );
+ module_t * const m = list_empty( arg2 )
+ ? root_module()
+ : bindmodule( list_front( arg2 ) );
+ import_module( arg1, m );
+ return L0;
+}
+
+
+LIST * builtin_imported_modules( FRAME * frame, int flags )
+{
+ LIST * const arg0 = lol_get( frame->args, 0 );
+ OBJECT * const module = list_empty( arg0 ) ? 0 : list_front( arg0 );
+ return imported_modules( bindmodule( module ) );
+}
+
+
+LIST * builtin_instance( FRAME * frame, int flags )
+{
+ LIST * arg1 = lol_get( frame->args, 0 );
+ LIST * arg2 = lol_get( frame->args, 1 );
+ module_t * const instance = bindmodule( list_front( arg1 ) );
+ module_t * const class_module = bindmodule( list_front( arg2 ) );
+ instance->class_module = class_module;
+ module_set_fixed_variables( instance, class_module->num_fixed_variables );
+ return L0;
+}
+
+
+LIST * builtin_sort( FRAME * frame, int flags )
+{
+ return list_sort( lol_get( frame->args, 0 ) );
+}
+
+
+LIST * builtin_normalize_path( FRAME * frame, int flags )
+{
+ LIST * arg = lol_get( frame->args, 0 );
+
+ /* First, we iterate over all '/'-separated elements, starting from the end
+ * of string. If we see a '..', we remove a preceeding path element. If we
+ * see '.', we remove it. Removal is done by overwriting data using '\1'
+ * characters. After the whole string has been processed, we do a second
+ * pass, removing any entered '\1' characters.
+ */
+
+ string in[ 1 ];
+ string out[ 1 ];
+ /* Last character of the part of string still to be processed. */
+ char * end;
+ /* Working pointer. */
+ char * current;
+ /* Number of '..' elements seen and not processed yet. */
+ int dotdots = 0;
+ int rooted = 0;
+ OBJECT * result = 0;
+ LISTITER arg_iter = list_begin( arg );
+ LISTITER arg_end = list_end( arg );
+
+ /* Make a copy of input: we should not change it. Prepend a '/' before it as
+ * a guard for the algorithm later on and remember whether it was originally
+ * rooted or not.
+ */
+ string_new( in );
+ string_push_back( in, '/' );
+ for ( ; arg_iter != arg_end; arg_iter = list_next( arg_iter ) )
+ {
+ if ( object_str( list_item( arg_iter ) )[ 0 ] != '\0' )
+ {
+ if ( in->size == 1 )
+ rooted = ( object_str( list_item( arg_iter ) )[ 0 ] == '/' ) ||
+ ( object_str( list_item( arg_iter ) )[ 0 ] == '\\' );
+ else
+ string_append( in, "/" );
+ string_append( in, object_str( list_item( arg_iter ) ) );
+ }
+ }
+
+ /* Convert \ into /. On Windows, paths using / and \ are equivalent, and we
+ * want this function to obtain a canonic representation.
+ */
+ for ( current = in->value, end = in->value + in->size;
+ current < end; ++current )
+ if ( *current == '\\' )
+ *current = '/';
+
+ /* Now we remove any extra path elements by overwriting them with '\1'
+ * characters and cound how many more unused '..' path elements there are
+ * remaining. Note that each remaining path element with always starts with
+ * a '/' character.
+ */
+ for ( end = in->value + in->size - 1; end >= in->value; )
+ {
+ /* Set 'current' to the next occurence of '/', which always exists. */
+ for ( current = end; *current != '/'; --current );
+
+ if ( current == end )
+ {
+ /* Found a trailing or duplicate '/'. Remove it. */
+ *current = '\1';
+ }
+ else if ( ( end - current == 1 ) && ( *( current + 1 ) == '.' ) )
+ {
+ /* Found '/.'. Remove them all. */
+ *current = '\1';
+ *(current + 1) = '\1';
+ }
+ else if ( ( end - current == 2 ) && ( *( current + 1 ) == '.' ) &&
+ ( *( current + 2 ) == '.' ) )
+ {
+ /* Found '/..'. Remove them all. */
+ *current = '\1';
+ *(current + 1) = '\1';
+ *(current + 2) = '\1';
+ ++dotdots;
+ }
+ else if ( dotdots )
+ {
+ memset( current, '\1', end - current + 1 );
+ --dotdots;
+ }
+ end = current - 1;
+ }
+
+ string_new( out );
+
+ /* Now we know that we need to add exactly dotdots '..' path elements to the
+ * front and that our string is either empty or has a '/' as its first
+ * significant character. If we have any dotdots remaining then the passed
+ * path must not have been rooted or else it is invalid we return an empty
+ * list.
+ */
+ if ( dotdots )
+ {
+ if ( rooted )
+ {
+ string_free( out );
+ string_free( in );
+ return L0;
+ }
+ do
+ string_append( out, "/.." );
+ while ( --dotdots );
+ }
+
+ /* Now we actually remove all the path characters marked for removal. */
+ for ( current = in->value; *current; ++current )
+ if ( *current != '\1' )
+ string_push_back( out, *current );
+
+ /* Here we know that our string contains no '\1' characters and is either
+ * empty or has a '/' as its initial character. If the original path was not
+ * rooted and we have a non-empty path we need to drop the initial '/'. If
+ * the original path was rooted and we have an empty path we need to add
+ * back the '/'.
+ */
+ result = object_new( out->size
+ ? out->value + !rooted
+ : ( rooted ? "/" : "." ) );
+
+ string_free( out );
+ string_free( in );
+
+ return list_new( result );
+}
+
+
+LIST * builtin_native_rule( FRAME * frame, int flags )
+{
+ LIST * module_name = lol_get( frame->args, 0 );
+ LIST * rule_name = lol_get( frame->args, 1 );
+
+ module_t * module = bindmodule( list_front( module_name ) );
+
+ native_rule_t * np;
+ if ( module->native_rules && (np = (native_rule_t *)hash_find(
+ module->native_rules, list_front( rule_name ) ) ) )
+ {
+ new_rule_body( module, np->name, np->procedure, 1 );
+ }
+ else
+ {
+ backtrace_line( frame->prev );
+ printf( "error: no native rule \"%s\" defined in module \"%s.\"\n",
+ object_str( list_front( rule_name ) ), object_str( module->name ) );
+ backtrace( frame->prev );
+ exit( 1 );
+ }
+ return L0;
+}
+
+
+LIST * builtin_has_native_rule( FRAME * frame, int flags )
+{
+ LIST * module_name = lol_get( frame->args, 0 );
+ LIST * rule_name = lol_get( frame->args, 1 );
+ LIST * version = lol_get( frame->args, 2 );
+
+ module_t * module = bindmodule( list_front( module_name ) );
+
+ native_rule_t * np;
+ if ( module->native_rules && (np = (native_rule_t *)hash_find(
+ module->native_rules, list_front( rule_name ) ) ) )
+ {
+ int expected_version = atoi( object_str( list_front( version ) ) );
+ if ( np->version == expected_version )
+ return list_new( object_copy( constant_true ) );
+ }
+ return L0;
+}
+
+
+LIST * builtin_user_module( FRAME * frame, int flags )
+{
+ LIST * const module_name = lol_get( frame->args, 0 );
+ LISTITER iter = list_begin( module_name );
+ LISTITER const end = list_end( module_name );
+ for ( ; iter != end; iter = list_next( iter ) )
+ bindmodule( list_item( iter ) )->user_module = 1;
+ return L0;
+}
+
+
+LIST * builtin_nearest_user_location( FRAME * frame, int flags )
+{
+ FRAME * const nearest_user_frame = frame->module->user_module
+ ? frame
+ : frame->prev_user;
+ if ( !nearest_user_frame )
+ return L0;
+
+ {
+ LIST * result = L0;
+ char const * file;
+ int line;
+ char buf[ 32 ];
+
+ get_source_line( nearest_user_frame, &file, &line );
+ sprintf( buf, "%d", line );
+ result = list_push_back( result, object_new( file ) );
+ result = list_push_back( result, object_new( buf ) );
+ return result;
+ }
+}
+
+
+LIST * builtin_check_if_file( FRAME * frame, int flags )
+{
+ LIST * const name = lol_get( frame->args, 0 );
+ return file_is_file( list_front( name ) ) == 1
+ ? list_new( object_copy( constant_true ) )
+ : L0;
+}
+
+
+LIST * builtin_md5( FRAME * frame, int flags )
+{
+ LIST * l = lol_get( frame->args, 0 );
+ char const * s = object_str( list_front( l ) );
+
+ md5_state_t state;
+ md5_byte_t digest[ 16 ];
+ char hex_output[ 16 * 2 + 1 ];
+
+ int di;
+
+ md5_init( &state );
+ md5_append( &state, (md5_byte_t const *)s, strlen( s ) );
+ md5_finish( &state, digest );
+
+ for ( di = 0; di < 16; ++di )
+ sprintf( hex_output + di * 2, "%02x", digest[ di ] );
+
+ return list_new( object_new( hex_output ) );
+}
+
+
+LIST * builtin_file_open( FRAME * frame, int flags )
+{
+ char const * name = object_str( list_front( lol_get( frame->args, 0 ) ) );
+ char const * mode = object_str( list_front( lol_get( frame->args, 1 ) ) );
+ int fd;
+ char buffer[ sizeof( "4294967295" ) ];
+
+ if ( strcmp(mode, "w") == 0 )
+ fd = open( name, O_WRONLY|O_CREAT|O_TRUNC, 0666 );
+ else
+ fd = open( name, O_RDONLY );
+
+ if ( fd != -1 )
+ {
+ sprintf( buffer, "%d", fd );
+ return list_new( object_new( buffer ) );
+ }
+ return L0;
+}
+
+
+LIST * builtin_pad( FRAME * frame, int flags )
+{
+ OBJECT * string = list_front( lol_get( frame->args, 0 ) );
+ char const * width_s = object_str( list_front( lol_get( frame->args, 1 ) ) );
+
+ int current = strlen( object_str( string ) );
+ int desired = atoi( width_s );
+ if ( current >= desired )
+ return list_new( object_copy( string ) );
+ else
+ {
+ char * buffer = BJAM_MALLOC( desired + 1 );
+ int i;
+ LIST * result;
+
+ strcpy( buffer, object_str( string ) );
+ for ( i = current; i < desired; ++i )
+ buffer[ i ] = ' ';
+ buffer[ desired ] = '\0';
+ result = list_new( object_new( buffer ) );
+ BJAM_FREE( buffer );
+ return result;
+ }
+}
+
+
+LIST * builtin_precious( FRAME * frame, int flags )
+{
+ LIST * targets = lol_get( frame->args, 0 );
+ LISTITER iter = list_begin( targets );
+ LISTITER const end = list_end( targets );
+ for ( ; iter != end; iter = list_next( iter ) )
+ bindtarget( list_item( iter ) )->flags |= T_FLAG_PRECIOUS;
+ return L0;
+}
+
+
+LIST * builtin_self_path( FRAME * frame, int flags )
+{
+ extern char const * saved_argv0;
+ char * p = executable_path( saved_argv0 );
+ if ( p )
+ {
+ LIST * const result = list_new( object_new( p ) );
+ free( p );
+ return result;
+ }
+ return L0;
+}
+
+
+LIST * builtin_makedir( FRAME * frame, int flags )
+{
+ LIST * const path = lol_get( frame->args, 0 );
+ return file_mkdir( object_str( list_front( path ) ) )
+ ? L0
+ : list_new( object_copy( list_front( path ) ) );
+}
+
+LIST *builtin_readlink( FRAME * frame, int flags )
+{
+ const char * path = object_str( list_front( lol_get( frame->args, 0 ) ) );
+#ifdef OS_NT
+
+ /* This struct is declared in ntifs.h which is
+ * part of the Windows Driver Kit.
+ */
+ typedef struct _REPARSE_DATA_BUFFER {
+ ULONG ReparseTag;
+ USHORT ReparseDataLength;
+ USHORT Reserved;
+ union {
+ struct {
+ USHORT SubstituteNameOffset;
+ USHORT SubstituteNameLength;
+ USHORT PrintNameOffset;
+ USHORT PrintNameLength;
+ ULONG Flags;
+ WCHAR PathBuffer[ 1 ];
+ } SymbolicLinkReparseBuffer;
+ struct {
+ USHORT SubstituteNameOffset;
+ USHORT SubstituteNameLength;
+ USHORT PrintNameOffset;
+ USHORT PrintNameLength;
+ WCHAR PathBuffer[ 1 ];
+ } MountPointReparseBuffer;
+ struct {
+ UCHAR DataBuffer[ 1 ];
+ } GenericReparseBuffer;
+ };
+ } REPARSE_DATA_BUFFER;
+
+ HANDLE hLink = CreateFileA( path, 0, FILE_SHARE_READ, NULL, OPEN_EXISTING, FILE_FLAG_BACKUP_SEMANTICS | FILE_FLAG_OPEN_REPARSE_POINT, NULL );
+ DWORD n;
+ union {
+ REPARSE_DATA_BUFFER reparse;
+ char data[MAXIMUM_REPARSE_DATA_BUFFER_SIZE];
+ } buf;
+ int okay = DeviceIoControl(hLink, FSCTL_GET_REPARSE_POINT, NULL, 0, &buf, sizeof(buf), &n, NULL);
+
+ CloseHandle( hLink );
+
+ if (okay && buf.reparse.ReparseTag == IO_REPARSE_TAG_SYMLINK )
+ {
+ int index = buf.reparse.SymbolicLinkReparseBuffer.SubstituteNameOffset / 2;
+ int length = buf.reparse.SymbolicLinkReparseBuffer.SubstituteNameLength / 2;
+ char cbuf[MAX_PATH + 1];
+ int numchars = WideCharToMultiByte( CP_ACP, 0, buf.reparse.SymbolicLinkReparseBuffer.PathBuffer + index, length, cbuf, sizeof(cbuf), NULL, NULL );
+ if( numchars >= sizeof(cbuf) )
+ {
+ return 0;
+ }
+ cbuf[numchars] = '\0';
+ return list_new( object_new( cbuf ) );
+ }
+ else if( okay && buf.reparse.ReparseTag == IO_REPARSE_TAG_MOUNT_POINT )
+ {
+ int index = buf.reparse.MountPointReparseBuffer.SubstituteNameOffset / 2;
+ int length = buf.reparse.MountPointReparseBuffer.SubstituteNameLength / 2;
+ char cbuf[MAX_PATH + 1];
+ const char * result;
+ int numchars = WideCharToMultiByte( CP_ACP, 0, buf.reparse.MountPointReparseBuffer.PathBuffer + index, length, cbuf, sizeof(cbuf), NULL, NULL );
+ if( numchars >= sizeof(cbuf) )
+ {
+ return 0;
+ }
+ cbuf[numchars] = '\0';
+ /* strip off the leading "\??\" */
+ result = cbuf;
+ if ( cbuf[ 0 ] == '\\' && cbuf[ 1 ] == '?' &&
+ cbuf[ 2 ] == '?' && cbuf[ 3 ] == '\\' &&
+ cbuf[ 4 ] != '\0' && cbuf[ 5 ] == ':' )
+ {
+ result += 4;
+ }
+ return list_new( object_new( result ) );
+ }
+ return 0;
+#else
+ char static_buf[256];
+ char * buf = static_buf;
+ size_t bufsize = 256;
+ LIST * result = 0;
+ while (1) {
+ ssize_t len = readlink( path, buf, bufsize );
+ if ( len < 0 )
+ {
+ break;
+ }
+ else if ( len < bufsize )
+ {
+ buf[ len ] = '\0';
+ result = list_new( object_new( buf ) );
+ break;
+ }
+ if ( buf != static_buf )
+ BJAM_FREE( buf );
+ bufsize *= 2;
+ buf = BJAM_MALLOC( bufsize );
+ }
+
+ if ( buf != static_buf )
+ BJAM_FREE( buf );
+
+ return result;
+#endif
+}
+
+
+#ifdef HAVE_PYTHON
+
+LIST * builtin_python_import_rule( FRAME * frame, int flags )
+{
+ static int first_time = 1;
+ char const * python_module = object_str( list_front( lol_get( frame->args,
+ 0 ) ) );
+ char const * python_function = object_str( list_front( lol_get( frame->args,
+ 1 ) ) );
+ OBJECT * jam_module = list_front( lol_get( frame->args, 2 ) );
+ OBJECT * jam_rule = list_front( lol_get( frame->args, 3 ) );
+
+ PyObject * pName;
+ PyObject * pModule;
+ PyObject * pDict;
+ PyObject * pFunc;
+
+ if ( first_time )
+ {
+ /* At the first invocation, we add the value of the global
+ * EXTRA_PYTHONPATH to the sys.path Python variable.
+ */
+ LIST * extra = 0;
+ module_t * outer_module = frame->module;
+ LISTITER iter, end;
+
+ first_time = 0;
+
+ extra = var_get( root_module(), constant_extra_pythonpath );
+
+ iter = list_begin( extra ), end = list_end( extra );
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ string buf[ 1 ];
+ string_new( buf );
+ string_append( buf, "import sys\nsys.path.append(\"" );
+ string_append( buf, object_str( list_item( iter ) ) );
+ string_append( buf, "\")\n" );
+ PyRun_SimpleString( buf->value );
+ string_free( buf );
+ }
+ }
+
+ pName = PyString_FromString( python_module );
+ pModule = PyImport_Import( pName );
+ Py_DECREF( pName );
+
+ if ( pModule != NULL )
+ {
+ pDict = PyModule_GetDict( pModule );
+ pFunc = PyDict_GetItemString( pDict, python_function );
+
+ if ( pFunc && PyCallable_Check( pFunc ) )
+ {
+ module_t * m = bindmodule( jam_module );
+ new_rule_body( m, jam_rule, function_python( pFunc, 0 ), 0 );
+ }
+ else
+ {
+ if ( PyErr_Occurred() )
+ PyErr_Print();
+ fprintf( stderr, "Cannot find function \"%s\"\n", python_function );
+ }
+ Py_DECREF( pModule );
+ }
+ else
+ {
+ PyErr_Print();
+ fprintf( stderr, "Failed to load \"%s\"\n", python_module );
+ }
+ return L0;
+
+}
+
+#endif /* #ifdef HAVE_PYTHON */
+
+
+void lol_build( LOL * lol, char const * * elements )
+{
+ LIST * l = L0;
+ lol_init( lol );
+
+ while ( elements && *elements )
+ {
+ if ( !strcmp( *elements, ":" ) )
+ {
+ lol_add( lol, l );
+ l = L0;
+ }
+ else
+ {
+ l = list_push_back( l, object_new( *elements ) );
+ }
+ ++elements;
+ }
+
+ if ( l != L0 )
+ lol_add( lol, l );
+}
+
+
+#ifdef HAVE_PYTHON
+
+/*
+ * Calls the bjam rule specified by name passed in 'args'. The name is looked up
+ * in the context of bjam's 'python_interface' module. Returns the list of
+ * strings returned by the rule.
+ */
+
+PyObject * bjam_call( PyObject * self, PyObject * args )
+{
+ FRAME inner[ 1 ];
+ LIST * result;
+ PARSE * p;
+ OBJECT * rulename;
+
+ /* Build up the list of arg lists. */
+ frame_init( inner );
+ inner->prev = 0;
+ inner->prev_user = 0;
+ inner->module = bindmodule( constant_python_interface );
+
+ /* Extract the rule name and arguments from 'args'. */
+
+ /* PyTuple_GetItem returns borrowed reference. */
+ rulename = object_new( PyString_AsString( PyTuple_GetItem( args, 0 ) ) );
+ {
+ int i = 1;
+ int size = PyTuple_Size( args );
+ for ( ; i < size; ++i )
+ {
+ PyObject * a = PyTuple_GetItem( args, i );
+ if ( PyString_Check( a ) )
+ {
+ lol_add( inner->args, list_new( object_new(
+ PyString_AsString( a ) ) ) );
+ }
+ else if ( PySequence_Check( a ) )
+ {
+ LIST * l = 0;
+ int s = PySequence_Size( a );
+ int i = 0;
+ for ( ; i < s; ++i )
+ {
+ /* PySequence_GetItem returns new reference. */
+ PyObject * e = PySequence_GetItem( a, i );
+ char * s = PyString_AsString( e );
+ if ( !s )
+ {
+ printf( "Invalid parameter type passed from Python\n" );
+ exit( 1 );
+ }
+ l = list_push_back( l, object_new( s ) );
+ Py_DECREF( e );
+ }
+ lol_add( inner->args, l );
+ }
+ }
+ }
+
+ result = evaluate_rule( bindrule( rulename, inner->module), rulename, inner );
+ object_free( rulename );
+
+ frame_free( inner );
+
+ /* Convert the bjam list into a Python list result. */
+ {
+ PyObject * const pyResult = PyList_New( list_length( result ) );
+ int i = 0;
+ LISTITER iter = list_begin( result );
+ LISTITER const end = list_end( result );
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ PyList_SetItem( pyResult, i, PyString_FromString( object_str(
+ list_item( iter ) ) ) );
+ i += 1;
+ }
+ list_free( result );
+ return pyResult;
+ }
+}
+
+
+/*
+ * Accepts four arguments:
+ * - module name
+ * - rule name,
+ * - Python callable.
+ * - (optional) bjam language function signature.
+ * Creates a bjam rule with the specified name in the specified module, which
+ * will invoke the Python callable.
+ */
+
+PyObject * bjam_import_rule( PyObject * self, PyObject * args )
+{
+ char * module;
+ char * rule;
+ PyObject * func;
+ PyObject * bjam_signature = NULL;
+ module_t * m;
+ RULE * r;
+ OBJECT * module_name;
+ OBJECT * rule_name;
+
+ if ( !PyArg_ParseTuple( args, "ssO|O:import_rule",
+ &module, &rule, &func, &bjam_signature ) )
+ return NULL;
+
+ if ( !PyCallable_Check( func ) )
+ {
+ PyErr_SetString( PyExc_RuntimeError, "Non-callable object passed to "
+ "bjam.import_rule" );
+ return NULL;
+ }
+
+ module_name = *module ? object_new( module ) : 0;
+ m = bindmodule( module_name );
+ if ( module_name )
+ object_free( module_name );
+ rule_name = object_new( rule );
+ new_rule_body( m, rule_name, function_python( func, bjam_signature ), 0 );
+ object_free( rule_name );
+
+ Py_INCREF( Py_None );
+ return Py_None;
+}
+
+
+/*
+ * Accepts four arguments:
+ * - an action name
+ * - an action body
+ * - a list of variable that will be bound inside the action
+ * - integer flags.
+ * Defines an action on bjam side.
+ */
+
+PyObject * bjam_define_action( PyObject * self, PyObject * args )
+{
+ char * name;
+ char * body;
+ module_t * m;
+ PyObject * bindlist_python;
+ int flags;
+ LIST * bindlist = L0;
+ int n;
+ int i;
+ OBJECT * name_str;
+ FUNCTION * body_func;
+
+ if ( !PyArg_ParseTuple( args, "ssO!i:define_action", &name, &body,
+ &PyList_Type, &bindlist_python, &flags ) )
+ return NULL;
+
+ n = PyList_Size( bindlist_python );
+ for ( i = 0; i < n; ++i )
+ {
+ PyObject * next = PyList_GetItem( bindlist_python, i );
+ if ( !PyString_Check( next ) )
+ {
+ PyErr_SetString( PyExc_RuntimeError, "bind list has non-string "
+ "type" );
+ return NULL;
+ }
+ bindlist = list_push_back( bindlist, object_new( PyString_AsString( next
+ ) ) );
+ }
+
+ name_str = object_new( name );
+ body_func = function_compile_actions( body, constant_builtin, -1 );
+ new_rule_actions( root_module(), name_str, body_func, bindlist, flags );
+ function_free( body_func );
+ object_free( name_str );
+
+ Py_INCREF( Py_None );
+ return Py_None;
+}
+
+
+/*
+ * Returns the value of a variable in root Jam module.
+ */
+
+PyObject * bjam_variable( PyObject * self, PyObject * args )
+{
+ char * name;
+ LIST * value;
+ PyObject * result;
+ int i;
+ OBJECT * varname;
+ LISTITER iter;
+ LISTITER end;
+
+ if ( !PyArg_ParseTuple( args, "s", &name ) )
+ return NULL;
+
+ varname = object_new( name );
+ value = var_get( root_module(), varname );
+ object_free( varname );
+ iter = list_begin( value );
+ end = list_end( value );
+
+ result = PyList_New( list_length( value ) );
+ for ( i = 0; iter != end; iter = list_next( iter ), ++i )
+ PyList_SetItem( result, i, PyString_FromString( object_str( list_item(
+ iter ) ) ) );
+
+ return result;
+}
+
+
+PyObject * bjam_backtrace( PyObject * self, PyObject * args )
+{
+ PyObject * result = PyList_New( 0 );
+ struct frame * f = frame_before_python_call;
+
+ for ( ; f = f->prev; )
+ {
+ PyObject * tuple = PyTuple_New( 4 );
+ char const * file;
+ int line;
+ char buf[ 32 ];
+ string module_name[ 1 ];
+
+ get_source_line( f, &file, &line );
+ sprintf( buf, "%d", line );
+ string_new( module_name );
+ if ( f->module->name )
+ {
+ string_append( module_name, object_str( f->module->name ) );
+ string_append( module_name, "." );
+ }
+
+ /* PyTuple_SetItem steals reference. */
+ PyTuple_SetItem( tuple, 0, PyString_FromString( file ) );
+ PyTuple_SetItem( tuple, 1, PyString_FromString( buf ) );
+ PyTuple_SetItem( tuple, 2, PyString_FromString( module_name->value ) );
+ PyTuple_SetItem( tuple, 3, PyString_FromString( f->rulename ) );
+
+ string_free( module_name );
+
+ PyList_Append( result, tuple );
+ Py_DECREF( tuple );
+ }
+ return result;
+}
+
+PyObject * bjam_caller( PyObject * self, PyObject * args )
+{
+ return PyString_FromString( frame_before_python_call->prev->module->name ?
+ object_str( frame_before_python_call->prev->module->name ) : "" );
+}
+
+#endif /* #ifdef HAVE_PYTHON */
+
+
+#ifdef HAVE_POPEN
+
+#if defined(_MSC_VER) || defined(__BORLANDC__)
+ #define popen windows_popen_wrapper
+ #define pclose _pclose
+
+ /*
+ * This wrapper is a workaround for a funny _popen() feature on Windows
+ * where it eats external quotes in some cases. The bug seems to be related
+ * to the quote stripping functionality used by the Windows cmd.exe
+ * interpreter when its /S is not specified.
+ *
+ * Cleaned up quote from the cmd.exe help screen as displayed on Windows XP
+ * SP3:
+ *
+ * 1. If all of the following conditions are met, then quote characters on
+ * the command line are preserved:
+ *
+ * - no /S switch
+ * - exactly two quote characters
+ * - no special characters between the two quote characters, where
+ * special is one of: &<>()@^|
+ * - there are one or more whitespace characters between the two quote
+ * characters
+ * - the string between the two quote characters is the name of an
+ * executable file.
+ *
+ * 2. Otherwise, old behavior is to see if the first character is a quote
+ * character and if so, strip the leading character and remove the last
+ * quote character on the command line, preserving any text after the
+ * last quote character.
+ *
+ * This causes some commands containing quotes not to be executed correctly.
+ * For example:
+ *
+ * "\Long folder name\aaa.exe" --name="Jurko" --no-surname
+ *
+ * would get its outermost quotes stripped and would be executed as:
+ *
+ * \Long folder name\aaa.exe" --name="Jurko --no-surname
+ *
+ * which would report an error about '\Long' not being a valid command.
+ *
+ * cmd.exe help seems to indicate it would be enough to add an extra space
+ * character in front of the command to avoid this but this does not work,
+ * most likely due to the shell first stripping all leading whitespace
+ * characters from the command.
+ *
+ * Solution implemented here is to quote the whole command in case it
+ * contains any quote characters. Note thought this will not work correctly
+ * should Windows ever 'fix' this feature.
+ * (03.06.2008.) (Jurko)
+ */
+ static FILE * windows_popen_wrapper( char const * command,
+ char const * mode )
+ {
+ int const extra_command_quotes_needed = !!strchr( command, '"' );
+ string quoted_command;
+ FILE * result;
+
+ if ( extra_command_quotes_needed )
+ {
+ string_new( &quoted_command );
+ string_append( &quoted_command, "\"" );
+ string_append( &quoted_command, command );
+ string_append( &quoted_command, "\"" );
+ command = quoted_command.value;
+ }
+
+ result = _popen( command, "r" );
+
+ if ( extra_command_quotes_needed )
+ string_free( &quoted_command );
+
+ return result;
+ }
+#endif /* defined(_MSC_VER) || defined(__BORLANDC__) */
+
+
+static char * rtrim( char * const s )
+{
+ char * p = s;
+ while ( *p ) ++p;
+ for ( --p; p >= s && isspace( *p ); *p-- = 0 );
+ return s;
+}
+
+
+LIST * builtin_shell( FRAME * frame, int flags )
+{
+ LIST * command = lol_get( frame->args, 0 );
+ LIST * result = L0;
+ string s;
+ int ret;
+ char buffer[ 1024 ];
+ FILE * p = NULL;
+ int exit_status = -1;
+ int exit_status_opt = 0;
+ int no_output_opt = 0;
+ int strip_eol_opt = 0;
+
+ /* Process the variable args options. */
+ {
+ int a = 1;
+ LIST * arg = lol_get( frame->args, a );
+ for ( ; !list_empty( arg ); arg = lol_get( frame->args, ++a ) )
+ {
+ if ( !strcmp( "exit-status", object_str( list_front( arg ) ) ) )
+ exit_status_opt = 1;
+ else if ( !strcmp( "no-output", object_str( list_front( arg ) ) ) )
+ no_output_opt = 1;
+ else if ( !strcmp("strip-eol", object_str( list_front( arg ) ) ) )
+ strip_eol_opt = 1;
+ }
+ }
+
+ /* The following fflush() call seems to be indicated as a workaround for a
+ * popen() bug on POSIX implementations related to synhronizing input
+ * stream positions for the called and the calling process.
+ */
+ fflush( NULL );
+
+ p = popen( object_str( list_front( command ) ), "r" );
+ if ( p == NULL )
+ return L0;
+
+ string_new( &s );
+
+ while ( ( ret = fread( buffer, sizeof( char ), sizeof( buffer ) - 1, p ) ) >
+ 0 )
+ {
+ buffer[ ret ] = 0;
+ if ( !no_output_opt )
+ {
+ if ( strip_eol_opt )
+ rtrim( buffer );
+ string_append( &s, buffer );
+ }
+ }
+
+ exit_status = pclose( p );
+
+ /* The command output is returned first. */
+ result = list_new( object_new( s.value ) );
+ string_free( &s );
+
+ /* The command exit result next. */
+ if ( exit_status_opt )
+ {
+ if ( WIFEXITED( exit_status ) )
+ exit_status = WEXITSTATUS( exit_status );
+ else
+ exit_status = -1;
+ sprintf( buffer, "%d", exit_status );
+ result = list_push_back( result, object_new( buffer ) );
+ }
+
+ return result;
+}
+
+#else /* #ifdef HAVE_POPEN */
+
+LIST * builtin_shell( FRAME * frame, int flags )
+{
+ return L0;
+}
+
+#endif /* #ifdef HAVE_POPEN */
diff --git a/tools/build/src/engine/builtins.h b/tools/build/src/engine/builtins.h
new file mode 100644
index 0000000000..6d0c87367f
--- /dev/null
+++ b/tools/build/src/engine/builtins.h
@@ -0,0 +1,71 @@
+/*
+ * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+#ifndef JAM_BUILTINS_H
+# define JAM_BUILTINS_H
+
+# include "frames.h"
+
+/*
+ * builtins.h - compile parsed jam statements
+ */
+
+void load_builtins();
+void init_set();
+void init_path();
+void init_regex();
+void init_property_set();
+void init_sequence();
+void init_order();
+
+void property_set_done();
+
+LIST *builtin_calc( FRAME * frame, int flags );
+LIST *builtin_depends( FRAME * frame, int flags );
+LIST *builtin_rebuilds( FRAME * frame, int flags );
+LIST *builtin_echo( FRAME * frame, int flags );
+LIST *builtin_exit( FRAME * frame, int flags );
+LIST *builtin_flags( FRAME * frame, int flags );
+LIST *builtin_glob( FRAME * frame, int flags );
+LIST *builtin_glob_recursive( FRAME * frame, int flags );
+LIST *builtin_subst( FRAME * frame, int flags );
+LIST *builtin_match( FRAME * frame, int flags );
+LIST *builtin_split_by_characters( FRAME * frame, int flags );
+LIST *builtin_hdrmacro( FRAME * frame, int flags );
+LIST *builtin_rulenames( FRAME * frame, int flags );
+LIST *builtin_varnames( FRAME * frame, int flags );
+LIST *builtin_delete_module( FRAME * frame, int flags );
+LIST *builtin_import( FRAME * frame, int flags );
+LIST *builtin_export( FRAME * frame, int flags );
+LIST *builtin_caller_module( FRAME * frame, int flags );
+LIST *builtin_backtrace( FRAME * frame, int flags );
+LIST *builtin_pwd( FRAME * frame, int flags );
+LIST *builtin_update( FRAME * frame, int flags );
+LIST *builtin_update_now( FRAME * frame, int flags );
+LIST *builtin_import_module( FRAME * frame, int flags );
+LIST *builtin_imported_modules( FRAME * frame, int flags );
+LIST *builtin_instance( FRAME * frame, int flags );
+LIST *builtin_sort( FRAME * frame, int flags );
+LIST *builtin_normalize_path( FRAME * frame, int flags );
+LIST *builtin_native_rule( FRAME * frame, int flags );
+LIST *builtin_has_native_rule( FRAME * frame, int flags );
+LIST *builtin_user_module( FRAME * frame, int flags );
+LIST *builtin_nearest_user_location( FRAME * frame, int flags );
+LIST *builtin_check_if_file( FRAME * frame, int flags );
+LIST *builtin_python_import_rule( FRAME * frame, int flags );
+LIST *builtin_shell( FRAME * frame, int flags );
+LIST *builtin_md5( FRAME * frame, int flags );
+LIST *builtin_file_open( FRAME * frame, int flags );
+LIST *builtin_pad( FRAME * frame, int flags );
+LIST *builtin_precious( FRAME * frame, int flags );
+LIST *builtin_self_path( FRAME * frame, int flags );
+LIST *builtin_makedir( FRAME * frame, int flags );
+LIST *builtin_readlink( FRAME * frame, int flags );
+
+void backtrace( FRAME *frame );
+extern int last_update_now_status;
+
+#endif
diff --git a/tools/build/src/engine/bump_version.py b/tools/build/src/engine/bump_version.py
new file mode 100644
index 0000000000..1771422884
--- /dev/null
+++ b/tools/build/src/engine/bump_version.py
@@ -0,0 +1,98 @@
+#!/usr/bin/python
+
+# This script is used to bump the bjam version. It takes a single argument, e.g
+#
+# ./bump_version.py 3.1.9
+#
+# and updates all the necessary files.
+#
+# Copyright 2006 Rene Rivera.
+# Copyright 2005-2006 Vladimir Prus.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+
+import os
+import os.path
+import re
+import string
+import sys
+
+srcdir = os.path.abspath(os.path.dirname(__file__))
+docdir = os.path.abspath(os.path.join(srcdir, "..", "doc"))
+
+
+def edit(file, *replacements):
+ print(" '%s'..." % file)
+ f = open(file, 'r')
+ text = f.read()
+ f.close()
+ for (source, target) in replacements:
+ text, n = re.compile(source, re.MULTILINE).subn(target, text)
+ assert n > 0
+ f = open(file, 'w')
+ f.write(text)
+ f.close()
+
+
+def make_edits(ver):
+ ver03 = (list(ver) + [0] * 3)[0:3]
+ ver02 = ver03[0:2]
+
+ join = lambda v, s : s.join(str(x) for x in v)
+ dotJoin = lambda v : join(v, ".")
+
+ print("Setting version to %s" % str(ver03))
+
+ edit(os.path.join(srcdir, "boost-jam.spec"),
+ ('^(Version:) .*$', '\\1 %s' % dotJoin(ver03)))
+
+ edit(os.path.join(srcdir, "build.jam"),
+ ('^(_VERSION_ =).* ;$', '\\1 %s ;' % join(ver03, " ")))
+
+ edit(os.path.join(docdir, "bjam.qbk"),
+ ('(\[version).*(\])', '\\1: %s\\2' % dotJoin(ver03)),
+ ('(\[def :version:).*(\])', '\\1 %s\\2' % dotJoin(ver03)))
+
+ edit(os.path.join(srcdir, "patchlevel.h"),
+ ('^(#define VERSION_MAJOR) .*$', '\\1 %s' % ver03[0]),
+ ('^(#define VERSION_MINOR) .*$', '\\1 %s' % ver03[1]),
+ ('^(#define VERSION_PATCH) .*$', '\\1 %s' % ver03[2]),
+ ('^(#define VERSION_MAJOR_SYM) .*$', '\\1 "%02d"' % ver03[0]),
+ ('^(#define VERSION_MINOR_SYM) .*$', '\\1 "%02d"' % ver03[1]),
+ ('^(#define VERSION_PATCH_SYM) .*$', '\\1 "%02d"' % ver03[2]),
+ ('^(#define VERSION) .*$', '\\1 "%s"' % dotJoin(ver)),
+ ('^(#define JAMVERSYM) .*$', '\\1 "JAMVERSION=%s"' % dotJoin(ver02)))
+
+
+def main():
+ if len(sys.argv) < 2:
+ print("Expect new version as argument.")
+ sys.exit(1)
+ if len(sys.argv) > 3:
+ print("Too many arguments.")
+ sys.exit(1)
+
+ version = sys.argv[1].split(".")
+ if len(version) > 3:
+ print("Expect version argument in the format: <MAJOR>.<MINOR>.<PATCH>")
+ sys.exit(1)
+
+ try:
+ version = list(int(x) for x in version)
+ except ValueError:
+ print("Version values must be valid integers.")
+ sys.exit(1)
+
+ while version and version[-1] == 0:
+ version.pop()
+
+ if not version:
+ print("At least one of the version values must be positive.")
+ sys.exit()
+
+ make_edits(version)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/tools/build/src/engine/class.c b/tools/build/src/engine/class.c
new file mode 100644
index 0000000000..a4abfaac88
--- /dev/null
+++ b/tools/build/src/engine/class.c
@@ -0,0 +1,191 @@
+/*
+ * Copyright Vladimir Prus 2003.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+#include "class.h"
+
+#include "constants.h"
+#include "frames.h"
+#include "hash.h"
+#include "lists.h"
+#include "object.h"
+#include "rules.h"
+#include "strings.h"
+#include "variable.h"
+
+#include <stdio.h>
+#include <stdlib.h>
+
+
+static struct hash * classes = 0;
+
+
+static void check_defined( LIST * class_names )
+{
+ LISTITER iter = list_begin( class_names );
+ LISTITER const end = list_end( class_names );
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ if ( !hash_find( classes, list_item( iter ) ) )
+ {
+ printf( "Class %s is not defined\n", object_str( list_item( iter ) )
+ );
+ abort();
+ }
+ }
+}
+
+
+static OBJECT * class_module_name( OBJECT * declared_name )
+{
+ string name[ 1 ];
+ OBJECT * result;
+
+ string_new( name );
+ string_append( name, "class@" );
+ string_append( name, object_str( declared_name ) );
+
+ result = object_new( name->value );
+ string_free( name );
+
+ return result;
+}
+
+
+struct import_base_data
+{
+ OBJECT * base_name;
+ module_t * base_module;
+ module_t * class_module;
+};
+
+
+static void import_base_rule( void * r_, void * d_ )
+{
+ RULE * r = (RULE *)r_;
+ RULE * ir1;
+ RULE * ir2;
+ struct import_base_data * d = (struct import_base_data *)d_;
+ OBJECT * qname;
+
+ string qualified_name[ 1 ];
+ string_new ( qualified_name );
+ string_append ( qualified_name, object_str( d->base_name ) );
+ string_push_back( qualified_name, '.' );
+ string_append ( qualified_name, object_str( r->name ) );
+ qname = object_new( qualified_name->value );
+ string_free( qualified_name );
+
+ ir1 = import_rule( r, d->class_module, r->name );
+ ir2 = import_rule( r, d->class_module, qname );
+
+ object_free( qname );
+
+ /* Copy 'exported' flag. */
+ ir1->exported = ir2->exported = r->exported;
+
+ /* If we are importing a class method, localize it. */
+ if ( ( r->module == d->base_module ) || ( r->module->class_module &&
+ ( r->module->class_module == d->base_module ) ) )
+ {
+ rule_localize( ir1, d->class_module );
+ rule_localize( ir2, d->class_module );
+ }
+}
+
+
+/*
+ * For each exported rule 'n', declared in class module for base, imports that
+ * rule in 'class' as 'n' and as 'base.n'. Imported rules are localized and
+ * marked as exported.
+ */
+
+static void import_base_rules( module_t * class_, OBJECT * base )
+{
+ OBJECT * module_name = class_module_name( base );
+ module_t * base_module = bindmodule( module_name );
+ LIST * imported;
+ struct import_base_data d;
+ d.base_name = base;
+ d.base_module = base_module;
+ d.class_module = class_;
+ object_free( module_name );
+
+ if ( base_module->rules )
+ hashenumerate( base_module->rules, import_base_rule, &d );
+
+ imported = imported_modules( base_module );
+ import_module( imported, class_ );
+ list_free( imported );
+}
+
+
+OBJECT * make_class_module( LIST * xname, LIST * bases, FRAME * frame )
+{
+ OBJECT * name = class_module_name( list_front( xname ) );
+ OBJECT * * pp;
+ module_t * class_module = 0;
+ module_t * outer_module = frame->module;
+ int found;
+
+ if ( !classes )
+ classes = hashinit( sizeof( OBJECT * ), "classes" );
+
+ pp = (OBJECT * *)hash_insert( classes, list_front( xname ), &found );
+ if ( !found )
+ {
+ *pp = object_copy( list_front( xname ) );
+ }
+ else
+ {
+ printf( "Class %s already defined\n", object_str( list_front( xname ) )
+ );
+ abort();
+ }
+ check_defined( bases );
+
+ class_module = bindmodule( name );
+
+ {
+ /*
+ Initialize variables that Boost.Build inserts in every object.
+ We want to avoid creating the object's hash if it isn't needed.
+ */
+ int num = class_module->num_fixed_variables;
+ module_add_fixed_var( class_module, constant_name, &num );
+ module_add_fixed_var( class_module, constant_class, &num );
+ module_set_fixed_variables( class_module, num );
+ }
+
+ var_set( class_module, constant_name, xname, VAR_SET );
+ var_set( class_module, constant_bases, bases, VAR_SET );
+
+ {
+ LISTITER iter = list_begin( bases );
+ LISTITER const end = list_end( bases );
+ for ( ; iter != end; iter = list_next( iter ) )
+ import_base_rules( class_module, list_item( iter ) );
+ }
+
+ return name;
+}
+
+
+static void free_class( void * xclass, void * data )
+{
+ object_free( *(OBJECT * *)xclass );
+}
+
+
+void class_done( void )
+{
+ if ( classes )
+ {
+ hashenumerate( classes, free_class, (void *)0 );
+ hashdone( classes );
+ classes = 0;
+ }
+}
diff --git a/tools/build/v2/engine/class.h b/tools/build/src/engine/class.h
index 256d298b53..256d298b53 100644
--- a/tools/build/v2/engine/class.h
+++ b/tools/build/src/engine/class.h
diff --git a/tools/build/src/engine/command.c b/tools/build/src/engine/command.c
new file mode 100644
index 0000000000..31141fc977
--- /dev/null
+++ b/tools/build/src/engine/command.c
@@ -0,0 +1,121 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * command.c - maintain lists of commands
+ */
+
+#include "jam.h"
+#include "command.h"
+
+#include "lists.h"
+#include "rules.h"
+
+#include <assert.h>
+
+
+/*
+ * cmdlist_append_cmd
+ */
+CMDLIST * cmdlist_append_cmd( CMDLIST * l, CMD * cmd )
+{
+ CMDLIST * result = (CMDLIST *)BJAM_MALLOC( sizeof( CMDLIST ) );
+ result->iscmd = 1;
+ result->next = l;
+ result->impl.cmd = cmd;
+ return result;
+}
+
+CMDLIST * cmdlist_append_target( CMDLIST * l, TARGET * t )
+{
+ CMDLIST * result = (CMDLIST *)BJAM_MALLOC( sizeof( CMDLIST ) );
+ result->iscmd = 0;
+ result->next = l;
+ result->impl.t = t;
+ return result;
+}
+
+void cmdlist_free( CMDLIST * l )
+{
+ while ( l )
+ {
+ CMDLIST * tmp = l->next;
+ BJAM_FREE( l );
+ l = tmp;
+ }
+}
+
+/*
+ * cmd_new() - return a new CMD.
+ */
+
+CMD * cmd_new( RULE * rule, LIST * targets, LIST * sources, LIST * shell )
+{
+ CMD * cmd = (CMD *)BJAM_MALLOC( sizeof( CMD ) );
+ FRAME frame[ 1 ];
+
+ assert( cmd );
+ cmd->rule = rule;
+ cmd->shell = shell;
+ cmd->next = 0;
+ cmd->noop = 0;
+ cmd->asynccnt = 1;
+ cmd->status = 0;
+ cmd->lock = NULL;
+ cmd->unlock = NULL;
+
+ lol_init( &cmd->args );
+ lol_add( &cmd->args, targets );
+ lol_add( &cmd->args, sources );
+ string_new( cmd->buf );
+
+ frame_init( frame );
+ frame->module = rule->module;
+ lol_init( frame->args );
+ lol_add( frame->args, list_copy( targets ) );
+ lol_add( frame->args, list_copy( sources ) );
+ function_run_actions( rule->actions->command, frame, stack_global(),
+ cmd->buf );
+ frame_free( frame );
+
+ return cmd;
+}
+
+
+/*
+ * cmd_free() - free a CMD
+ */
+
+void cmd_free( CMD * cmd )
+{
+ cmdlist_free( cmd->next );
+ lol_free( &cmd->args );
+ list_free( cmd->shell );
+ string_free( cmd->buf );
+ freetargets( cmd->unlock );
+ BJAM_FREE( (void *)cmd );
+}
+
+
+/*
+ * cmd_release_targets_and_shell()
+ *
+ * Makes the CMD release its hold on its targets & shell lists and forget
+ * about them. Useful in case caller still has references to those lists and
+ * wants to reuse them after freeing the CMD object.
+ */
+
+void cmd_release_targets_and_shell( CMD * cmd )
+{
+ cmd->args.list[ 0 ] = L0; /* targets */
+ cmd->shell = L0; /* shell */
+}
diff --git a/tools/build/src/engine/command.h b/tools/build/src/engine/command.h
new file mode 100644
index 0000000000..fd59ed11da
--- /dev/null
+++ b/tools/build/src/engine/command.h
@@ -0,0 +1,100 @@
+/*
+ * Copyright 1994 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * command.h - the CMD structure and routines to manipulate them
+ *
+ * Both ACTION and CMD contain a rule, targets, and sources. An
+ * ACTION describes a rule to be applied to the given targets and
+ * sources; a CMD is what actually gets executed by the shell. The
+ * differences are due to:
+ *
+ * ACTIONS must be combined if 'actions together' is given.
+ * ACTIONS must be split if 'actions piecemeal' is given.
+ * ACTIONS must have current sources omitted for 'actions updated'.
+ *
+ * The CMD datatype holds a single command that is to be executed
+ * against a target, and they can chain together to represent the
+ * full collection of commands used to update a target.
+ *
+ * Structures:
+ *
+ * CMD - an action, ready to be formatted into a buffer and executed.
+ *
+ * External routines:
+ *
+ * cmd_new() - return a new CMD or 0 if too many args.
+ * cmd_free() - delete CMD and its parts.
+ * cmd_next() - walk the CMD chain.
+ * cmd_release_targets_and_shell() - CMD forgets about its targets & shell.
+ */
+
+
+/*
+ * CMD - an action, ready to be formatted into a buffer and executed.
+ */
+
+#ifndef COMMAND_SW20111118_H
+#define COMMAND_SW20111118_H
+
+#include "lists.h"
+#include "rules.h"
+#include "strings.h"
+
+
+typedef struct _cmd CMD;
+
+/*
+ * A list whose elements are either TARGETS or CMDS.
+ * CMDLIST is used only by CMD. A TARGET means that
+ * the CMD is the last updating action required to
+ * build the target. A CMD is the next CMD required
+ * to build the same target. (Note that a single action
+ * can update more than one target, so the CMDs form
+ * a DAG, not a straight linear list.)
+ */
+typedef struct _cmdlist {
+ struct _cmdlist * next;
+ union {
+ CMD * cmd;
+ TARGET * t;
+ } impl;
+ char iscmd;
+} CMDLIST;
+
+CMDLIST * cmdlist_append_cmd( CMDLIST *, CMD * );
+CMDLIST * cmdlist_append_target( CMDLIST *, TARGET * );
+void cmd_list_free( CMDLIST * );
+
+struct _cmd
+{
+ CMDLIST * next;
+ RULE * rule; /* rule->actions contains shell script */
+ LIST * shell; /* $(JAMSHELL) value */
+ LOL args; /* LISTs for $(<), $(>) */
+ string buf[ 1 ]; /* actual commands */
+ int noop; /* no-op commands should be faked instead of executed */
+ int asynccnt; /* number of outstanding dependencies */
+ TARGETS * lock; /* semaphores that are required by this cmd. */
+ TARGETS * unlock; /* semaphores that are released when this cmd finishes. */
+ char status; /* the command status */
+};
+
+CMD * cmd_new
+(
+ RULE * rule, /* rule (referenced) */
+ LIST * targets, /* $(<) (ownership transferred) */
+ LIST * sources, /* $(>) (ownership transferred) */
+ LIST * shell /* $(JAMSHELL) (ownership transferred) */
+);
+
+void cmd_release_targets_and_shell( CMD * );
+
+void cmd_free( CMD * );
+
+#define cmd_next( c ) ((c)->next)
+
+#endif
diff --git a/tools/build/src/engine/compile.c b/tools/build/src/engine/compile.c
new file mode 100644
index 0000000000..a690b9fa5e
--- /dev/null
+++ b/tools/build/src/engine/compile.c
@@ -0,0 +1,232 @@
+/*
+ * Copyright 1993, 2000 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * compile.c - compile parsed jam statements
+ *
+ * External routines:
+ * evaluate_rule() - execute a rule invocation
+ *
+ * Internal routines:
+ * debug_compile() - printf with indent to show rule expansion
+ */
+
+#include "jam.h"
+#include "compile.h"
+
+#include "builtins.h"
+#include "class.h"
+#include "constants.h"
+#include "hash.h"
+#include "hdrmacro.h"
+#include "make.h"
+#include "modules.h"
+#include "parse.h"
+#include "rules.h"
+#include "search.h"
+#include "strings.h"
+#include "variable.h"
+
+#include <assert.h>
+#include <stdarg.h>
+#include <string.h>
+
+
+static void debug_compile( int which, char const * s, FRAME * );
+
+/* Internal functions from builtins.c */
+void backtrace( FRAME * );
+void backtrace_line( FRAME * );
+void print_source_line( FRAME * );
+void unknown_rule( FRAME *, char const * key, module_t *, OBJECT * rule_name );
+
+
+/*
+ * evaluate_rule() - execute a rule invocation
+ */
+
+LIST * evaluate_rule( RULE * rule, OBJECT * rulename, FRAME * frame )
+{
+ LIST * result = L0;
+ profile_frame prof[ 1 ];
+ module_t * prev_module = frame->module;
+
+ if ( DEBUG_COMPILE )
+ {
+ /* Try hard to indicate in which module the rule is going to execute. */
+ char buf[ 256 ] = "";
+ if ( rule->module->name )
+ {
+ strncat( buf, object_str( rule->module->name ), sizeof( buf ) -
+ 1 );
+ strncat( buf, ".", sizeof( buf ) - 1 );
+ if ( strncmp( buf, object_str( rule->name ), strlen( buf ) ) == 0 )
+ {
+ buf[ 0 ] = 0;
+ }
+ }
+ strncat( buf, object_str( rule->name ), sizeof( buf ) - 1 );
+ debug_compile( 1, buf, frame );
+
+ lol_print( frame->args );
+ printf( "\n" );
+ }
+
+ if ( rule->procedure && rule->module != prev_module )
+ {
+ /* Propagate current module to nested rule invocations. */
+ frame->module = rule->module;
+ }
+
+ /* Record current rule name in frame. */
+ if ( rule->procedure )
+ {
+ frame->rulename = object_str( rulename );
+ /* And enter record profile info. */
+ if ( DEBUG_PROFILE )
+ profile_enter( function_rulename( rule->procedure ), prof );
+ }
+
+ /* Check traditional targets $(<) and sources $(>). */
+ if ( !rule->actions && !rule->procedure )
+ unknown_rule( frame, NULL, frame->module, rule->name );
+
+ /* If this rule will be executed for updating the targets then construct the
+ * action for make().
+ */
+ if ( rule->actions )
+ {
+ TARGETS * t;
+
+ /* The action is associated with this instance of this rule. */
+ ACTION * const action = (ACTION *)BJAM_MALLOC( sizeof( ACTION ) );
+ memset( (char *)action, '\0', sizeof( *action ) );
+
+ action->rule = rule;
+ action->targets = targetlist( (TARGETS *)0, lol_get( frame->args, 0 ) );
+ action->sources = targetlist( (TARGETS *)0, lol_get( frame->args, 1 ) );
+ action->refs = 1;
+
+ /* If we have a group of targets all being built using the same action
+ * and any of these targets is updated, then we have to consider them
+ * all to be out-dated. We do this by adding a REBUILDS in both directions
+ * between the first target and all the other targets.
+ */
+ if ( action->targets )
+ {
+ TARGET * const t0 = action->targets->target;
+ for ( t = action->targets->next; t; t = t->next )
+ {
+ t->target->rebuilds = targetentry( t->target->rebuilds, t0 );
+ t0->rebuilds = targetentry( t0->rebuilds, t->target );
+ }
+ }
+
+ /* Append this action to the actions of each target. */
+ for ( t = action->targets; t; t = t->next )
+ t->target->actions = actionlist( t->target->actions, action );
+
+ action_free( action );
+ }
+
+ /* Now recursively compile any parse tree associated with this rule.
+ * function_refer()/function_free() call pair added to ensure the rule does
+ * not get freed while in use.
+ */
+ if ( rule->procedure )
+ {
+ FUNCTION * const function = rule->procedure;
+ function_refer( function );
+ result = function_run( function, frame, stack_global() );
+ function_free( function );
+ }
+
+ if ( DEBUG_PROFILE && rule->procedure )
+ profile_exit( prof );
+
+ if ( DEBUG_COMPILE )
+ debug_compile( -1, 0, frame );
+
+ return result;
+}
+
+
+/*
+ * Call the given rule with the specified parameters. The parameters should be
+ * of type LIST* and end with a NULL pointer. This differs from 'evaluate_rule'
+ * in that frame for the called rule is prepared inside 'call_rule'.
+ *
+ * This function is useful when a builtin rule (in C) wants to call another rule
+ * which might be implemented in Jam.
+ */
+
+LIST * call_rule( OBJECT * rulename, FRAME * caller_frame, ... )
+{
+ va_list va;
+ LIST * result;
+
+ FRAME inner[ 1 ];
+ frame_init( inner );
+ inner->prev = caller_frame;
+ inner->prev_user = caller_frame->module->user_module
+ ? caller_frame
+ : caller_frame->prev_user;
+ inner->module = caller_frame->module;
+
+ va_start( va, caller_frame );
+ for ( ; ; )
+ {
+ LIST * const l = va_arg( va, LIST * );
+ if ( !l )
+ break;
+ lol_add( inner->args, l );
+ }
+ va_end( va );
+
+ result = evaluate_rule( bindrule( rulename, inner->module ), rulename, inner );
+
+ frame_free( inner );
+
+ return result;
+}
+
+
+/*
+ * debug_compile() - printf with indent to show rule expansion
+ */
+
+static void debug_compile( int which, char const * s, FRAME * frame )
+{
+ static int level = 0;
+ static char indent[ 36 ] = ">>>>|>>>>|>>>>|>>>>|>>>>|>>>>|>>>>|";
+
+ if ( which >= 0 )
+ {
+ int i;
+
+ print_source_line( frame );
+
+ i = ( level + 1 ) * 2;
+ while ( i > 35 )
+ {
+ fputs( indent, stdout );
+ i -= 35;
+ }
+
+ printf( "%*.*s ", i, i, indent );
+ }
+
+ if ( s )
+ printf( "%s ", s );
+
+ level += which;
+}
diff --git a/tools/build/src/engine/compile.h b/tools/build/src/engine/compile.h
new file mode 100644
index 0000000000..c70f98b9ee
--- /dev/null
+++ b/tools/build/src/engine/compile.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright 1993, 2000 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * compile.h - compile parsed jam statements
+ */
+
+#ifndef COMPILE_DWA20011022_H
+#define COMPILE_DWA20011022_H
+
+#include "frames.h"
+#include "lists.h"
+#include "object.h"
+#include "rules.h"
+
+void compile_builtins();
+
+LIST * evaluate_rule( RULE * rule, OBJECT * rulename, FRAME * );
+LIST * call_rule( OBJECT * rulename, FRAME * caller_frame, ... );
+
+/* Flags for compile_set(), etc */
+
+#define ASSIGN_SET 0x00 /* = assign variable */
+#define ASSIGN_APPEND 0x01 /* += append variable */
+#define ASSIGN_DEFAULT 0x02 /* set only if unset */
+
+/* Flags for compile_setexec() */
+
+#define EXEC_UPDATED 0x01 /* executes updated */
+#define EXEC_TOGETHER 0x02 /* executes together */
+#define EXEC_IGNORE 0x04 /* executes ignore */
+#define EXEC_QUIETLY 0x08 /* executes quietly */
+#define EXEC_PIECEMEAL 0x10 /* executes piecemeal */
+#define EXEC_EXISTING 0x20 /* executes existing */
+
+/* Conditions for compile_if() */
+
+#define EXPR_NOT 0 /* ! cond */
+#define EXPR_AND 1 /* cond && cond */
+#define EXPR_OR 2 /* cond || cond */
+#define EXPR_EXISTS 3 /* arg */
+#define EXPR_EQUALS 4 /* arg = arg */
+#define EXPR_NOTEQ 5 /* arg != arg */
+#define EXPR_LESS 6 /* arg < arg */
+#define EXPR_LESSEQ 7 /* arg <= arg */
+#define EXPR_MORE 8 /* arg > arg */
+#define EXPR_MOREEQ 9 /* arg >= arg */
+#define EXPR_IN 10 /* arg in arg */
+
+#endif
diff --git a/tools/build/src/engine/constants.c b/tools/build/src/engine/constants.c
new file mode 100644
index 0000000000..891d32294e
--- /dev/null
+++ b/tools/build/src/engine/constants.c
@@ -0,0 +1,186 @@
+/*
+ * Copyright 2011 Steven Watanabe
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * constants.c - constant objects
+ *
+ * External functions:
+ *
+ * constants_init() - initialize constants
+ * constants_done() - free constants
+ *
+ */
+
+#include "constants.h"
+
+
+void constants_init( void )
+{
+ constant_empty = object_new( "" );
+ constant_dot = object_new( "." );
+ constant_plus = object_new( "+" );
+ constant_star = object_new( "*" );
+ constant_question_mark = object_new( "?" );
+ constant_ok = object_new( "ok" );
+ constant_true = object_new( "true" );
+ constant_name = object_new( "__name__" );
+ constant_bases = object_new( "__bases__" );
+ constant_class = object_new( "__class__" );
+ constant_typecheck = object_new( ".typecheck" );
+ constant_builtin = object_new( "(builtin)" );
+ constant_HCACHEFILE = object_new( "HCACHEFILE" );
+ constant_HCACHEMAXAGE = object_new( "HCACHEMAXAGE" );
+ constant_HDRSCAN = object_new( "HDRSCAN" );
+ constant_HDRRULE = object_new( "HDRRULE" );
+ constant_BINDRULE = object_new( "BINDRULE" );
+ constant_LOCATE = object_new( "LOCATE" );
+ constant_SEARCH = object_new( "SEARCH" );
+ constant_JAM_SEMAPHORE = object_new( "JAM_SEMAPHORE" );
+ constant_TIMING_RULE = object_new( "__TIMING_RULE__" );
+ constant_ACTION_RULE = object_new( "__ACTION_RULE__" );
+ constant_JAMSHELL = object_new( "JAMSHELL" );
+ constant_TMPDIR = object_new( "TMPDIR" );
+ constant_TMPNAME = object_new( "TMPNAME" );
+ constant_TMPFILE = object_new( "TMPFILE" );
+ constant_STDOUT = object_new( "STDOUT" );
+ constant_STDERR = object_new( "STDERR" );
+ constant_JAMDATE = object_new( "JAMDATE" );
+ constant_JAM_TIMESTAMP_RESOLUTION = object_new( "JAM_TIMESTAMP_RESOLUTION" );
+ constant_JAM_VERSION = object_new( "JAM_VERSION" );
+ constant_JAMUNAME = object_new( "JAMUNAME" );
+ constant_ENVIRON = object_new( ".ENVIRON" );
+ constant_ARGV = object_new( "ARGV" );
+ constant_all = object_new( "all" );
+ constant_PARALLELISM = object_new( "PARALLELISM" );
+ constant_KEEP_GOING = object_new( "KEEP_GOING" );
+ constant_other = object_new( "[OTHER]" );
+ constant_total = object_new( "[TOTAL]" );
+ constant_FILE_DIRSCAN = object_new( "FILE_DIRSCAN" );
+ constant_MAIN = object_new( "MAIN" );
+ constant_MAIN_MAKE = object_new( "MAIN_MAKE" );
+ constant_MAKE_MAKE0 = object_new( "MAKE_MAKE0" );
+ constant_MAKE_MAKE1 = object_new( "MAKE_MAKE1" );
+ constant_MAKE_MAKE0SORT = object_new( "MAKE_MAKE0SORT" );
+ constant_BINDMODULE = object_new( "BINDMODULE" );
+ constant_IMPORT_MODULE = object_new( "IMPORT_MODULE" );
+ constant_BUILTIN_GLOB_BACK = object_new( "BUILTIN_GLOB_BACK" );
+ constant_timestamp = object_new( "timestamp" );
+ constant_python = object_new("__python__");
+ constant_python_interface = object_new( "python_interface" );
+ constant_extra_pythonpath = object_new( "EXTRA_PYTHONPATH" );
+ constant_MAIN_PYTHON = object_new( "MAIN_PYTHON" );
+}
+
+void constants_done( void )
+{
+ object_free( constant_empty );
+ object_free( constant_dot );
+ object_free( constant_plus );
+ object_free( constant_star );
+ object_free( constant_question_mark );
+ object_free( constant_ok );
+ object_free( constant_true );
+ object_free( constant_name );
+ object_free( constant_bases );
+ object_free( constant_class );
+ object_free( constant_typecheck );
+ object_free( constant_builtin );
+ object_free( constant_HCACHEFILE );
+ object_free( constant_HCACHEMAXAGE );
+ object_free( constant_HDRSCAN );
+ object_free( constant_HDRRULE );
+ object_free( constant_BINDRULE );
+ object_free( constant_LOCATE );
+ object_free( constant_SEARCH );
+ object_free( constant_JAM_SEMAPHORE );
+ object_free( constant_TIMING_RULE );
+ object_free( constant_ACTION_RULE );
+ object_free( constant_JAMSHELL );
+ object_free( constant_TMPDIR );
+ object_free( constant_TMPNAME );
+ object_free( constant_TMPFILE );
+ object_free( constant_STDOUT );
+ object_free( constant_STDERR );
+ object_free( constant_JAMDATE );
+ object_free( constant_JAM_TIMESTAMP_RESOLUTION );
+ object_free( constant_JAM_VERSION );
+ object_free( constant_JAMUNAME );
+ object_free( constant_ENVIRON );
+ object_free( constant_ARGV );
+ object_free( constant_all );
+ object_free( constant_PARALLELISM );
+ object_free( constant_KEEP_GOING );
+ object_free( constant_other );
+ object_free( constant_total );
+ object_free( constant_FILE_DIRSCAN );
+ object_free( constant_MAIN );
+ object_free( constant_MAIN_MAKE );
+ object_free( constant_MAKE_MAKE0 );
+ object_free( constant_MAKE_MAKE1 );
+ object_free( constant_MAKE_MAKE0SORT );
+ object_free( constant_BINDMODULE );
+ object_free( constant_IMPORT_MODULE );
+ object_free( constant_BUILTIN_GLOB_BACK );
+ object_free( constant_timestamp );
+ object_free( constant_python );
+ object_free( constant_python_interface );
+ object_free( constant_extra_pythonpath );
+ object_free( constant_MAIN_PYTHON );
+}
+
+OBJECT * constant_empty;
+OBJECT * constant_dot;
+OBJECT * constant_plus;
+OBJECT * constant_star;
+OBJECT * constant_question_mark;
+OBJECT * constant_ok;
+OBJECT * constant_true;
+OBJECT * constant_name;
+OBJECT * constant_bases;
+OBJECT * constant_class;
+OBJECT * constant_typecheck;
+OBJECT * constant_builtin;
+OBJECT * constant_HCACHEFILE;
+OBJECT * constant_HCACHEMAXAGE;
+OBJECT * constant_HDRSCAN;
+OBJECT * constant_HDRRULE;
+OBJECT * constant_BINDRULE;
+OBJECT * constant_LOCATE;
+OBJECT * constant_SEARCH;
+OBJECT * constant_JAM_SEMAPHORE;
+OBJECT * constant_TIMING_RULE;
+OBJECT * constant_ACTION_RULE;
+OBJECT * constant_JAMSHELL;
+OBJECT * constant_TMPDIR;
+OBJECT * constant_TMPNAME;
+OBJECT * constant_TMPFILE;
+OBJECT * constant_STDOUT;
+OBJECT * constant_STDERR;
+OBJECT * constant_JAMDATE;
+OBJECT * constant_JAM_VERSION;
+OBJECT * constant_JAMUNAME;
+OBJECT * constant_ENVIRON;
+OBJECT * constant_ARGV;
+OBJECT * constant_all;
+OBJECT * constant_PARALLELISM;
+OBJECT * constant_KEEP_GOING;
+OBJECT * constant_other;
+OBJECT * constant_total;
+OBJECT * constant_FILE_DIRSCAN;
+OBJECT * constant_MAIN;
+OBJECT * constant_MAIN_MAKE;
+OBJECT * constant_MAKE_MAKE0;
+OBJECT * constant_MAKE_MAKE1;
+OBJECT * constant_MAKE_MAKE0SORT;
+OBJECT * constant_BINDMODULE;
+OBJECT * constant_IMPORT_MODULE;
+OBJECT * constant_BUILTIN_GLOB_BACK;
+OBJECT * constant_timestamp;
+OBJECT * constant_JAM_TIMESTAMP_RESOLUTION;
+OBJECT * constant_python;
+OBJECT * constant_python_interface;
+OBJECT * constant_extra_pythonpath;
+OBJECT * constant_MAIN_PYTHON;
diff --git a/tools/build/src/engine/constants.h b/tools/build/src/engine/constants.h
new file mode 100644
index 0000000000..60d7073b90
--- /dev/null
+++ b/tools/build/src/engine/constants.h
@@ -0,0 +1,73 @@
+/*
+ * Copyright 2011 Steven Watanabe
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * constants.h - constant objects
+ */
+
+#ifndef BOOST_JAM_CONSTANTS_H
+#define BOOST_JAM_CONSTANTS_H
+
+#include "object.h"
+
+void constants_init( void );
+void constants_done( void );
+
+extern OBJECT * constant_empty; /* "" */
+extern OBJECT * constant_dot; /* "." */
+extern OBJECT * constant_plus; /* "+" */
+extern OBJECT * constant_star; /* "*" */
+extern OBJECT * constant_question_mark; /* "?" */
+extern OBJECT * constant_ok; /* "ok" */
+extern OBJECT * constant_true; /* "true" */
+extern OBJECT * constant_name; /* "__name__" */
+extern OBJECT * constant_bases; /* "__bases__" */
+extern OBJECT * constant_class; /* "__class__" */
+extern OBJECT * constant_typecheck; /* ".typecheck" */
+extern OBJECT * constant_builtin; /* "(builtin)" */
+extern OBJECT * constant_HCACHEFILE; /* "HCACHEFILE" */
+extern OBJECT * constant_HCACHEMAXAGE; /* "HCACHEMAXAGE" */
+extern OBJECT * constant_HDRSCAN; /* "HDRSCAN" */
+extern OBJECT * constant_HDRRULE; /* "HDRRULE" */
+extern OBJECT * constant_BINDRULE; /* "BINDRULE" */
+extern OBJECT * constant_LOCATE; /* "LOCATE" */
+extern OBJECT * constant_SEARCH; /* "SEARCH" */
+extern OBJECT * constant_JAM_SEMAPHORE; /* "JAM_SEMAPHORE" */
+extern OBJECT * constant_TIMING_RULE; /* "__TIMING_RULE__" */
+extern OBJECT * constant_ACTION_RULE; /* "__ACTION_RULE__" */
+extern OBJECT * constant_JAMSHELL; /* "JAMSHELL" */
+extern OBJECT * constant_TMPDIR; /* "TMPDIR" */
+extern OBJECT * constant_TMPNAME; /* "TMPNAME" */
+extern OBJECT * constant_TMPFILE; /* "TMPFILE" */
+extern OBJECT * constant_STDOUT; /* "STDOUT" */
+extern OBJECT * constant_STDERR; /* "STDERR" */
+extern OBJECT * constant_JAMDATE; /* "JAMDATE" */
+extern OBJECT * constant_JAM_TIMESTAMP_RESOLUTION; /* "JAM_TIMESTAMP_RESOLUTION" */
+extern OBJECT * constant_JAM_VERSION; /* "JAM_VERSION" */
+extern OBJECT * constant_JAMUNAME; /* "JAMUNAME" */
+extern OBJECT * constant_ENVIRON; /* ".ENVIRON" */
+extern OBJECT * constant_ARGV; /* "ARGV" */
+extern OBJECT * constant_all; /* "all" */
+extern OBJECT * constant_PARALLELISM; /* "PARALLELISM" */
+extern OBJECT * constant_KEEP_GOING; /* "KEEP_GOING" */
+extern OBJECT * constant_other; /* "[OTHER]" */
+extern OBJECT * constant_total; /* "[TOTAL]" */
+extern OBJECT * constant_FILE_DIRSCAN; /* "FILE_DIRSCAN" */
+extern OBJECT * constant_MAIN; /* "MAIN" */
+extern OBJECT * constant_MAIN_MAKE; /* "MAIN_MAKE" */
+extern OBJECT * constant_MAKE_MAKE0; /* "MAKE_MAKE0" */
+extern OBJECT * constant_MAKE_MAKE1; /* "MAKE_MAKE1" */
+extern OBJECT * constant_MAKE_MAKE0SORT; /* "MAKE_MAKE0SORT" */
+extern OBJECT * constant_BINDMODULE; /* "BINDMODULE" */
+extern OBJECT * constant_IMPORT_MODULE; /* "IMPORT_MODULE" */
+extern OBJECT * constant_BUILTIN_GLOB_BACK; /* "BUILTIN_GLOB_BACK" */
+extern OBJECT * constant_timestamp; /* "timestamp" */
+extern OBJECT * constant_python; /* "__python__" */
+extern OBJECT * constant_python_interface; /* "python_interface" */
+extern OBJECT * constant_extra_pythonpath; /* "EXTRA_PYTHONPATH" */
+extern OBJECT * constant_MAIN_PYTHON; /* "MAIN_PYTHON" */
+
+#endif
diff --git a/tools/build/src/engine/cwd.c b/tools/build/src/engine/cwd.c
new file mode 100644
index 0000000000..7ebe97045f
--- /dev/null
+++ b/tools/build/src/engine/cwd.c
@@ -0,0 +1,83 @@
+/*
+ * Copyright 2002. Vladimir Prus
+ * Copyright 2005. Rene Rivera
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+#include "cwd.h"
+
+#include "jam.h"
+#include "mem.h"
+#include "pathsys.h"
+
+#include <assert.h>
+#include <errno.h>
+#include <limits.h>
+
+/* MinGW on Windows declares PATH_MAX in limits.h */
+#if defined( NT ) && !defined( __GNUC__ )
+# include <direct.h>
+# define PATH_MAX _MAX_PATH
+#else
+# include <unistd.h>
+# if defined( __COMO__ )
+# include <linux/limits.h>
+# endif
+#endif
+
+#ifndef PATH_MAX
+# define PATH_MAX 1024
+#endif
+
+
+static OBJECT * cwd_;
+
+
+void cwd_init( void )
+{
+ int buffer_size = PATH_MAX;
+ char * cwd_buffer = 0;
+ int error;
+
+ assert( !cwd_ );
+
+ do
+ {
+ char * const buffer = BJAM_MALLOC_RAW( buffer_size );
+ cwd_buffer = getcwd( buffer, buffer_size );
+ error = errno;
+ if ( cwd_buffer )
+ {
+ /* We store the path using its canonical/long/key format. */
+ OBJECT * const cwd = object_new( cwd_buffer );
+ cwd_ = path_as_key( cwd );
+ object_free( cwd );
+ }
+ buffer_size *= 2;
+ BJAM_FREE_RAW( buffer );
+ }
+ while ( !cwd_ && error == ERANGE );
+
+ if ( !cwd_ )
+ {
+ perror( "can not get current working directory" );
+ exit( EXITBAD );
+ }
+}
+
+
+OBJECT * cwd( void )
+{
+ assert( cwd_ );
+ return cwd_;
+}
+
+
+void cwd_done( void )
+{
+ assert( cwd_ );
+ object_free( cwd_ );
+ cwd_ = NULL;
+}
diff --git a/tools/build/src/engine/cwd.h b/tools/build/src/engine/cwd.h
new file mode 100644
index 0000000000..886714a8fd
--- /dev/null
+++ b/tools/build/src/engine/cwd.h
@@ -0,0 +1,35 @@
+/*
+ * Copyright 2002. Vladimir Prus
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * cwd.h - manages the current working folder information
+ */
+
+#ifndef CWD_H
+#define CWD_H
+
+#include "object.h"
+
+
+/* cwd() - returns the current working folder */
+OBJECT * cwd( void );
+
+/* cwd_init() - initialize the cwd module functionality
+ *
+ * The current working folder can not change in Boost Jam so this function
+ * gets the current working folder information from the OS and stores it
+ * internally.
+ *
+ * Expected to be called at program startup before the program's current
+ * working folder has been changed
+ */
+void cwd_init( void );
+
+/* cwd_done() - cleans up the cwd module functionality */
+void cwd_done( void );
+
+#endif
diff --git a/tools/build/v2/engine/debian/changelog b/tools/build/src/engine/debian/changelog
index 29084289cc..29084289cc 100644
--- a/tools/build/v2/engine/debian/changelog
+++ b/tools/build/src/engine/debian/changelog
diff --git a/tools/build/v2/engine/debian/control b/tools/build/src/engine/debian/control
index c7f151932e..c7f151932e 100644
--- a/tools/build/v2/engine/debian/control
+++ b/tools/build/src/engine/debian/control
diff --git a/tools/build/v2/engine/debian/copyright b/tools/build/src/engine/debian/copyright
index f72e4e3a96..f72e4e3a96 100644
--- a/tools/build/v2/engine/debian/copyright
+++ b/tools/build/src/engine/debian/copyright
diff --git a/tools/build/v2/engine/debian/jam.man.sgml b/tools/build/src/engine/debian/jam.man.sgml
index ee21d4d830..ee21d4d830 100644
--- a/tools/build/v2/engine/debian/jam.man.sgml
+++ b/tools/build/src/engine/debian/jam.man.sgml
diff --git a/tools/build/v2/engine/debian/rules b/tools/build/src/engine/debian/rules
index 756052a3b3..756052a3b3 100755
--- a/tools/build/v2/engine/debian/rules
+++ b/tools/build/src/engine/debian/rules
diff --git a/tools/build/src/engine/debug.c b/tools/build/src/engine/debug.c
new file mode 100644
index 0000000000..2a656551bf
--- /dev/null
+++ b/tools/build/src/engine/debug.c
@@ -0,0 +1,145 @@
+/*
+ * Copyright 2005. Rene Rivera
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+#include "jam.h"
+#include "debug.h"
+
+#include "hash.h"
+
+
+static profile_frame * profile_stack = 0;
+static struct hash * profile_hash = 0;
+static profile_info profile_other = { 0 };
+static profile_info profile_total = { 0 };
+
+
+profile_frame * profile_init( OBJECT * rulename, profile_frame * frame )
+{
+ if ( DEBUG_PROFILE ) profile_enter( rulename, frame );
+ return frame;
+}
+
+
+void profile_enter( OBJECT * rulename, profile_frame * frame )
+{
+ if ( DEBUG_PROFILE )
+ {
+ clock_t start = clock();
+ profile_info * p;
+
+ if ( !profile_hash && rulename )
+ profile_hash = hashinit( sizeof( profile_info ), "profile" );
+
+ if ( rulename )
+ {
+ int found;
+ p = (profile_info *)hash_insert( profile_hash, rulename, &found );
+ if ( !found )
+ {
+ p->name = rulename;
+ p->cumulative = 0;
+ p->net = 0;
+ p->num_entries = 0;
+ p->stack_count = 0;
+ p->memory = 0;
+ }
+ }
+ else
+ {
+ p = &profile_other;
+ }
+
+ ++p->num_entries;
+ ++p->stack_count;
+
+ frame->info = p;
+
+ frame->caller = profile_stack;
+ profile_stack = frame;
+
+ frame->entry_time = clock();
+ frame->overhead = 0;
+ frame->subrules = 0;
+
+ /* caller pays for the time it takes to play with the hash table */
+ if ( frame->caller )
+ frame->caller->overhead += frame->entry_time - start;
+ }
+}
+
+
+void profile_memory( long mem )
+{
+ if ( DEBUG_PROFILE )
+ if ( profile_stack && profile_stack->info )
+ profile_stack->info->memory += mem;
+}
+
+
+void profile_exit( profile_frame * frame )
+{
+ if ( DEBUG_PROFILE )
+ {
+ /* Cumulative time for this call. */
+ clock_t const t = clock() - frame->entry_time - frame->overhead;
+ /* If this rule is already present on the stack, do not add the time for
+ * this instance.
+ */
+ if ( frame->info->stack_count == 1 )
+ frame->info->cumulative += t;
+ /* Net time does not depend on presense of the same rule in call stack.
+ */
+ frame->info->net += t - frame->subrules;
+
+ if ( frame->caller )
+ {
+ /* Caller's cumulative time must account for this overhead. */
+ frame->caller->overhead += frame->overhead;
+ frame->caller->subrules += t;
+ }
+ /* Pop this stack frame. */
+ --frame->info->stack_count;
+ profile_stack = frame->caller;
+ }
+}
+
+
+static void dump_profile_entry( void * p_, void * ignored )
+{
+ profile_info * p = (profile_info *)p_;
+ unsigned long mem_each = ( p->memory / ( p->num_entries ? p->num_entries : 1
+ ) );
+ double cumulative = p->cumulative;
+ double net = p->net;
+ double q = p->net;
+ q /= ( p->num_entries ? p->num_entries : 1 );
+ cumulative /= CLOCKS_PER_SEC;
+ net /= CLOCKS_PER_SEC;
+ q /= CLOCKS_PER_SEC;
+ if ( !ignored )
+ {
+ profile_total.cumulative += p->net;
+ profile_total.memory += p->memory;
+ }
+ printf( "%10ld %12.6f %12.6f %12.8f %10ld %10ld %s\n", p->num_entries,
+ cumulative, net, q, p->memory, mem_each, object_str( p->name ) );
+}
+
+
+void profile_dump()
+{
+ if ( profile_hash )
+ {
+ printf( "%10s %12s %12s %12s %10s %10s %s\n", "--count--", "--gross--",
+ "--net--", "--each--", "--mem--", "--each--", "--name--" );
+ hashenumerate( profile_hash, dump_profile_entry, 0 );
+ profile_other.name = constant_other;
+ dump_profile_entry( &profile_other, 0 );
+ profile_total.name = constant_total;
+ dump_profile_entry( &profile_total, (void *)1 );
+ }
+}
diff --git a/tools/build/src/engine/debug.h b/tools/build/src/engine/debug.h
new file mode 100644
index 0000000000..4151d27fab
--- /dev/null
+++ b/tools/build/src/engine/debug.h
@@ -0,0 +1,55 @@
+/*
+ * Copyright 2005. Rene Rivera
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+#ifndef BJAM_DEBUG_H
+#define BJAM_DEBUG_H
+
+#include "constants.h"
+#include "object.h"
+#include <time.h>
+
+
+typedef struct profile_info
+{
+ /* name of rule being called */
+ OBJECT * name;
+ /* cumulative time spent in rule */
+ clock_t cumulative;
+ /* time spent in rule proper */
+ clock_t net;
+ /* number of time rule was entered */
+ unsigned long num_entries;
+ /* number of the times this function is present in stack */
+ unsigned long stack_count;
+ /* bytes of memory allocated by the call */
+ unsigned long memory;
+} profile_info;
+
+typedef struct profile_frame
+{
+ /* permanent storage where data accumulates */
+ profile_info * info;
+ /* overhead for profiling in this call */
+ clock_t overhead;
+ /* time of last entry to rule */
+ clock_t entry_time;
+ /* stack frame of caller */
+ struct profile_frame * caller;
+ /* time spent in subrules */
+ clock_t subrules;
+} profile_frame;
+
+profile_frame * profile_init( OBJECT * rulename, profile_frame * );
+void profile_enter( OBJECT * rulename, profile_frame * );
+void profile_memory( long mem );
+void profile_exit( profile_frame * );
+void profile_dump();
+
+#define PROFILE_ENTER( scope ) profile_frame PROF_ ## scope, *PROF_ ## scope ## _p = profile_init( constant_ ## scope, &PROF_ ## scope )
+#define PROFILE_EXIT( scope ) profile_exit( PROF_ ## scope ## _p )
+
+#endif
diff --git a/tools/build/src/engine/execcmd.c b/tools/build/src/engine/execcmd.c
new file mode 100644
index 0000000000..f751cbff33
--- /dev/null
+++ b/tools/build/src/engine/execcmd.c
@@ -0,0 +1,121 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ * Copyright 2007 Noel Belcourt.
+ *
+ * Utility functions shared between different exec*.c platform specific
+ * implementation modules.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+#include "jam.h"
+#include "execcmd.h"
+
+#include <assert.h>
+#include <stdio.h>
+#include <string.h>
+
+
+/* Internal interrupt counter. */
+static int intr;
+
+
+/* Constructs a list of command-line elements using the format specified by the
+ * given shell list.
+ *
+ * Given argv array should have at least MAXARGC + 1 elements.
+ * Slot numbers may be between 0 and 998 (inclusive).
+ *
+ * Constructed argv list will be zero terminated. Character arrays referenced by
+ * the argv structure elements will be either elements from the give shell list,
+ * internal static buffers or the given command string and should thus not
+ * considered owned by or released via the argv structure and should be
+ * considered invalidated by the next argv_from_shell() call.
+ *
+ * Shell list elements:
+ * - Starting with '%' - represent the command string.
+ * - Starting with '!' - represent the slot number (increased by one).
+ * - Anything else - used as a literal.
+ * - If no '%' element is found, the command string is appended as an extra.
+ */
+
+void argv_from_shell( char const * * argv, LIST * shell, char const * command,
+ int const slot )
+{
+ static char jobno[ 4 ];
+
+ int i;
+ int gotpercent = 0;
+ LISTITER iter = list_begin( shell );
+ LISTITER end = list_end( shell );
+
+ assert( 0 <= slot );
+ assert( slot < 999 );
+ sprintf( jobno, "%d", slot + 1 );
+
+ for ( i = 0; iter != end && i < MAXARGC; ++i, iter = list_next( iter ) )
+ {
+ switch ( object_str( list_item( iter ) )[ 0 ] )
+ {
+ case '%': argv[ i ] = command; ++gotpercent; break;
+ case '!': argv[ i ] = jobno; break;
+ default : argv[ i ] = object_str( list_item( iter ) );
+ }
+ }
+
+ if ( !gotpercent )
+ argv[ i++ ] = command;
+
+ argv[ i ] = NULL;
+}
+
+
+/* Returns whether the given command string contains lines longer than the given
+ * maximum.
+ */
+int check_cmd_for_too_long_lines( char const * command, int const max,
+ int * const error_length, int * const error_max_length )
+{
+ while ( *command )
+ {
+ size_t const l = strcspn( command, "\n" );
+ if ( l > max )
+ {
+ *error_length = l;
+ *error_max_length = max;
+ return EXEC_CHECK_LINE_TOO_LONG;
+ }
+ command += l;
+ if ( *command )
+ ++command;
+ }
+ return EXEC_CHECK_OK;
+}
+
+
+/* Checks whether the given shell list is actually a request to execute raw
+ * commands without an external shell.
+ */
+int is_raw_command_request( LIST * shell )
+{
+ return !list_empty( shell ) &&
+ !strcmp( object_str( list_front( shell ) ), "%" ) &&
+ list_next( list_begin( shell ) ) == list_end( shell );
+}
+
+
+/* Returns whether an interrupt has been detected so far. */
+
+int interrupted( void )
+{
+ return intr != 0;
+}
+
+
+/* Internal interrupt handler. */
+
+void onintr( int disp )
+{
+ ++intr;
+ printf( "...interrupted\n" );
+}
diff --git a/tools/build/src/engine/execcmd.h b/tools/build/src/engine/execcmd.h
new file mode 100644
index 0000000000..ab145aa1d4
--- /dev/null
+++ b/tools/build/src/engine/execcmd.h
@@ -0,0 +1,102 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * execcmd.h - execute a shell script.
+ *
+ * Defines the interface to be implemented in platform specific implementation
+ * modules as well as different shared utility functions prepared in the
+ * execcmd.c module.
+ */
+
+#ifndef EXECCMD_H
+#define EXECCMD_H
+
+#include "lists.h"
+#include "strings.h"
+#include "timestamp.h"
+
+
+typedef struct timing_info
+{
+ double system;
+ double user;
+ timestamp start;
+ timestamp end;
+} timing_info;
+
+typedef void (* ExecCmdCallback)
+(
+ void * const closure,
+ int const status,
+ timing_info const * const,
+ char const * const cmd_stdout,
+ char const * const cmd_stderr,
+ int const cmd_exit_reason
+);
+
+/* Status codes passed to ExecCmdCallback routines. */
+#define EXEC_CMD_OK 0
+#define EXEC_CMD_FAIL 1
+#define EXEC_CMD_INTR 2
+
+int exec_check
+(
+ string const * command,
+ LIST * * pShell,
+ int * error_length,
+ int * error_max_length
+);
+
+/* exec_check() return codes. */
+#define EXEC_CHECK_OK 101
+#define EXEC_CHECK_NOOP 102
+#define EXEC_CHECK_LINE_TOO_LONG 103
+#define EXEC_CHECK_TOO_LONG 104
+
+void exec_cmd
+(
+ string const * command,
+ ExecCmdCallback func,
+ void * closure,
+ LIST * shell
+);
+
+void exec_wait();
+
+
+/******************************************************************************
+ * *
+ * Utility functions defined in the execcmd.c module. *
+ * *
+ ******************************************************************************/
+
+/* Constructs a list of command-line elements using the format specified by the
+ * given shell list.
+ */
+void argv_from_shell( char const * * argv, LIST * shell, char const * command,
+ int const slot );
+
+/* Interrupt routine bumping the internal interrupt counter. Needs to be
+ * registered by platform specific exec*.c modules.
+ */
+void onintr( int disp );
+
+/* Returns whether an interrupt has been detected so far. */
+int interrupted( void );
+
+/* Checks whether the given shell list is actually a request to execute raw
+ * commands without an external shell.
+ */
+int is_raw_command_request( LIST * shell );
+
+/* Utility worker for exec_check() checking whether all the given command lines
+ * are under the specified length limit.
+ */
+int check_cmd_for_too_long_lines( char const * command, int const max,
+ int * const error_length, int * const error_max_length );
+
+#endif
diff --git a/tools/build/src/engine/execnt.c b/tools/build/src/engine/execnt.c
new file mode 100644
index 0000000000..d75aab0aef
--- /dev/null
+++ b/tools/build/src/engine/execnt.c
@@ -0,0 +1,1400 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Copyright 2007 Rene Rivera.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * execnt.c - execute a shell command on Windows NT
+ *
+ * If $(JAMSHELL) is defined, uses that to formulate the actual command. The
+ * default is: cmd.exe /Q/C
+ *
+ * In $(JAMSHELL), % expands to the command string and ! expands to the slot
+ * number (starting at 1) for multiprocess (-j) invocations. If $(JAMSHELL) does
+ * not include a %, it is tacked on as the last argument.
+ *
+ * Each $(JAMSHELL) placeholder must be specified as a separate individual
+ * element in a jam variable value.
+ *
+ * Do not just set JAMSHELL to cmd.exe - it will not work!
+ *
+ * External routines:
+ * exec_check() - preprocess and validate the command
+ * exec_cmd() - launch an async command execution
+ * exec_wait() - wait for any of the async command processes to terminate
+ *
+ * Internal routines:
+ * filetime_to_seconds() - Windows FILETIME --> number of seconds conversion
+ */
+
+#include "jam.h"
+#ifdef USE_EXECNT
+#include "execcmd.h"
+
+#include "lists.h"
+#include "output.h"
+#include "pathsys.h"
+#include "string.h"
+
+#include <assert.h>
+#include <ctype.h>
+#include <errno.h>
+#include <time.h>
+
+#define WIN32_LEAN_AND_MEAN
+#include <windows.h>
+#include <process.h>
+#include <tlhelp32.h>
+
+
+/* get the maximum shell command line length according to the OS */
+static int maxline();
+/* valid raw command string length */
+static long raw_command_length( char const * command );
+/* add two 64-bit unsigned numbers, h1l1 and h2l2 */
+static FILETIME add_64(
+ unsigned long h1, unsigned long l1,
+ unsigned long h2, unsigned long l2 );
+/* */
+static FILETIME add_FILETIME( FILETIME t1, FILETIME t2 );
+/* */
+static FILETIME negate_FILETIME( FILETIME t );
+/* record the timing info for the process */
+static void record_times( HANDLE const, timing_info * const );
+/* calc the current running time of an *active* process */
+static double running_time( HANDLE const );
+/* terminate the given process, after terminating all its children first */
+static void kill_process_tree( DWORD const procesdId, HANDLE const );
+/* waits for a command to complete or time out */
+static int try_wait( int const timeoutMillis );
+/* reads any pending output for running commands */
+static void read_output();
+/* checks if a command ran out of time, and kills it */
+static int try_kill_one();
+/* is the first process a parent (direct or indirect) to the second one */
+static int is_parent_child( DWORD const parent, DWORD const child );
+/* */
+static void close_alert( PROCESS_INFORMATION const * const );
+/* close any alerts hanging around */
+static void close_alerts();
+/* prepare a command file to be executed using an external shell */
+static char const * prepare_command_file( string const * command, int slot );
+/* invoke the actual external process using the given command line */
+static void invoke_cmd( char const * const command, int const slot );
+/* find a free slot in the running commands table */
+static int get_free_cmdtab_slot();
+/* put together the final command string we are to run */
+static void string_new_from_argv( string * result, char const * const * argv );
+/* frees and renews the given string */
+static void string_renew( string * const );
+/* reports the last failed Windows API related error message */
+static void reportWindowsError( char const * const apiName, int slot );
+/* closes a Windows HANDLE and resets its variable to 0. */
+static void closeWinHandle( HANDLE * const handle );
+
+/* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */
+
+/* CreateProcessA() Windows API places a limit of 32768 characters (bytes) on
+ * the allowed command-line length, including a trailing Unicode (2-byte)
+ * nul-terminator character.
+ */
+#define MAX_RAW_COMMAND_LENGTH 32766
+
+/* We hold handles for pipes used to communicate with child processes in two
+ * element arrays indexed as follows.
+ */
+#define EXECCMD_PIPE_READ 0
+#define EXECCMD_PIPE_WRITE 1
+
+static int intr_installed;
+
+
+/* The list of commands we run. */
+static struct
+{
+ /* Temporary command file used to execute the action when needed. */
+ string command_file[ 1 ];
+
+ /* Pipes for communicating with the child process. Parent reads from (0),
+ * child writes to (1).
+ */
+ HANDLE pipe_out[ 2 ];
+ HANDLE pipe_err[ 2 ];
+
+ string buffer_out[ 1 ]; /* buffer to hold stdout, if any */
+ string buffer_err[ 1 ]; /* buffer to hold stderr, if any */
+
+ PROCESS_INFORMATION pi; /* running process information */
+
+ /* Function called when the command completes. */
+ ExecCmdCallback func;
+
+ /* Opaque data passed back to the 'func' callback. */
+ void * closure;
+}
+cmdtab[ MAXJOBS ] = { { 0 } };
+
+
+/*
+ * Execution unit tests.
+ */
+
+void execnt_unit_test()
+{
+#if !defined( NDEBUG )
+ /* vc6 preprocessor is broken, so assert with these strings gets confused.
+ * Use a table instead.
+ */
+ {
+ typedef struct test { char * command; int result; } test;
+ test tests[] = {
+ { "", 0 },
+ { " ", 0 },
+ { "x", 1 },
+ { "\nx", 1 },
+ { "x\n", 1 },
+ { "\nx\n", 1 },
+ { "\nx \n", 2 },
+ { "\nx \n ", 2 },
+ { " \n\t\t\v\r\r\n \t x \v \t\t\r\n\n\n \n\n\v\t", 8 },
+ { "x\ny", -1 },
+ { "x\n\n y", -1 },
+ { "echo x > foo.bar", -1 },
+ { "echo x < foo.bar", -1 },
+ { "echo x | foo.bar", -1 },
+ { "echo x \">\" foo.bar", 18 },
+ { "echo x '<' foo.bar", 18 },
+ { "echo x \"|\" foo.bar", 18 },
+ { "echo x \\\">\\\" foo.bar", -1 },
+ { "echo x \\\"<\\\" foo.bar", -1 },
+ { "echo x \\\"|\\\" foo.bar", -1 },
+ { "\"echo x > foo.bar\"", 18 },
+ { "echo x \"'\"<' foo.bar", -1 },
+ { "echo x \\\\\"<\\\\\" foo.bar", 22 },
+ { "echo x \\x\\\"<\\\\\" foo.bar", -1 },
+ { 0 } };
+ test const * t;
+ for ( t = tests; t->command; ++t )
+ assert( raw_command_length( t->command ) == t->result );
+ }
+
+ {
+ int const length = maxline() + 9;
+ char * const cmd = (char *)BJAM_MALLOC_ATOMIC( length + 1 );
+ memset( cmd, 'x', length );
+ cmd[ length ] = 0;
+ assert( raw_command_length( cmd ) == length );
+ BJAM_FREE( cmd );
+ }
+#endif
+}
+
+
+/*
+ * exec_check() - preprocess and validate the command
+ */
+
+int exec_check
+(
+ string const * command,
+ LIST * * pShell,
+ int * error_length,
+ int * error_max_length
+)
+{
+ /* Default shell does nothing when triggered with an empty or a
+ * whitespace-only command so we simply skip running it in that case. We
+ * still pass them on to non-default shells as we do not really know what
+ * they are going to do with such commands.
+ */
+ if ( list_empty( *pShell ) )
+ {
+ char const * s = command->value;
+ while ( isspace( *s ) ) ++s;
+ if ( !*s )
+ return EXEC_CHECK_NOOP;
+ }
+
+ /* Check prerequisites for executing raw commands. */
+ if ( is_raw_command_request( *pShell ) )
+ {
+ int const raw_cmd_length = raw_command_length( command->value );
+ if ( raw_cmd_length < 0 )
+ {
+ /* Invalid characters detected - fallback to default shell. */
+ list_free( *pShell );
+ *pShell = L0;
+ }
+ else if ( raw_cmd_length > MAX_RAW_COMMAND_LENGTH )
+ {
+ *error_length = raw_cmd_length;
+ *error_max_length = MAX_RAW_COMMAND_LENGTH;
+ return EXEC_CHECK_TOO_LONG;
+ }
+ else
+ return raw_cmd_length ? EXEC_CHECK_OK : EXEC_CHECK_NOOP;
+ }
+
+ /* Now we know we are using an external shell. Note that there is no need to
+ * check for too long command strings when using an external shell since we
+ * use a command file and assume no one is going to set up a JAMSHELL format
+ * string longer than a few hundred bytes at most which should be well under
+ * the total command string limit. Should someone actually construct such a
+ * JAMSHELL value it will get reported as an 'invalid parameter'
+ * CreateProcessA() Windows API failure which seems like a good enough
+ * result for such intentional mischief.
+ */
+
+ /* Check for too long command lines. */
+ return check_cmd_for_too_long_lines( command->value, maxline(),
+ error_length, error_max_length );
+}
+
+
+/*
+ * exec_cmd() - launch an async command execution
+ *
+ * We assume exec_check() already verified that the given command can have its
+ * command string constructed as requested.
+ */
+
+void exec_cmd
+(
+ string const * cmd_orig,
+ ExecCmdCallback func,
+ void * closure,
+ LIST * shell
+)
+{
+ int const slot = get_free_cmdtab_slot();
+ int const is_raw_cmd = is_raw_command_request( shell );
+ string cmd_local[ 1 ];
+
+ /* Initialize default shell - anything more than /Q/C is non-portable. */
+ static LIST * default_shell;
+ if ( !default_shell )
+ default_shell = list_new( object_new( "cmd.exe /Q/C" ) );
+
+ /* Specifying no shell means requesting the default shell. */
+ if ( list_empty( shell ) )
+ shell = default_shell;
+
+ if ( DEBUG_EXECCMD )
+ if ( is_raw_cmd )
+ printf( "Executing raw command directly\n" );
+ else
+ {
+ printf( "Executing using a command file and the shell: " );
+ list_print( shell );
+ printf( "\n" );
+ }
+
+ /* If we are running a raw command directly - trim its leading whitespaces
+ * as well as any trailing all-whitespace lines but keep any trailing
+ * whitespace in the final/only line containing something other than
+ * whitespace).
+ */
+ if ( is_raw_cmd )
+ {
+ char const * start = cmd_orig->value;
+ char const * p = cmd_orig->value + cmd_orig->size;
+ char const * end = p;
+ while ( isspace( *start ) ) ++start;
+ while ( p > start && isspace( p[ -1 ] ) )
+ if ( *--p == '\n' )
+ end = p;
+ string_new( cmd_local );
+ string_append_range( cmd_local, start, end );
+ assert( cmd_local->size == raw_command_length( cmd_orig->value ) );
+ }
+ /* If we are not running a raw command directly, prepare a command file to
+ * be executed using an external shell and the actual command string using
+ * that command file.
+ */
+ else
+ {
+ char const * const cmd_file = prepare_command_file( cmd_orig, slot );
+ char const * argv[ MAXARGC + 1 ]; /* +1 for NULL */
+ argv_from_shell( argv, shell, cmd_file, slot );
+ string_new_from_argv( cmd_local, argv );
+ }
+
+ /* Catch interrupts whenever commands are running. */
+ if ( !intr_installed )
+ {
+ intr_installed = 1;
+ signal( SIGINT, onintr );
+ }
+
+ /* Save input data into the selected running commands table slot. */
+ cmdtab[ slot ].func = func;
+ cmdtab[ slot ].closure = closure;
+
+ /* Invoke the actual external process using the constructed command line. */
+ invoke_cmd( cmd_local->value, slot );
+
+ /* Free our local command string copy. */
+ string_free( cmd_local );
+}
+
+
+/*
+ * exec_wait() - wait for any of the async command processes to terminate
+ *
+ * Wait and drive at most one execution completion, while processing the I/O for
+ * all ongoing commands.
+ */
+
+void exec_wait()
+{
+ int i = -1;
+ int exit_reason; /* reason why a command completed */
+
+ /* Wait for a command to complete, while snarfing up any output. */
+ while ( 1 )
+ {
+ /* Check for a complete command, briefly. */
+ i = try_wait( 500 );
+ /* Read in the output of all running commands. */
+ read_output();
+ /* Close out pending debug style dialogs. */
+ close_alerts();
+ /* Process the completed command we found. */
+ if ( i >= 0 ) { exit_reason = EXIT_OK; break; }
+ /* Check if a command ran out of time. */
+ i = try_kill_one();
+ if ( i >= 0 ) { exit_reason = EXIT_TIMEOUT; break; }
+ }
+
+ /* We have a command... process it. */
+ {
+ DWORD exit_code;
+ timing_info time;
+ int rstat;
+
+ /* The time data for the command. */
+ record_times( cmdtab[ i ].pi.hProcess, &time );
+
+ /* Removed the used temporary command file. */
+ if ( cmdtab[ i ].command_file->size )
+ unlink( cmdtab[ i ].command_file->value );
+
+ /* Find out the process exit code. */
+ GetExitCodeProcess( cmdtab[ i ].pi.hProcess, &exit_code );
+
+ /* The dispossition of the command. */
+ if ( interrupted() )
+ rstat = EXEC_CMD_INTR;
+ else if ( exit_code )
+ rstat = EXEC_CMD_FAIL;
+ else
+ rstat = EXEC_CMD_OK;
+
+ /* Call the callback, may call back to jam rule land. */
+ (*cmdtab[ i ].func)( cmdtab[ i ].closure, rstat, &time,
+ cmdtab[ i ].buffer_out->value, cmdtab[ i ].buffer_err->value,
+ exit_reason );
+
+ /* Clean up our child process tracking data. No need to clear the
+ * temporary command file name as it gets reused.
+ */
+ closeWinHandle( &cmdtab[ i ].pi.hProcess );
+ closeWinHandle( &cmdtab[ i ].pi.hThread );
+ closeWinHandle( &cmdtab[ i ].pipe_out[ EXECCMD_PIPE_READ ] );
+ closeWinHandle( &cmdtab[ i ].pipe_out[ EXECCMD_PIPE_WRITE ] );
+ closeWinHandle( &cmdtab[ i ].pipe_err[ EXECCMD_PIPE_READ ] );
+ closeWinHandle( &cmdtab[ i ].pipe_err[ EXECCMD_PIPE_WRITE ] );
+ string_renew( cmdtab[ i ].buffer_out );
+ string_renew( cmdtab[ i ].buffer_err );
+ }
+}
+
+
+/* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */
+
+/*
+ * Invoke the actual external process using the given command line. Track the
+ * process in our running commands table.
+ */
+
+static void invoke_cmd( char const * const command, int const slot )
+{
+ SECURITY_ATTRIBUTES sa = { sizeof( SECURITY_ATTRIBUTES ), 0, 0 };
+ SECURITY_DESCRIPTOR sd;
+ STARTUPINFO si = { sizeof( STARTUPINFO ), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0 };
+
+ /* Init the security data. */
+ InitializeSecurityDescriptor( &sd, SECURITY_DESCRIPTOR_REVISION );
+ SetSecurityDescriptorDacl( &sd, TRUE, NULL, FALSE );
+ sa.lpSecurityDescriptor = &sd;
+ sa.bInheritHandle = TRUE;
+
+ /* Create output buffers. */
+ string_new( cmdtab[ slot ].buffer_out );
+ string_new( cmdtab[ slot ].buffer_err );
+
+ /* Create pipes for communicating with the child process. */
+ if ( !CreatePipe( &cmdtab[ slot ].pipe_out[ EXECCMD_PIPE_READ ],
+ &cmdtab[ slot ].pipe_out[ EXECCMD_PIPE_WRITE ], &sa, 0 ) )
+ {
+ reportWindowsError( "CreatePipe", slot );
+ return;
+ }
+ if ( globs.pipe_action && !CreatePipe( &cmdtab[ slot ].pipe_err[
+ EXECCMD_PIPE_READ ], &cmdtab[ slot ].pipe_err[ EXECCMD_PIPE_WRITE ],
+ &sa, 0 ) )
+ {
+ reportWindowsError( "CreatePipe", slot );
+ return;
+ }
+
+ /* Set handle inheritance off for the pipe ends the parent reads from. */
+ SetHandleInformation( cmdtab[ slot ].pipe_out[ EXECCMD_PIPE_READ ],
+ HANDLE_FLAG_INHERIT, 0 );
+ if ( globs.pipe_action )
+ SetHandleInformation( cmdtab[ slot ].pipe_err[ EXECCMD_PIPE_READ ],
+ HANDLE_FLAG_INHERIT, 0 );
+
+ /* Hide the child window, if any. */
+ si.dwFlags |= STARTF_USESHOWWINDOW;
+ si.wShowWindow = SW_HIDE;
+
+ /* Redirect the child's output streams to our pipes. */
+ si.dwFlags |= STARTF_USESTDHANDLES;
+ si.hStdOutput = cmdtab[ slot ].pipe_out[ EXECCMD_PIPE_WRITE ];
+ si.hStdError = globs.pipe_action
+ ? cmdtab[ slot ].pipe_err[ EXECCMD_PIPE_WRITE ]
+ : cmdtab[ slot ].pipe_out[ EXECCMD_PIPE_WRITE ];
+
+ /* Let the child inherit stdin, as some commands assume it is available. */
+ si.hStdInput = GetStdHandle( STD_INPUT_HANDLE );
+
+ if ( DEBUG_EXECCMD )
+ printf( "Command string for CreateProcessA(): '%s'\n", command );
+
+ /* Run the command by creating a sub-process for it. */
+ if ( !CreateProcessA(
+ NULL , /* application name */
+ (char *)command , /* command line */
+ NULL , /* process attributes */
+ NULL , /* thread attributes */
+ TRUE , /* inherit handles */
+ CREATE_NEW_PROCESS_GROUP, /* create flags */
+ NULL , /* env vars, null inherits env */
+ NULL , /* current dir, null is our current dir */
+ &si , /* startup info */
+ &cmdtab[ slot ].pi ) ) /* child process info, if created */
+ {
+ reportWindowsError( "CreateProcessA", slot );
+ return;
+ }
+}
+
+
+/*
+ * For more details on Windows cmd.exe shell command-line length limitations see
+ * the following MSDN article:
+ * http://support.microsoft.com/default.aspx?scid=kb;en-us;830473
+ */
+
+static int raw_maxline()
+{
+ OSVERSIONINFO os_info;
+ os_info.dwOSVersionInfoSize = sizeof( os_info );
+ GetVersionEx( &os_info );
+
+ if ( os_info.dwMajorVersion >= 5 ) return 8191; /* XP */
+ if ( os_info.dwMajorVersion == 4 ) return 2047; /* NT 4.x */
+ return 996; /* NT 3.5.1 */
+}
+
+static int maxline()
+{
+ static result;
+ if ( !result ) result = raw_maxline();
+ return result;
+}
+
+
+/*
+ * Closes a Windows HANDLE and resets its variable to 0.
+ */
+
+static void closeWinHandle( HANDLE * const handle )
+{
+ if ( *handle )
+ {
+ CloseHandle( *handle );
+ *handle = 0;
+ }
+}
+
+
+/*
+ * Frees and renews the given string.
+ */
+
+static void string_renew( string * const s )
+{
+ string_free( s );
+ string_new( s );
+}
+
+
+/*
+ * raw_command_length() - valid raw command string length
+ *
+ * Checks whether the given command may be executed as a raw command. If yes,
+ * returns the corresponding command string length. If not, returns -1.
+ *
+ * Rules for constructing raw command strings:
+ * - Command may not contain unquoted shell I/O redirection characters.
+ * - May have at most one command line with non-whitespace content.
+ * - Leading whitespace trimmed.
+ * - Trailing all-whitespace lines trimmed.
+ * - Trailing whitespace on the sole command line kept (may theoretically
+ * affect the executed command).
+ */
+
+static long raw_command_length( char const * command )
+{
+ char const * p;
+ char const * escape = 0;
+ char inquote = 0;
+ char const * newline = 0;
+
+ /* Skip leading whitespace. */
+ while ( isspace( *command ) )
+ ++command;
+
+ p = command;
+
+ /* Look for newlines and unquoted I/O redirection. */
+ do
+ {
+ p += strcspn( p, "\n\"'<>|\\" );
+ switch ( *p )
+ {
+ case '\n':
+ /* If our command contains non-whitespace content split over
+ * multiple lines we can not execute it directly.
+ */
+ newline = p;
+ while ( isspace( *++p ) );
+ if ( *p ) return -1;
+ break;
+
+ case '\\':
+ escape = escape && escape == p - 1 ? 0 : p;
+ ++p;
+ break;
+
+ case '"':
+ case '\'':
+ if ( escape && escape == p - 1 )
+ escape = 0;
+ else if ( inquote == *p )
+ inquote = 0;
+ else if ( !inquote )
+ inquote = *p;
+ ++p;
+ break;
+
+ case '<':
+ case '>':
+ case '|':
+ if ( !inquote )
+ return -1;
+ ++p;
+ break;
+ }
+ }
+ while ( *p );
+
+ /* Return the number of characters the command will occupy. */
+ return ( newline ? newline : p ) - command;
+}
+
+
+/* 64-bit arithmetic helpers. */
+
+/* Compute the carry bit from the addition of two 32-bit unsigned numbers. */
+#define add_carry_bit( a, b ) ((((a) | (b)) >> 31) & (~((a) + (b)) >> 31) & 0x1)
+
+/* Compute the high 32 bits of the addition of two 64-bit unsigned numbers, h1l1
+ * and h2l2.
+ */
+#define add_64_hi( h1, l1, h2, l2 ) ((h1) + (h2) + add_carry_bit(l1, l2))
+
+
+/*
+ * Add two 64-bit unsigned numbers, h1l1 and h2l2.
+ */
+
+static FILETIME add_64
+(
+ unsigned long h1, unsigned long l1,
+ unsigned long h2, unsigned long l2
+)
+{
+ FILETIME result;
+ result.dwLowDateTime = l1 + l2;
+ result.dwHighDateTime = add_64_hi( h1, l1, h2, l2 );
+ return result;
+}
+
+
+static FILETIME add_FILETIME( FILETIME t1, FILETIME t2 )
+{
+ return add_64( t1.dwHighDateTime, t1.dwLowDateTime, t2.dwHighDateTime,
+ t2.dwLowDateTime );
+}
+
+
+static FILETIME negate_FILETIME( FILETIME t )
+{
+ /* 2s complement negation */
+ return add_64( ~t.dwHighDateTime, ~t.dwLowDateTime, 0, 1 );
+}
+
+
+/*
+ * filetime_to_seconds() - Windows FILETIME --> number of seconds conversion
+ */
+
+static double filetime_to_seconds( FILETIME const ft )
+{
+ return ft.dwHighDateTime * ( (double)( 1UL << 31 ) * 2.0 * 1.0e-7 ) +
+ ft.dwLowDateTime * 1.0e-7;
+}
+
+
+static void record_times( HANDLE const process, timing_info * const time )
+{
+ FILETIME creation;
+ FILETIME exit;
+ FILETIME kernel;
+ FILETIME user;
+ if ( GetProcessTimes( process, &creation, &exit, &kernel, &user ) )
+ {
+ time->system = filetime_to_seconds( kernel );
+ time->user = filetime_to_seconds( user );
+ timestamp_from_filetime( &time->start, &creation );
+ timestamp_from_filetime( &time->end, &exit );
+ }
+}
+
+
+#define IO_BUFFER_SIZE ( 16 * 1024 )
+
+static char ioBuffer[ IO_BUFFER_SIZE + 1 ];
+
+
+static void read_pipe
+(
+ HANDLE in, /* the pipe to read from */
+ string * out
+)
+{
+ DWORD bytesInBuffer = 0;
+ DWORD bytesAvailable = 0;
+
+ do
+ {
+ /* check if we have any data to read */
+ if ( !PeekNamedPipe( in, ioBuffer, IO_BUFFER_SIZE, &bytesInBuffer,
+ &bytesAvailable, NULL ) )
+ bytesAvailable = 0;
+
+ /* read in the available data */
+ if ( bytesAvailable > 0 )
+ {
+ /* we only read in the available bytes, to avoid blocking */
+ if ( ReadFile( in, ioBuffer, bytesAvailable <= IO_BUFFER_SIZE ?
+ bytesAvailable : IO_BUFFER_SIZE, &bytesInBuffer, NULL ) )
+ {
+ if ( bytesInBuffer > 0 )
+ {
+ /* Clean up some illegal chars. */
+ int i;
+ for ( i = 0; i < bytesInBuffer; ++i )
+ {
+ if ( ( (unsigned char)ioBuffer[ i ] < 1 ) )
+ ioBuffer[ i ] = '?';
+ }
+ /* Null, terminate. */
+ ioBuffer[ bytesInBuffer ] = '\0';
+ /* Append to the output. */
+ string_append( out, ioBuffer );
+ /* Subtract what we read in. */
+ bytesAvailable -= bytesInBuffer;
+ }
+ else
+ {
+ /* Likely read a error, bail out. */
+ bytesAvailable = 0;
+ }
+ }
+ else
+ {
+ /* Definitely read a error, bail out. */
+ bytesAvailable = 0;
+ }
+ }
+ }
+ while ( bytesAvailable > 0 );
+}
+
+
+static void read_output()
+{
+ int i;
+ for ( i = 0; i < globs.jobs; ++i )
+ if ( cmdtab[ i ].pi.hProcess )
+ {
+ /* Read stdout data. */
+ if ( cmdtab[ i ].pipe_out[ EXECCMD_PIPE_READ ] )
+ read_pipe( cmdtab[ i ].pipe_out[ EXECCMD_PIPE_READ ],
+ cmdtab[ i ].buffer_out );
+ /* Read stderr data. */
+ if ( cmdtab[ i ].pipe_err[ EXECCMD_PIPE_READ ] )
+ read_pipe( cmdtab[ i ].pipe_err[ EXECCMD_PIPE_READ ],
+ cmdtab[ i ].buffer_err );
+ }
+}
+
+
+/*
+ * Waits for a single child process command to complete, or the timeout,
+ * whichever comes first. Returns the index of the completed command in the
+ * cmdtab array, or -1.
+ */
+
+typedef struct _twh_params
+{
+ int * active_procs;
+ HANDLE * active_handles;
+ DWORD num_active;
+ DWORD timeoutMillis;
+} twh_params;
+
+static int try_wait_helper( twh_params * );
+
+static int try_wait( int const timeoutMillis )
+{
+#define MAX_THREADS MAXJOBS/(MAXIMUM_WAIT_OBJECTS - 1) + 1
+ int i;
+ int num_active;
+ int wait_api_result;
+ HANDLE active_handles[ MAXJOBS + MAX_THREADS ];
+ int active_procs[ MAXJOBS + MAX_THREADS ];
+ unsigned int num_threads;
+ unsigned int num_handles;
+ unsigned int last_chunk_size;
+ unsigned int last_chunk_offset;
+ HANDLE completed_event = INVALID_HANDLE_VALUE;
+ HANDLE thread_handles[MAXIMUM_WAIT_OBJECTS];
+ twh_params thread_params[MAX_THREADS];
+ int result = -1;
+ BOOL success;
+
+ /* Prepare a list of all active processes to wait for. */
+ for ( num_active = 0, i = 0; i < globs.jobs; ++i )
+ if ( cmdtab[ i ].pi.hProcess )
+ {
+ if ( num_active == MAXIMUM_WAIT_OBJECTS )
+ {
+ /*
+ * We surpassed MAXIMUM_WAIT_OBJECTS, so we need to use threads
+ * to wait for this set. Create an event object which will
+ * notify threads to stop waiting. Every handle set chunk should
+ * have this event as its last element.
+ */
+ assert( completed_event == INVALID_HANDLE_VALUE );
+ completed_event = CreateEvent(NULL, FALSE, FALSE, NULL);
+ active_handles[ num_active ] = active_handles[ num_active - 1 ];
+ active_procs[ num_active ] = active_procs[ num_active - 1 ];
+ active_handles[ num_active - 1 ] = completed_event;
+ active_procs[ num_active - 1 ] = -1;
+ ++num_active;
+ }
+ else if ( ( completed_event != INVALID_HANDLE_VALUE ) &&
+ !((num_active + 1) % MAXIMUM_WAIT_OBJECTS) )
+ {
+ active_handles[ num_active ] = completed_event;
+ active_procs[ num_active ] = -1;
+ ++num_active;
+ }
+ active_handles[ num_active ] = cmdtab[ i ].pi.hProcess;
+ active_procs[ num_active ] = i;
+ ++num_active;
+ }
+
+ assert( (num_active <= MAXIMUM_WAIT_OBJECTS) ==
+ (completed_event == INVALID_HANDLE_VALUE) );
+ if ( num_active <= MAXIMUM_WAIT_OBJECTS )
+ {
+ twh_params twh;
+ twh.active_procs = active_procs;
+ twh.active_handles = active_handles;
+ twh.num_active = num_active;
+ twh.timeoutMillis = timeoutMillis;
+ return try_wait_helper( &twh );
+ }
+
+ num_threads = num_active / MAXIMUM_WAIT_OBJECTS;
+ last_chunk_size = num_active % MAXIMUM_WAIT_OBJECTS;
+ num_handles = num_threads;
+ if ( last_chunk_size )
+ {
+ /* Can we fit the last chunk in the outer WFMO call? */
+ if ( last_chunk_size <= MAXIMUM_WAIT_OBJECTS - num_threads )
+ {
+ last_chunk_offset = num_threads * MAXIMUM_WAIT_OBJECTS;
+ for ( i = 0; i < last_chunk_size; ++i )
+ thread_handles[ i + num_threads ] =
+ active_handles[ i + last_chunk_offset ];
+ num_handles = num_threads + last_chunk_size;
+ }
+ else
+ {
+ /* We need another thread for the remainder. */
+ /* Add completed_event handle to the last chunk. */
+ active_handles[ num_active ] = completed_event;
+ active_procs[ num_active ] = -1;
+ ++last_chunk_size;
+ ++num_active;
+ ++num_threads;
+ num_handles = num_threads;
+ }
+ }
+
+ assert( num_threads <= MAX_THREADS );
+
+ for ( i = 0; i < num_threads; ++i )
+ {
+ thread_params[i].active_procs = active_procs +
+ i * MAXIMUM_WAIT_OBJECTS;
+ thread_params[i].active_handles = active_handles +
+ i * MAXIMUM_WAIT_OBJECTS;
+ thread_params[i].timeoutMillis = INFINITE;
+ thread_params[i].num_active = MAXIMUM_WAIT_OBJECTS;
+ if ( ( i == num_threads - 1 ) && last_chunk_size &&
+ ( num_handles == num_threads ) )
+ thread_params[i].num_active = last_chunk_size;
+ thread_handles[i] = CreateThread(NULL, 4 * 1024,
+ (LPTHREAD_START_ROUTINE)&try_wait_helper, &thread_params[i],
+ 0, NULL);
+ }
+ wait_api_result = WaitForMultipleObjects(num_handles, thread_handles,
+ FALSE, timeoutMillis);
+ if ( ( WAIT_OBJECT_0 <= wait_api_result ) &&
+ ( wait_api_result < WAIT_OBJECT_0 + num_threads ) )
+ {
+ HANDLE thread_handle = thread_handles[wait_api_result - WAIT_OBJECT_0];
+ success = GetExitCodeThread(thread_handle, (DWORD *)&result);
+ assert( success );
+ }
+ else if ( ( WAIT_OBJECT_0 + num_threads <= wait_api_result ) &&
+ ( wait_api_result < WAIT_OBJECT_0 + num_handles ) )
+ {
+ unsigned int offset = wait_api_result - num_threads - WAIT_OBJECT_0;
+ result = active_procs[ last_chunk_offset + offset ];
+ }
+ SetEvent(completed_event);
+ /* Should complete instantly. */
+ WaitForMultipleObjects(num_threads, thread_handles, TRUE, INFINITE);
+ CloseHandle(completed_event);
+ for ( i = 0; i < num_threads; ++i )
+ CloseHandle(thread_handles[i]);
+ return result;
+#undef MAX_THREADS
+}
+
+static int try_wait_helper( twh_params * params )
+{
+ int wait_api_result;
+
+ assert( params->num_active <= MAXIMUM_WAIT_OBJECTS );
+
+ /* Wait for a child to complete, or for our timeout window to expire. */
+ wait_api_result = WaitForMultipleObjects( params->num_active,
+ params->active_handles, FALSE, params->timeoutMillis );
+ if ( ( WAIT_OBJECT_0 <= wait_api_result ) &&
+ ( wait_api_result < WAIT_OBJECT_0 + params->num_active ) )
+ {
+ /* Terminated process detected - return its index. */
+ return params->active_procs[ wait_api_result - WAIT_OBJECT_0 ];
+ }
+
+ /* Timeout. */
+ return -1;
+}
+
+
+static int try_kill_one()
+{
+ /* Only need to check if a timeout was specified with the -l option. */
+ if ( globs.timeout > 0 )
+ {
+ int i;
+ for ( i = 0; i < globs.jobs; ++i )
+ if ( cmdtab[ i ].pi.hProcess )
+ {
+ double const t = running_time( cmdtab[ i ].pi.hProcess );
+ if ( t > (double)globs.timeout )
+ {
+ /* The job may have left an alert dialog around, try and get
+ * rid of it before killing the job itself.
+ */
+ close_alert( &cmdtab[ i ].pi );
+ /* We have a "runaway" job, kill it. */
+ kill_process_tree( cmdtab[ i ].pi.dwProcessId,
+ cmdtab[ i ].pi.hProcess );
+ /* And return its running commands table slot. */
+ return i;
+ }
+ }
+ }
+ return -1;
+}
+
+
+static void close_alerts()
+{
+ /* We only attempt this every 5 seconds or so, because it is not a cheap
+ * operation, and we will catch the alerts eventually. This check uses
+ * floats as some compilers define CLOCKS_PER_SEC as a float or double.
+ */
+ if ( ( (float)clock() / (float)( CLOCKS_PER_SEC * 5 ) ) < ( 1.0 / 5.0 ) )
+ {
+ int i;
+ for ( i = 0; i < globs.jobs; ++i )
+ if ( cmdtab[ i ].pi.hProcess )
+ close_alert( &cmdtab[ i ].pi );
+ }
+}
+
+
+/*
+ * Calc the current running time of an *active* process.
+ */
+
+static double running_time( HANDLE const process )
+{
+ FILETIME creation;
+ FILETIME exit;
+ FILETIME kernel;
+ FILETIME user;
+ if ( GetProcessTimes( process, &creation, &exit, &kernel, &user ) )
+ {
+ /* Compute the elapsed time. */
+ FILETIME current;
+ GetSystemTimeAsFileTime( &current );
+ return filetime_to_seconds( add_FILETIME( current,
+ negate_FILETIME( creation ) ) );
+ }
+ return 0.0;
+}
+
+
+/*
+ * Not really optimal, or efficient, but it is easier this way, and it is not
+ * like we are going to be killing thousands, or even tens of processes.
+ */
+
+static void kill_process_tree( DWORD const pid, HANDLE const process )
+{
+ HANDLE const process_snapshot_h = CreateToolhelp32Snapshot(
+ TH32CS_SNAPPROCESS, 0 );
+ if ( INVALID_HANDLE_VALUE != process_snapshot_h )
+ {
+ BOOL ok = TRUE;
+ PROCESSENTRY32 pinfo;
+ pinfo.dwSize = sizeof( PROCESSENTRY32 );
+ for (
+ ok = Process32First( process_snapshot_h, &pinfo );
+ ok == TRUE;
+ ok = Process32Next( process_snapshot_h, &pinfo ) )
+ {
+ if ( pinfo.th32ParentProcessID == pid )
+ {
+ /* Found a child, recurse to kill it and anything else below it.
+ */
+ HANDLE const ph = OpenProcess( PROCESS_ALL_ACCESS, FALSE,
+ pinfo.th32ProcessID );
+ if ( ph )
+ {
+ kill_process_tree( pinfo.th32ProcessID, ph );
+ CloseHandle( ph );
+ }
+ }
+ }
+ CloseHandle( process_snapshot_h );
+ }
+ /* Now that the children are all dead, kill the root. */
+ TerminateProcess( process, -2 );
+}
+
+
+static double creation_time( HANDLE const process )
+{
+ FILETIME creation;
+ FILETIME exit;
+ FILETIME kernel;
+ FILETIME user;
+ return GetProcessTimes( process, &creation, &exit, &kernel, &user )
+ ? filetime_to_seconds( creation )
+ : 0.0;
+}
+
+
+/*
+ * Recursive check if first process is parent (directly or indirectly) of the
+ * second one. Both processes are passed as process ids, not handles. Special
+ * return value 2 means that the second process is smss.exe and its parent
+ * process is System (first argument is ignored).
+ */
+
+static int is_parent_child( DWORD const parent, DWORD const child )
+{
+ HANDLE process_snapshot_h = INVALID_HANDLE_VALUE;
+
+ if ( !child )
+ return 0;
+ if ( parent == child )
+ return 1;
+
+ process_snapshot_h = CreateToolhelp32Snapshot( TH32CS_SNAPPROCESS, 0 );
+ if ( INVALID_HANDLE_VALUE != process_snapshot_h )
+ {
+ BOOL ok = TRUE;
+ PROCESSENTRY32 pinfo;
+ pinfo.dwSize = sizeof( PROCESSENTRY32 );
+ for (
+ ok = Process32First( process_snapshot_h, &pinfo );
+ ok == TRUE;
+ ok = Process32Next( process_snapshot_h, &pinfo ) )
+ {
+ if ( pinfo.th32ProcessID == child )
+ {
+ /* Unfortunately, process ids are not really unique. There might
+ * be spurious "parent and child" relationship match between two
+ * non-related processes if real parent process of a given
+ * process has exited (while child process kept running as an
+ * "orphan") and the process id of such parent process has been
+ * reused by internals of the operating system when creating
+ * another process.
+ *
+ * Thus an additional check is needed - process creation time.
+ * This check may fail (i.e. return 0) for system processes due
+ * to insufficient privileges, and that is OK.
+ */
+ double tchild = 0.0;
+ double tparent = 0.0;
+ HANDLE const hchild = OpenProcess( PROCESS_QUERY_INFORMATION,
+ FALSE, pinfo.th32ProcessID );
+ CloseHandle( process_snapshot_h );
+
+ /* csrss.exe may display message box like following:
+ * xyz.exe - Unable To Locate Component
+ * This application has failed to start because
+ * boost_foo-bar.dll was not found. Re-installing the
+ * application may fix the problem
+ * This actually happens when starting a test process that
+ * depends on a dynamic library which failed to build. We want
+ * to automatically close these message boxes even though
+ * csrss.exe is not our child process. We may depend on the fact
+ * that (in all current versions of Windows) csrss.exe is a
+ * direct child of the smss.exe process, which in turn is a
+ * direct child of the System process, which always has process
+ * id == 4. This check must be performed before comparing
+ * process creation times.
+ */
+ if ( !stricmp( pinfo.szExeFile, "csrss.exe" ) &&
+ is_parent_child( parent, pinfo.th32ParentProcessID ) == 2 )
+ return 1;
+ if ( !stricmp( pinfo.szExeFile, "smss.exe" ) &&
+ ( pinfo.th32ParentProcessID == 4 ) )
+ return 2;
+
+ if ( hchild )
+ {
+ HANDLE hparent = OpenProcess( PROCESS_QUERY_INFORMATION,
+ FALSE, pinfo.th32ParentProcessID );
+ if ( hparent )
+ {
+ tchild = creation_time( hchild );
+ tparent = creation_time( hparent );
+ CloseHandle( hparent );
+ }
+ CloseHandle( hchild );
+ }
+
+ /* Return 0 if one of the following is true:
+ * 1. we failed to read process creation time
+ * 2. child was created before alleged parent
+ */
+ if ( ( tchild == 0.0 ) || ( tparent == 0.0 ) ||
+ ( tchild < tparent ) )
+ return 0;
+
+ return is_parent_child( parent, pinfo.th32ParentProcessID ) & 1;
+ }
+ }
+
+ CloseHandle( process_snapshot_h );
+ }
+
+ return 0;
+}
+
+
+/*
+ * Called by the OS for each topmost window.
+ */
+
+BOOL CALLBACK close_alert_window_enum( HWND hwnd, LPARAM lParam )
+{
+ char buf[ 7 ] = { 0 };
+ PROCESS_INFORMATION const * const pi = (PROCESS_INFORMATION *)lParam;
+ DWORD pid;
+ DWORD tid;
+
+ /* We want to find and close any window that:
+ * 1. is visible and
+ * 2. is a dialog and
+ * 3. is displayed by any of our child processes
+ */
+ if (
+ /* We assume hidden windows do not require user interaction. */
+ !IsWindowVisible( hwnd )
+ /* Failed to read class name; presume it is not a dialog. */
+ || !GetClassNameA( hwnd, buf, sizeof( buf ) )
+ /* All Windows system dialogs use the same Window class name. */
+ || strcmp( buf, "#32770" ) )
+ return TRUE;
+
+ /* GetWindowThreadProcessId() returns 0 on error, otherwise thread id of
+ * the window's message pump thread.
+ */
+ tid = GetWindowThreadProcessId( hwnd, &pid );
+ if ( !tid || !is_parent_child( pi->dwProcessId, pid ) )
+ return TRUE;
+
+ /* Ask real nice. */
+ PostMessageA( hwnd, WM_CLOSE, 0, 0 );
+
+ /* Wait and see if it worked. If not, insist. */
+ if ( WaitForSingleObject( pi->hProcess, 200 ) == WAIT_TIMEOUT )
+ {
+ PostThreadMessageA( tid, WM_QUIT, 0, 0 );
+ WaitForSingleObject( pi->hProcess, 300 );
+ }
+
+ /* Done, we do not want to check any other windows now. */
+ return FALSE;
+}
+
+
+static void close_alert( PROCESS_INFORMATION const * const pi )
+{
+ EnumWindows( &close_alert_window_enum, (LPARAM)pi );
+}
+
+
+/*
+ * Open a command file to store the command into for executing using an external
+ * shell. Returns a pointer to a FILE open for writing or 0 in case such a file
+ * could not be opened. The file name used is stored back in the corresponding
+ * running commands table slot.
+ *
+ * Expects the running commands table slot's command_file attribute to contain
+ * either a zeroed out string object or one prepared previously by this same
+ * function.
+ */
+
+static FILE * open_command_file( int const slot )
+{
+ string * const command_file = cmdtab[ slot ].command_file;
+
+ /* If the temporary command file name has not already been prepared for this
+ * slot number, prepare a new one containing a '##' place holder that will
+ * be changed later and needs to be located at a fixed distance from the
+ * end.
+ */
+ if ( !command_file->value )
+ {
+ DWORD const procID = GetCurrentProcessId();
+ string const * const tmpdir = path_tmpdir();
+ string_new( command_file );
+ string_reserve( command_file, tmpdir->size + 64 );
+ command_file->size = sprintf( command_file->value,
+ "%s\\jam%d-%02d-##.bat", tmpdir->value, procID, slot );
+ }
+
+ /* For some reason opening a command file can fail intermittently. But doing
+ * some retries works. Most likely this is due to a previously existing file
+ * of the same name that happens to still be opened by an active virus
+ * scanner. Originally pointed out and fixed by Bronek Kozicki.
+ *
+ * We first try to open several differently named files to avoid having to
+ * wait idly if not absolutely necessary. Our temporary command file names
+ * contain a fixed position place holder we use for generating different
+ * file names.
+ */
+ {
+ char * const index1 = command_file->value + command_file->size - 6;
+ char * const index2 = index1 + 1;
+ int waits_remaining;
+ assert( command_file->value < index1 );
+ assert( index2 + 1 < command_file->value + command_file->size );
+ assert( index2[ 1 ] == '.' );
+ for ( waits_remaining = 3; ; --waits_remaining )
+ {
+ int index;
+ for ( index = 0; index != 20; ++index )
+ {
+ FILE * f;
+ *index1 = '0' + index / 10;
+ *index2 = '0' + index % 10;
+ f = fopen( command_file->value, "w" );
+ if ( f ) return f;
+ }
+ if ( !waits_remaining ) break;
+ Sleep( 250 );
+ }
+ }
+
+ return 0;
+}
+
+
+/*
+ * Prepare a command file to be executed using an external shell.
+ */
+
+static char const * prepare_command_file( string const * command, int slot )
+{
+ FILE * const f = open_command_file( slot );
+ if ( !f )
+ {
+ printf( "failed to write command file!\n" );
+ exit( EXITBAD );
+ }
+ fputs( command->value, f );
+ fclose( f );
+ return cmdtab[ slot ].command_file->value;
+}
+
+
+/*
+ * Find a free slot in the running commands table.
+ */
+
+static int get_free_cmdtab_slot()
+{
+ int slot;
+ for ( slot = 0; slot < MAXJOBS; ++slot )
+ if ( !cmdtab[ slot ].pi.hProcess )
+ return slot;
+ printf( "no slots for child!\n" );
+ exit( EXITBAD );
+}
+
+
+/*
+ * Put together the final command string we are to run.
+ */
+
+static void string_new_from_argv( string * result, char const * const * argv )
+{
+ assert( argv );
+ assert( argv[ 0 ] );
+ string_copy( result, *(argv++) );
+ while ( *argv )
+ {
+ string_push_back( result, ' ' );
+ string_append( result, *(argv++) );
+ }
+}
+
+
+/*
+ * Reports the last failed Windows API related error message.
+ */
+
+static void reportWindowsError( char const * const apiName, int slot )
+{
+ char * errorMessage;
+ char buf[24];
+ string * err_buf;
+ timing_info time;
+ DWORD const errorCode = GetLastError();
+ DWORD apiResult = FormatMessageA(
+ FORMAT_MESSAGE_ALLOCATE_BUFFER | /* __in DWORD dwFlags */
+ FORMAT_MESSAGE_FROM_SYSTEM |
+ FORMAT_MESSAGE_IGNORE_INSERTS,
+ NULL, /* __in_opt LPCVOID lpSource */
+ errorCode, /* __in DWORD dwMessageId */
+ 0, /* __in DWORD dwLanguageId */
+ (LPSTR)&errorMessage, /* __out LPTSTR lpBuffer */
+ 0, /* __in DWORD nSize */
+ 0 ); /* __in_opt va_list * Arguments */
+
+ /* Build a message as if the process had written to stderr. */
+ if ( globs.pipe_action )
+ err_buf = cmdtab[ slot ].buffer_err;
+ else
+ err_buf = cmdtab[ slot ].buffer_out;
+ string_append( err_buf, apiName );
+ string_append( err_buf, "() Windows API failed: " );
+ sprintf( buf, "%d", errorCode );
+ string_append( err_buf, buf );
+
+ if ( !apiResult )
+ string_append( err_buf, ".\n" );
+ else
+ {
+ string_append( err_buf, " - " );
+ string_append( err_buf, errorMessage );
+ /* Make sure that the buffer is terminated with a newline */
+ if( err_buf->value[ err_buf->size - 1 ] != '\n' )
+ string_push_back( err_buf, '\n' );
+ LocalFree( errorMessage );
+ }
+
+ /* Since the process didn't actually start, use a blank timing_info. */
+ time.system = 0;
+ time.user = 0;
+ timestamp_current( &time.start );
+ timestamp_current( &time.end );
+
+ /* Invoke the callback with a failure status. */
+ (*cmdtab[ slot ].func)( cmdtab[ slot ].closure, EXEC_CMD_FAIL, &time,
+ cmdtab[ slot ].buffer_out->value, cmdtab[ slot ].buffer_err->value,
+ EXIT_OK );
+
+ /* Clean up any handles that were opened. */
+ closeWinHandle( &cmdtab[ slot ].pi.hProcess );
+ closeWinHandle( &cmdtab[ slot ].pi.hThread );
+ closeWinHandle( &cmdtab[ slot ].pipe_out[ EXECCMD_PIPE_READ ] );
+ closeWinHandle( &cmdtab[ slot ].pipe_out[ EXECCMD_PIPE_WRITE ] );
+ closeWinHandle( &cmdtab[ slot ].pipe_err[ EXECCMD_PIPE_READ ] );
+ closeWinHandle( &cmdtab[ slot ].pipe_err[ EXECCMD_PIPE_WRITE ] );
+ string_renew( cmdtab[ slot ].buffer_out );
+ string_renew( cmdtab[ slot ].buffer_err );
+}
+
+
+#endif /* USE_EXECNT */
diff --git a/tools/build/src/engine/execunix.c b/tools/build/src/engine/execunix.c
new file mode 100644
index 0000000000..965e580116
--- /dev/null
+++ b/tools/build/src/engine/execunix.c
@@ -0,0 +1,559 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ * Copyright 2007 Noel Belcourt.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+#include "jam.h"
+#include "execcmd.h"
+
+#include "lists.h"
+#include "output.h"
+#include "strings.h"
+
+#include <errno.h>
+#include <signal.h>
+#include <stdio.h>
+#include <time.h>
+#include <unistd.h> /* vfork(), _exit(), STDOUT_FILENO and such */
+#include <sys/resource.h>
+#include <sys/times.h>
+#include <sys/wait.h>
+
+#if defined(sun) || defined(__sun)
+ #include <wait.h>
+#endif
+
+#ifdef USE_EXECUNIX
+
+#include <sys/times.h>
+
+#if defined(__APPLE__)
+ #define NO_VFORK
+#endif
+
+#ifdef NO_VFORK
+ #define vfork() fork()
+#endif
+
+
+/*
+ * execunix.c - execute a shell script on UNIX/OS2/AmigaOS
+ *
+ * If $(JAMSHELL) is defined, uses that to formulate execvp()/spawnvp(). The
+ * default is: /bin/sh -c
+ *
+ * In $(JAMSHELL), % expands to the command string and ! expands to the slot
+ * number (starting at 1) for multiprocess (-j) invocations. If $(JAMSHELL) does
+ * not include a %, it is tacked on as the last argument.
+ *
+ * Each word must be an individual element in a jam variable value.
+ *
+ * Do not just set JAMSHELL to /bin/sh - it will not work!
+ *
+ * External routines:
+ * exec_check() - preprocess and validate the command.
+ * exec_cmd() - launch an async command execution.
+ * exec_wait() - wait for any of the async command processes to terminate.
+ */
+
+/* find a free slot in the running commands table */
+static int get_free_cmdtab_slot();
+
+/* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */
+
+static clock_t tps;
+static int old_time_initialized;
+static struct tms old_time;
+
+/* We hold stdout & stderr child process information in two element arrays
+ * indexed as follows.
+ */
+#define OUT 0
+#define ERR 1
+
+static struct
+{
+ int pid; /* on win32, a real process handle */
+ int fd[ 2 ]; /* file descriptors for stdout and stderr */
+ FILE * stream[ 2 ]; /* child's stdout and stderr file streams */
+ clock_t start_time; /* start time of child process */
+ int exit_reason; /* termination status */
+ char * buffer[ 2 ]; /* buffers to hold stdout and stderr, if any */
+ int buf_size[ 2 ]; /* buffer sizes in bytes */
+ timestamp start_dt; /* start of command timestamp */
+
+ /* Function called when the command completes. */
+ ExecCmdCallback func;
+
+ /* Opaque data passed back to the 'func' callback. */
+ void * closure;
+} cmdtab[ MAXJOBS ] = { { 0 } };
+
+
+/*
+ * exec_check() - preprocess and validate the command.
+ */
+
+int exec_check
+(
+ string const * command,
+ LIST * * pShell,
+ int * error_length,
+ int * error_max_length
+)
+{
+ int const is_raw_cmd = is_raw_command_request( *pShell );
+
+ /* We allow empty commands for non-default shells since we do not really
+ * know what they are going to do with such commands.
+ */
+ if ( !command->size && ( is_raw_cmd || list_empty( *pShell ) ) )
+ return EXEC_CHECK_NOOP;
+
+ return is_raw_cmd
+ ? EXEC_CHECK_OK
+ : check_cmd_for_too_long_lines( command->value, MAXLINE, error_length,
+ error_max_length );
+}
+
+
+/*
+ * exec_cmd() - launch an async command execution.
+ */
+
+/* We hold file descriptors for pipes used to communicate with child processes
+ * in two element arrays indexed as follows.
+ */
+#define EXECCMD_PIPE_READ 0
+#define EXECCMD_PIPE_WRITE 1
+
+void exec_cmd
+(
+ string const * command,
+ ExecCmdCallback func,
+ void * closure,
+ LIST * shell
+)
+{
+ int const slot = get_free_cmdtab_slot();
+ int out[ 2 ];
+ int err[ 2 ];
+ int len;
+ char const * argv[ MAXARGC + 1 ]; /* +1 for NULL */
+
+ /* Initialize default shell. */
+ static LIST * default_shell;
+ if ( !default_shell )
+ default_shell = list_push_back( list_new(
+ object_new( "/bin/sh" ) ),
+ object_new( "-c" ) );
+
+ if ( list_empty( shell ) )
+ shell = default_shell;
+
+ /* Forumulate argv. If shell was defined, be prepared for % and ! subs.
+ * Otherwise, use stock /bin/sh.
+ */
+ argv_from_shell( argv, shell, command->value, slot );
+
+ if ( DEBUG_EXECCMD )
+ {
+ int i;
+ printf( "Using shell: " );
+ list_print( shell );
+ printf( "\n" );
+ for ( i = 0; argv[ i ]; ++i )
+ printf( " argv[%d] = '%s'\n", i, argv[ i ] );
+ }
+
+ /* Create pipes for collecting child output. */
+ if ( pipe( out ) < 0 || ( globs.pipe_action && pipe( err ) < 0 ) )
+ {
+ perror( "pipe" );
+ exit( EXITBAD );
+ }
+
+ /* Initialize old_time only once. */
+ if ( !old_time_initialized )
+ {
+ times( &old_time );
+ old_time_initialized = 1;
+ }
+
+ /* Start the command */
+
+ timestamp_current( &cmdtab[ slot ].start_dt );
+
+ if ( 0 < globs.timeout )
+ {
+ /* Handle hung processes by manually tracking elapsed time and signal
+ * process when time limit expires.
+ */
+ struct tms buf;
+ cmdtab[ slot ].start_time = times( &buf );
+
+ /* Make a global, only do this once. */
+ if ( !tps ) tps = sysconf( _SC_CLK_TCK );
+ }
+
+ /* Child does not need the read pipe ends used by the parent. */
+ fcntl( out[ EXECCMD_PIPE_READ ], F_SETFD, FD_CLOEXEC );
+ if ( globs.pipe_action )
+ fcntl( err[ EXECCMD_PIPE_READ ], F_SETFD, FD_CLOEXEC );
+
+ if ( ( cmdtab[ slot ].pid = vfork() ) == -1 )
+ {
+ perror( "vfork" );
+ exit( EXITBAD );
+ }
+
+ if ( cmdtab[ slot ].pid == 0 )
+ {
+ /*****************/
+ /* Child process */
+ /*****************/
+ int const pid = getpid();
+
+ /* Redirect stdout and stderr to pipes inherited from the parent. */
+ dup2( out[ EXECCMD_PIPE_WRITE ], STDOUT_FILENO );
+ dup2( globs.pipe_action ? err[ EXECCMD_PIPE_WRITE ] :
+ out[ EXECCMD_PIPE_WRITE ], STDERR_FILENO );
+ close( out[ EXECCMD_PIPE_WRITE ] );
+ if ( globs.pipe_action )
+ close( err[ EXECCMD_PIPE_WRITE ] );
+
+ /* Make this process a process group leader so that when we kill it, all
+ * child processes of this process are terminated as well. We use
+ * killpg( pid, SIGKILL ) to kill the process group leader and all its
+ * children.
+ */
+ if ( 0 < globs.timeout )
+ {
+ struct rlimit r_limit;
+ r_limit.rlim_cur = globs.timeout;
+ r_limit.rlim_max = globs.timeout;
+ setrlimit( RLIMIT_CPU, &r_limit );
+ }
+ setpgid( pid, pid );
+ execvp( argv[ 0 ], (char * *)argv );
+ perror( "execvp" );
+ _exit( 127 );
+ }
+
+ /******************/
+ /* Parent process */
+ /******************/
+ setpgid( cmdtab[ slot ].pid, cmdtab[ slot ].pid );
+
+ /* Parent not need the write pipe ends used by the child. */
+ close( out[ EXECCMD_PIPE_WRITE ] );
+ if ( globs.pipe_action )
+ close( err[ EXECCMD_PIPE_WRITE ] );
+
+ /* Set both pipe read file descriptors to non-blocking. */
+ fcntl( out[ EXECCMD_PIPE_READ ], F_SETFL, O_NONBLOCK );
+ if ( globs.pipe_action )
+ fcntl( err[ EXECCMD_PIPE_READ ], F_SETFL, O_NONBLOCK );
+
+ /* Parent reads from out[ EXECCMD_PIPE_READ ]. */
+ cmdtab[ slot ].fd[ OUT ] = out[ EXECCMD_PIPE_READ ];
+ cmdtab[ slot ].stream[ OUT ] = fdopen( cmdtab[ slot ].fd[ OUT ], "rb" );
+ if ( !cmdtab[ slot ].stream[ OUT ] )
+ {
+ perror( "fdopen" );
+ exit( EXITBAD );
+ }
+
+ /* Parent reads from err[ EXECCMD_PIPE_READ ]. */
+ if ( globs.pipe_action )
+ {
+ cmdtab[ slot ].fd[ ERR ] = err[ EXECCMD_PIPE_READ ];
+ cmdtab[ slot ].stream[ ERR ] = fdopen( cmdtab[ slot ].fd[ ERR ], "rb" );
+ if ( !cmdtab[ slot ].stream[ ERR ] )
+ {
+ perror( "fdopen" );
+ exit( EXITBAD );
+ }
+ }
+
+ /* Save input data into the selected running commands table slot. */
+ cmdtab[ slot ].func = func;
+ cmdtab[ slot ].closure = closure;
+}
+
+#undef EXECCMD_PIPE_READ
+#undef EXECCMD_PIPE_WRITE
+
+
+/* Returns 1 if file descriptor is closed, or 0 if it is still alive.
+ *
+ * i is index into cmdtab
+ *
+ * s (stream) indexes:
+ * - cmdtab[ i ].stream[ s ]
+ * - cmdtab[ i ].buffer[ s ]
+ * - cmdtab[ i ].fd [ s ]
+ */
+
+static int read_descriptor( int i, int s )
+{
+ int ret;
+ char buffer[ BUFSIZ ];
+
+ while ( 0 < ( ret = fread( buffer, sizeof( char ), BUFSIZ - 1,
+ cmdtab[ i ].stream[ s ] ) ) )
+ {
+ buffer[ ret ] = 0;
+ if ( !cmdtab[ i ].buffer[ s ] )
+ {
+ /* Never been allocated. */
+ if ( globs.max_buf && ret > globs.max_buf )
+ {
+ ret = globs.max_buf;
+ buffer[ ret ] = 0;
+ }
+ cmdtab[ i ].buf_size[ s ] = ret + 1;
+ cmdtab[ i ].buffer[ s ] = (char*)BJAM_MALLOC_ATOMIC( ret + 1 );
+ memcpy( cmdtab[ i ].buffer[ s ], buffer, ret + 1 );
+ }
+ else
+ {
+ /* Previously allocated. */
+ if ( cmdtab[ i ].buf_size[ s ] < globs.max_buf || !globs.max_buf )
+ {
+ char * tmp = cmdtab[ i ].buffer[ s ];
+ int const old_len = cmdtab[ i ].buf_size[ s ] - 1;
+ int const new_len = old_len + ret + 1;
+ cmdtab[ i ].buf_size[ s ] = new_len;
+ cmdtab[ i ].buffer[ s ] = (char*)BJAM_MALLOC_ATOMIC( new_len );
+ memcpy( cmdtab[ i ].buffer[ s ], tmp, old_len );
+ memcpy( cmdtab[ i ].buffer[ s ] + old_len, buffer, ret + 1 );
+ BJAM_FREE( tmp );
+ }
+ }
+ }
+
+ /* If buffer full, ensure last buffer char is newline so that jam log
+ * contains the command status at beginning of it own line instead of
+ * appended to end of the previous output.
+ */
+ if ( globs.max_buf && globs.max_buf <= cmdtab[ i ].buf_size[ s ] )
+ cmdtab[ i ].buffer[ s ][ cmdtab[ i ].buf_size[ s ] - 2 ] = '\n';
+
+ return feof( cmdtab[ i ].stream[ s ] );
+}
+
+
+/*
+ * close_streams() - Close the stream and pipe descriptor.
+ */
+
+static void close_streams( int const i, int const s )
+{
+ fclose( cmdtab[ i ].stream[ s ] );
+ cmdtab[ i ].stream[ s ] = 0;
+
+ close( cmdtab[ i ].fd[ s ] );
+ cmdtab[ i ].fd[ s ] = 0;
+}
+
+
+/*
+ * Populate the file descriptors collection for use in select() and return the
+ * maximal included file descriptor value.
+ */
+
+static int populate_file_descriptors( fd_set * const fds )
+{
+ int i;
+ int fd_max = 0;
+
+ FD_ZERO( fds );
+ for ( i = 0; i < globs.jobs; ++i )
+ {
+ int fd;
+ if ( ( fd = cmdtab[ i ].fd[ OUT ] ) > 0 )
+ {
+ if ( fd > fd_max ) fd_max = fd;
+ FD_SET( fd, fds );
+ }
+ if ( globs.pipe_action )
+ {
+ if ( ( fd = cmdtab[ i ].fd[ ERR ] ) > 0 )
+ {
+ if ( fd > fd_max ) fd_max = fd;
+ FD_SET( fd, fds );
+ }
+ }
+ }
+ return fd_max;
+}
+
+
+/*
+ * exec_wait() - wait for any of the async command processes to terminate.
+ *
+ * May register more than one terminated child process but will exit as soon as
+ * at least one has been registered.
+ */
+
+void exec_wait()
+{
+ int finished = 0;
+
+ /* Process children that signaled. */
+ while ( !finished )
+ {
+ int i;
+ struct timeval tv;
+ struct timeval * ptv = NULL;
+ int select_timeout = globs.timeout;
+
+ /* Prepare file descriptor information for use in select(). */
+ fd_set fds;
+ int const fd_max = populate_file_descriptors( &fds );
+
+ /* Check for timeouts:
+ * - kill children that already timed out
+ * - decide how long until the next one times out
+ */
+ if ( globs.timeout > 0 )
+ {
+ struct tms buf;
+ clock_t const current = times( &buf );
+ for ( i = 0; i < globs.jobs; ++i )
+ if ( cmdtab[ i ].pid )
+ {
+ clock_t const consumed =
+ ( current - cmdtab[ i ].start_time ) / tps;
+ if ( consumed >= globs.timeout )
+ {
+ killpg( cmdtab[ i ].pid, SIGKILL );
+ cmdtab[ i ].exit_reason = EXIT_TIMEOUT;
+ }
+ else if ( globs.timeout - consumed < select_timeout )
+ select_timeout = globs.timeout - consumed;
+ }
+
+ /* If nothing else causes our select() call to exit, force it after
+ * however long it takes for the next one of our child processes to
+ * crossed its alloted processing time so we can terminate it.
+ */
+ tv.tv_sec = select_timeout;
+ tv.tv_usec = 0;
+ ptv = &tv;
+ }
+
+ /* select() will wait for I/O on a descriptor, a signal, or timeout. */
+ {
+ int ret;
+ while ( ( ret = select( fd_max + 1, &fds, 0, 0, ptv ) ) == -1 )
+ if ( errno != EINTR )
+ break;
+ if ( ret <= 0 )
+ continue;
+ }
+
+ for ( i = 0; i < globs.jobs; ++i )
+ {
+ int out_done = 0;
+ int err_done = 0;
+ if ( FD_ISSET( cmdtab[ i ].fd[ OUT ], &fds ) )
+ out_done = read_descriptor( i, OUT );
+
+ if ( globs.pipe_action && FD_ISSET( cmdtab[ i ].fd[ ERR ], &fds ) )
+ err_done = read_descriptor( i, ERR );
+
+ /* If feof on either descriptor, we are done. */
+ if ( out_done || err_done )
+ {
+ int pid;
+ int status;
+ int rstat;
+ timing_info time_info;
+
+ /* We found a terminated child process - our search is done. */
+ finished = 1;
+
+ /* Close the stream and pipe descriptors. */
+ close_streams( i, OUT );
+ if ( globs.pipe_action )
+ close_streams( i, ERR );
+
+ /* Reap the child and release resources. */
+ while ( ( pid = waitpid( cmdtab[ i ].pid, &status, 0 ) ) == -1 )
+ if ( errno != EINTR )
+ break;
+ if ( pid != cmdtab[ i ].pid )
+ {
+ printf( "unknown pid %d with errno = %d\n", pid, errno );
+ exit( EXITBAD );
+ }
+
+ /* Set reason for exit if not timed out. */
+ if ( WIFEXITED( status ) )
+ cmdtab[ i ].exit_reason = WEXITSTATUS( status )
+ ? EXIT_FAIL
+ : EXIT_OK;
+
+ {
+ struct tms new_time;
+ times( &new_time );
+ time_info.system = (double)( new_time.tms_cstime -
+ old_time.tms_cstime ) / CLOCKS_PER_SEC;
+ time_info.user = (double)( new_time.tms_cutime -
+ old_time.tms_cutime ) / CLOCKS_PER_SEC;
+ timestamp_copy( &time_info.start, &cmdtab[ i ].start_dt );
+ timestamp_current( &time_info.end );
+ old_time = new_time;
+ }
+
+ /* Drive the completion. */
+ if ( interrupted() )
+ rstat = EXEC_CMD_INTR;
+ else if ( status )
+ rstat = EXEC_CMD_FAIL;
+ else
+ rstat = EXEC_CMD_OK;
+
+ /* Call the callback, may call back to jam rule land. */
+ (*cmdtab[ i ].func)( cmdtab[ i ].closure, rstat, &time_info,
+ cmdtab[ i ].buffer[ OUT ], cmdtab[ i ].buffer[ ERR ],
+ cmdtab[ i ].exit_reason );
+
+ /* Clean up the command's running commands table slot. */
+ BJAM_FREE( cmdtab[ i ].buffer[ OUT ] );
+ cmdtab[ i ].buffer[ OUT ] = 0;
+ cmdtab[ i ].buf_size[ OUT ] = 0;
+
+ BJAM_FREE( cmdtab[ i ].buffer[ ERR ] );
+ cmdtab[ i ].buffer[ ERR ] = 0;
+ cmdtab[ i ].buf_size[ ERR ] = 0;
+
+ cmdtab[ i ].pid = 0;
+ cmdtab[ i ].func = 0;
+ cmdtab[ i ].closure = 0;
+ cmdtab[ i ].start_time = 0;
+ }
+ }
+ }
+}
+
+
+/*
+ * Find a free slot in the running commands table.
+ */
+
+static int get_free_cmdtab_slot()
+{
+ int slot;
+ for ( slot = 0; slot < MAXJOBS; ++slot )
+ if ( !cmdtab[ slot ].pid )
+ return slot;
+ printf( "no slots for child!\n" );
+ exit( EXITBAD );
+}
+
+# endif /* USE_EXECUNIX */
diff --git a/tools/build/src/engine/filent.c b/tools/build/src/engine/filent.c
new file mode 100644
index 0000000000..00dcc49b36
--- /dev/null
+++ b/tools/build/src/engine/filent.c
@@ -0,0 +1,448 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Copyright 2005 Rene Rivera.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * filent.c - scan directories and archives on NT
+ *
+ * External routines:
+ * file_archscan() - scan an archive for files
+ * file_mkdir() - create a directory
+ * file_supported_fmt_resolution() - file modification timestamp resolution
+ *
+ * External routines called only via routines in filesys.c:
+ * file_collect_dir_content_() - collects directory content information
+ * file_dirscan_() - OS specific file_dirscan() implementation
+ * file_query_() - query information about a path from the OS
+ */
+
+#include "jam.h"
+#ifdef OS_NT
+#include "filesys.h"
+
+#include "object.h"
+#include "pathsys.h"
+#include "strings.h"
+
+#ifdef __BORLANDC__
+# undef FILENAME /* cpp namespace collision */
+#endif
+
+#define WIN32_LEAN_AND_MEAN
+#include <windows.h>
+
+#include <assert.h>
+#include <ctype.h>
+#include <direct.h>
+#include <io.h>
+
+
+/*
+ * file_collect_dir_content_() - collects directory content information
+ */
+
+int file_collect_dir_content_( file_info_t * const d )
+{
+ PATHNAME f;
+ string pathspec[ 1 ];
+ string pathname[ 1 ];
+ LIST * files = L0;
+ int d_length;
+
+ assert( d );
+ assert( d->is_dir );
+ assert( list_empty( d->files ) );
+
+ d_length = strlen( object_str( d->name ) );
+
+ memset( (char *)&f, '\0', sizeof( f ) );
+ f.f_dir.ptr = object_str( d->name );
+ f.f_dir.len = d_length;
+
+ /* Prepare file search specification for the FindXXX() Windows API. */
+ if ( !d_length )
+ string_copy( pathspec, ".\\*" );
+ else
+ {
+ /* We can not simply assume the given folder name will never include its
+ * trailing path separator or otherwise we would not support the Windows
+ * root folder specified without its drive letter, i.e. '\'.
+ */
+ char const trailingChar = object_str( d->name )[ d_length - 1 ] ;
+ string_copy( pathspec, object_str( d->name ) );
+ if ( ( trailingChar != '\\' ) && ( trailingChar != '/' ) )
+ string_append( pathspec, "\\" );
+ string_append( pathspec, "*" );
+ }
+
+ /* The following code for collecting information about all files in a folder
+ * needs to be kept synchronized with how the file_query() operation is
+ * implemented (collects information about a single file).
+ */
+ {
+ /* FIXME: Avoid duplicate FindXXX Windows API calls here and in the code
+ * determining a normalized path.
+ */
+ WIN32_FIND_DATA finfo;
+ HANDLE const findHandle = FindFirstFileA( pathspec->value, &finfo );
+ if ( findHandle == INVALID_HANDLE_VALUE )
+ {
+ string_free( pathspec );
+ return -1;
+ }
+
+ string_new( pathname );
+ do
+ {
+ OBJECT * pathname_obj;
+
+ f.f_base.ptr = finfo.cFileName;
+ f.f_base.len = strlen( finfo.cFileName );
+
+ string_truncate( pathname, 0 );
+ path_build( &f, pathname );
+
+ pathname_obj = object_new( pathname->value );
+ path_register_key( pathname_obj );
+ files = list_push_back( files, pathname_obj );
+ {
+ int found;
+ file_info_t * const ff = file_info( pathname_obj, &found );
+ ff->is_dir = finfo.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY;
+ ff->is_file = !ff->is_dir;
+ ff->exists = 1;
+ timestamp_from_filetime( &ff->time, &finfo.ftLastWriteTime );
+ // Use the timestamp of the link target, not the link itself
+ // (i.e. stat instead of lstat)
+ if ( finfo.dwFileAttributes & FILE_ATTRIBUTE_REPARSE_POINT )
+ {
+ HANDLE hLink = CreateFileA( pathname->value, 0, FILE_SHARE_READ, NULL, OPEN_EXISTING, FILE_FLAG_BACKUP_SEMANTICS, NULL );
+ BY_HANDLE_FILE_INFORMATION target_finfo[ 1 ];
+ if ( hLink != INVALID_HANDLE_VALUE && GetFileInformationByHandle( hLink, target_finfo ) )
+ {
+ ff->is_file = target_finfo->dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY ? 0 : 1;
+ ff->is_dir = target_finfo->dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY ? 1 : 0;
+ timestamp_from_filetime( &ff->time, &target_finfo->ftLastWriteTime );
+ }
+ }
+ }
+ }
+ while ( FindNextFile( findHandle, &finfo ) );
+
+ FindClose( findHandle );
+ }
+
+ string_free( pathname );
+ string_free( pathspec );
+
+ d->files = files;
+ return 0;
+}
+
+
+/*
+ * file_dirscan_() - OS specific file_dirscan() implementation
+ */
+
+void file_dirscan_( file_info_t * const d, scanback func, void * closure )
+{
+ assert( d );
+ assert( d->is_dir );
+
+ /* Special case \ or d:\ : enter it */
+ {
+ char const * const name = object_str( d->name );
+ if ( name[ 0 ] == '\\' && !name[ 1 ] )
+ {
+ (*func)( closure, d->name, 1 /* stat()'ed */, &d->time );
+ }
+ else if ( name[ 0 ] && name[ 1 ] == ':' && name[ 2 ] && !name[ 3 ] )
+ {
+ /* We have just entered a 3-letter drive name spelling (with a
+ * trailing slash), into the hash table. Now enter its two-letter
+ * variant, without the trailing slash, so that if we try to check
+ * whether "c:" exists, we hit it.
+ *
+ * Jam core has workarounds for that. Given:
+ * x = c:\whatever\foo ;
+ * p = $(x:D) ;
+ * p2 = $(p:D) ;
+ * There will be no trailing slash in $(p), but there will be one in
+ * $(p2). But, that seems rather fragile.
+ */
+ OBJECT * const dir_no_slash = object_new_range( name, 2 );
+ (*func)( closure, d->name, 1 /* stat()'ed */, &d->time );
+ (*func)( closure, dir_no_slash, 1 /* stat()'ed */, &d->time );
+ object_free( dir_no_slash );
+ }
+ }
+}
+
+
+/*
+ * file_mkdir() - create a directory
+ */
+
+int file_mkdir( char const * const path )
+{
+ return _mkdir( path );
+}
+
+
+/*
+ * file_query_() - query information about a path from the OS
+ *
+ * The following code for collecting information about a single file needs to be
+ * kept synchronized with how the file_collect_dir_content_() operation is
+ * implemented (collects information about all files in a folder).
+ */
+
+int try_file_query_root( file_info_t * const info )
+{
+ WIN32_FILE_ATTRIBUTE_DATA fileData;
+ char buf[ 4 ];
+ char const * const pathstr = object_str( info->name );
+ if ( !pathstr[ 0 ] )
+ {
+ buf[ 0 ] = '.';
+ buf[ 1 ] = 0;
+ }
+ else if ( pathstr[ 0 ] == '\\' && ! pathstr[ 1 ] )
+ {
+ buf[ 0 ] = '\\';
+ buf[ 1 ] = '\0';
+ }
+ else if ( pathstr[ 1 ] == ':' )
+ {
+ if ( !pathstr[ 2 ] )
+ {
+ }
+ else if ( !pathstr[ 2 ] || ( pathstr[ 2 ] == '\\' && !pathstr[ 3 ] ) )
+ {
+ buf[ 0 ] = pathstr[ 0 ];
+ buf[ 1 ] = ':';
+ buf[ 2 ] = '\\';
+ buf[ 3 ] = '\0';
+ }
+ else
+ {
+ return 0;
+ }
+ }
+ else
+ {
+ return 0;
+ }
+
+ /* We have a root path */
+ if ( !GetFileAttributesExA( buf, GetFileExInfoStandard, &fileData ) )
+ {
+ info->is_dir = 0;
+ info->is_file = 0;
+ info->exists = 0;
+ timestamp_clear( &info->time );
+ }
+ else
+ {
+ info->is_dir = fileData.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY;
+ info->is_file = !info->is_dir;
+ info->exists = 1;
+ timestamp_from_filetime( &info->time, &fileData.ftLastWriteTime );
+ }
+ return 1;
+}
+
+void file_query_( file_info_t * const info )
+{
+ char const * const pathstr = object_str( info->name );
+ const char * dir;
+ OBJECT * parent;
+ file_info_t * parent_info;
+
+ if ( try_file_query_root( info ) )
+ return;
+
+ if ( ( dir = strrchr( pathstr, '\\' ) ) )
+ {
+ parent = object_new_range( pathstr, dir - pathstr );
+ }
+ else
+ {
+ parent = object_copy( constant_empty );
+ }
+ parent_info = file_query( parent );
+ object_free( parent );
+ if ( !parent_info || !parent_info->is_dir )
+ {
+ info->is_dir = 0;
+ info->is_file = 0;
+ info->exists = 0;
+ timestamp_clear( &info->time );
+ }
+ else
+ {
+ info->is_dir = 0;
+ info->is_file = 0;
+ info->exists = 0;
+ timestamp_clear( &info->time );
+ if ( list_empty( parent_info->files ) )
+ file_collect_dir_content_( parent_info );
+ }
+}
+
+
+/*
+ * file_supported_fmt_resolution() - file modification timestamp resolution
+ *
+ * Returns the minimum file modification timestamp resolution supported by this
+ * Boost Jam implementation. File modification timestamp changes of less than
+ * the returned value might not be recognized.
+ *
+ * Does not take into consideration any OS or file system related restrictions.
+ *
+ * Return value 0 indicates that any value supported by the OS is also supported
+ * here.
+ */
+
+void file_supported_fmt_resolution( timestamp * const t )
+{
+ /* On Windows we support nano-second file modification timestamp resolution,
+ * just the same as the Windows OS itself.
+ */
+ timestamp_init( t, 0, 0 );
+}
+
+
+/*
+ * file_archscan() - scan an archive for files
+ */
+
+/* Straight from SunOS */
+
+#define ARMAG "!<arch>\n"
+#define SARMAG 8
+
+#define ARFMAG "`\n"
+
+struct ar_hdr
+{
+ char ar_name[ 16 ];
+ char ar_date[ 12 ];
+ char ar_uid[ 6 ];
+ char ar_gid[ 6 ];
+ char ar_mode[ 8 ];
+ char ar_size[ 10 ];
+ char ar_fmag[ 2 ];
+};
+
+#define SARFMAG 2
+#define SARHDR sizeof( struct ar_hdr )
+
+void file_archscan( char const * archive, scanback func, void * closure )
+{
+ struct ar_hdr ar_hdr;
+ char * string_table = 0;
+ char buf[ MAXJPATH ];
+ long offset;
+ int const fd = open( archive, O_RDONLY | O_BINARY, 0 );
+
+ if ( fd < 0 )
+ return;
+
+ if ( read( fd, buf, SARMAG ) != SARMAG || strncmp( ARMAG, buf, SARMAG ) )
+ {
+ close( fd );
+ return;
+ }
+
+ offset = SARMAG;
+
+ if ( DEBUG_BINDSCAN )
+ printf( "scan archive %s\n", archive );
+
+ while ( ( read( fd, &ar_hdr, SARHDR ) == SARHDR ) &&
+ !memcmp( ar_hdr.ar_fmag, ARFMAG, SARFMAG ) )
+ {
+ long lar_date;
+ long lar_size;
+ char * name = 0;
+ char * endname;
+
+ sscanf( ar_hdr.ar_date, "%ld", &lar_date );
+ sscanf( ar_hdr.ar_size, "%ld", &lar_size );
+
+ lar_size = ( lar_size + 1 ) & ~1;
+
+ if ( ar_hdr.ar_name[ 0 ] == '/' && ar_hdr.ar_name[ 1 ] == '/' )
+ {
+ /* This is the "string table" entry of the symbol table, holding
+ * filename strings longer than 15 characters, i.e. those that do
+ * not fit into ar_name.
+ */
+ string_table = BJAM_MALLOC_ATOMIC( lar_size + 1 );
+ if ( read( fd, string_table, lar_size ) != lar_size )
+ printf( "error reading string table\n" );
+ string_table[ lar_size ] = '\0';
+ offset += SARHDR + lar_size;
+ continue;
+ }
+ else if ( ar_hdr.ar_name[ 0 ] == '/' && ar_hdr.ar_name[ 1 ] != ' ' )
+ {
+ /* Long filenames are recognized by "/nnnn" where nnnn is the
+ * string's offset in the string table represented in ASCII
+ * decimals.
+ */
+ name = string_table + atoi( ar_hdr.ar_name + 1 );
+ for ( endname = name; *endname && *endname != '\n'; ++endname );
+ }
+ else
+ {
+ /* normal name */
+ name = ar_hdr.ar_name;
+ endname = name + sizeof( ar_hdr.ar_name );
+ }
+
+ /* strip trailing white-space, slashes, and backslashes */
+
+ while ( endname-- > name )
+ if ( !isspace( *endname ) && ( *endname != '\\' ) && ( *endname !=
+ '/' ) )
+ break;
+ *++endname = 0;
+
+ /* strip leading directory names, an NT specialty */
+ {
+ char * c;
+ if ( c = strrchr( name, '/' ) )
+ name = c + 1;
+ if ( c = strrchr( name, '\\' ) )
+ name = c + 1;
+ }
+
+ sprintf( buf, "%s(%.*s)", archive, endname - name, name );
+ {
+ OBJECT * const member = object_new( buf );
+ timestamp time;
+ timestamp_init( &time, (time_t)lar_date, 0 );
+ (*func)( closure, member, 1 /* time valid */, &time );
+ object_free( member );
+ }
+
+ offset += SARHDR + lar_size;
+ lseek( fd, offset, 0 );
+ }
+
+ close( fd );
+}
+
+#endif /* OS_NT */
diff --git a/tools/build/src/engine/filesys.c b/tools/build/src/engine/filesys.c
new file mode 100644
index 0000000000..dadaef82ed
--- /dev/null
+++ b/tools/build/src/engine/filesys.c
@@ -0,0 +1,326 @@
+/*
+ * Copyright 2001-2004 David Abrahams.
+ * Copyright 2005 Rene Rivera.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * filesys.c - OS independant file system manipulation support
+ *
+ * External routines:
+ * file_build1() - construct a path string based on PATHNAME information
+ * file_dirscan() - scan a directory for files
+ * file_done() - module cleanup called on shutdown
+ * file_info() - return cached information about a path
+ * file_is_file() - return whether a path identifies an existing file
+ * file_query() - get cached information about a path, query the OS if
+ * needed
+ * file_remove_atexit() - schedule a path to be removed on program exit
+ * file_time() - get a file timestamp
+ *
+ * External routines - utilites for OS specific module implementations:
+ * file_query_posix_() - query information about a path using POSIX stat()
+ *
+ * Internal routines:
+ * file_dirscan_impl() - no-profiling worker for file_dirscan()
+ */
+
+
+#include "jam.h"
+#include "filesys.h"
+
+#include "lists.h"
+#include "object.h"
+#include "pathsys.h"
+#include "strings.h"
+
+#include <assert.h>
+#include <sys/stat.h>
+
+
+/* Internal OS specific implementation details - have names ending with an
+ * underscore and are expected to be implemented in an OS specific fileXXX.c
+ * module.
+ */
+void file_dirscan_( file_info_t * const dir, scanback func, void * closure );
+int file_collect_dir_content_( file_info_t * const dir );
+void file_query_( file_info_t * const );
+
+static void file_dirscan_impl( OBJECT * dir, scanback func, void * closure );
+static void free_file_info( void * xfile, void * data );
+static void remove_files_atexit( void );
+
+
+static struct hash * filecache_hash;
+
+
+/*
+ * file_build1() - construct a path string based on PATHNAME information
+ */
+
+void file_build1( PATHNAME * const f, string * file )
+{
+ if ( DEBUG_SEARCH )
+ {
+ printf( "build file: " );
+ if ( f->f_root.len )
+ printf( "root = '%.*s' ", f->f_root.len, f->f_root.ptr );
+ if ( f->f_dir.len )
+ printf( "dir = '%.*s' ", f->f_dir.len, f->f_dir.ptr );
+ if ( f->f_base.len )
+ printf( "base = '%.*s' ", f->f_base.len, f->f_base.ptr );
+ printf( "\n" );
+ }
+
+ /* Start with the grist. If the current grist is not surrounded by <>'s, add
+ * them.
+ */
+ if ( f->f_grist.len )
+ {
+ if ( f->f_grist.ptr[ 0 ] != '<' )
+ string_push_back( file, '<' );
+ string_append_range(
+ file, f->f_grist.ptr, f->f_grist.ptr + f->f_grist.len );
+ if ( file->value[ file->size - 1 ] != '>' )
+ string_push_back( file, '>' );
+ }
+}
+
+
+/*
+ * file_dirscan() - scan a directory for files
+ */
+
+void file_dirscan( OBJECT * dir, scanback func, void * closure )
+{
+ PROFILE_ENTER( FILE_DIRSCAN );
+ file_dirscan_impl( dir, func, closure );
+ PROFILE_EXIT( FILE_DIRSCAN );
+}
+
+
+/*
+ * file_done() - module cleanup called on shutdown
+ */
+
+void file_done()
+{
+ remove_files_atexit();
+ if ( filecache_hash )
+ {
+ hashenumerate( filecache_hash, free_file_info, (void *)0 );
+ hashdone( filecache_hash );
+ }
+}
+
+
+/*
+ * file_info() - return cached information about a path
+ *
+ * Returns a default initialized structure containing only the path's normalized
+ * name in case this is the first time this file system entity has been
+ * referenced.
+ */
+
+file_info_t * file_info( OBJECT * const path, int * found )
+{
+ OBJECT * const path_key = path_as_key( path );
+ file_info_t * finfo;
+
+ if ( !filecache_hash )
+ filecache_hash = hashinit( sizeof( file_info_t ), "file_info" );
+
+ finfo = (file_info_t *)hash_insert( filecache_hash, path_key, found );
+ if ( !*found )
+ {
+ finfo->name = path_key;
+ finfo->files = L0;
+ }
+ else
+ object_free( path_key );
+
+ return finfo;
+}
+
+
+/*
+ * file_is_file() - return whether a path identifies an existing file
+ */
+
+int file_is_file( OBJECT * const path )
+{
+ file_info_t const * const ff = file_query( path );
+ return ff ? ff->is_file : -1;
+}
+
+
+/*
+ * file_time() - get a file timestamp
+ */
+
+int file_time( OBJECT * const path, timestamp * const time )
+{
+ file_info_t const * const ff = file_query( path );
+ if ( !ff ) return -1;
+ timestamp_copy( time, &ff->time );
+ return 0;
+}
+
+
+/*
+ * file_query() - get cached information about a path, query the OS if needed
+ *
+ * Returns 0 in case querying the OS about the given path fails, e.g. because
+ * the path does not reference an existing file system object.
+ */
+
+file_info_t * file_query( OBJECT * const path )
+{
+ /* FIXME: Add tracking for disappearing files (i.e. those that can not be
+ * detected by stat() even though they had been detected successfully
+ * before) and see how they should be handled in the rest of Boost Jam code.
+ * Possibly allow Jamfiles to specify some files as 'volatile' which would
+ * make Boost Jam avoid caching information about those files and instead
+ * ask the OS about them every time.
+ */
+ int found;
+ file_info_t * const ff = file_info( path, &found );
+ if ( !found )
+ {
+ file_query_( ff );
+ if ( ff->exists )
+ {
+ /* Set the path's timestamp to 1 in case it is 0 or undetected to avoid
+ * confusion with non-existing paths.
+ */
+ if ( timestamp_empty( &ff->time ) )
+ timestamp_init( &ff->time, 1, 0 );
+ }
+ }
+ if ( !ff->exists )
+ {
+ return 0;
+ }
+ return ff;
+}
+
+
+/*
+ * file_query_posix_() - query information about a path using POSIX stat()
+ *
+ * Fallback file_query_() implementation for OS specific modules.
+ *
+ * Note that the Windows POSIX stat() function implementation suffers from
+ * several issues:
+ * * Does not support file timestamps with resolution finer than 1 second,
+ * meaning it can not be used to detect file timestamp changes of less than
+ * 1 second. One possible consequence is that some fast-paced touch commands
+ * (such as those done by Boost Build's internal testing system if it does
+ * not do some extra waiting) will not be detected correctly by the build
+ * system.
+ * * Returns file modification times automatically adjusted for daylight
+ * savings time even though daylight savings time should have nothing to do
+ * with internal time representation.
+ */
+
+void file_query_posix_( file_info_t * const info )
+{
+ struct stat statbuf;
+ char const * const pathstr = object_str( info->name );
+ char const * const pathspec = *pathstr ? pathstr : ".";
+
+ if ( stat( pathspec, &statbuf ) < 0 )
+ {
+ info->is_file = 0;
+ info->is_dir = 0;
+ info->exists = 0;
+ timestamp_clear( &info->time );
+ }
+ else
+ {
+ info->is_file = statbuf.st_mode & S_IFREG ? 1 : 0;
+ info->is_dir = statbuf.st_mode & S_IFDIR ? 1 : 0;
+ info->exists = 1;
+ timestamp_init( &info->time, statbuf.st_mtime, 0 );
+ }
+}
+
+
+/*
+ * file_remove_atexit() - schedule a path to be removed on program exit
+ */
+
+static LIST * files_to_remove = L0;
+
+void file_remove_atexit( OBJECT * const path )
+{
+ files_to_remove = list_push_back( files_to_remove, object_copy( path ) );
+}
+
+
+/*
+ * file_dirscan_impl() - no-profiling worker for file_dirscan()
+ */
+
+static void file_dirscan_impl( OBJECT * dir, scanback func, void * closure )
+{
+ file_info_t * const d = file_query( dir );
+ if ( !d || !d->is_dir )
+ return;
+
+ /* Lazy collect the directory content information. */
+ if ( list_empty( d->files ) )
+ {
+ if ( DEBUG_BINDSCAN )
+ printf( "scan directory %s\n", object_str( d->name ) );
+ if ( file_collect_dir_content_( d ) < 0 )
+ return;
+ }
+
+ /* OS specific part of the file_dirscan operation. */
+ file_dirscan_( d, func, closure );
+
+ /* Report the collected directory content. */
+ {
+ LISTITER iter = list_begin( d->files );
+ LISTITER const end = list_end( d->files );
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ OBJECT * const path = list_item( iter );
+ file_info_t const * const ffq = file_query( path );
+ /* Using a file name read from a file_info_t structure allows OS
+ * specific implementations to store some kind of a normalized file
+ * name there. Using such a normalized file name then allows us to
+ * correctly recognize different file paths actually identifying the
+ * same file. For instance, an implementation may:
+ * - convert all file names internally to lower case on a case
+ * insensitive file system
+ * - convert the NTFS paths to their long path variants as that
+ * file system each file system entity may have a long and a
+ * short path variant thus allowing for many different path
+ * strings identifying the same file.
+ */
+ (*func)( closure, ffq->name, 1 /* stat()'ed */, &ffq->time );
+ }
+ }
+}
+
+
+static void free_file_info( void * xfile, void * data )
+{
+ file_info_t * const file = (file_info_t *)xfile;
+ object_free( file->name );
+ list_free( file->files );
+}
+
+
+static void remove_files_atexit( void )
+{
+ LISTITER iter = list_begin( files_to_remove );
+ LISTITER const end = list_end( files_to_remove );
+ for ( ; iter != end; iter = list_next( iter ) )
+ remove( object_str( list_item( iter ) ) );
+ list_free( files_to_remove );
+ files_to_remove = L0;
+}
diff --git a/tools/build/src/engine/filesys.h b/tools/build/src/engine/filesys.h
new file mode 100644
index 0000000000..74fa3958f3
--- /dev/null
+++ b/tools/build/src/engine/filesys.h
@@ -0,0 +1,57 @@
+/*
+ * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * filesys.h - OS specific file routines
+ */
+
+#ifndef FILESYS_DWA20011025_H
+#define FILESYS_DWA20011025_H
+
+#include "hash.h"
+#include "lists.h"
+#include "object.h"
+#include "pathsys.h"
+#include "timestamp.h"
+
+
+typedef struct file_info_t
+{
+ OBJECT * name;
+ char is_file;
+ char is_dir;
+ char exists;
+ timestamp time;
+ LIST * files;
+} file_info_t;
+
+typedef void (*scanback)( void * closure, OBJECT * path, int found,
+ timestamp const * const );
+
+
+void file_archscan( char const * arch, scanback func, void * closure );
+void file_build1( PATHNAME * const f, string * file ) ;
+void file_dirscan( OBJECT * dir, scanback func, void * closure );
+file_info_t * file_info( OBJECT * const path, int * found );
+int file_is_file( OBJECT * const path );
+int file_mkdir( char const * const path );
+file_info_t * file_query( OBJECT * const path );
+void file_remove_atexit( OBJECT * const path );
+void file_supported_fmt_resolution( timestamp * const );
+int file_time( OBJECT * const path, timestamp * const );
+
+/* Internal utility worker functions. */
+void file_query_posix_( file_info_t * const );
+
+void file_done();
+
+#endif
diff --git a/tools/build/src/engine/fileunix.c b/tools/build/src/engine/fileunix.c
new file mode 100644
index 0000000000..bff3a42f52
--- /dev/null
+++ b/tools/build/src/engine/fileunix.c
@@ -0,0 +1,464 @@
+/*
+ * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Copyright 2005 Rene Rivera.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * fileunix.c - manipulate file names and scan directories on UNIX/AmigaOS
+ *
+ * External routines:
+ * file_archscan() - scan an archive for files
+ * file_mkdir() - create a directory
+ * file_supported_fmt_resolution() - file modification timestamp resolution
+ *
+ * External routines called only via routines in filesys.c:
+ * file_collect_dir_content_() - collects directory content information
+ * file_dirscan_() - OS specific file_dirscan() implementation
+ * file_query_() - query information about a path from the OS
+ */
+
+#include "jam.h"
+#ifdef USE_FILEUNIX
+#include "filesys.h"
+
+#include "object.h"
+#include "pathsys.h"
+#include "strings.h"
+
+#include <assert.h>
+#include <stdio.h>
+#include <sys/stat.h> /* needed for mkdir() */
+
+#if defined( sun ) || defined( __sun ) || defined( linux )
+# include <unistd.h> /* needed for read and close prototype */
+#endif
+
+#if defined( OS_SEQUENT ) || \
+ defined( OS_DGUX ) || \
+ defined( OS_SCO ) || \
+ defined( OS_ISC )
+# define PORTAR 1
+#endif
+
+#if defined( OS_RHAPSODY ) || defined( OS_MACOSX ) || defined( OS_NEXT )
+# include <sys/dir.h>
+# include <unistd.h> /* need unistd for rhapsody's proper lseek */
+# define STRUCT_DIRENT struct direct
+#else
+# include <dirent.h>
+# define STRUCT_DIRENT struct dirent
+#endif
+
+#ifdef OS_COHERENT
+# include <arcoff.h>
+# define HAVE_AR
+#endif
+
+#if defined( OS_MVS ) || defined( OS_INTERIX )
+#define ARMAG "!<arch>\n"
+#define SARMAG 8
+#define ARFMAG "`\n"
+#define HAVE_AR
+
+struct ar_hdr /* archive file member header - printable ascii */
+{
+ char ar_name[ 16 ]; /* file member name - `/' terminated */
+ char ar_date[ 12 ]; /* file member date - decimal */
+ char ar_uid[ 6 ]; /* file member user id - decimal */
+ char ar_gid[ 6 ]; /* file member group id - decimal */
+ char ar_mode[ 8 ]; /* file member mode - octal */
+ char ar_size[ 10 ]; /* file member size - decimal */
+ char ar_fmag[ 2 ]; /* ARFMAG - string to end header */
+};
+#endif
+
+#if defined( OS_QNX ) || defined( OS_BEOS ) || defined( OS_MPEIX )
+# define NO_AR
+# define HAVE_AR
+#endif
+
+#ifndef HAVE_AR
+# ifdef OS_AIX
+/* Define these for AIX to get the definitions for both small and big archive
+ * file format variants.
+ */
+# define __AR_SMALL__
+# define __AR_BIG__
+# endif
+# include <ar.h>
+#endif
+
+
+/*
+ * file_collect_dir_content_() - collects directory content information
+ */
+
+int file_collect_dir_content_( file_info_t * const d )
+{
+ LIST * files = L0;
+ PATHNAME f;
+ DIR * dd;
+ STRUCT_DIRENT * dirent;
+ string path[ 1 ];
+ char const * dirstr;
+
+ assert( d );
+ assert( d->is_dir );
+ assert( list_empty( d->files ) );
+
+ dirstr = object_str( d->name );
+
+ memset( (char *)&f, '\0', sizeof( f ) );
+ f.f_dir.ptr = dirstr;
+ f.f_dir.len = strlen( dirstr );
+
+ if ( !*dirstr ) dirstr = ".";
+
+ if ( !( dd = opendir( dirstr ) ) )
+ return -1;
+
+ string_new( path );
+ while ( ( dirent = readdir( dd ) ) )
+ {
+ OBJECT * name;
+ f.f_base.ptr = dirent->d_name
+ #ifdef old_sinix
+ - 2 /* Broken structure definition on sinix. */
+ #endif
+ ;
+ f.f_base.len = strlen( f.f_base.ptr );
+
+ string_truncate( path, 0 );
+ path_build( &f, path );
+ name = object_new( path->value );
+ /* Immediately stat the file to preserve invariants. */
+ if ( file_query( name ) )
+ files = list_push_back( files, name );
+ else
+ object_free( name );
+ }
+ string_free( path );
+
+ closedir( dd );
+
+ d->files = files;
+ return 0;
+}
+
+
+/*
+ * file_dirscan_() - OS specific file_dirscan() implementation
+ */
+
+void file_dirscan_( file_info_t * const d, scanback func, void * closure )
+{
+ assert( d );
+ assert( d->is_dir );
+
+ /* Special case / : enter it */
+ if ( !strcmp( object_str( d->name ), "/" ) )
+ (*func)( closure, d->name, 1 /* stat()'ed */, &d->time );
+}
+
+
+/*
+ * file_mkdir() - create a directory
+ */
+
+int file_mkdir( char const * const path )
+{
+ /* Explicit cast to remove const modifiers and avoid related compiler
+ * warnings displayed when using the intel compiler.
+ */
+ return mkdir( (char *)path, 0777 );
+}
+
+
+/*
+ * file_query_() - query information about a path from the OS
+ */
+
+void file_query_( file_info_t * const info )
+{
+ file_query_posix_( info );
+}
+
+
+/*
+ * file_supported_fmt_resolution() - file modification timestamp resolution
+ *
+ * Returns the minimum file modification timestamp resolution supported by this
+ * Boost Jam implementation. File modification timestamp changes of less than
+ * the returned value might not be recognized.
+ *
+ * Does not take into consideration any OS or file system related restrictions.
+ *
+ * Return value 0 indicates that any value supported by the OS is also supported
+ * here.
+ */
+
+void file_supported_fmt_resolution( timestamp * const t )
+{
+ /* The current implementation does not support file modification timestamp
+ * resolution of less than one second.
+ */
+ timestamp_init( t, 1, 0 );
+}
+
+
+/*
+ * file_archscan() - scan an archive for files
+ */
+
+#ifndef AIAMAG /* God-fearing UNIX */
+
+#define SARFMAG 2
+#define SARHDR sizeof( struct ar_hdr )
+
+void file_archscan( char const * archive, scanback func, void * closure )
+{
+#ifndef NO_AR
+ struct ar_hdr ar_hdr;
+ char * string_table = 0;
+ char buf[ MAXJPATH ];
+ long offset;
+ int fd;
+
+ if ( ( fd = open( archive, O_RDONLY, 0 ) ) < 0 )
+ return;
+
+ if ( read( fd, buf, SARMAG ) != SARMAG ||
+ strncmp( ARMAG, buf, SARMAG ) )
+ {
+ close( fd );
+ return;
+ }
+
+ offset = SARMAG;
+
+ if ( DEBUG_BINDSCAN )
+ printf( "scan archive %s\n", archive );
+
+ while ( ( read( fd, &ar_hdr, SARHDR ) == SARHDR ) &&
+ !( memcmp( ar_hdr.ar_fmag, ARFMAG, SARFMAG )
+#ifdef ARFZMAG
+ /* OSF also has a compressed format */
+ && memcmp( ar_hdr.ar_fmag, ARFZMAG, SARFMAG )
+#endif
+ ) )
+ {
+ char lar_name_[ 257 ];
+ char * lar_name = lar_name_ + 1;
+ long lar_date;
+ long lar_size;
+ long lar_offset;
+ char * c;
+ char * src;
+ char * dest;
+
+ strncpy( lar_name, ar_hdr.ar_name, sizeof( ar_hdr.ar_name ) );
+
+ sscanf( ar_hdr.ar_date, "%ld", &lar_date );
+ sscanf( ar_hdr.ar_size, "%ld", &lar_size );
+
+ if ( ar_hdr.ar_name[ 0 ] == '/' )
+ {
+ if ( ar_hdr.ar_name[ 1 ] == '/' )
+ {
+ /* This is the "string table" entry of the symbol table, holding
+ * filename strings longer than 15 characters, i.e. those that
+ * do not fit into ar_name.
+ */
+ string_table = (char *)BJAM_MALLOC_ATOMIC( lar_size );
+ lseek( fd, offset + SARHDR, 0 );
+ if ( read( fd, string_table, lar_size ) != lar_size )
+ printf("error reading string table\n");
+ }
+ else if ( string_table && ar_hdr.ar_name[ 1 ] != ' ' )
+ {
+ /* Long filenames are recognized by "/nnnn" where nnnn is the
+ * offset of the string in the string table represented in ASCII
+ * decimals.
+ */
+ dest = lar_name;
+ lar_offset = atoi( lar_name + 1 );
+ src = &string_table[ lar_offset ];
+ while ( *src != '/' )
+ *dest++ = *src++;
+ *dest = '/';
+ }
+ }
+
+ c = lar_name - 1;
+ while ( ( *++c != ' ' ) && ( *c != '/' ) );
+ *c = '\0';
+
+ if ( DEBUG_BINDSCAN )
+ printf( "archive name %s found\n", lar_name );
+
+ sprintf( buf, "%s(%s)", archive, lar_name );
+
+ {
+ OBJECT * const member = object_new( buf );
+ timestamp time;
+ timestamp_init( &time, (time_t)lar_date, 0 );
+ (*func)( closure, member, 1 /* time valid */, &time );
+ object_free( member );
+ }
+
+ offset += SARHDR + ( ( lar_size + 1 ) & ~1 );
+ lseek( fd, offset, 0 );
+ }
+
+ if ( string_table )
+ BJAM_FREE( string_table );
+
+ close( fd );
+#endif /* NO_AR */
+}
+
+#else /* AIAMAG - RS6000 AIX */
+
+static void file_archscan_small( int fd, char const * archive, scanback func,
+ void * closure )
+{
+ struct fl_hdr fl_hdr;
+
+ struct {
+ struct ar_hdr hdr;
+ char pad[ 256 ];
+ } ar_hdr ;
+
+ char buf[ MAXJPATH ];
+ long offset;
+
+ if ( read( fd, (char *)&fl_hdr, FL_HSZ ) != FL_HSZ )
+ return;
+
+ sscanf( fl_hdr.fl_fstmoff, "%ld", &offset );
+
+ if ( DEBUG_BINDSCAN )
+ printf( "scan archive %s\n", archive );
+
+ while ( offset > 0 && lseek( fd, offset, 0 ) >= 0 &&
+ read( fd, &ar_hdr, sizeof( ar_hdr ) ) >= (int)sizeof( ar_hdr.hdr ) )
+ {
+ long lar_date;
+ int lar_namlen;
+
+ sscanf( ar_hdr.hdr.ar_namlen, "%d" , &lar_namlen );
+ sscanf( ar_hdr.hdr.ar_date , "%ld", &lar_date );
+ sscanf( ar_hdr.hdr.ar_nxtmem, "%ld", &offset );
+
+ if ( !lar_namlen )
+ continue;
+
+ ar_hdr.hdr._ar_name.ar_name[ lar_namlen ] = '\0';
+
+ sprintf( buf, "%s(%s)", archive, ar_hdr.hdr._ar_name.ar_name );
+
+ {
+ OBJECT * const member = object_new( buf );
+ timestamp time;
+ timestamp_init( &time, (time_t)lar_date, 0 );
+ (*func)( closure, member, 1 /* time valid */, &time );
+ object_free( member );
+ }
+ }
+}
+
+/* Check for OS versions supporting the big variant. */
+#ifdef AR_HSZ_BIG
+
+static void file_archscan_big( int fd, char const * archive, scanback func,
+ void * closure )
+{
+ struct fl_hdr_big fl_hdr;
+
+ struct {
+ struct ar_hdr_big hdr;
+ char pad[ 256 ];
+ } ar_hdr ;
+
+ char buf[ MAXJPATH ];
+ long long offset;
+
+ if ( read( fd, (char *)&fl_hdr, FL_HSZ_BIG ) != FL_HSZ_BIG )
+ return;
+
+ sscanf( fl_hdr.fl_fstmoff, "%lld", &offset );
+
+ if ( DEBUG_BINDSCAN )
+ printf( "scan archive %s\n", archive );
+
+ while ( offset > 0 && lseek( fd, offset, 0 ) >= 0 &&
+ read( fd, &ar_hdr, sizeof( ar_hdr ) ) >= sizeof( ar_hdr.hdr ) )
+ {
+ long lar_date;
+ int lar_namlen;
+
+ sscanf( ar_hdr.hdr.ar_namlen, "%d" , &lar_namlen );
+ sscanf( ar_hdr.hdr.ar_date , "%ld" , &lar_date );
+ sscanf( ar_hdr.hdr.ar_nxtmem, "%lld", &offset );
+
+ if ( !lar_namlen )
+ continue;
+
+ ar_hdr.hdr._ar_name.ar_name[ lar_namlen ] = '\0';
+
+ sprintf( buf, "%s(%s)", archive, ar_hdr.hdr._ar_name.ar_name );
+
+ {
+ OBJECT * const member = object_new( buf );
+ timestamp time;
+ timestamp_init( &time, (time_t)lar_date, 0 );
+ (*func)( closure, member, 1 /* time valid */, &time );
+ object_free( member );
+ }
+ }
+}
+
+#endif /* AR_HSZ_BIG */
+
+void file_archscan( char const * archive, scanback func, void * closure )
+{
+ int fd;
+ char fl_magic[ SAIAMAG ];
+
+ if ( ( fd = open( archive, O_RDONLY, 0 ) ) < 0 )
+ return;
+
+ if ( read( fd, fl_magic, SAIAMAG ) != SAIAMAG ||
+ lseek( fd, 0, SEEK_SET ) == -1 )
+ {
+ close( fd );
+ return;
+ }
+
+ if ( !strncmp( AIAMAG, fl_magic, SAIAMAG ) )
+ {
+ /* read small variant */
+ file_archscan_small( fd, archive, func, closure );
+ }
+#ifdef AR_HSZ_BIG
+ else if ( !strncmp( AIAMAGBIG, fl_magic, SAIAMAG ) )
+ {
+ /* read big variant */
+ file_archscan_big( fd, archive, func, closure );
+ }
+#endif
+
+ close( fd );
+}
+
+#endif /* AIAMAG - RS6000 AIX */
+
+#endif /* USE_FILEUNIX */
diff --git a/tools/build/src/engine/frames.c b/tools/build/src/engine/frames.c
new file mode 100644
index 0000000000..0491c5c322
--- /dev/null
+++ b/tools/build/src/engine/frames.c
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+#include "jam.h"
+#include "frames.h"
+
+
+FRAME * frame_before_python_call;
+
+
+void frame_init( FRAME * frame )
+{
+ frame->prev = 0;
+ frame->prev_user = 0;
+ lol_init( frame->args );
+ frame->module = root_module();
+ frame->rulename = "module scope";
+ frame->file = 0;
+ frame->line = -1;
+}
+
+
+void frame_free( FRAME * frame )
+{
+ lol_free( frame->args );
+}
diff --git a/tools/build/src/engine/frames.h b/tools/build/src/engine/frames.h
new file mode 100644
index 0000000000..2e99f17370
--- /dev/null
+++ b/tools/build/src/engine/frames.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+#ifndef FRAMES_DWA20011021_H
+#define FRAMES_DWA20011021_H
+
+#include "lists.h"
+#include "modules.h"
+#include "object.h"
+
+
+typedef struct frame FRAME;
+
+struct frame
+{
+ FRAME * prev;
+ FRAME * prev_user; /* The nearest enclosing frame for which
+ module->user_module is true. */
+ LOL args[ 1 ];
+ module_t * module;
+ OBJECT * file;
+ int line;
+ char const * rulename;
+};
+
+
+/* When a call into Python is in progress, this variable points to the bjam
+ * frame that was current at the moment of the call. When the call completes,
+ * the variable is not defined. Furthermore, if Jam calls Python which calls Jam
+ * and so on, this variable only keeps the most recent Jam frame.
+ */
+extern FRAME * frame_before_python_call;
+
+
+void frame_init( FRAME * );
+void frame_free( FRAME * );
+
+#endif
diff --git a/tools/build/src/engine/function.c b/tools/build/src/engine/function.c
new file mode 100644
index 0000000000..690855e14c
--- /dev/null
+++ b/tools/build/src/engine/function.c
@@ -0,0 +1,4870 @@
+/*
+ * Copyright 2011 Steven Watanabe
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+#include "jam.h"
+#include "function.h"
+
+#include "class.h"
+#include "compile.h"
+#include "constants.h"
+#include "filesys.h"
+#include "frames.h"
+#include "lists.h"
+#include "mem.h"
+#include "pathsys.h"
+#include "rules.h"
+#include "search.h"
+#include "variable.h"
+
+#include <assert.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+#ifdef OS_CYGWIN
+# include <cygwin/version.h>
+# include <sys/cygwin.h>
+# ifdef CYGWIN_VERSION_CYGWIN_CONV
+# include <errno.h>
+# endif
+# include <windows.h>
+#endif
+
+int glob( char const * s, char const * c );
+void backtrace( FRAME * );
+void backtrace_line( FRAME * );
+
+#define INSTR_PUSH_EMPTY 0
+#define INSTR_PUSH_CONSTANT 1
+#define INSTR_PUSH_ARG 2
+#define INSTR_PUSH_VAR 3
+#define INSTR_PUSH_VAR_FIXED 57
+#define INSTR_PUSH_GROUP 4
+#define INSTR_PUSH_RESULT 5
+#define INSTR_PUSH_APPEND 6
+#define INSTR_SWAP 7
+
+#define INSTR_JUMP_EMPTY 8
+#define INSTR_JUMP_NOT_EMPTY 9
+
+#define INSTR_JUMP 10
+#define INSTR_JUMP_LT 11
+#define INSTR_JUMP_LE 12
+#define INSTR_JUMP_GT 13
+#define INSTR_JUMP_GE 14
+#define INSTR_JUMP_EQ 15
+#define INSTR_JUMP_NE 16
+#define INSTR_JUMP_IN 17
+#define INSTR_JUMP_NOT_IN 18
+
+#define INSTR_JUMP_NOT_GLOB 19
+
+#define INSTR_FOR_INIT 56
+#define INSTR_FOR_LOOP 20
+
+#define INSTR_SET_RESULT 21
+#define INSTR_RETURN 22
+#define INSTR_POP 23
+
+#define INSTR_PUSH_LOCAL 24
+#define INSTR_POP_LOCAL 25
+#define INSTR_SET 26
+#define INSTR_APPEND 27
+#define INSTR_DEFAULT 28
+
+#define INSTR_PUSH_LOCAL_FIXED 58
+#define INSTR_POP_LOCAL_FIXED 59
+#define INSTR_SET_FIXED 60
+#define INSTR_APPEND_FIXED 61
+#define INSTR_DEFAULT_FIXED 62
+
+#define INSTR_PUSH_LOCAL_GROUP 29
+#define INSTR_POP_LOCAL_GROUP 30
+#define INSTR_SET_GROUP 31
+#define INSTR_APPEND_GROUP 32
+#define INSTR_DEFAULT_GROUP 33
+
+#define INSTR_PUSH_ON 34
+#define INSTR_POP_ON 35
+#define INSTR_SET_ON 36
+#define INSTR_APPEND_ON 37
+#define INSTR_DEFAULT_ON 38
+#define INSTR_GET_ON 65
+
+#define INSTR_CALL_RULE 39
+#define INSTR_CALL_MEMBER_RULE 66
+
+#define INSTR_APPLY_MODIFIERS 40
+#define INSTR_APPLY_INDEX 41
+#define INSTR_APPLY_INDEX_MODIFIERS 42
+#define INSTR_APPLY_MODIFIERS_GROUP 43
+#define INSTR_APPLY_INDEX_GROUP 44
+#define INSTR_APPLY_INDEX_MODIFIERS_GROUP 45
+#define INSTR_COMBINE_STRINGS 46
+#define INSTR_GET_GRIST 64
+
+#define INSTR_INCLUDE 47
+#define INSTR_RULE 48
+#define INSTR_ACTIONS 49
+#define INSTR_PUSH_MODULE 50
+#define INSTR_POP_MODULE 51
+#define INSTR_CLASS 52
+#define INSTR_BIND_MODULE_VARIABLES 63
+
+#define INSTR_APPEND_STRINGS 53
+#define INSTR_WRITE_FILE 54
+#define INSTR_OUTPUT_STRINGS 55
+
+typedef struct instruction
+{
+ unsigned int op_code;
+ int arg;
+} instruction;
+
+typedef struct _subfunction
+{
+ OBJECT * name;
+ FUNCTION * code;
+ int local;
+} SUBFUNCTION;
+
+typedef struct _subaction
+{
+ OBJECT * name;
+ FUNCTION * command;
+ int flags;
+} SUBACTION;
+
+#define FUNCTION_BUILTIN 0
+#define FUNCTION_JAM 1
+
+struct argument
+{
+ int flags;
+#define ARG_ONE 0
+#define ARG_OPTIONAL 1
+#define ARG_PLUS 2
+#define ARG_STAR 3
+#define ARG_VARIADIC 4
+ OBJECT * type_name;
+ OBJECT * arg_name;
+ int index;
+};
+
+struct arg_list
+{
+ int size;
+ struct argument * args;
+};
+
+struct _function
+{
+ int type;
+ int reference_count;
+ OBJECT * rulename;
+ struct arg_list * formal_arguments;
+ int num_formal_arguments;
+};
+
+typedef struct _builtin_function
+{
+ FUNCTION base;
+ LIST * ( * func )( FRAME *, int flags );
+ int flags;
+} BUILTIN_FUNCTION;
+
+typedef struct _jam_function
+{
+ FUNCTION base;
+ int code_size;
+ instruction * code;
+ int num_constants;
+ OBJECT * * constants;
+ int num_subfunctions;
+ SUBFUNCTION * functions;
+ int num_subactions;
+ SUBACTION * actions;
+ FUNCTION * generic;
+ OBJECT * file;
+ int line;
+} JAM_FUNCTION;
+
+
+#ifdef HAVE_PYTHON
+
+#define FUNCTION_PYTHON 2
+
+typedef struct _python_function
+{
+ FUNCTION base;
+ PyObject * python_function;
+} PYTHON_FUNCTION;
+
+static LIST * call_python_function( PYTHON_FUNCTION *, FRAME * );
+
+#endif
+
+
+struct _stack
+{
+ void * data;
+};
+
+static void * stack;
+
+STACK * stack_global()
+{
+ static STACK result;
+ if ( !stack )
+ {
+ int const size = 1 << 21;
+ stack = BJAM_MALLOC( size );
+ result.data = (char *)stack + size;
+ }
+ return &result;
+}
+
+static void check_alignment( STACK * s )
+{
+ assert( (size_t)s->data % sizeof( LIST * ) == 0 );
+}
+
+void * stack_allocate( STACK * s, int size )
+{
+ check_alignment( s );
+ s->data = (char *)s->data - size;
+ check_alignment( s );
+ return s->data;
+}
+
+void stack_deallocate( STACK * s, int size )
+{
+ check_alignment( s );
+ s->data = (char *)s->data + size;
+ check_alignment( s );
+}
+
+void stack_push( STACK * s, LIST * l )
+{
+ *(LIST * *)stack_allocate( s, sizeof( LIST * ) ) = l;
+}
+
+LIST * stack_pop( STACK * s )
+{
+ LIST * const result = *(LIST * *)s->data;
+ stack_deallocate( s, sizeof( LIST * ) );
+ return result;
+}
+
+LIST * stack_top( STACK * s )
+{
+ check_alignment( s );
+ return *(LIST * *)s->data;
+}
+
+LIST * stack_at( STACK * s, int n )
+{
+ check_alignment( s );
+ return *( (LIST * *)s->data + n );
+}
+
+void stack_set( STACK * s, int n, LIST * value )
+{
+ check_alignment( s );
+ *((LIST * *)s->data + n) = value;
+}
+
+void * stack_get( STACK * s )
+{
+ check_alignment( s );
+ return s->data;
+}
+
+LIST * frame_get_local( FRAME * frame, int idx )
+{
+ /* The only local variables are the arguments. */
+ return list_copy( lol_get( frame->args, idx ) );
+}
+
+static OBJECT * function_get_constant( JAM_FUNCTION * function, int idx )
+{
+ return function->constants[ idx ];
+}
+
+static LIST * function_get_variable( JAM_FUNCTION * function, FRAME * frame,
+ int idx )
+{
+ return list_copy( var_get( frame->module, function->constants[ idx ] ) );
+}
+
+static void function_set_variable( JAM_FUNCTION * function, FRAME * frame,
+ int idx, LIST * value )
+{
+ var_set( frame->module, function->constants[ idx ], value, VAR_SET );
+}
+
+static LIST * function_swap_variable( JAM_FUNCTION * function, FRAME * frame,
+ int idx, LIST * value )
+{
+ return var_swap( frame->module, function->constants[ idx ], value );
+}
+
+static void function_append_variable( JAM_FUNCTION * function, FRAME * frame,
+ int idx, LIST * value )
+{
+ var_set( frame->module, function->constants[ idx ], value, VAR_APPEND );
+}
+
+static void function_default_variable( JAM_FUNCTION * function, FRAME * frame,
+ int idx, LIST * value )
+{
+ var_set( frame->module, function->constants[ idx ], value, VAR_DEFAULT );
+}
+
+static void function_set_rule( JAM_FUNCTION * function, FRAME * frame,
+ STACK * s, int idx )
+{
+ SUBFUNCTION * sub = function->functions + idx;
+ new_rule_body( frame->module, sub->name, sub->code, !sub->local );
+}
+
+static void function_set_actions( JAM_FUNCTION * function, FRAME * frame,
+ STACK * s, int idx )
+{
+ SUBACTION * sub = function->actions + idx;
+ LIST * bindlist = stack_pop( s );
+ new_rule_actions( frame->module, sub->name, sub->command, bindlist,
+ sub->flags );
+}
+
+
+/*
+ * Returns the index if name is "<", ">", "1", "2", ... or "19" otherwise
+ * returns -1.
+ */
+
+static int get_argument_index( char const * s )
+{
+ if ( s[ 0 ] != '\0')
+ {
+ if ( s[ 1 ] == '\0' )
+ {
+ switch ( s[ 0 ] )
+ {
+ case '<': return 0;
+ case '>': return 1;
+
+ case '1':
+ case '2':
+ case '3':
+ case '4':
+ case '5':
+ case '6':
+ case '7':
+ case '8':
+ case '9':
+ return s[ 0 ] - '1';
+ }
+ }
+ else if ( s[ 0 ] == '1' && s[ 2 ] == '\0' )
+ {
+ switch( s[ 1 ] )
+ {
+ case '0':
+ case '1':
+ case '2':
+ case '3':
+ case '4':
+ case '5':
+ case '6':
+ case '7':
+ case '8':
+ case '9':
+ return s[ 1 ] - '0' + 10 - 1;
+ }
+ }
+ }
+ return -1;
+}
+
+static LIST * function_get_named_variable( JAM_FUNCTION * function,
+ FRAME * frame, OBJECT * name )
+{
+ int const idx = get_argument_index( object_str( name ) );
+ return idx == -1
+ ? list_copy( var_get( frame->module, name ) )
+ : list_copy( lol_get( frame->args, idx ) );
+}
+
+static void function_set_named_variable( JAM_FUNCTION * function, FRAME * frame,
+ OBJECT * name, LIST * value)
+{
+ var_set( frame->module, name, value, VAR_SET );
+}
+
+static LIST * function_swap_named_variable( JAM_FUNCTION * function,
+ FRAME * frame, OBJECT * name, LIST * value )
+{
+ return var_swap( frame->module, name, value );
+}
+
+static void function_append_named_variable( JAM_FUNCTION * function,
+ FRAME * frame, OBJECT * name, LIST * value)
+{
+ var_set( frame->module, name, value, VAR_APPEND );
+}
+
+static void function_default_named_variable( JAM_FUNCTION * function,
+ FRAME * frame, OBJECT * name, LIST * value )
+{
+ var_set( frame->module, name, value, VAR_DEFAULT );
+}
+
+static LIST * function_call_rule( JAM_FUNCTION * function, FRAME * frame,
+ STACK * s, int n_args, char const * unexpanded, OBJECT * file, int line )
+{
+ FRAME inner[ 1 ];
+ int i;
+ LIST * first = stack_pop( s );
+ LIST * result = L0;
+ OBJECT * rulename;
+ LIST * trailing;
+
+ frame->file = file;
+ frame->line = line;
+
+ if ( list_empty( first ) )
+ {
+ backtrace_line( frame );
+ printf( "warning: rulename %s expands to empty string\n", unexpanded );
+ backtrace( frame );
+ list_free( first );
+ for ( i = 0; i < n_args; ++i )
+ list_free( stack_pop( s ) );
+ return result;
+ }
+
+ rulename = object_copy( list_front( first ) );
+
+ frame_init( inner );
+ inner->prev = frame;
+ inner->prev_user = frame->module->user_module ? frame : frame->prev_user;
+ inner->module = frame->module; /* This gets fixed up in evaluate_rule(). */
+
+ for ( i = 0; i < n_args; ++i )
+ lol_add( inner->args, stack_at( s, n_args - i - 1 ) );
+
+ for ( i = 0; i < n_args; ++i )
+ stack_pop( s );
+
+ trailing = list_pop_front( first );
+ if ( trailing )
+ {
+ if ( inner->args->count == 0 )
+ lol_add( inner->args, trailing );
+ else
+ {
+ LIST * * const l = &inner->args->list[ 0 ];
+ *l = list_append( trailing, *l );
+ }
+ }
+
+ result = evaluate_rule( bindrule( rulename, inner->module ), rulename, inner );
+ frame_free( inner );
+ object_free( rulename );
+ return result;
+}
+
+static LIST * function_call_member_rule( JAM_FUNCTION * function, FRAME * frame, STACK * s, int n_args, OBJECT * rulename, OBJECT * file, int line )
+{
+ FRAME inner[ 1 ];
+ int i;
+ LIST * first = stack_pop( s );
+ LIST * result = L0;
+ LIST * trailing;
+ RULE * rule;
+ module_t * module;
+ OBJECT * real_rulename = 0;
+
+ frame->file = file;
+ frame->line = line;
+
+ if ( list_empty( first ) )
+ {
+ backtrace_line( frame );
+ printf( "warning: object is empty\n" );
+ backtrace( frame );
+
+ list_free( first );
+
+ for( i = 0; i < n_args; ++i )
+ {
+ list_free( stack_pop( s ) );
+ }
+
+ return result;
+ }
+
+ /* FIXME: handle generic case */
+ assert( list_length( first ) == 1 );
+
+ module = bindmodule( list_front( first ) );
+ if ( module->class_module )
+ {
+ rule = bindrule( rulename, module );
+ real_rulename = object_copy( function_rulename( rule->procedure ) );
+ }
+ else
+ {
+ string buf[ 1 ];
+ string_new( buf );
+ string_append( buf, object_str( list_front( first ) ) );
+ string_push_back( buf, '.' );
+ string_append( buf, object_str( rulename ) );
+ real_rulename = object_new( buf->value );
+ string_free( buf );
+ rule = bindrule( real_rulename, frame->module );
+ }
+
+ frame_init( inner );
+
+ inner->prev = frame;
+ inner->prev_user = frame->module->user_module ? frame : frame->prev_user;
+ inner->module = frame->module; /* This gets fixed up in evaluate_rule(), below. */
+
+ for( i = 0; i < n_args; ++i )
+ {
+ lol_add( inner->args, stack_at( s, n_args - i - 1 ) );
+ }
+
+ for( i = 0; i < n_args; ++i )
+ {
+ stack_pop( s );
+ }
+
+ if ( list_length( first ) > 1 )
+ {
+ string buf[ 1 ];
+ LIST * trailing = L0;
+ LISTITER iter = list_begin( first ), end = list_end( first );
+ iter = list_next( iter );
+ string_new( buf );
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ string_append( buf, object_str( list_item( iter ) ) );
+ string_push_back( buf, '.' );
+ string_append( buf, object_str( rulename ) );
+ trailing = list_push_back( trailing, object_new( buf->value ) );
+ string_truncate( buf, 0 );
+ }
+ string_free( buf );
+ if ( inner->args->count == 0 )
+ lol_add( inner->args, trailing );
+ else
+ {
+ LIST * * const l = &inner->args->list[ 0 ];
+ *l = list_append( trailing, *l );
+ }
+ }
+
+ result = evaluate_rule( rule, real_rulename, inner );
+ frame_free( inner );
+ object_free( rulename );
+ object_free( real_rulename );
+ return result;
+}
+
+
+/* Variable expansion */
+
+typedef struct
+{
+ int sub1;
+ int sub2;
+} subscript_t;
+
+typedef struct
+{
+ PATHNAME f; /* :GDBSMR -- pieces */
+ char parent; /* :P -- go to parent directory */
+ char filemods; /* one of the above applied */
+ char downshift; /* :L -- downshift result */
+ char upshift; /* :U -- upshift result */
+ char to_slashes; /* :T -- convert "\" to "/" */
+ char to_windows; /* :W -- convert cygwin to native paths */
+ PATHPART empty; /* :E -- default for empties */
+ PATHPART join; /* :J -- join list with char */
+} VAR_EDITS;
+
+static LIST * apply_modifiers_impl( LIST * result, string * buf,
+ VAR_EDITS * edits, int n, LISTITER iter, LISTITER end );
+static void get_iters( subscript_t const subscript, LISTITER * const first,
+ LISTITER * const last, int const length );
+
+
+/*
+ * var_edit_parse() - parse : modifiers into PATHNAME structure
+ *
+ * The : modifiers in a $(varname:modifier) currently support replacing or
+ * omitting elements of a filename, and so they are parsed into a PATHNAME
+ * structure (which contains pointers into the original string).
+ *
+ * Modifiers of the form "X=value" replace the component X with the given value.
+ * Modifiers without the "=value" cause everything but the component X to be
+ * omitted. X is one of:
+ *
+ * G <grist>
+ * D directory name
+ * B base name
+ * S .suffix
+ * M (member)
+ * R root directory - prepended to whole path
+ *
+ * This routine sets:
+ *
+ * f->f_xxx.ptr = 0
+ * f->f_xxx.len = 0
+ * -> leave the original component xxx
+ *
+ * f->f_xxx.ptr = string
+ * f->f_xxx.len = strlen( string )
+ * -> replace component xxx with string
+ *
+ * f->f_xxx.ptr = ""
+ * f->f_xxx.len = 0
+ * -> omit component xxx
+ *
+ * var_edit_file() below and path_build() obligingly follow this convention.
+ */
+
+static int var_edit_parse( char const * mods, VAR_EDITS * edits, int havezeroed
+ )
+{
+ while ( *mods )
+ {
+ PATHPART * fp;
+
+ switch ( *mods++ )
+ {
+ case 'L': edits->downshift = 1; continue;
+ case 'U': edits->upshift = 1; continue;
+ case 'P': edits->parent = edits->filemods = 1; continue;
+ case 'E': fp = &edits->empty; goto strval;
+ case 'J': fp = &edits->join; goto strval;
+ case 'G': fp = &edits->f.f_grist; goto fileval;
+ case 'R': fp = &edits->f.f_root; goto fileval;
+ case 'D': fp = &edits->f.f_dir; goto fileval;
+ case 'B': fp = &edits->f.f_base; goto fileval;
+ case 'S': fp = &edits->f.f_suffix; goto fileval;
+ case 'M': fp = &edits->f.f_member; goto fileval;
+ case 'T': edits->to_slashes = 1; continue;
+ case 'W': edits->to_windows = 1; continue;
+ default:
+ continue; /* Should complain, but so what... */
+ }
+
+ fileval:
+ /* Handle :CHARS, where each char (without a following =) selects a
+ * particular file path element. On the first such char, we deselect all
+ * others (by setting ptr = "", len = 0) and for each char we select
+ * that element (by setting ptr = 0).
+ */
+ edits->filemods = 1;
+
+ if ( *mods != '=' )
+ {
+ if ( !havezeroed++ )
+ {
+ int i;
+ for ( i = 0; i < 6; ++i )
+ {
+ edits->f.part[ i ].len = 0;
+ edits->f.part[ i ].ptr = "";
+ }
+ }
+
+ fp->ptr = 0;
+ continue;
+ }
+
+ strval:
+ /* Handle :X=value, or :X */
+ if ( *mods != '=' )
+ {
+ fp->ptr = "";
+ fp->len = 0;
+ }
+ else
+ {
+ fp->ptr = ++mods;
+ fp->len = strlen( mods );
+ mods += fp->len;
+ }
+ }
+
+ return havezeroed;
+}
+
+
+/*
+ * var_edit_file() - copy input target name to output, modifying filename.
+ */
+
+static void var_edit_file( char const * in, string * out, VAR_EDITS * edits )
+{
+ if ( edits->filemods )
+ {
+ PATHNAME pathname;
+
+ /* Parse apart original filename, putting parts into "pathname". */
+ path_parse( in, &pathname );
+
+ /* Replace any pathname with edits->f */
+ if ( edits->f.f_grist .ptr ) pathname.f_grist = edits->f.f_grist;
+ if ( edits->f.f_root .ptr ) pathname.f_root = edits->f.f_root;
+ if ( edits->f.f_dir .ptr ) pathname.f_dir = edits->f.f_dir;
+ if ( edits->f.f_base .ptr ) pathname.f_base = edits->f.f_base;
+ if ( edits->f.f_suffix.ptr ) pathname.f_suffix = edits->f.f_suffix;
+ if ( edits->f.f_member.ptr ) pathname.f_member = edits->f.f_member;
+
+ /* If requested, modify pathname to point to parent. */
+ if ( edits->parent )
+ path_parent( &pathname );
+
+ /* Put filename back together. */
+ path_build( &pathname, out );
+ }
+ else
+ string_append( out, in );
+}
+
+/*
+ * var_edit_cyg2win() - conversion of a cygwin to a Windows path.
+ *
+ * FIXME: skip grist
+ */
+
+#ifdef OS_CYGWIN
+static void var_edit_cyg2win( string * out, size_t pos, VAR_EDITS * edits )
+{
+ if ( edits->to_windows )
+ {
+ #ifdef CYGWIN_VERSION_CYGWIN_CONV
+ /* Use new Cygwin API added with Cygwin 1.7. Old one had no error
+ * handling and has been deprecated.
+ */
+ char * dynamicBuffer = 0;
+ char buffer[ MAX_PATH + 1001 ];
+ char const * result = buffer;
+ cygwin_conv_path_t const conv_type = CCP_POSIX_TO_WIN_A | CCP_RELATIVE;
+ ssize_t const apiResult = cygwin_conv_path( conv_type, out->value + pos,
+ buffer, sizeof( buffer ) / sizeof( *buffer ) );
+ assert( apiResult == 0 || apiResult == -1 );
+ assert( apiResult || strlen( result ) < sizeof( buffer ) / sizeof(
+ *buffer ) );
+ if ( apiResult )
+ {
+ result = 0;
+ if ( errno == ENOSPC )
+ {
+ ssize_t const size = cygwin_conv_path( conv_type, out->value +
+ pos, NULL, 0 );
+ assert( size >= -1 );
+ if ( size > 0 )
+ {
+ dynamicBuffer = (char *)BJAM_MALLOC_ATOMIC( size );
+ if ( dynamicBuffer )
+ {
+ ssize_t const apiResult = cygwin_conv_path( conv_type,
+ out->value + pos, dynamicBuffer, size );
+ assert( apiResult == 0 || apiResult == -1 );
+ if ( !apiResult )
+ {
+ result = dynamicBuffer;
+ assert( strlen( result ) < size );
+ }
+ }
+ }
+ }
+ }
+ #else /* CYGWIN_VERSION_CYGWIN_CONV */
+ /* Use old Cygwin API deprecated with Cygwin 1.7. */
+ char result[ MAX_PATH + 1 ];
+ cygwin_conv_to_win32_path( out->value + pos, result );
+ assert( strlen( result ) <= MAX_PATH );
+ #endif /* CYGWIN_VERSION_CYGWIN_CONV */
+ if ( result )
+ {
+ string_truncate( out, pos );
+ string_append( out, result );
+ edits->to_slashes = 0;
+ }
+ #ifdef CYGWIN_VERSION_CYGWIN_CONV
+ if ( dynamicBuffer )
+ BJAM_FREE( dynamicBuffer );
+ #endif
+ }
+}
+#endif /* OS_CYGWIN */
+
+
+/*
+ * var_edit_shift() - do upshift/downshift & other mods.
+ */
+
+static void var_edit_shift( string * out, size_t pos, VAR_EDITS * edits )
+{
+#ifdef OS_CYGWIN
+ var_edit_cyg2win( out, pos, edits );
+#endif
+
+ if ( edits->upshift || edits->downshift || edits->to_slashes )
+ {
+ /* Handle upshifting, downshifting and slash translation now. */
+ char * p;
+ for ( p = out->value + pos; *p; ++p )
+ {
+ if ( edits->upshift )
+ *p = toupper( *p );
+ else if ( edits->downshift )
+ *p = tolower( *p );
+ if ( edits->to_slashes && ( *p == '\\' ) )
+ *p = '/';
+ }
+ }
+}
+
+
+/*
+ * Reads n LISTs from the top of the STACK and combines them to form VAR_EDITS.
+ * Returns the number of VAR_EDITS pushed onto the STACK.
+ */
+
+static int expand_modifiers( STACK * s, int n )
+{
+ int i;
+ int total = 1;
+ LIST * * args = stack_get( s );
+ for ( i = 0; i < n; ++i )
+ total *= list_length( args[ i ] );
+
+ if ( total != 0 )
+ {
+ VAR_EDITS * out = stack_allocate( s, total * sizeof( VAR_EDITS ) );
+ LISTITER * iter = stack_allocate( s, n * sizeof( LIST * ) );
+ for ( i = 0; i < n; ++i )
+ iter[ i ] = list_begin( args[ i ] );
+ i = 0;
+ {
+ int havezeroed;
+ loop:
+ memset( out, 0, sizeof( *out ) );
+ havezeroed = 0;
+ for ( i = 0; i < n; ++i )
+ havezeroed = var_edit_parse( object_str( list_item( iter[ i ] )
+ ), out, havezeroed );
+ ++out;
+ while ( --i >= 0 )
+ {
+ if ( list_next( iter[ i ] ) != list_end( args[ i ] ) )
+ {
+ iter[ i ] = list_next( iter[ i ] );
+ goto loop;
+ }
+ iter[ i ] = list_begin( args[ i ] );
+ }
+ }
+ stack_deallocate( s, n * sizeof( LIST * ) );
+ }
+ return total;
+}
+
+static LIST * apply_modifiers( STACK * s, int n )
+{
+ LIST * value = stack_top( s );
+ LIST * result = L0;
+ VAR_EDITS * const edits = (VAR_EDITS *)( (LIST * *)stack_get( s ) + 1 );
+ string buf[ 1 ];
+ string_new( buf );
+ result = apply_modifiers_impl( result, buf, edits, n, list_begin( value ),
+ list_end( value ) );
+ string_free( buf );
+ return result;
+}
+
+
+/*
+ * Parse a string of the form "1-2", "-2--1", "2-" and return the two
+ * subscripts.
+ */
+
+subscript_t parse_subscript( char const * s )
+{
+ subscript_t result;
+ result.sub1 = 0;
+ result.sub2 = 0;
+ do /* so we can use "break" */
+ {
+ /* Allow negative subscripts. */
+ if ( !isdigit( *s ) && ( *s != '-' ) )
+ {
+ result.sub2 = 0;
+ break;
+ }
+ result.sub1 = atoi( s );
+
+ /* Skip over the first symbol, which is either a digit or dash. */
+ ++s;
+ while ( isdigit( *s ) ) ++s;
+
+ if ( *s == '\0' )
+ {
+ result.sub2 = result.sub1;
+ break;
+ }
+
+ if ( *s != '-' )
+ {
+ result.sub2 = 0;
+ break;
+ }
+
+ ++s;
+
+ if ( *s == '\0' )
+ {
+ result.sub2 = -1;
+ break;
+ }
+
+ if ( !isdigit( *s ) && ( *s != '-' ) )
+ {
+ result.sub2 = 0;
+ break;
+ }
+
+ /* First, compute the index of the last element. */
+ result.sub2 = atoi( s );
+ while ( isdigit( *++s ) );
+
+ if ( *s != '\0' )
+ result.sub2 = 0;
+
+ } while ( 0 );
+ return result;
+}
+
+static LIST * apply_subscript( STACK * s )
+{
+ LIST * value = stack_top( s );
+ LIST * indices = stack_at( s, 1 );
+ LIST * result = L0;
+ int length = list_length( value );
+ string buf[ 1 ];
+ LISTITER indices_iter = list_begin( indices );
+ LISTITER const indices_end = list_end( indices );
+ string_new( buf );
+ for ( ; indices_iter != indices_end; indices_iter = list_next( indices_iter
+ ) )
+ {
+ LISTITER iter = list_begin( value );
+ LISTITER end = list_end( value );
+ subscript_t const subscript = parse_subscript( object_str( list_item(
+ indices_iter ) ) );
+ get_iters( subscript, &iter, &end, length );
+ for ( ; iter != end; iter = list_next( iter ) )
+ result = list_push_back( result, object_copy( list_item( iter ) ) );
+ }
+ string_free( buf );
+ return result;
+}
+
+
+/*
+ * Reads the LIST from first and applies subscript to it. The results are
+ * written to *first and *last.
+ */
+
+static void get_iters( subscript_t const subscript, LISTITER * const first,
+ LISTITER * const last, int const length )
+{
+ int start;
+ int size;
+ LISTITER iter;
+ LISTITER end;
+ {
+
+ if ( subscript.sub1 < 0 )
+ start = length + subscript.sub1;
+ else if ( subscript.sub1 > length )
+ start = length;
+ else
+ start = subscript.sub1 - 1;
+
+ size = subscript.sub2 < 0
+ ? length + 1 + subscript.sub2 - start
+ : subscript.sub2 - start;
+
+ /*
+ * HACK: When the first subscript is before the start of the list, it
+ * magically becomes the beginning of the list. This is inconsistent,
+ * but needed for backwards compatibility.
+ */
+ if ( start < 0 )
+ start = 0;
+
+ /* The "sub2 < 0" test handles the semantic error of sub2 < sub1. */
+ if ( size < 0 )
+ size = 0;
+
+ if ( start + size > length )
+ size = length - start;
+ }
+
+ iter = *first;
+ while ( start-- > 0 )
+ iter = list_next( iter );
+
+ end = iter;
+ while ( size-- > 0 )
+ end = list_next( end );
+
+ *first = iter;
+ *last = end;
+}
+
+static LIST * apply_modifiers_empty( LIST * result, string * buf,
+ VAR_EDITS * edits, int n )
+{
+ int i;
+ for ( i = 0; i < n; ++i )
+ {
+ if ( edits[ i ].empty.ptr )
+ {
+ /** FIXME: is empty.ptr always null-terminated? */
+ var_edit_file( edits[ i ].empty.ptr, buf, edits + i );
+ var_edit_shift( buf, 0, edits + i );
+ result = list_push_back( result, object_new( buf->value ) );
+ string_truncate( buf, 0 );
+ }
+ }
+ return result;
+}
+
+static LIST * apply_modifiers_non_empty( LIST * result, string * buf,
+ VAR_EDITS * edits, int n, LISTITER begin, LISTITER end )
+{
+ int i;
+ LISTITER iter;
+ for ( i = 0; i < n; ++i )
+ {
+ if ( edits[ i ].join.ptr )
+ {
+ var_edit_file( object_str( list_item( begin ) ), buf, edits + i );
+ var_edit_shift( buf, 0, edits + i );
+ for ( iter = list_next( begin ); iter != end; iter = list_next( iter
+ ) )
+ {
+ size_t size;
+ string_append( buf, edits[ i ].join.ptr );
+ size = buf->size;
+ var_edit_file( object_str( list_item( iter ) ), buf, edits + i
+ );
+ var_edit_shift( buf, size, edits + i );
+ }
+ result = list_push_back( result, object_new( buf->value ) );
+ string_truncate( buf, 0 );
+ }
+ else
+ {
+ for ( iter = begin; iter != end; iter = list_next( iter ) )
+ {
+ var_edit_file( object_str( list_item( iter ) ), buf, edits + i );
+ var_edit_shift( buf, 0, edits + i );
+ result = list_push_back( result, object_new( buf->value ) );
+ string_truncate( buf, 0 );
+ }
+ }
+ }
+ return result;
+}
+
+static LIST * apply_modifiers_impl( LIST * result, string * buf,
+ VAR_EDITS * edits, int n, LISTITER iter, LISTITER end )
+{
+ return iter == end
+ ? apply_modifiers_empty( result, buf, edits, n )
+ : apply_modifiers_non_empty( result, buf, edits, n, iter, end );
+}
+
+static LIST * apply_subscript_and_modifiers( STACK * s, int n )
+{
+ LIST * const value = stack_top( s );
+ LIST * const indices = stack_at( s, 1 );
+ LIST * result = L0;
+ VAR_EDITS * const edits = (VAR_EDITS *)((LIST * *)stack_get( s ) + 2);
+ int const length = list_length( value );
+ string buf[ 1 ];
+ LISTITER indices_iter = list_begin( indices );
+ LISTITER const indices_end = list_end( indices );
+ string_new( buf );
+ for ( ; indices_iter != indices_end; indices_iter = list_next( indices_iter
+ ) )
+ {
+ LISTITER iter = list_begin( value );
+ LISTITER end = list_end( value );
+ subscript_t const sub = parse_subscript( object_str( list_item(
+ indices_iter ) ) );
+ get_iters( sub, &iter, &end, length );
+ result = apply_modifiers_impl( result, buf, edits, n, iter, end );
+ }
+ string_free( buf );
+ return result;
+}
+
+
+/*
+ * expand() - expands a list of concatenated strings and variable refereces
+ *
+ * Takes a list of expansion items - each representing one element to be
+ * concatenated and each containing a list of its values. Returns a list of all
+ * possible values constructed by selecting a single value from each of the
+ * elements and concatenating them together.
+ *
+ * For example, in the following code:
+ *
+ * local a = one two three four ;
+ * local b = foo bar ;
+ * ECHO /$(a)/$(b)/$(a)/ ;
+ *
+ * When constructing the result of /$(a)/$(b)/ this function would get called
+ * with the following 7 expansion items:
+ * 1. /
+ * 2. one two three four
+ * 3. /
+ * 4. foo bar
+ * 5. /
+ * 6. one two three four
+ * 7. /
+ *
+ * And would result in a list containing 32 values:
+ * 1. /one/foo/one/
+ * 2. /one/foo/two/
+ * 3. /one/foo/three/
+ * 4. /one/foo/four/
+ * 5. /one/bar/one/
+ * ...
+ *
+ */
+
+typedef struct expansion_item
+{
+ /* Item's value list initialized prior to calling expand(). */
+ LIST * values;
+
+ /* Internal data initialized and used inside expand(). */
+ LISTITER current; /* Currently used value. */
+ int size; /* Concatenated string length prior to concatenating the
+ * item's current value.
+ */
+} expansion_item;
+
+static LIST * expand( expansion_item * items, int const length )
+{
+ LIST * result = L0;
+ string buf[ 1 ];
+ int size = 0;
+ int i;
+
+ assert( length > 0 );
+ for ( i = 0; i < length; ++i )
+ {
+ LISTITER iter = list_begin( items[ i ].values );
+ LISTITER const end = list_end( items[ i ].values );
+
+ /* If any of the items has no values - the result is an empty list. */
+ if ( iter == end ) return L0;
+
+ /* Set each item's 'current' to its first listed value. This indicates
+ * each item's next value to be used when constructing the list of all
+ * possible concatenated values.
+ */
+ items[ i ].current = iter;
+
+ /* Calculate the longest concatenated string length - to know how much
+ * memory we need to allocate as a buffer for holding the concatenated
+ * strings.
+ */
+ {
+ int max = 0;
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ int const len = strlen( object_str( list_item( iter ) ) );
+ if ( len > max ) max = len;
+ }
+ size += max;
+ }
+ }
+
+ string_new( buf );
+ string_reserve( buf, size );
+
+ i = 0;
+ while ( i >= 0 )
+ {
+ for ( ; i < length; ++i )
+ {
+ items[ i ].size = buf->size;
+ string_append( buf, object_str( list_item( items[ i ].current ) ) );
+ }
+ result = list_push_back( result, object_new( buf->value ) );
+ while ( --i >= 0 )
+ {
+ if ( list_next( items[ i ].current ) != list_end( items[ i ].values
+ ) )
+ {
+ items[ i ].current = list_next( items[ i ].current );
+ string_truncate( buf, items[ i ].size );
+ break;
+ }
+ else
+ items[ i ].current = list_begin( items[ i ].values );
+ }
+ }
+
+ string_free( buf );
+ return result;
+}
+
+static void combine_strings( STACK * s, int n, string * out )
+{
+ int i;
+ for ( i = 0; i < n; ++i )
+ {
+ LIST * const values = stack_pop( s );
+ LISTITER iter = list_begin( values );
+ LISTITER const end = list_end( values );
+ if ( iter != end )
+ {
+ string_append( out, object_str( list_item( iter ) ) );
+ for ( iter = list_next( iter ); iter != end; iter = list_next( iter
+ ) )
+ {
+ string_push_back( out, ' ' );
+ string_append( out, object_str( list_item( iter ) ) );
+ }
+ list_free( values );
+ }
+ }
+}
+
+struct dynamic_array
+{
+ int size;
+ int capacity;
+ void * data;
+};
+
+static void dynamic_array_init( struct dynamic_array * array )
+{
+ array->size = 0;
+ array->capacity = 0;
+ array->data = 0;
+}
+
+static void dynamic_array_free( struct dynamic_array * array )
+{
+ BJAM_FREE( array->data );
+}
+
+static void dynamic_array_push_impl( struct dynamic_array * const array,
+ void const * const value, int const unit_size )
+{
+ if ( array->capacity == 0 )
+ {
+ array->capacity = 2;
+ array->data = BJAM_MALLOC( array->capacity * unit_size );
+ }
+ else if ( array->capacity == array->size )
+ {
+ void * new_data;
+ array->capacity *= 2;
+ new_data = BJAM_MALLOC( array->capacity * unit_size );
+ memcpy( new_data, array->data, array->size * unit_size );
+ BJAM_FREE( array->data );
+ array->data = new_data;
+ }
+ memcpy( (char *)array->data + array->size * unit_size, value, unit_size );
+ ++array->size;
+}
+
+#define dynamic_array_push( array, value ) (dynamic_array_push_impl(array, &value, sizeof(value)))
+#define dynamic_array_at( type, array, idx ) (((type *)(array)->data)[idx])
+
+
+/*
+ * struct compiler
+ */
+
+struct label_info
+{
+ int absolute_position;
+ struct dynamic_array uses[ 1 ];
+};
+
+struct stored_rule
+{
+ OBJECT * name;
+ PARSE * parse;
+ int num_arguments;
+ struct arg_list * arguments;
+ int local;
+};
+
+typedef struct compiler
+{
+ struct dynamic_array code[ 1 ];
+ struct dynamic_array constants[ 1 ];
+ struct dynamic_array labels[ 1 ];
+ struct dynamic_array rules[ 1 ];
+ struct dynamic_array actions[ 1 ];
+} compiler;
+
+static void compiler_init( compiler * c )
+{
+ dynamic_array_init( c->code );
+ dynamic_array_init( c->constants );
+ dynamic_array_init( c->labels );
+ dynamic_array_init( c->rules );
+ dynamic_array_init( c->actions );
+}
+
+static void compiler_free( compiler * c )
+{
+ int i;
+ dynamic_array_free( c->actions );
+ dynamic_array_free( c->rules );
+ for ( i = 0; i < c->labels->size; ++i )
+ dynamic_array_free( dynamic_array_at( struct label_info, c->labels, i
+ ).uses );
+ dynamic_array_free( c->labels );
+ dynamic_array_free( c->constants );
+ dynamic_array_free( c->code );
+}
+
+static void compile_emit_instruction( compiler * c, instruction instr )
+{
+ dynamic_array_push( c->code, instr );
+}
+
+static int compile_new_label( compiler * c )
+{
+ int result = c->labels->size;
+ struct label_info info;
+ info.absolute_position = -1;
+ dynamic_array_init( info.uses );
+ dynamic_array_push( c->labels, info );
+ return result;
+}
+
+static void compile_set_label( compiler * c, int label )
+{
+ struct label_info * const l = &dynamic_array_at( struct label_info,
+ c->labels, label );
+ int const pos = c->code->size;
+ int i;
+ assert( l->absolute_position == -1 );
+ l->absolute_position = pos;
+ for ( i = 0; i < l->uses->size; ++i )
+ {
+ int id = dynamic_array_at( int, l->uses, i );
+ int offset = (int)( pos - id - 1 );
+ dynamic_array_at( instruction, c->code, id ).arg = offset;
+ }
+}
+
+static void compile_emit( compiler * c, unsigned int op_code, int arg )
+{
+ instruction instr;
+ instr.op_code = op_code;
+ instr.arg = arg;
+ compile_emit_instruction( c, instr );
+}
+
+static void compile_emit_branch( compiler * c, unsigned int op_code, int label )
+{
+ struct label_info * const l = &dynamic_array_at( struct label_info,
+ c->labels, label );
+ int const pos = c->code->size;
+ instruction instr;
+ instr.op_code = op_code;
+ if ( l->absolute_position == -1 )
+ {
+ instr.arg = 0;
+ dynamic_array_push( l->uses, pos );
+ }
+ else
+ instr.arg = (int)( l->absolute_position - pos - 1 );
+ compile_emit_instruction( c, instr );
+}
+
+static int compile_emit_constant( compiler * c, OBJECT * value )
+{
+ OBJECT * copy = object_copy( value );
+ dynamic_array_push( c->constants, copy );
+ return c->constants->size - 1;
+}
+
+static int compile_emit_rule( compiler * c, OBJECT * name, PARSE * parse,
+ int num_arguments, struct arg_list * arguments, int local )
+{
+ struct stored_rule rule;
+ rule.name = object_copy( name );
+ rule.parse = parse;
+ rule.num_arguments = num_arguments;
+ rule.arguments = arguments;
+ rule.local = local;
+ dynamic_array_push( c->rules, rule );
+ return (int)( c->rules->size - 1 );
+}
+
+static int compile_emit_actions( compiler * c, PARSE * parse )
+{
+ SUBACTION a;
+ a.name = object_copy( parse->string );
+ a.command = function_compile_actions( object_str( parse->string1 ),
+ parse->file, parse->line );
+ a.flags = parse->num;
+ dynamic_array_push( c->actions, a );
+ return (int)( c->actions->size - 1 );
+}
+
+static JAM_FUNCTION * compile_to_function( compiler * c )
+{
+ JAM_FUNCTION * const result = BJAM_MALLOC( sizeof( JAM_FUNCTION ) );
+ int i;
+ result->base.type = FUNCTION_JAM;
+ result->base.reference_count = 1;
+ result->base.formal_arguments = 0;
+ result->base.num_formal_arguments = 0;
+
+ result->base.rulename = 0;
+
+ result->code_size = c->code->size;
+ result->code = BJAM_MALLOC( c->code->size * sizeof( instruction ) );
+ memcpy( result->code, c->code->data, c->code->size * sizeof( instruction ) );
+
+ result->constants = BJAM_MALLOC( c->constants->size * sizeof( OBJECT * ) );
+ memcpy( result->constants, c->constants->data, c->constants->size * sizeof(
+ OBJECT * ) );
+ result->num_constants = c->constants->size;
+
+ result->num_subfunctions = c->rules->size;
+ result->functions = BJAM_MALLOC( c->rules->size * sizeof( SUBFUNCTION ) );
+ for ( i = 0; i < c->rules->size; ++i )
+ {
+ struct stored_rule * const rule = &dynamic_array_at( struct stored_rule,
+ c->rules, i );
+ result->functions[ i ].name = rule->name;
+ result->functions[ i ].code = function_compile( rule->parse );
+ result->functions[ i ].code->num_formal_arguments = rule->num_arguments;
+ result->functions[ i ].code->formal_arguments = rule->arguments;
+ result->functions[ i ].local = rule->local;
+ }
+
+ result->actions = BJAM_MALLOC( c->actions->size * sizeof( SUBACTION ) );
+ memcpy( result->actions, c->actions->data, c->actions->size * sizeof(
+ SUBACTION ) );
+ result->num_subactions = c->actions->size;
+
+ result->generic = 0;
+
+ result->file = 0;
+ result->line = -1;
+
+ return result;
+}
+
+
+/*
+ * Parsing of variable expansions
+ */
+
+typedef struct VAR_PARSE_GROUP
+{
+ struct dynamic_array elems[ 1 ];
+} VAR_PARSE_GROUP;
+
+typedef struct VAR_PARSE_ACTIONS
+{
+ struct dynamic_array elems[ 1 ];
+} VAR_PARSE_ACTIONS;
+
+#define VAR_PARSE_TYPE_VAR 0
+#define VAR_PARSE_TYPE_STRING 1
+#define VAR_PARSE_TYPE_FILE 2
+
+typedef struct _var_parse
+{
+ int type; /* string, variable or file */
+} VAR_PARSE;
+
+typedef struct
+{
+ VAR_PARSE base;
+ VAR_PARSE_GROUP * name;
+ VAR_PARSE_GROUP * subscript;
+ struct dynamic_array modifiers[ 1 ];
+} VAR_PARSE_VAR;
+
+typedef struct
+{
+ VAR_PARSE base;
+ OBJECT * s;
+} VAR_PARSE_STRING;
+
+typedef struct
+{
+ VAR_PARSE base;
+ struct dynamic_array filename[ 1 ];
+ struct dynamic_array contents[ 1 ];
+} VAR_PARSE_FILE;
+
+static void var_parse_free( VAR_PARSE * );
+
+
+/*
+ * VAR_PARSE_GROUP
+ */
+
+static VAR_PARSE_GROUP * var_parse_group_new()
+{
+ VAR_PARSE_GROUP * const result = BJAM_MALLOC( sizeof( VAR_PARSE_GROUP ) );
+ dynamic_array_init( result->elems );
+ return result;
+}
+
+static void var_parse_group_free( VAR_PARSE_GROUP * group )
+{
+ int i;
+ for ( i = 0; i < group->elems->size; ++i )
+ var_parse_free( dynamic_array_at( VAR_PARSE *, group->elems, i ) );
+ dynamic_array_free( group->elems );
+ BJAM_FREE( group );
+}
+
+static void var_parse_group_add( VAR_PARSE_GROUP * group, VAR_PARSE * elem )
+{
+ dynamic_array_push( group->elems, elem );
+}
+
+static void var_parse_group_maybe_add_constant( VAR_PARSE_GROUP * group,
+ char const * start, char const * end )
+{
+ if ( start != end )
+ {
+ string buf[ 1 ];
+ VAR_PARSE_STRING * const value = (VAR_PARSE_STRING *)BJAM_MALLOC(
+ sizeof(VAR_PARSE_STRING) );
+ value->base.type = VAR_PARSE_TYPE_STRING;
+ string_new( buf );
+ string_append_range( buf, start, end );
+ value->s = object_new( buf->value );
+ string_free( buf );
+ var_parse_group_add( group, (VAR_PARSE *)value );
+ }
+}
+
+VAR_PARSE_STRING * var_parse_group_as_literal( VAR_PARSE_GROUP * group )
+{
+ if ( group->elems->size == 1 )
+ {
+ VAR_PARSE * result = dynamic_array_at( VAR_PARSE *, group->elems, 0 );
+ if ( result->type == VAR_PARSE_TYPE_STRING )
+ return (VAR_PARSE_STRING *)result;
+ }
+ return 0;
+}
+
+
+/*
+ * VAR_PARSE_ACTIONS
+ */
+
+static VAR_PARSE_ACTIONS * var_parse_actions_new()
+{
+ VAR_PARSE_ACTIONS * const result = (VAR_PARSE_ACTIONS *)BJAM_MALLOC(
+ sizeof(VAR_PARSE_ACTIONS) );
+ dynamic_array_init( result->elems );
+ return result;
+}
+
+static void var_parse_actions_free( VAR_PARSE_ACTIONS * actions )
+{
+ int i;
+ for ( i = 0; i < actions->elems->size; ++i )
+ var_parse_group_free( dynamic_array_at( VAR_PARSE_GROUP *,
+ actions->elems, i ) );
+ dynamic_array_free( actions->elems );
+ BJAM_FREE( actions );
+}
+
+
+/*
+ * VAR_PARSE_VAR
+ */
+
+static VAR_PARSE_VAR * var_parse_var_new()
+{
+ VAR_PARSE_VAR * result = BJAM_MALLOC( sizeof( VAR_PARSE_VAR ) );
+ result->base.type = VAR_PARSE_TYPE_VAR;
+ result->name = var_parse_group_new();
+ result->subscript = 0;
+ dynamic_array_init( result->modifiers );
+ return result;
+}
+
+static void var_parse_var_free( VAR_PARSE_VAR * var )
+{
+ int i;
+ var_parse_group_free( var->name );
+ if ( var->subscript )
+ var_parse_group_free( var->subscript );
+ for ( i = 0; i < var->modifiers->size; ++i )
+ var_parse_group_free( dynamic_array_at( VAR_PARSE_GROUP *,
+ var->modifiers, i ) );
+ dynamic_array_free( var->modifiers );
+ BJAM_FREE( var );
+}
+
+static VAR_PARSE_GROUP * var_parse_var_new_modifier( VAR_PARSE_VAR * var )
+{
+ VAR_PARSE_GROUP * result = var_parse_group_new();
+ dynamic_array_push( var->modifiers, result );
+ return result;
+}
+
+
+/*
+ * VAR_PARSE_STRING
+ */
+
+static void var_parse_string_free( VAR_PARSE_STRING * string )
+{
+ object_free( string->s );
+ BJAM_FREE( string );
+}
+
+
+/*
+ * VAR_PARSE_FILE
+ */
+
+static VAR_PARSE_FILE * var_parse_file_new( void )
+{
+ VAR_PARSE_FILE * const result = (VAR_PARSE_FILE *)BJAM_MALLOC( sizeof(
+ VAR_PARSE_FILE ) );
+ result->base.type = VAR_PARSE_TYPE_FILE;
+ dynamic_array_init( result->filename );
+ dynamic_array_init( result->contents );
+ return result;
+}
+
+static void var_parse_file_free( VAR_PARSE_FILE * file )
+{
+ int i;
+ for ( i = 0; i < file->filename->size; ++i )
+ var_parse_group_free( dynamic_array_at( VAR_PARSE_GROUP *,
+ file->filename, i ) );
+ dynamic_array_free( file->filename );
+ for ( i = 0; i < file->contents->size; ++i )
+ var_parse_group_free( dynamic_array_at( VAR_PARSE_GROUP *,
+ file->contents, i ) );
+ dynamic_array_free( file->contents );
+ BJAM_FREE( file );
+}
+
+
+/*
+ * VAR_PARSE
+ */
+
+static void var_parse_free( VAR_PARSE * parse )
+{
+ switch ( parse->type )
+ {
+ case VAR_PARSE_TYPE_VAR:
+ var_parse_var_free( (VAR_PARSE_VAR *)parse );
+ break;
+
+ case VAR_PARSE_TYPE_STRING:
+ var_parse_string_free( (VAR_PARSE_STRING *)parse );
+ break;
+
+ case VAR_PARSE_TYPE_FILE:
+ var_parse_file_free( (VAR_PARSE_FILE *)parse );
+ break;
+
+ default:
+ assert( !"Invalid type" );
+ }
+}
+
+
+/*
+ * Compile VAR_PARSE
+ */
+
+static void var_parse_group_compile( VAR_PARSE_GROUP const * parse,
+ compiler * c );
+
+static void var_parse_var_compile( VAR_PARSE_VAR const * parse, compiler * c )
+{
+ int expand_name = 0;
+ int is_get_grist = 0;
+ int has_modifiers = 0;
+ /* Special case common modifiers */
+ if ( parse->modifiers->size == 1 )
+ {
+ VAR_PARSE_GROUP * mod = dynamic_array_at( VAR_PARSE_GROUP *, parse->modifiers, 0 );
+ if ( mod->elems->size == 1 )
+ {
+ VAR_PARSE * mod1 = dynamic_array_at( VAR_PARSE *, mod->elems, 0 );
+ if ( mod1->type == VAR_PARSE_TYPE_STRING )
+ {
+ OBJECT * s = ( (VAR_PARSE_STRING *)mod1 )->s;
+ if ( ! strcmp ( object_str( s ), "G" ) )
+ {
+ is_get_grist = 1;
+ }
+ }
+ }
+ }
+ /* If there are modifiers, emit them in reverse order. */
+ if ( parse->modifiers->size > 0 && !is_get_grist )
+ {
+ int i;
+ has_modifiers = 1;
+ for ( i = 0; i < parse->modifiers->size; ++i )
+ var_parse_group_compile( dynamic_array_at( VAR_PARSE_GROUP *,
+ parse->modifiers, parse->modifiers->size - i - 1 ), c );
+ }
+
+ /* If there is a subscript, emit it. */
+ if ( parse->subscript )
+ var_parse_group_compile( parse->subscript, c );
+
+ /* If the variable name is empty, look it up. */
+ if ( parse->name->elems->size == 0 )
+ compile_emit( c, INSTR_PUSH_VAR, compile_emit_constant( c,
+ constant_empty ) );
+ /* If the variable name does not need to be expanded, look it up. */
+ else if ( parse->name->elems->size == 1 && dynamic_array_at( VAR_PARSE *,
+ parse->name->elems, 0 )->type == VAR_PARSE_TYPE_STRING )
+ {
+ OBJECT * const name = ( (VAR_PARSE_STRING *)dynamic_array_at(
+ VAR_PARSE *, parse->name->elems, 0 ) )->s;
+ int const idx = get_argument_index( object_str( name ) );
+ if ( idx != -1 )
+ compile_emit( c, INSTR_PUSH_ARG, idx );
+ else
+ compile_emit( c, INSTR_PUSH_VAR, compile_emit_constant( c, name ) );
+ }
+ /* Otherwise, push the var names and use the group instruction. */
+ else
+ {
+ var_parse_group_compile( parse->name, c );
+ expand_name = 1;
+ }
+
+ /** Select the instruction for expanding the variable. */
+ if ( !has_modifiers && !parse->subscript && !expand_name )
+ ;
+ else if ( !has_modifiers && !parse->subscript && expand_name )
+ compile_emit( c, INSTR_PUSH_GROUP, 0 );
+ else if ( !has_modifiers && parse->subscript && !expand_name )
+ compile_emit( c, INSTR_APPLY_INDEX, 0 );
+ else if ( !has_modifiers && parse->subscript && expand_name )
+ compile_emit( c, INSTR_APPLY_INDEX_GROUP, 0 );
+ else if ( has_modifiers && !parse->subscript && !expand_name )
+ compile_emit( c, INSTR_APPLY_MODIFIERS, parse->modifiers->size );
+ else if ( has_modifiers && !parse->subscript && expand_name )
+ compile_emit( c, INSTR_APPLY_MODIFIERS_GROUP, parse->modifiers->size );
+ else if ( has_modifiers && parse->subscript && !expand_name )
+ compile_emit( c, INSTR_APPLY_INDEX_MODIFIERS, parse->modifiers->size );
+ else if ( has_modifiers && parse->subscript && expand_name )
+ compile_emit( c, INSTR_APPLY_INDEX_MODIFIERS_GROUP,
+ parse->modifiers->size );
+
+ /* Now apply any special modifiers */
+ if ( is_get_grist )
+ {
+ compile_emit( c, INSTR_GET_GRIST, 0 );
+ }
+}
+
+static void var_parse_string_compile( VAR_PARSE_STRING const * parse,
+ compiler * c )
+{
+ compile_emit( c, INSTR_PUSH_CONSTANT, compile_emit_constant( c, parse->s )
+ );
+}
+
+static void var_parse_file_compile( VAR_PARSE_FILE const * parse, compiler * c )
+{
+ int i;
+ for ( i = 0; i < parse->filename->size; ++i )
+ var_parse_group_compile( dynamic_array_at( VAR_PARSE_GROUP *,
+ parse->filename, parse->filename->size - i - 1 ), c );
+ compile_emit( c, INSTR_APPEND_STRINGS, parse->filename->size );
+ for ( i = 0; i < parse->contents->size; ++i )
+ var_parse_group_compile( dynamic_array_at( VAR_PARSE_GROUP *,
+ parse->contents, parse->contents->size - i - 1 ), c );
+ compile_emit( c, INSTR_WRITE_FILE, parse->contents->size );
+}
+
+static void var_parse_compile( VAR_PARSE const * parse, compiler * c )
+{
+ switch ( parse->type )
+ {
+ case VAR_PARSE_TYPE_VAR:
+ var_parse_var_compile( (VAR_PARSE_VAR const *)parse, c );
+ break;
+
+ case VAR_PARSE_TYPE_STRING:
+ var_parse_string_compile( (VAR_PARSE_STRING const *)parse, c );
+ break;
+
+ case VAR_PARSE_TYPE_FILE:
+ var_parse_file_compile( (VAR_PARSE_FILE const *)parse, c );
+ break;
+
+ default:
+ assert( !"Unknown var parse type." );
+ }
+}
+
+static void var_parse_group_compile( VAR_PARSE_GROUP const * parse, compiler * c
+ )
+{
+ /* Emit the elements in reverse order. */
+ int i;
+ for ( i = 0; i < parse->elems->size; ++i )
+ var_parse_compile( dynamic_array_at( VAR_PARSE *, parse->elems,
+ parse->elems->size - i - 1 ), c );
+ /* If there are no elements, emit an empty string. */
+ if ( parse->elems->size == 0 )
+ compile_emit( c, INSTR_PUSH_CONSTANT, compile_emit_constant( c,
+ constant_empty ) );
+ /* If there is more than one element, combine them. */
+ if ( parse->elems->size > 1 )
+ compile_emit( c, INSTR_COMBINE_STRINGS, parse->elems->size );
+}
+
+static void var_parse_actions_compile( VAR_PARSE_ACTIONS const * actions,
+ compiler * c )
+{
+ int i;
+ for ( i = 0; i < actions->elems->size; ++i )
+ var_parse_group_compile( dynamic_array_at( VAR_PARSE_GROUP *,
+ actions->elems, actions->elems->size - i - 1 ), c );
+ compile_emit( c, INSTR_OUTPUT_STRINGS, actions->elems->size );
+}
+
+
+/*
+ * Parse VAR_PARSE_VAR
+ */
+
+static VAR_PARSE * parse_at_file( char const * start, char const * mid,
+ char const * end );
+static VAR_PARSE * parse_variable( char const * * string );
+static int try_parse_variable( char const * * s_, char const * * string,
+ VAR_PARSE_GROUP * out );
+static void balance_parentheses( char const * * s_, char const * * string,
+ VAR_PARSE_GROUP * out );
+static void parse_var_string( char const * first, char const * last,
+ struct dynamic_array * out );
+
+
+/*
+ * Parses a string that can contain variables to expand.
+ */
+
+static VAR_PARSE_GROUP * parse_expansion( char const * * string )
+{
+ VAR_PARSE_GROUP * result = var_parse_group_new();
+ char const * s = *string;
+ for ( ; ; )
+ {
+ if ( try_parse_variable( &s, string, result ) ) {}
+ else if ( s[ 0 ] == '\0' )
+ {
+ var_parse_group_maybe_add_constant( result, *string, s );
+ return result;
+ }
+ else
+ ++s;
+ }
+}
+
+static VAR_PARSE_ACTIONS * parse_actions( char const * string )
+{
+ VAR_PARSE_ACTIONS * const result = var_parse_actions_new();
+ parse_var_string( string, string + strlen( string ), result->elems );
+ return result;
+}
+
+/*
+ * Checks whether the string a *s_ starts with a variable expansion "$(".
+ * *string should point to the first unemitted character before *s. If *s_
+ * starts with variable expansion, appends elements to out up to the closing
+ * ")", and adjusts *s_ and *string to point to next character. Returns 1 if s_
+ * starts with a variable, 0 otherwise.
+ */
+
+static int try_parse_variable( char const * * s_, char const * * string,
+ VAR_PARSE_GROUP * out )
+{
+ char const * s = *s_;
+ if ( s[ 0 ] == '$' && s[ 1 ] == '(' )
+ {
+ var_parse_group_maybe_add_constant( out, *string, s );
+ s += 2;
+ var_parse_group_add( out, parse_variable( &s ) );
+ *string = s;
+ *s_ = s;
+ return 1;
+ }
+ if ( s[ 0 ] == '@' && s[ 1 ] == '(' )
+ {
+ int depth = 1;
+ char const * ine;
+ char const * split = 0;
+ var_parse_group_maybe_add_constant( out, *string, s );
+ s += 2;
+ ine = s;
+
+ /* Scan the content of the response file @() section. */
+ while ( *ine && ( depth > 0 ) )
+ {
+ switch ( *ine )
+ {
+ case '(': ++depth; break;
+ case ')': --depth; break;
+ case ':':
+ if ( ( depth == 1 ) && ( ine[ 1 ] == 'E' ) && ( ine[ 2 ] == '='
+ ) )
+ split = ine;
+ break;
+ }
+ ++ine;
+ }
+
+ if ( !split || depth )
+ return 0;
+
+ var_parse_group_add( out, parse_at_file( s, split, ine - 1 ) );
+ *string = ine;
+ *s_ = ine;
+ return 1;
+ }
+ return 0;
+}
+
+
+static char const * current_file = "";
+static int current_line;
+
+static void parse_error( char const * message )
+{
+ printf( "%s:%d: %s\n", current_file, current_line, message );
+}
+
+
+/*
+ * Parses a single variable up to the closing ")" and adjusts *string to point
+ * to the next character. *string should point to the character immediately
+ * after the initial "$(".
+ */
+
+static VAR_PARSE * parse_variable( char const * * string )
+{
+ VAR_PARSE_VAR * const result = var_parse_var_new();
+ VAR_PARSE_GROUP * const name = result->name;
+ char const * s = *string;
+ for ( ; ; )
+ {
+ if ( try_parse_variable( &s, string, name ) ) {}
+ else if ( s[ 0 ] == ':' )
+ {
+ VAR_PARSE_GROUP * mod;
+ var_parse_group_maybe_add_constant( name, *string, s );
+ ++s;
+ *string = s;
+ mod = var_parse_var_new_modifier( result );
+ for ( ; ; )
+ {
+ if ( try_parse_variable( &s, string, mod ) ) {}
+ else if ( s[ 0 ] == ')' )
+ {
+ var_parse_group_maybe_add_constant( mod, *string, s );
+ *string = ++s;
+ return (VAR_PARSE *)result;
+ }
+ else if ( s[ 0 ] == '(' )
+ {
+ ++s;
+ balance_parentheses( &s, string, mod );
+ }
+ else if ( s[ 0 ] == ':' )
+ {
+ var_parse_group_maybe_add_constant( mod, *string, s );
+ *string = ++s;
+ mod = var_parse_var_new_modifier( result );
+ }
+ else if ( s[ 0 ] == '[' )
+ {
+ parse_error("unexpected subscript");
+ ++s;
+ }
+ else if ( s[ 0 ] == '\0' )
+ {
+ parse_error( "unbalanced parentheses" );
+ var_parse_group_maybe_add_constant( mod, *string, s );
+ *string = s;
+ return (VAR_PARSE *)result;
+ }
+ else
+ ++s;
+ }
+ }
+ else if ( s[ 0 ] == '[' )
+ {
+ VAR_PARSE_GROUP * subscript = var_parse_group_new();
+ result->subscript = subscript;
+ var_parse_group_maybe_add_constant( name, *string, s );
+ *string = ++s;
+ for ( ; ; )
+ {
+ if ( try_parse_variable( &s, string, subscript ) ) {}
+ else if ( s[ 0 ] == ']' )
+ {
+ var_parse_group_maybe_add_constant( subscript, *string, s );
+ *string = ++s;
+ if ( s[ 0 ] != ')' && s[ 0 ] != ':' && s[ 0 ] != '\0' )
+ parse_error( "unexpected text following []" );
+ break;
+ }
+ else if ( isdigit( s[ 0 ] ) || s[ 0 ] == '-' )
+ {
+ ++s;
+ }
+ else if ( s[ 0 ] == '\0' )
+ {
+ parse_error( "malformed subscript" );
+ break;
+ }
+ else
+ {
+ parse_error( "malformed subscript" );
+ ++s;
+ }
+ }
+ }
+ else if ( s[ 0 ] == ')' )
+ {
+ var_parse_group_maybe_add_constant( name, *string, s );
+ *string = ++s;
+ return (VAR_PARSE *)result;
+ }
+ else if ( s[ 0 ] == '(' )
+ {
+ ++s;
+ balance_parentheses( &s, string, name );
+ }
+ else if ( s[ 0 ] == '\0' )
+ {
+ parse_error( "unbalanced parentheses" );
+ var_parse_group_maybe_add_constant( name, *string, s );
+ *string = s;
+ return (VAR_PARSE *)result;
+ }
+ else
+ ++s;
+ }
+}
+
+static void parse_var_string( char const * first, char const * last,
+ struct dynamic_array * out )
+{
+ char const * saved = first;
+ while ( first != last )
+ {
+ /* Handle whitespace. */
+ while ( first != last && isspace( *first ) ) ++first;
+ if ( saved != first )
+ {
+ VAR_PARSE_GROUP * const group = var_parse_group_new();
+ var_parse_group_maybe_add_constant( group, saved, first );
+ saved = first;
+ dynamic_array_push( out, group );
+ }
+ if ( first == last ) break;
+
+ /* Handle non-whitespace */
+ {
+ VAR_PARSE_GROUP * group = var_parse_group_new();
+ for ( ; ; )
+ {
+ if ( first == last || isspace( *first ) )
+ {
+ var_parse_group_maybe_add_constant( group, saved, first );
+ saved = first;
+ break;
+ }
+ if ( try_parse_variable( &first, &saved, group ) )
+ assert( first <= last );
+ else
+ ++first;
+ }
+ dynamic_array_push( out, group );
+ }
+ }
+}
+
+/*
+ * start should point to the character immediately following the opening "@(",
+ * mid should point to the ":E=", and end should point to the closing ")".
+ */
+
+static VAR_PARSE * parse_at_file( char const * start, char const * mid,
+ char const * end )
+{
+ VAR_PARSE_FILE * result = var_parse_file_new();
+ parse_var_string( start, mid, result->filename );
+ parse_var_string( mid + 3, end, result->contents );
+ return (VAR_PARSE *)result;
+}
+
+/*
+ * Given that *s_ points to the character after a "(", parses up to the matching
+ * ")". *string should point to the first unemitted character before *s_.
+ *
+ * When the function returns, *s_ will point to the character after the ")", and
+ * *string will point to the first unemitted character before *s_. The range
+ * from *string to *s_ does not contain any variables that need to be expanded.
+ */
+
+void balance_parentheses( char const * * s_, char const * * string,
+ VAR_PARSE_GROUP * out)
+{
+ int depth = 1;
+ char const * s = *s_;
+ for ( ; ; )
+ {
+ if ( try_parse_variable( &s, string, out ) ) { }
+ else if ( s[ 0 ] == ':' || s[ 0 ] == '[' )
+ {
+ parse_error( "unbalanced parentheses" );
+ ++s;
+ }
+ else if ( s[ 0 ] == '\0' )
+ {
+ parse_error( "unbalanced parentheses" );
+ break;
+ }
+ else if ( s[ 0 ] == ')' )
+ {
+ ++s;
+ if ( --depth == 0 ) break;
+ }
+ else if ( s[ 0 ] == '(' )
+ {
+ ++depth;
+ ++s;
+ }
+ else
+ ++s;
+ }
+ *s_ = s;
+}
+
+
+/*
+ * Main compile.
+ */
+
+#define RESULT_STACK 0
+#define RESULT_RETURN 1
+#define RESULT_NONE 2
+
+static void compile_parse( PARSE * parse, compiler * c, int result_location );
+static struct arg_list * arg_list_compile( PARSE * parse, int * num_arguments );
+
+static void compile_condition( PARSE * parse, compiler * c, int branch_true, int label )
+{
+ assert( parse->type == PARSE_EVAL );
+ switch ( parse->num )
+ {
+ case EXPR_EXISTS:
+ compile_parse( parse->left, c, RESULT_STACK );
+ if ( branch_true )
+ compile_emit_branch( c, INSTR_JUMP_NOT_EMPTY, label );
+ else
+ compile_emit_branch( c, INSTR_JUMP_EMPTY, label );
+ break;
+
+ case EXPR_EQUALS:
+ compile_parse( parse->left, c, RESULT_STACK );
+ compile_parse( parse->right, c, RESULT_STACK );
+ if ( branch_true )
+ compile_emit_branch( c, INSTR_JUMP_EQ, label );
+ else
+ compile_emit_branch( c, INSTR_JUMP_NE, label );
+ break;
+
+ case EXPR_NOTEQ:
+ compile_parse( parse->left, c, RESULT_STACK );
+ compile_parse( parse->right, c, RESULT_STACK );
+ if ( branch_true )
+ compile_emit_branch( c, INSTR_JUMP_NE, label );
+ else
+ compile_emit_branch( c, INSTR_JUMP_EQ, label );
+ break;
+
+ case EXPR_LESS:
+ compile_parse( parse->left, c, RESULT_STACK );
+ compile_parse( parse->right, c, RESULT_STACK );
+ if ( branch_true )
+ compile_emit_branch( c, INSTR_JUMP_LT, label );
+ else
+ compile_emit_branch( c, INSTR_JUMP_GE, label );
+ break;
+
+ case EXPR_LESSEQ:
+ compile_parse( parse->left, c, RESULT_STACK );
+ compile_parse( parse->right, c, RESULT_STACK );
+ if ( branch_true )
+ compile_emit_branch( c, INSTR_JUMP_LE, label );
+ else
+ compile_emit_branch( c, INSTR_JUMP_GT, label );
+ break;
+
+ case EXPR_MORE:
+ compile_parse( parse->left, c, RESULT_STACK );
+ compile_parse( parse->right, c, RESULT_STACK );
+ if ( branch_true )
+ compile_emit_branch( c, INSTR_JUMP_GT, label );
+ else
+ compile_emit_branch( c, INSTR_JUMP_LE, label );
+ break;
+
+ case EXPR_MOREEQ:
+ compile_parse( parse->left, c, RESULT_STACK );
+ compile_parse( parse->right, c, RESULT_STACK );
+ if ( branch_true )
+ compile_emit_branch( c, INSTR_JUMP_GE, label );
+ else
+ compile_emit_branch( c, INSTR_JUMP_LT, label );
+ break;
+
+ case EXPR_IN:
+ compile_parse( parse->left, c, RESULT_STACK );
+ compile_parse( parse->right, c, RESULT_STACK );
+ if ( branch_true )
+ compile_emit_branch( c, INSTR_JUMP_IN, label );
+ else
+ compile_emit_branch( c, INSTR_JUMP_NOT_IN, label );
+ break;
+
+ case EXPR_AND:
+ if ( branch_true )
+ {
+ int f = compile_new_label( c );
+ compile_condition( parse->left, c, 0, f );
+ compile_condition( parse->right, c, 1, label );
+ compile_set_label( c, f );
+ }
+ else
+ {
+ compile_condition( parse->left, c, 0, label );
+ compile_condition( parse->right, c, 0, label );
+ }
+ break;
+
+ case EXPR_OR:
+ if ( branch_true )
+ {
+ compile_condition( parse->left, c, 1, label );
+ compile_condition( parse->right, c, 1, label );
+ }
+ else
+ {
+ int t = compile_new_label( c );
+ compile_condition( parse->left, c, 1, t );
+ compile_condition( parse->right, c, 0, label );
+ compile_set_label( c, t );
+ }
+ break;
+
+ case EXPR_NOT:
+ compile_condition( parse->left, c, !branch_true, label );
+ break;
+ }
+}
+
+static void adjust_result( compiler * c, int actual_location,
+ int desired_location )
+{
+ if ( actual_location == desired_location )
+ ;
+ else if ( actual_location == RESULT_STACK && desired_location == RESULT_RETURN )
+ compile_emit( c, INSTR_SET_RESULT, 0 );
+ else if ( actual_location == RESULT_STACK && desired_location == RESULT_NONE )
+ compile_emit( c, INSTR_POP, 0 );
+ else if ( actual_location == RESULT_RETURN && desired_location == RESULT_STACK )
+ compile_emit( c, INSTR_PUSH_RESULT, 0 );
+ else if ( actual_location == RESULT_RETURN && desired_location == RESULT_NONE )
+ ;
+ else if ( actual_location == RESULT_NONE && desired_location == RESULT_STACK )
+ compile_emit( c, INSTR_PUSH_EMPTY, 0 );
+ else if ( actual_location == RESULT_NONE && desired_location == RESULT_RETURN )
+ {
+ compile_emit( c, INSTR_PUSH_EMPTY, 0 );
+ compile_emit( c, INSTR_SET_RESULT, 0 );
+ }
+ else
+ assert( !"invalid result location" );
+}
+
+static char const * parse_type( PARSE * parse )
+{
+ switch ( parse->type )
+ {
+ case PARSE_APPEND: return "append";
+ case PARSE_EVAL: return "eval";
+ case PARSE_RULES: return "rules";
+ default: return "unknown";
+ }
+}
+
+static void compile_append_chain( PARSE * parse, compiler * c )
+{
+ assert( parse->type == PARSE_APPEND );
+ if ( parse->left->type == PARSE_NULL )
+ compile_parse( parse->right, c, RESULT_STACK );
+ else
+ {
+ if ( parse->left->type == PARSE_APPEND )
+ compile_append_chain( parse->left, c );
+ else
+ compile_parse( parse->left, c, RESULT_STACK );
+ compile_parse( parse->right, c, RESULT_STACK );
+ compile_emit( c, INSTR_PUSH_APPEND, 0 );
+ }
+}
+
+static void compile_parse( PARSE * parse, compiler * c, int result_location )
+{
+ if ( parse->type == PARSE_APPEND )
+ {
+ compile_append_chain( parse, c );
+ adjust_result( c, RESULT_STACK, result_location );
+ }
+ else if ( parse->type == PARSE_EVAL )
+ {
+ /* FIXME: This is only needed because of the bizarre parsing of
+ * conditions.
+ */
+ if ( parse->num == EXPR_EXISTS )
+ compile_parse( parse->left, c, result_location );
+ else
+ {
+ int f = compile_new_label( c );
+ int end = compile_new_label( c );
+
+ printf( "%s:%d: Conditional used as list (check operator "
+ "precedence).\n", object_str( parse->file ), parse->line );
+
+ /* Emit the condition */
+ compile_condition( parse, c, 0, f );
+ compile_emit( c, INSTR_PUSH_CONSTANT, compile_emit_constant( c,
+ constant_true ) );
+ compile_emit_branch( c, INSTR_JUMP, end );
+ compile_set_label( c, f );
+ compile_emit( c, INSTR_PUSH_EMPTY, 0 );
+ compile_set_label( c, end );
+ adjust_result( c, RESULT_STACK, result_location );
+ }
+ }
+ else if ( parse->type == PARSE_FOREACH )
+ {
+ int var = compile_emit_constant( c, parse->string );
+ int top = compile_new_label( c );
+ int end = compile_new_label( c );
+
+ /*
+ * Evaluate the list.
+ */
+ compile_parse( parse->left, c, RESULT_STACK );
+
+ /* Localize the loop variable */
+ if ( parse->num )
+ {
+ compile_emit( c, INSTR_PUSH_EMPTY, 0 );
+ compile_emit( c, INSTR_PUSH_LOCAL, var );
+ compile_emit( c, INSTR_SWAP, 1 );
+ }
+
+ compile_emit( c, INSTR_FOR_INIT, 0 );
+ compile_set_label( c, top );
+ compile_emit_branch( c, INSTR_FOR_LOOP, end );
+ compile_emit( c, INSTR_SET, var );
+
+ /* Run the loop body */
+ compile_parse( parse->right, c, RESULT_NONE );
+
+ compile_emit_branch( c, INSTR_JUMP, top );
+ compile_set_label( c, end );
+
+ if ( parse->num )
+ compile_emit( c, INSTR_POP_LOCAL, var );
+
+ adjust_result( c, RESULT_NONE, result_location);
+ }
+ else if ( parse->type == PARSE_IF )
+ {
+ int f = compile_new_label( c );
+ /* Emit the condition */
+ compile_condition( parse->left, c, 0, f );
+ /* Emit the if block */
+ compile_parse( parse->right, c, result_location );
+ if ( parse->third->type != PARSE_NULL || result_location != RESULT_NONE )
+ {
+ /* Emit the else block */
+ int end = compile_new_label( c );
+ compile_emit_branch( c, INSTR_JUMP, end );
+ compile_set_label( c, f );
+ compile_parse( parse->third, c, result_location );
+ compile_set_label( c, end );
+ }
+ else
+ compile_set_label( c, f );
+
+ }
+ else if ( parse->type == PARSE_WHILE )
+ {
+ int nested_result = result_location == RESULT_NONE
+ ? RESULT_NONE
+ : RESULT_RETURN;
+ int test = compile_new_label( c );
+ int top = compile_new_label( c );
+ /* Make sure that we return an empty list if the loop runs zero times.
+ */
+ adjust_result( c, RESULT_NONE, nested_result );
+ /* Jump to the loop test. */
+ compile_emit_branch( c, INSTR_JUMP, test );
+ compile_set_label( c, top );
+ /* Emit the loop body. */
+ compile_parse( parse->right, c, nested_result );
+ /* Emit the condition. */
+ compile_set_label( c, test );
+ compile_condition( parse->left, c, 1, top );
+
+ adjust_result( c, nested_result, result_location );
+ }
+ else if ( parse->type == PARSE_INCLUDE )
+ {
+ compile_parse( parse->left, c, RESULT_STACK );
+ compile_emit( c, INSTR_INCLUDE, 0 );
+ compile_emit( c, INSTR_BIND_MODULE_VARIABLES, 0 );
+ adjust_result( c, RESULT_NONE, result_location );
+ }
+ else if ( parse->type == PARSE_MODULE )
+ {
+ int const nested_result = result_location == RESULT_NONE
+ ? RESULT_NONE
+ : RESULT_RETURN;
+ compile_parse( parse->left, c, RESULT_STACK );
+ compile_emit( c, INSTR_PUSH_MODULE, 0 );
+ compile_parse( parse->right, c, nested_result );
+ compile_emit( c, INSTR_POP_MODULE, 0 );
+ adjust_result( c, nested_result, result_location );
+ }
+ else if ( parse->type == PARSE_CLASS )
+ {
+ /* Evaluate the class name. */
+ compile_parse( parse->left->right, c, RESULT_STACK );
+ /* Evaluate the base classes. */
+ if ( parse->left->left )
+ compile_parse( parse->left->left->right, c, RESULT_STACK );
+ else
+ compile_emit( c, INSTR_PUSH_EMPTY, 0 );
+ compile_emit( c, INSTR_CLASS, 0 );
+ compile_parse( parse->right, c, RESULT_NONE );
+ compile_emit( c, INSTR_BIND_MODULE_VARIABLES, 0 );
+ compile_emit( c, INSTR_POP_MODULE, 0 );
+
+ adjust_result( c, RESULT_NONE, result_location );
+ }
+ else if ( parse->type == PARSE_LIST )
+ {
+ OBJECT * const o = parse->string;
+ char const * s = object_str( o );
+ VAR_PARSE_GROUP * group;
+ current_file = object_str( parse->file );
+ current_line = parse->line;
+ group = parse_expansion( &s );
+ var_parse_group_compile( group, c );
+ var_parse_group_free( group );
+ adjust_result( c, RESULT_STACK, result_location );
+ }
+ else if ( parse->type == PARSE_LOCAL )
+ {
+ int nested_result = result_location == RESULT_NONE
+ ? RESULT_NONE
+ : RESULT_RETURN;
+ /* This should be left recursive group of compile_appends. */
+ PARSE * vars = parse->left;
+
+ /* Special case an empty list of vars */
+ if ( vars->type == PARSE_NULL )
+ {
+ compile_parse( parse->right, c, RESULT_NONE );
+ compile_parse( parse->third, c, result_location );
+ nested_result = result_location;
+ }
+ /* Check whether there is exactly one variable with a constant name. */
+ else if ( vars->left->type == PARSE_NULL &&
+ vars->right->type == PARSE_LIST )
+ {
+ char const * s = object_str( vars->right->string );
+ VAR_PARSE_GROUP * group;
+ current_file = object_str( parse->file );
+ current_line = parse->line;
+ group = parse_expansion( &s );
+ if ( group->elems->size == 1 && dynamic_array_at( VAR_PARSE *,
+ group->elems, 0 )->type == VAR_PARSE_TYPE_STRING )
+ {
+ int const name = compile_emit_constant( c, (
+ (VAR_PARSE_STRING *)dynamic_array_at( VAR_PARSE *,
+ group->elems, 0 ) )->s );
+ var_parse_group_free( group );
+ compile_parse( parse->right, c, RESULT_STACK );
+ compile_emit( c, INSTR_PUSH_LOCAL, name );
+ compile_parse( parse->third, c, nested_result );
+ compile_emit( c, INSTR_POP_LOCAL, name );
+ }
+ else
+ {
+ var_parse_group_compile( group, c );
+ var_parse_group_free( group );
+ compile_parse( parse->right, c, RESULT_STACK );
+ compile_emit( c, INSTR_PUSH_LOCAL_GROUP, 0 );
+ compile_parse( parse->third, c, nested_result );
+ compile_emit( c, INSTR_POP_LOCAL_GROUP, 0 );
+ }
+ }
+ else
+ {
+ compile_parse( parse->left, c, RESULT_STACK );
+ compile_parse( parse->right, c, RESULT_STACK );
+ compile_emit( c, INSTR_PUSH_LOCAL_GROUP, 0 );
+ compile_parse( parse->third, c, nested_result );
+ compile_emit( c, INSTR_POP_LOCAL_GROUP, 0 );
+ }
+ adjust_result( c, nested_result, result_location );
+ }
+ else if ( parse->type == PARSE_ON )
+ {
+ if ( parse->right->type == PARSE_APPEND &&
+ parse->right->left->type == PARSE_NULL &&
+ parse->right->right->type == PARSE_LIST )
+ {
+ /* [ on $(target) return $(variable) ] */
+ PARSE * value = parse->right->right;
+ OBJECT * const o = value->string;
+ char const * s = object_str( o );
+ VAR_PARSE_GROUP * group;
+ OBJECT * varname = 0;
+ current_file = object_str( value->file );
+ current_line = value->line;
+ group = parse_expansion( &s );
+ if ( group->elems->size == 1 )
+ {
+ VAR_PARSE * one = dynamic_array_at( VAR_PARSE *, group->elems, 0 );
+ if ( one->type == VAR_PARSE_TYPE_VAR )
+ {
+ VAR_PARSE_VAR * var = ( VAR_PARSE_VAR * )one;
+ if ( var->modifiers->size == 0 && !var->subscript && var->name->elems->size == 1 )
+ {
+ VAR_PARSE * name = dynamic_array_at( VAR_PARSE *, var->name->elems, 0 );
+ if ( name->type == VAR_PARSE_TYPE_STRING )
+ {
+ varname = ( ( VAR_PARSE_STRING * )name )->s;
+ }
+ }
+ }
+ }
+ if ( varname )
+ {
+ /* We have one variable with a fixed name and no modifiers. */
+ compile_parse( parse->left, c, RESULT_STACK );
+ compile_emit( c, INSTR_GET_ON, compile_emit_constant( c, varname ) );
+ }
+ else
+ {
+ /* Too complex. Fall back on push/pop. */
+ int end = compile_new_label( c );
+ compile_parse( parse->left, c, RESULT_STACK );
+ compile_emit_branch( c, INSTR_PUSH_ON, end );
+ var_parse_group_compile( group, c );
+ compile_emit( c, INSTR_POP_ON, 0 );
+ compile_set_label( c, end );
+ }
+ var_parse_group_free( group );
+ }
+ else
+ {
+ int end = compile_new_label( c );
+ compile_parse( parse->left, c, RESULT_STACK );
+ compile_emit_branch( c, INSTR_PUSH_ON, end );
+ compile_parse( parse->right, c, RESULT_STACK );
+ compile_emit( c, INSTR_POP_ON, 0 );
+ compile_set_label( c, end );
+ }
+ adjust_result( c, RESULT_STACK, result_location );
+ }
+ else if ( parse->type == PARSE_RULE )
+ {
+ PARSE * p;
+ int n = 0;
+ VAR_PARSE_GROUP * group;
+ char const * s = object_str( parse->string );
+
+ if ( parse->left->left || parse->left->right->type != PARSE_NULL )
+ for ( p = parse->left; p; p = p->left )
+ {
+ compile_parse( p->right, c, RESULT_STACK );
+ ++n;
+ }
+
+ current_file = object_str( parse->file );
+ current_line = parse->line;
+ group = parse_expansion( &s );
+
+ if ( group->elems->size == 2 &&
+ dynamic_array_at( VAR_PARSE *, group->elems, 0 )->type == VAR_PARSE_TYPE_VAR &&
+ dynamic_array_at( VAR_PARSE *, group->elems, 1 )->type == VAR_PARSE_TYPE_STRING &&
+ ( object_str( ( (VAR_PARSE_STRING *)dynamic_array_at( VAR_PARSE *, group->elems, 1 ) )->s )[ 0 ] == '.' ) )
+ {
+ VAR_PARSE_STRING * access = (VAR_PARSE_STRING *)dynamic_array_at( VAR_PARSE *, group->elems, 1 );
+ OBJECT * member = object_new( object_str( access->s ) + 1 );
+ /* Emit the object */
+ var_parse_var_compile( (VAR_PARSE_VAR *)dynamic_array_at( VAR_PARSE *, group->elems, 0 ), c );
+ var_parse_group_free( group );
+ compile_emit( c, INSTR_CALL_MEMBER_RULE, n );
+ compile_emit( c, compile_emit_constant( c, member ), parse->line );
+ object_free( member );
+ }
+ else
+ {
+ var_parse_group_compile( group, c );
+ var_parse_group_free( group );
+ compile_emit( c, INSTR_CALL_RULE, n );
+ compile_emit( c, compile_emit_constant( c, parse->string ), parse->line );
+ }
+
+ adjust_result( c, RESULT_STACK, result_location );
+ }
+ else if ( parse->type == PARSE_RULES )
+ {
+ do compile_parse( parse->left, c, RESULT_NONE );
+ while ( ( parse = parse->right )->type == PARSE_RULES );
+ compile_parse( parse, c, result_location );
+ }
+ else if ( parse->type == PARSE_SET )
+ {
+ PARSE * vars = parse->left;
+ unsigned int op_code;
+ unsigned int op_code_group;
+
+ switch ( parse->num )
+ {
+ case ASSIGN_APPEND: op_code = INSTR_APPEND; op_code_group = INSTR_APPEND_GROUP; break;
+ case ASSIGN_DEFAULT: op_code = INSTR_DEFAULT; op_code_group = INSTR_DEFAULT_GROUP; break;
+ default: op_code = INSTR_SET; op_code_group = INSTR_SET_GROUP; break;
+ }
+
+ /* Check whether there is exactly one variable with a constant name. */
+ if ( vars->type == PARSE_LIST )
+ {
+ char const * s = object_str( vars->string );
+ VAR_PARSE_GROUP * group;
+ current_file = object_str( parse->file );
+ current_line = parse->line;
+ group = parse_expansion( &s );
+ if ( group->elems->size == 1 && dynamic_array_at( VAR_PARSE *,
+ group->elems, 0 )->type == VAR_PARSE_TYPE_STRING )
+ {
+ int const name = compile_emit_constant( c, (
+ (VAR_PARSE_STRING *)dynamic_array_at( VAR_PARSE *,
+ group->elems, 0 ) )->s );
+ var_parse_group_free( group );
+ compile_parse( parse->right, c, RESULT_STACK );
+ if ( result_location != RESULT_NONE )
+ {
+ compile_emit( c, INSTR_SET_RESULT, 1 );
+ }
+ compile_emit( c, op_code, name );
+ }
+ else
+ {
+ var_parse_group_compile( group, c );
+ var_parse_group_free( group );
+ compile_parse( parse->right, c, RESULT_STACK );
+ if ( result_location != RESULT_NONE )
+ {
+ compile_emit( c, INSTR_SET_RESULT, 1 );
+ }
+ compile_emit( c, op_code_group, 0 );
+ }
+ }
+ else
+ {
+ compile_parse( parse->left, c, RESULT_STACK );
+ compile_parse( parse->right, c, RESULT_STACK );
+ if ( result_location != RESULT_NONE )
+ {
+ compile_emit( c, INSTR_SET_RESULT, 1 );
+ }
+ compile_emit( c, op_code_group, 0 );
+ }
+ if ( result_location != RESULT_NONE )
+ {
+ adjust_result( c, RESULT_RETURN, result_location );
+ }
+ }
+ else if ( parse->type == PARSE_SETCOMP )
+ {
+ int n_args;
+ struct arg_list * args = arg_list_compile( parse->right, &n_args );
+ int const rule_id = compile_emit_rule( c, parse->string, parse->left,
+ n_args, args, parse->num );
+ compile_emit( c, INSTR_RULE, rule_id );
+ adjust_result( c, RESULT_NONE, result_location );
+ }
+ else if ( parse->type == PARSE_SETEXEC )
+ {
+ int const actions_id = compile_emit_actions( c, parse );
+ compile_parse( parse->left, c, RESULT_STACK );
+ compile_emit( c, INSTR_ACTIONS, actions_id );
+ adjust_result( c, RESULT_NONE, result_location );
+ }
+ else if ( parse->type == PARSE_SETTINGS )
+ {
+ compile_parse( parse->left, c, RESULT_STACK );
+ compile_parse( parse->third, c, RESULT_STACK );
+ compile_parse( parse->right, c, RESULT_STACK );
+
+ switch ( parse->num )
+ {
+ case ASSIGN_APPEND: compile_emit( c, INSTR_APPEND_ON, 0 ); break;
+ case ASSIGN_DEFAULT: compile_emit( c, INSTR_DEFAULT_ON, 0 ); break;
+ default: compile_emit( c, INSTR_SET_ON, 0 ); break;
+ }
+
+ adjust_result( c, RESULT_STACK, result_location );
+ }
+ else if ( parse->type == PARSE_SWITCH )
+ {
+ int const switch_end = compile_new_label( c );
+ compile_parse( parse->left, c, RESULT_STACK );
+
+ for ( parse = parse->right; parse; parse = parse->right )
+ {
+ int const id = compile_emit_constant( c, parse->left->string );
+ int const next_case = compile_new_label( c );
+ compile_emit( c, INSTR_PUSH_CONSTANT, id );
+ compile_emit_branch( c, INSTR_JUMP_NOT_GLOB, next_case );
+ compile_parse( parse->left->left, c, result_location );
+ compile_emit_branch( c, INSTR_JUMP, switch_end );
+ compile_set_label( c, next_case );
+ }
+ compile_emit( c, INSTR_POP, 0 );
+ adjust_result( c, RESULT_NONE, result_location );
+ compile_set_label( c, switch_end );
+ }
+ else if ( parse->type == PARSE_NULL )
+ adjust_result( c, RESULT_NONE, result_location );
+ else
+ assert( !"unknown PARSE type." );
+}
+
+OBJECT * function_rulename( FUNCTION * function )
+{
+ return function->rulename;
+}
+
+void function_set_rulename( FUNCTION * function, OBJECT * rulename )
+{
+ function->rulename = rulename;
+}
+
+void function_location( FUNCTION * function_, OBJECT * * file, int * line )
+{
+ if ( function_->type == FUNCTION_BUILTIN )
+ {
+ *file = constant_builtin;
+ *line = -1;
+ }
+#ifdef HAVE_PYTHON
+ if ( function_->type == FUNCTION_PYTHON )
+ {
+ *file = constant_builtin;
+ *line = -1;
+ }
+#endif
+ else
+ {
+ JAM_FUNCTION * function = (JAM_FUNCTION *)function_;
+ assert( function_->type == FUNCTION_JAM );
+ *file = function->file;
+ *line = function->line;
+ }
+}
+
+static struct arg_list * arg_list_compile_builtin( char const * * args,
+ int * num_arguments );
+
+FUNCTION * function_builtin( LIST * ( * func )( FRAME * frame, int flags ),
+ int flags, char const * * args )
+{
+ BUILTIN_FUNCTION * result = BJAM_MALLOC( sizeof( BUILTIN_FUNCTION ) );
+ result->base.type = FUNCTION_BUILTIN;
+ result->base.reference_count = 1;
+ result->base.rulename = 0;
+ result->base.formal_arguments = arg_list_compile_builtin( args,
+ &result->base.num_formal_arguments );
+ result->func = func;
+ result->flags = flags;
+ return (FUNCTION *)result;
+}
+
+FUNCTION * function_compile( PARSE * parse )
+{
+ compiler c[ 1 ];
+ JAM_FUNCTION * result;
+ compiler_init( c );
+ compile_parse( parse, c, RESULT_RETURN );
+ compile_emit( c, INSTR_RETURN, 0 );
+ result = compile_to_function( c );
+ compiler_free( c );
+ result->file = object_copy( parse->file );
+ result->line = parse->line;
+ return (FUNCTION *)result;
+}
+
+FUNCTION * function_compile_actions( char const * actions, OBJECT * file,
+ int line )
+{
+ compiler c[ 1 ];
+ JAM_FUNCTION * result;
+ VAR_PARSE_ACTIONS * parse;
+ current_file = object_str( file );
+ current_line = line;
+ parse = parse_actions( actions );
+ compiler_init( c );
+ var_parse_actions_compile( parse, c );
+ var_parse_actions_free( parse );
+ compile_emit( c, INSTR_RETURN, 0 );
+ result = compile_to_function( c );
+ compiler_free( c );
+ result->file = object_copy( file );
+ result->line = line;
+ return (FUNCTION *)result;
+}
+
+static void argument_list_print( struct arg_list * args, int num_args );
+
+
+/* Define delimiters for type check elements in argument lists (and return type
+ * specifications, eventually).
+ */
+# define TYPE_OPEN_DELIM '['
+# define TYPE_CLOSE_DELIM ']'
+
+/*
+ * is_type_name() - true iff the given string represents a type check
+ * specification.
+ */
+
+int is_type_name( char const * s )
+{
+ return s[ 0 ] == TYPE_OPEN_DELIM && s[ strlen( s ) - 1 ] ==
+ TYPE_CLOSE_DELIM;
+}
+
+static void argument_error( char const * message, FUNCTION * procedure,
+ FRAME * frame, OBJECT * arg )
+{
+ extern void print_source_line( FRAME * );
+ LOL * actual = frame->args;
+ backtrace_line( frame->prev );
+ printf( "*** argument error\n* rule %s ( ", frame->rulename );
+ argument_list_print( procedure->formal_arguments,
+ procedure->num_formal_arguments );
+ printf( " )\n* called with: ( " );
+ lol_print( actual );
+ printf( " )\n* %s %s\n", message, arg ? object_str ( arg ) : "" );
+ function_location( procedure, &frame->file, &frame->line );
+ print_source_line( frame );
+ printf( "see definition of rule '%s' being called\n", frame->rulename );
+ backtrace( frame->prev );
+ exit( 1 );
+}
+
+static void type_check_range( OBJECT * type_name, LISTITER iter, LISTITER end,
+ FRAME * caller, FUNCTION * called, OBJECT * arg_name )
+{
+ static module_t * typecheck = 0;
+
+ /* If nothing to check, bail now. */
+ if ( iter == end || !type_name )
+ return;
+
+ if ( !typecheck )
+ typecheck = bindmodule( constant_typecheck );
+
+ /* If the checking rule can not be found, also bail. */
+ if ( !typecheck->rules || !hash_find( typecheck->rules, type_name ) )
+ return;
+
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ LIST * error;
+ FRAME frame[ 1 ];
+ frame_init( frame );
+ frame->module = typecheck;
+ frame->prev = caller;
+ frame->prev_user = caller->module->user_module
+ ? caller
+ : caller->prev_user;
+
+ /* Prepare the argument list */
+ lol_add( frame->args, list_new( object_copy( list_item( iter ) ) ) );
+ error = evaluate_rule( bindrule( type_name, frame->module ), type_name, frame );
+
+ if ( !list_empty( error ) )
+ argument_error( object_str( list_front( error ) ), called, caller,
+ arg_name );
+
+ frame_free( frame );
+ }
+}
+
+static void type_check( OBJECT * type_name, LIST * values, FRAME * caller,
+ FUNCTION * called, OBJECT * arg_name )
+{
+ type_check_range( type_name, list_begin( values ), list_end( values ),
+ caller, called, arg_name );
+}
+
+void argument_list_check( struct arg_list * formal, int formal_count,
+ FUNCTION * function, FRAME * frame )
+{
+ LOL * all_actual = frame->args;
+ int i;
+
+ for ( i = 0; i < formal_count; ++i )
+ {
+ LIST * actual = lol_get( all_actual, i );
+ LISTITER actual_iter = list_begin( actual );
+ LISTITER const actual_end = list_end( actual );
+ int j;
+ for ( j = 0; j < formal[ i ].size; ++j )
+ {
+ struct argument * formal_arg = &formal[ i ].args[ j ];
+ LIST * value;
+
+ switch ( formal_arg->flags )
+ {
+ case ARG_ONE:
+ if ( actual_iter == actual_end )
+ argument_error( "missing argument", function, frame,
+ formal_arg->arg_name );
+ type_check_range( formal_arg->type_name, actual_iter,
+ list_next( actual_iter ), frame, function,
+ formal_arg->arg_name );
+ actual_iter = list_next( actual_iter );
+ break;
+ case ARG_OPTIONAL:
+ if ( actual_iter == actual_end )
+ value = L0;
+ else
+ {
+ type_check_range( formal_arg->type_name, actual_iter,
+ list_next( actual_iter ), frame, function,
+ formal_arg->arg_name );
+ actual_iter = list_next( actual_iter );
+ }
+ break;
+ case ARG_PLUS:
+ if ( actual_iter == actual_end )
+ argument_error( "missing argument", function, frame,
+ formal_arg->arg_name );
+ /* fallthrough */
+ case ARG_STAR:
+ type_check_range( formal_arg->type_name, actual_iter,
+ actual_end, frame, function, formal_arg->arg_name );
+ actual_iter = actual_end;
+ break;
+ case ARG_VARIADIC:
+ return;
+ }
+ }
+
+ if ( actual_iter != actual_end )
+ argument_error( "extra argument", function, frame, list_item(
+ actual_iter ) );
+ }
+
+ for ( ; i < all_actual->count; ++i )
+ {
+ LIST * actual = lol_get( all_actual, i );
+ if ( !list_empty( actual ) )
+ argument_error( "extra argument", function, frame, list_front(
+ actual ) );
+ }
+}
+
+void argument_list_push( struct arg_list * formal, int formal_count,
+ FUNCTION * function, FRAME * frame, STACK * s )
+{
+ LOL * all_actual = frame->args;
+ int i;
+
+ for ( i = 0; i < formal_count; ++i )
+ {
+ LIST * actual = lol_get( all_actual, i );
+ LISTITER actual_iter = list_begin( actual );
+ LISTITER const actual_end = list_end( actual );
+ int j;
+ for ( j = 0; j < formal[ i ].size; ++j )
+ {
+ struct argument * formal_arg = &formal[ i ].args[ j ];
+ LIST * value;
+
+ switch ( formal_arg->flags )
+ {
+ case ARG_ONE:
+ if ( actual_iter == actual_end )
+ argument_error( "missing argument", function, frame,
+ formal_arg->arg_name );
+ value = list_new( object_copy( list_item( actual_iter ) ) );
+ actual_iter = list_next( actual_iter );
+ break;
+ case ARG_OPTIONAL:
+ if ( actual_iter == actual_end )
+ value = L0;
+ else
+ {
+ value = list_new( object_copy( list_item( actual_iter ) ) );
+ actual_iter = list_next( actual_iter );
+ }
+ break;
+ case ARG_PLUS:
+ if ( actual_iter == actual_end )
+ argument_error( "missing argument", function, frame,
+ formal_arg->arg_name );
+ /* fallthrough */
+ case ARG_STAR:
+ value = list_copy_range( actual, actual_iter, actual_end );
+ actual_iter = actual_end;
+ break;
+ case ARG_VARIADIC:
+ return;
+ }
+
+ type_check( formal_arg->type_name, value, frame, function,
+ formal_arg->arg_name );
+
+ if ( formal_arg->index != -1 )
+ {
+ LIST * * const old = &frame->module->fixed_variables[
+ formal_arg->index ];
+ stack_push( s, *old );
+ *old = value;
+ }
+ else
+ stack_push( s, var_swap( frame->module, formal_arg->arg_name,
+ value ) );
+ }
+
+ if ( actual_iter != actual_end )
+ argument_error( "extra argument", function, frame, list_item(
+ actual_iter ) );
+ }
+
+ for ( ; i < all_actual->count; ++i )
+ {
+ LIST * const actual = lol_get( all_actual, i );
+ if ( !list_empty( actual ) )
+ argument_error( "extra argument", function, frame, list_front(
+ actual ) );
+ }
+}
+
+void argument_list_pop( struct arg_list * formal, int formal_count,
+ FRAME * frame, STACK * s )
+{
+ int i;
+ for ( i = formal_count - 1; i >= 0; --i )
+ {
+ int j;
+ for ( j = formal[ i ].size - 1; j >= 0 ; --j )
+ {
+ struct argument * formal_arg = &formal[ i ].args[ j ];
+
+ if ( formal_arg->flags == ARG_VARIADIC )
+ continue;
+ if ( formal_arg->index != -1 )
+ {
+ LIST * const old = stack_pop( s );
+ LIST * * const pos = &frame->module->fixed_variables[
+ formal_arg->index ];
+ list_free( *pos );
+ *pos = old;
+ }
+ else
+ var_set( frame->module, formal_arg->arg_name, stack_pop( s ),
+ VAR_SET );
+ }
+ }
+}
+
+
+struct argument_compiler
+{
+ struct dynamic_array args[ 1 ];
+ struct argument arg;
+ int state;
+#define ARGUMENT_COMPILER_START 0
+#define ARGUMENT_COMPILER_FOUND_TYPE 1
+#define ARGUMENT_COMPILER_FOUND_OBJECT 2
+#define ARGUMENT_COMPILER_DONE 3
+};
+
+
+static void argument_compiler_init( struct argument_compiler * c )
+{
+ dynamic_array_init( c->args );
+ c->state = ARGUMENT_COMPILER_START;
+}
+
+static void argument_compiler_free( struct argument_compiler * c )
+{
+ dynamic_array_free( c->args );
+}
+
+static void argument_compiler_add( struct argument_compiler * c, OBJECT * arg,
+ OBJECT * file, int line )
+{
+ switch ( c->state )
+ {
+ case ARGUMENT_COMPILER_FOUND_OBJECT:
+
+ if ( object_equal( arg, constant_question_mark ) )
+ {
+ c->arg.flags = ARG_OPTIONAL;
+ }
+ else if ( object_equal( arg, constant_plus ) )
+ {
+ c->arg.flags = ARG_PLUS;
+ }
+ else if ( object_equal( arg, constant_star ) )
+ {
+ c->arg.flags = ARG_STAR;
+ }
+
+ dynamic_array_push( c->args, c->arg );
+ c->state = ARGUMENT_COMPILER_START;
+
+ if ( c->arg.flags != ARG_ONE )
+ break;
+ /* fall-through */
+
+ case ARGUMENT_COMPILER_START:
+
+ c->arg.type_name = 0;
+ c->arg.index = -1;
+ c->arg.flags = ARG_ONE;
+
+ if ( is_type_name( object_str( arg ) ) )
+ {
+ c->arg.type_name = object_copy( arg );
+ c->state = ARGUMENT_COMPILER_FOUND_TYPE;
+ break;
+ }
+ /* fall-through */
+
+ case ARGUMENT_COMPILER_FOUND_TYPE:
+
+ if ( is_type_name( object_str( arg ) ) )
+ {
+ printf( "%s:%d: missing argument name before type name: %s\n",
+ object_str( file ), line, object_str( arg ) );
+ exit( 1 );
+ }
+
+ c->arg.arg_name = object_copy( arg );
+ if ( object_equal( arg, constant_star ) )
+ {
+ c->arg.flags = ARG_VARIADIC;
+ dynamic_array_push( c->args, c->arg );
+ c->state = ARGUMENT_COMPILER_DONE;
+ }
+ else
+ {
+ c->state = ARGUMENT_COMPILER_FOUND_OBJECT;
+ }
+ break;
+
+ case ARGUMENT_COMPILER_DONE:
+ break;
+ }
+}
+
+static void argument_compiler_recurse( struct argument_compiler * c,
+ PARSE * parse )
+{
+ if ( parse->type == PARSE_APPEND )
+ {
+ argument_compiler_recurse( c, parse->left );
+ argument_compiler_recurse( c, parse->right );
+ }
+ else if ( parse->type != PARSE_NULL )
+ {
+ assert( parse->type == PARSE_LIST );
+ argument_compiler_add( c, parse->string, parse->file, parse->line );
+ }
+}
+
+static struct arg_list arg_compile_impl( struct argument_compiler * c,
+ OBJECT * file, int line )
+{
+ struct arg_list result;
+ switch ( c->state )
+ {
+ case ARGUMENT_COMPILER_START:
+ case ARGUMENT_COMPILER_DONE:
+ break;
+ case ARGUMENT_COMPILER_FOUND_TYPE:
+ printf( "%s:%d: missing argument name after type name: %s\n",
+ object_str( file ), line, object_str( c->arg.type_name ) );
+ exit( 1 );
+ case ARGUMENT_COMPILER_FOUND_OBJECT:
+ dynamic_array_push( c->args, c->arg );
+ break;
+ }
+ result.size = c->args->size;
+ result.args = BJAM_MALLOC( c->args->size * sizeof( struct argument ) );
+ memcpy( result.args, c->args->data, c->args->size * sizeof( struct argument
+ ) );
+ return result;
+}
+
+static struct arg_list arg_compile( PARSE * parse )
+{
+ struct argument_compiler c[ 1 ];
+ struct arg_list result;
+ argument_compiler_init( c );
+ argument_compiler_recurse( c, parse );
+ result = arg_compile_impl( c, parse->file, parse->line );
+ argument_compiler_free( c );
+ return result;
+}
+
+struct argument_list_compiler
+{
+ struct dynamic_array args[ 1 ];
+};
+
+static void argument_list_compiler_init( struct argument_list_compiler * c )
+{
+ dynamic_array_init( c->args );
+}
+
+static void argument_list_compiler_free( struct argument_list_compiler * c )
+{
+ dynamic_array_free( c->args );
+}
+
+static void argument_list_compiler_add( struct argument_list_compiler * c,
+ PARSE * parse )
+{
+ struct arg_list args = arg_compile( parse );
+ dynamic_array_push( c->args, args );
+}
+
+static void argument_list_compiler_recurse( struct argument_list_compiler * c,
+ PARSE * parse )
+{
+ if ( parse )
+ {
+ argument_list_compiler_add( c, parse->right );
+ argument_list_compiler_recurse( c, parse->left );
+ }
+}
+
+static struct arg_list * arg_list_compile( PARSE * parse, int * num_arguments )
+{
+ if ( parse )
+ {
+ struct argument_list_compiler c[ 1 ];
+ struct arg_list * result;
+ argument_list_compiler_init( c );
+ argument_list_compiler_recurse( c, parse );
+ *num_arguments = c->args->size;
+ result = BJAM_MALLOC( c->args->size * sizeof( struct arg_list ) );
+ memcpy( result, c->args->data, c->args->size * sizeof( struct arg_list )
+ );
+ argument_list_compiler_free( c );
+ return result;
+ }
+ *num_arguments = 0;
+ return 0;
+}
+
+static struct arg_list * arg_list_compile_builtin( char const * * args,
+ int * num_arguments )
+{
+ if ( args )
+ {
+ struct argument_list_compiler c[ 1 ];
+ struct arg_list * result;
+ argument_list_compiler_init( c );
+ while ( *args )
+ {
+ struct argument_compiler arg_comp[ 1 ];
+ struct arg_list arg;
+ argument_compiler_init( arg_comp );
+ for ( ; *args; ++args )
+ {
+ OBJECT * token;
+ if ( strcmp( *args, ":" ) == 0 )
+ {
+ ++args;
+ break;
+ }
+ token = object_new( *args );
+ argument_compiler_add( arg_comp, token, constant_builtin, -1 );
+ object_free( token );
+ }
+ arg = arg_compile_impl( arg_comp, constant_builtin, -1 );
+ dynamic_array_push( c->args, arg );
+ argument_compiler_free( arg_comp );
+ }
+ *num_arguments = c->args->size;
+ result = BJAM_MALLOC( c->args->size * sizeof( struct arg_list ) );
+ memcpy( result, c->args->data, c->args->size * sizeof( struct arg_list )
+ );
+ argument_list_compiler_free( c );
+ return result;
+ }
+ *num_arguments = 0;
+ return 0;
+}
+
+static void argument_list_print( struct arg_list * args, int num_args )
+{
+ if ( args )
+ {
+ int i;
+ for ( i = 0; i < num_args; ++i )
+ {
+ int j;
+ if ( i ) printf( " : " );
+ for ( j = 0; j < args[ i ].size; ++j )
+ {
+ struct argument * formal_arg = &args[ i ].args[ j ];
+ if ( j ) printf( " " );
+ if ( formal_arg->type_name )
+ printf( "%s ", object_str( formal_arg->type_name ) );
+ printf( "%s", object_str( formal_arg->arg_name ) );
+ switch ( formal_arg->flags )
+ {
+ case ARG_OPTIONAL: printf( " ?" ); break;
+ case ARG_PLUS: printf( " +" ); break;
+ case ARG_STAR: printf( " *" ); break;
+ }
+ }
+ }
+ }
+}
+
+
+struct arg_list * argument_list_bind_variables( struct arg_list * formal,
+ int formal_count, module_t * module, int * counter )
+{
+ if ( formal )
+ {
+ struct arg_list * result = (struct arg_list *)BJAM_MALLOC( sizeof(
+ struct arg_list ) * formal_count );
+ int i;
+
+ for ( i = 0; i < formal_count; ++i )
+ {
+ int j;
+ struct argument * args = (struct argument *)BJAM_MALLOC( sizeof(
+ struct argument ) * formal[ i ].size );
+ for ( j = 0; j < formal[ i ].size; ++j )
+ {
+ args[ j ] = formal[ i ].args[ j ];
+ if ( args[ j ].type_name )
+ args[ j ].type_name = object_copy( args[ j ].type_name );
+ args[ j ].arg_name = object_copy( args[ j ].arg_name );
+ if ( args[ j ].flags != ARG_VARIADIC )
+ args[ j ].index = module_add_fixed_var( module,
+ args[ j ].arg_name, counter );
+ }
+ result[ i ].args = args;
+ result[ i ].size = formal[ i ].size;
+ }
+
+ return result;
+ }
+ return 0;
+}
+
+
+void argument_list_free( struct arg_list * args, int args_count )
+{
+ int i;
+ for ( i = 0; i < args_count; ++i )
+ {
+ int j;
+ for ( j = 0; j < args[ i ].size; ++j )
+ {
+ if ( args[ i ].args[ j ].type_name )
+ object_free( args[ i ].args[ j ].type_name );
+ object_free( args[ i ].args[ j ].arg_name );
+ }
+ BJAM_FREE( args[ i ].args );
+ }
+ BJAM_FREE( args );
+}
+
+
+FUNCTION * function_unbind_variables( FUNCTION * f )
+{
+ if ( f->type == FUNCTION_JAM )
+ {
+ JAM_FUNCTION * const func = (JAM_FUNCTION *)f;
+ return func->generic ? func->generic : f;
+ }
+#ifdef HAVE_PYTHON
+ if ( f->type == FUNCTION_PYTHON )
+ return f;
+#endif
+ assert( f->type == FUNCTION_BUILTIN );
+ return f;
+}
+
+FUNCTION * function_bind_variables( FUNCTION * f, module_t * module,
+ int * counter )
+{
+ if ( f->type == FUNCTION_BUILTIN )
+ return f;
+#ifdef HAVE_PYTHON
+ if ( f->type == FUNCTION_PYTHON )
+ return f;
+#endif
+ {
+ JAM_FUNCTION * func = (JAM_FUNCTION *)f;
+ JAM_FUNCTION * new_func = BJAM_MALLOC( sizeof( JAM_FUNCTION ) );
+ instruction * code;
+ int i;
+ assert( f->type == FUNCTION_JAM );
+ memcpy( new_func, func, sizeof( JAM_FUNCTION ) );
+ new_func->base.reference_count = 1;
+ new_func->base.formal_arguments = argument_list_bind_variables(
+ f->formal_arguments, f->num_formal_arguments, module, counter );
+ new_func->code = BJAM_MALLOC( func->code_size * sizeof( instruction ) );
+ memcpy( new_func->code, func->code, func->code_size * sizeof(
+ instruction ) );
+ new_func->generic = (FUNCTION *)func;
+ func = new_func;
+ for ( i = 0; ; ++i )
+ {
+ OBJECT * key;
+ int op_code;
+ code = func->code + i;
+ switch ( code->op_code )
+ {
+ case INSTR_PUSH_VAR: op_code = INSTR_PUSH_VAR_FIXED; break;
+ case INSTR_PUSH_LOCAL: op_code = INSTR_PUSH_LOCAL_FIXED; break;
+ case INSTR_POP_LOCAL: op_code = INSTR_POP_LOCAL_FIXED; break;
+ case INSTR_SET: op_code = INSTR_SET_FIXED; break;
+ case INSTR_APPEND: op_code = INSTR_APPEND_FIXED; break;
+ case INSTR_DEFAULT: op_code = INSTR_DEFAULT_FIXED; break;
+ case INSTR_RETURN: return (FUNCTION *)new_func;
+ case INSTR_CALL_MEMBER_RULE:
+ case INSTR_CALL_RULE: ++i; continue;
+ case INSTR_PUSH_MODULE:
+ {
+ int depth = 1;
+ ++i;
+ while ( depth > 0 )
+ {
+ code = func->code + i;
+ switch ( code->op_code )
+ {
+ case INSTR_PUSH_MODULE:
+ case INSTR_CLASS:
+ ++depth;
+ break;
+ case INSTR_POP_MODULE:
+ --depth;
+ break;
+ case INSTR_CALL_RULE:
+ ++i;
+ break;
+ }
+ ++i;
+ }
+ --i;
+ }
+ default: continue;
+ }
+ key = func->constants[ code->arg ];
+ if ( !( object_equal( key, constant_TMPDIR ) ||
+ object_equal( key, constant_TMPNAME ) ||
+ object_equal( key, constant_TMPFILE ) ||
+ object_equal( key, constant_STDOUT ) ||
+ object_equal( key, constant_STDERR ) ) )
+ {
+ code->op_code = op_code;
+ code->arg = module_add_fixed_var( module, key, counter );
+ }
+ }
+ }
+}
+
+void function_refer( FUNCTION * func )
+{
+ ++func->reference_count;
+}
+
+void function_free( FUNCTION * function_ )
+{
+ int i;
+
+ if ( --function_->reference_count != 0 )
+ return;
+
+ if ( function_->formal_arguments )
+ argument_list_free( function_->formal_arguments,
+ function_->num_formal_arguments );
+
+ if ( function_->type == FUNCTION_JAM )
+ {
+ JAM_FUNCTION * func = (JAM_FUNCTION *)function_;
+
+ BJAM_FREE( func->code );
+
+ if ( func->generic )
+ function_free( func->generic );
+ else
+ {
+ if ( function_->rulename ) object_free( function_->rulename );
+
+ for ( i = 0; i < func->num_constants; ++i )
+ object_free( func->constants[ i ] );
+ BJAM_FREE( func->constants );
+
+ for ( i = 0; i < func->num_subfunctions; ++i )
+ {
+ object_free( func->functions[ i ].name );
+ function_free( func->functions[ i ].code );
+ }
+ BJAM_FREE( func->functions );
+
+ for ( i = 0; i < func->num_subactions; ++i )
+ {
+ object_free( func->actions[ i ].name );
+ function_free( func->actions[ i ].command );
+ }
+ BJAM_FREE( func->actions );
+
+ object_free( func->file );
+ }
+ }
+#ifdef HAVE_PYTHON
+ else if ( function_->type == FUNCTION_PYTHON )
+ {
+ PYTHON_FUNCTION * func = (PYTHON_FUNCTION *)function_;
+ Py_DECREF( func->python_function );
+ if ( function_->rulename ) object_free( function_->rulename );
+ }
+#endif
+ else
+ {
+ assert( function_->type == FUNCTION_BUILTIN );
+ if ( function_->rulename ) object_free( function_->rulename );
+ }
+
+ BJAM_FREE( function_ );
+}
+
+
+/* Alignment check for stack */
+
+struct align_var_edits
+{
+ char ch;
+ VAR_EDITS e;
+};
+
+struct align_expansion_item
+{
+ char ch;
+ expansion_item e;
+};
+
+static char check_align_var_edits[ sizeof(struct align_var_edits) <= sizeof(VAR_EDITS) + sizeof(void *) ? 1 : -1 ];
+static char check_align_expansion_item[ sizeof(struct align_expansion_item) <= sizeof(expansion_item) + sizeof(void *) ? 1 : -1 ];
+
+static char check_ptr_size1[ sizeof(LIST *) <= sizeof(void *) ? 1 : -1 ];
+static char check_ptr_size2[ sizeof(char *) <= sizeof(void *) ? 1 : -1 ];
+
+void function_run_actions( FUNCTION * function, FRAME * frame, STACK * s,
+ string * out )
+{
+ *(string * *)stack_allocate( s, sizeof( string * ) ) = out;
+ list_free( function_run( function, frame, s ) );
+ stack_deallocate( s, sizeof( string * ) );
+}
+
+/*
+ * WARNING: The instruction set is tuned for Jam and is not really generic. Be
+ * especially careful about stack push/pop.
+ */
+
+LIST * function_run( FUNCTION * function_, FRAME * frame, STACK * s )
+{
+ JAM_FUNCTION * function;
+ instruction * code;
+ LIST * l;
+ LIST * r;
+ LIST * result = L0;
+ void * saved_stack = s->data;
+
+ if ( function_->type == FUNCTION_BUILTIN )
+ {
+ BUILTIN_FUNCTION const * const f = (BUILTIN_FUNCTION *)function_;
+ if ( function_->formal_arguments )
+ argument_list_check( function_->formal_arguments,
+ function_->num_formal_arguments, function_, frame );
+ return f->func( frame, f->flags );
+ }
+
+#ifdef HAVE_PYTHON
+ else if ( function_->type == FUNCTION_PYTHON )
+ {
+ PYTHON_FUNCTION * f = (PYTHON_FUNCTION *)function_;
+ return call_python_function( f, frame );
+ }
+#endif
+
+ assert( function_->type == FUNCTION_JAM );
+
+ if ( function_->formal_arguments )
+ argument_list_push( function_->formal_arguments,
+ function_->num_formal_arguments, function_, frame, s );
+
+ function = (JAM_FUNCTION *)function_;
+ code = function->code;
+ for ( ; ; )
+ {
+ switch ( code->op_code )
+ {
+
+ /*
+ * Basic stack manipulation
+ */
+
+ case INSTR_PUSH_EMPTY:
+ stack_push( s, L0 );
+ break;
+
+ case INSTR_PUSH_CONSTANT:
+ {
+ OBJECT * value = function_get_constant( function, code->arg );
+ stack_push( s, list_new( object_copy( value ) ) );
+ break;
+ }
+
+ case INSTR_PUSH_ARG:
+ stack_push( s, frame_get_local( frame, code->arg ) );
+ break;
+
+ case INSTR_PUSH_VAR:
+ stack_push( s, function_get_variable( function, frame, code->arg ) );
+ break;
+
+ case INSTR_PUSH_VAR_FIXED:
+ stack_push( s, list_copy( frame->module->fixed_variables[ code->arg
+ ] ) );
+ break;
+
+ case INSTR_PUSH_GROUP:
+ {
+ LIST * value = L0;
+ LISTITER iter;
+ LISTITER end;
+ l = stack_pop( s );
+ for ( iter = list_begin( l ), end = list_end( l ); iter != end;
+ iter = list_next( iter ) )
+ value = list_append( value, function_get_named_variable(
+ function, frame, list_item( iter ) ) );
+ list_free( l );
+ stack_push( s, value );
+ break;
+ }
+
+ case INSTR_PUSH_APPEND:
+ r = stack_pop( s );
+ l = stack_pop( s );
+ stack_push( s, list_append( l, r ) );
+ break;
+
+ case INSTR_SWAP:
+ l = stack_top( s );
+ stack_set( s, 0, stack_at( s, code->arg ) );
+ stack_set( s, code->arg, l );
+ break;
+
+ case INSTR_POP:
+ list_free( stack_pop( s ) );
+ break;
+
+ /*
+ * Branch instructions
+ */
+
+ case INSTR_JUMP:
+ code += code->arg;
+ break;
+
+ case INSTR_JUMP_EMPTY:
+ l = stack_pop( s );
+ if ( !list_cmp( l, L0 ) ) code += code->arg;
+ list_free( l );
+ break;
+
+ case INSTR_JUMP_NOT_EMPTY:
+ l = stack_pop( s );
+ if ( list_cmp( l, L0 ) ) code += code->arg;
+ list_free( l );
+ break;
+
+ case INSTR_JUMP_LT:
+ r = stack_pop( s );
+ l = stack_pop( s );
+ if ( list_cmp( l, r ) < 0 ) code += code->arg;
+ list_free( l );
+ list_free( r );
+ break;
+
+ case INSTR_JUMP_LE:
+ r = stack_pop( s );
+ l = stack_pop( s );
+ if ( list_cmp( l, r ) <= 0 ) code += code->arg;
+ list_free( l );
+ list_free( r );
+ break;
+
+ case INSTR_JUMP_GT:
+ r = stack_pop( s );
+ l = stack_pop( s );
+ if ( list_cmp( l, r ) > 0 ) code += code->arg;
+ list_free( l );
+ list_free( r );
+ break;
+
+ case INSTR_JUMP_GE:
+ r = stack_pop( s );
+ l = stack_pop( s );
+ if ( list_cmp( l, r ) >= 0 ) code += code->arg;
+ list_free( l );
+ list_free( r );
+ break;
+
+ case INSTR_JUMP_EQ:
+ r = stack_pop( s );
+ l = stack_pop( s );
+ if ( list_cmp( l, r ) == 0 ) code += code->arg;
+ list_free( l );
+ list_free( r );
+ break;
+
+ case INSTR_JUMP_NE:
+ r = stack_pop(s);
+ l = stack_pop(s);
+ if ( list_cmp(l, r) != 0 ) code += code->arg;
+ list_free(l);
+ list_free(r);
+ break;
+
+ case INSTR_JUMP_IN:
+ r = stack_pop(s);
+ l = stack_pop(s);
+ if ( list_is_sublist( l, r ) ) code += code->arg;
+ list_free(l);
+ list_free(r);
+ break;
+
+ case INSTR_JUMP_NOT_IN:
+ r = stack_pop( s );
+ l = stack_pop( s );
+ if ( !list_is_sublist( l, r ) ) code += code->arg;
+ list_free( l );
+ list_free( r );
+ break;
+
+ /*
+ * For
+ */
+
+ case INSTR_FOR_INIT:
+ l = stack_top( s );
+ *(LISTITER *)stack_allocate( s, sizeof( LISTITER ) ) =
+ list_begin( l );
+ break;
+
+ case INSTR_FOR_LOOP:
+ {
+ LISTITER iter = *(LISTITER *)stack_get( s );
+ stack_deallocate( s, sizeof( LISTITER ) );
+ l = stack_top( s );
+ if ( iter == list_end( l ) )
+ {
+ list_free( stack_pop( s ) );
+ code += code->arg;
+ }
+ else
+ {
+ r = list_new( object_copy( list_item( iter ) ) );
+ iter = list_next( iter );
+ *(LISTITER *)stack_allocate( s, sizeof( LISTITER ) ) = iter;
+ stack_push( s, r );
+ }
+ break;
+ }
+
+ /*
+ * Switch
+ */
+
+ case INSTR_JUMP_NOT_GLOB:
+ {
+ char const * pattern;
+ char const * match;
+ l = stack_pop( s );
+ r = stack_top( s );
+ pattern = list_empty( l ) ? "" : object_str( list_front( l ) );
+ match = list_empty( r ) ? "" : object_str( list_front( r ) );
+ if ( glob( pattern, match ) )
+ code += code->arg;
+ else
+ list_free( stack_pop( s ) );
+ list_free( l );
+ break;
+ }
+
+ /*
+ * Return
+ */
+
+ case INSTR_SET_RESULT:
+ list_free( result );
+ if ( !code->arg )
+ result = stack_pop( s );
+ else
+ result = list_copy( stack_top( s ) );
+ break;
+
+ case INSTR_PUSH_RESULT:
+ stack_push( s, result );
+ result = L0;
+ break;
+
+ case INSTR_RETURN:
+ {
+ if ( function_->formal_arguments )
+ argument_list_pop( function_->formal_arguments,
+ function_->num_formal_arguments, frame, s );
+#ifndef NDEBUG
+ if ( !( saved_stack == s->data ) )
+ {
+ frame->file = function->file;
+ frame->line = function->line;
+ backtrace_line( frame );
+ printf( "error: stack check failed.\n" );
+ backtrace( frame );
+ assert( saved_stack == s->data );
+ }
+#endif
+ assert( saved_stack == s->data );
+ return result;
+ }
+
+ /*
+ * Local variables
+ */
+
+ case INSTR_PUSH_LOCAL:
+ {
+ LIST * value = stack_pop( s );
+ stack_push( s, function_swap_variable( function, frame, code->arg,
+ value ) );
+ break;
+ }
+
+ case INSTR_POP_LOCAL:
+ function_set_variable( function, frame, code->arg, stack_pop( s ) );
+ break;
+
+ case INSTR_PUSH_LOCAL_FIXED:
+ {
+ LIST * value = stack_pop( s );
+ LIST * * ptr = &frame->module->fixed_variables[ code->arg ];
+ assert( code->arg < frame->module->num_fixed_variables );
+ stack_push( s, *ptr );
+ *ptr = value;
+ break;
+ }
+
+ case INSTR_POP_LOCAL_FIXED:
+ {
+ LIST * value = stack_pop( s );
+ LIST * * ptr = &frame->module->fixed_variables[ code->arg ];
+ assert( code->arg < frame->module->num_fixed_variables );
+ list_free( *ptr );
+ *ptr = value;
+ break;
+ }
+
+ case INSTR_PUSH_LOCAL_GROUP:
+ {
+ LIST * const value = stack_pop( s );
+ LISTITER iter;
+ LISTITER end;
+ l = stack_pop( s );
+ for ( iter = list_begin( l ), end = list_end( l ); iter != end;
+ iter = list_next( iter ) )
+ stack_push( s, function_swap_named_variable( function, frame,
+ list_item( iter ), list_copy( value ) ) );
+ list_free( value );
+ stack_push( s, l );
+ break;
+ }
+
+ case INSTR_POP_LOCAL_GROUP:
+ {
+ LISTITER iter;
+ LISTITER end;
+ r = stack_pop( s );
+ l = list_reverse( r );
+ list_free( r );
+ for ( iter = list_begin( l ), end = list_end( l ); iter != end;
+ iter = list_next( iter ) )
+ function_set_named_variable( function, frame, list_item( iter ),
+ stack_pop( s ) );
+ list_free( l );
+ break;
+ }
+
+ /*
+ * on $(TARGET) variables
+ */
+
+ case INSTR_PUSH_ON:
+ {
+ LIST * targets = stack_top( s );
+ if ( !list_empty( targets ) )
+ {
+ /* FIXME: push the state onto the stack instead of using
+ * pushsettings.
+ */
+ TARGET * t = bindtarget( list_front( targets ) );
+ pushsettings( frame->module, t->settings );
+ }
+ else
+ {
+ /* [ on $(TARGET) ... ] is ignored if $(TARGET) is empty. */
+ list_free( stack_pop( s ) );
+ stack_push( s, L0 );
+ code += code->arg;
+ }
+ break;
+ }
+
+ case INSTR_POP_ON:
+ {
+ LIST * result = stack_pop( s );
+ LIST * targets = stack_pop( s );
+ if ( !list_empty( targets ) )
+ {
+ TARGET * t = bindtarget( list_front( targets ) );
+ popsettings( frame->module, t->settings );
+ }
+ list_free( targets );
+ stack_push( s, result );
+ break;
+ }
+
+ case INSTR_SET_ON:
+ {
+ LIST * targets = stack_pop( s );
+ LIST * value = stack_pop( s );
+ LIST * vars = stack_pop( s );
+ LISTITER iter = list_begin( targets );
+ LISTITER const end = list_end( targets );
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ TARGET * t = bindtarget( list_item( iter ) );
+ LISTITER vars_iter = list_begin( vars );
+ LISTITER const vars_end = list_end( vars );
+ for ( ; vars_iter != vars_end; vars_iter = list_next( vars_iter
+ ) )
+ t->settings = addsettings( t->settings, VAR_SET, list_item(
+ vars_iter ), list_copy( value ) );
+ }
+ list_free( vars );
+ list_free( targets );
+ stack_push( s, value );
+ break;
+ }
+
+ case INSTR_APPEND_ON:
+ {
+ LIST * targets = stack_pop( s );
+ LIST * value = stack_pop( s );
+ LIST * vars = stack_pop( s );
+ LISTITER iter = list_begin( targets );
+ LISTITER const end = list_end( targets );
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ TARGET * const t = bindtarget( list_item( iter ) );
+ LISTITER vars_iter = list_begin( vars );
+ LISTITER const vars_end = list_end( vars );
+ for ( ; vars_iter != vars_end; vars_iter = list_next( vars_iter
+ ) )
+ t->settings = addsettings( t->settings, VAR_APPEND,
+ list_item( vars_iter ), list_copy( value ) );
+ }
+ list_free( vars );
+ list_free( targets );
+ stack_push( s, value );
+ break;
+ }
+
+ case INSTR_DEFAULT_ON:
+ {
+ LIST * targets = stack_pop( s );
+ LIST * value = stack_pop( s );
+ LIST * vars = stack_pop( s );
+ LISTITER iter = list_begin( targets );
+ LISTITER const end = list_end( targets );
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ TARGET * t = bindtarget( list_item( iter ) );
+ LISTITER vars_iter = list_begin( vars );
+ LISTITER const vars_end = list_end( vars );
+ for ( ; vars_iter != vars_end; vars_iter = list_next( vars_iter
+ ) )
+ t->settings = addsettings( t->settings, VAR_DEFAULT,
+ list_item( vars_iter ), list_copy( value ) );
+ }
+ list_free( vars );
+ list_free( targets );
+ stack_push( s, value );
+ break;
+ }
+
+ /* [ on $(target) return $(variable) ] */
+ case INSTR_GET_ON:
+ {
+ LIST * targets = stack_pop( s );
+ LIST * result = L0;
+ if ( !list_empty( targets ) )
+ {
+ OBJECT * varname = function->constants[ code->arg ];
+ TARGET * t = bindtarget( list_front( targets ) );
+ SETTINGS * s = t->settings;
+ int found = 0;
+ for ( ; s != 0; s = s->next )
+ {
+ if ( object_equal( s->symbol, varname ) )
+ {
+ result = s->value;
+ found = 1;
+ break;
+ }
+ }
+ if ( !found )
+ {
+ result = var_get( frame->module, varname ) ;
+ }
+ }
+ stack_push( s, list_copy( result ) );
+ break;
+ }
+
+ /*
+ * Variable setting
+ */
+
+ case INSTR_SET:
+ function_set_variable( function, frame, code->arg,
+ stack_pop( s ) );
+ break;
+
+ case INSTR_APPEND:
+ function_append_variable( function, frame, code->arg,
+ stack_pop( s ) );
+ break;
+
+ case INSTR_DEFAULT:
+ function_default_variable( function, frame, code->arg,
+ stack_pop( s ) );
+ break;
+
+ case INSTR_SET_FIXED:
+ {
+ LIST * * ptr = &frame->module->fixed_variables[ code->arg ];
+ assert( code->arg < frame->module->num_fixed_variables );
+ list_free( *ptr );
+ *ptr = stack_pop( s );
+ break;
+ }
+
+ case INSTR_APPEND_FIXED:
+ {
+ LIST * * ptr = &frame->module->fixed_variables[ code->arg ];
+ assert( code->arg < frame->module->num_fixed_variables );
+ *ptr = list_append( *ptr, stack_pop( s ) );
+ break;
+ }
+
+ case INSTR_DEFAULT_FIXED:
+ {
+ LIST * * ptr = &frame->module->fixed_variables[ code->arg ];
+ LIST * value = stack_pop( s );
+ assert( code->arg < frame->module->num_fixed_variables );
+ if ( list_empty( *ptr ) )
+ *ptr = value;
+ else
+ list_free( value );
+ break;
+ }
+
+ case INSTR_SET_GROUP:
+ {
+ LIST * value = stack_pop( s );
+ LIST * vars = stack_pop( s );
+ LISTITER iter = list_begin( vars );
+ LISTITER const end = list_end( vars );
+ for ( ; iter != end; iter = list_next( iter ) )
+ function_set_named_variable( function, frame, list_item( iter ),
+ list_copy( value ) );
+ list_free( vars );
+ list_free( value );
+ break;
+ }
+
+ case INSTR_APPEND_GROUP:
+ {
+ LIST * value = stack_pop( s );
+ LIST * vars = stack_pop( s );
+ LISTITER iter = list_begin( vars );
+ LISTITER const end = list_end( vars );
+ for ( ; iter != end; iter = list_next( iter ) )
+ function_append_named_variable( function, frame, list_item( iter
+ ), list_copy( value ) );
+ list_free( vars );
+ list_free( value );
+ break;
+ }
+
+ case INSTR_DEFAULT_GROUP:
+ {
+ LIST * value = stack_pop( s );
+ LIST * vars = stack_pop( s );
+ LISTITER iter = list_begin( vars );
+ LISTITER const end = list_end( vars );
+ for ( ; iter != end; iter = list_next( iter ) )
+ function_default_named_variable( function, frame, list_item(
+ iter ), list_copy( value ) );
+ list_free( vars );
+ list_free( value );
+ break;
+ }
+
+ /*
+ * Rules
+ */
+
+ case INSTR_CALL_RULE:
+ {
+ char const * unexpanded = object_str( function_get_constant(
+ function, code[ 1 ].op_code ) );
+ LIST * result = function_call_rule( function, frame, s, code->arg,
+ unexpanded, function->file, code[ 1 ].arg );
+ stack_push( s, result );
+ ++code;
+ break;
+ }
+
+ case INSTR_CALL_MEMBER_RULE:
+ {
+ OBJECT * rule_name = function_get_constant( function, code[1].op_code );
+ LIST * result = function_call_member_rule( function, frame, s, code->arg, rule_name, function->file, code[1].arg );
+ stack_push( s, result );
+ ++code;
+ break;
+ }
+
+ case INSTR_RULE:
+ function_set_rule( function, frame, s, code->arg );
+ break;
+
+ case INSTR_ACTIONS:
+ function_set_actions( function, frame, s, code->arg );
+ break;
+
+ /*
+ * Variable expansion
+ */
+
+ case INSTR_APPLY_MODIFIERS:
+ {
+ int n;
+ int i;
+ l = stack_pop( s );
+ n = expand_modifiers( s, code->arg );
+ stack_push( s, l );
+ l = apply_modifiers( s, n );
+ list_free( stack_pop( s ) );
+ stack_deallocate( s, n * sizeof( VAR_EDITS ) );
+ for ( i = 0; i < code->arg; ++i )
+ list_free( stack_pop( s ) ); /* pop modifiers */
+ stack_push( s, l );
+ break;
+ }
+
+ case INSTR_APPLY_INDEX:
+ l = apply_subscript( s );
+ list_free( stack_pop( s ) );
+ list_free( stack_pop( s ) );
+ stack_push( s, l );
+ break;
+
+ case INSTR_APPLY_INDEX_MODIFIERS:
+ {
+ int i;
+ int n;
+ l = stack_pop( s );
+ r = stack_pop( s );
+ n = expand_modifiers( s, code->arg );
+ stack_push( s, r );
+ stack_push( s, l );
+ l = apply_subscript_and_modifiers( s, n );
+ list_free( stack_pop( s ) );
+ list_free( stack_pop( s ) );
+ stack_deallocate( s, n * sizeof( VAR_EDITS ) );
+ for ( i = 0; i < code->arg; ++i )
+ list_free( stack_pop( s ) ); /* pop modifiers */
+ stack_push( s, l );
+ break;
+ }
+
+ case INSTR_APPLY_MODIFIERS_GROUP:
+ {
+ int i;
+ LIST * const vars = stack_pop( s );
+ int const n = expand_modifiers( s, code->arg );
+ LIST * result = L0;
+ LISTITER iter = list_begin( vars );
+ LISTITER const end = list_end( vars );
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ stack_push( s, function_get_named_variable( function, frame,
+ list_item( iter ) ) );
+ result = list_append( result, apply_modifiers( s, n ) );
+ list_free( stack_pop( s ) );
+ }
+ list_free( vars );
+ stack_deallocate( s, n * sizeof( VAR_EDITS ) );
+ for ( i = 0; i < code->arg; ++i )
+ list_free( stack_pop( s ) ); /* pop modifiers */
+ stack_push( s, result );
+ break;
+ }
+
+ case INSTR_APPLY_INDEX_GROUP:
+ {
+ LIST * vars = stack_pop( s );
+ LIST * result = L0;
+ LISTITER iter = list_begin( vars );
+ LISTITER const end = list_end( vars );
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ stack_push( s, function_get_named_variable( function, frame,
+ list_item( iter ) ) );
+ result = list_append( result, apply_subscript( s ) );
+ list_free( stack_pop( s ) );
+ }
+ list_free( vars );
+ list_free( stack_pop( s ) );
+ stack_push( s, result );
+ break;
+ }
+
+ case INSTR_APPLY_INDEX_MODIFIERS_GROUP:
+ {
+ int i;
+ LIST * const vars = stack_pop( s );
+ LIST * const r = stack_pop( s );
+ int const n = expand_modifiers( s, code->arg );
+ LIST * result = L0;
+ LISTITER iter = list_begin( vars );
+ LISTITER const end = list_end( vars );
+ stack_push( s, r );
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ stack_push( s, function_get_named_variable( function, frame,
+ list_item( iter ) ) );
+ result = list_append( result, apply_subscript_and_modifiers( s,
+ n ) );
+ list_free( stack_pop( s ) );
+ }
+ list_free( stack_pop( s ) );
+ list_free( vars );
+ stack_deallocate( s, n * sizeof( VAR_EDITS ) );
+ for ( i = 0; i < code->arg; ++i )
+ list_free( stack_pop( s ) ); /* pop modifiers */
+ stack_push( s, result );
+ break;
+ }
+
+ case INSTR_COMBINE_STRINGS:
+ {
+ size_t const buffer_size = code->arg * sizeof( expansion_item );
+ LIST * * const stack_pos = stack_get( s );
+ expansion_item * items = stack_allocate( s, buffer_size );
+ LIST * result;
+ int i;
+ for ( i = 0; i < code->arg; ++i )
+ items[ i ].values = stack_pos[ i ];
+ result = expand( items, code->arg );
+ stack_deallocate( s, buffer_size );
+ for ( i = 0; i < code->arg; ++i )
+ list_free( stack_pop( s ) );
+ stack_push( s, result );
+ break;
+ }
+
+ case INSTR_GET_GRIST:
+ {
+ LIST * vals = stack_pop( s );
+ LIST * result = L0;
+ LISTITER iter, end;
+
+ for ( iter = list_begin( vals ), end = list_end( vals ); iter != end; ++iter )
+ {
+ OBJECT * new_object;
+ const char * value = object_str( list_item( iter ) );
+ const char * p;
+ if ( value[ 0 ] == '<' && ( p = strchr( value, '>' ) ) )
+ {
+ if( p[ 1 ] )
+ new_object = object_new_range( value, p - value + 1 );
+ else
+ new_object = object_copy( list_item( iter ) );
+ }
+ else
+ {
+ new_object = object_copy( constant_empty );
+ }
+ result = list_push_back( result, new_object );
+ }
+
+ list_free( vals );
+ stack_push( s, result );
+ break;
+ }
+
+ case INSTR_INCLUDE:
+ {
+ LIST * nt = stack_pop( s );
+ if ( !list_empty( nt ) )
+ {
+ TARGET * const t = bindtarget( list_front( nt ) );
+ list_free( nt );
+
+ /* DWA 2001/10/22 - Perforce Jam cleared the arguments here,
+ * which prevented an included file from being treated as part
+ * of the body of a rule. I did not see any reason to do that,
+ * so I lifted the restriction.
+ */
+
+ /* Bind the include file under the influence of "on-target"
+ * variables. Though they are targets, include files are not
+ * built with make().
+ */
+
+ pushsettings( root_module(), t->settings );
+ /* We do not expect that a file to be included is generated by
+ * some action. Therefore, pass 0 as third argument. If the name
+ * resolves to a directory, let it error out.
+ */
+ object_free( t->boundname );
+ t->boundname = search( t->name, &t->time, 0, 0 );
+ popsettings( root_module(), t->settings );
+
+ parse_file( t->boundname, frame );
+ }
+ break;
+ }
+
+ /*
+ * Classes and modules
+ */
+
+ case INSTR_PUSH_MODULE:
+ {
+ LIST * const module_name = stack_pop( s );
+ module_t * const outer_module = frame->module;
+ frame->module = !list_empty( module_name )
+ ? bindmodule( list_front( module_name ) )
+ : root_module();
+ list_free( module_name );
+ *(module_t * *)stack_allocate( s, sizeof( module_t * ) ) =
+ outer_module;
+ break;
+ }
+
+ case INSTR_POP_MODULE:
+ {
+ module_t * const outer_module = *(module_t * *)stack_get( s );
+ stack_deallocate( s, sizeof( module_t * ) );
+ frame->module = outer_module;
+ break;
+ }
+
+ case INSTR_CLASS:
+ {
+ LIST * bases = stack_pop( s );
+ LIST * name = stack_pop( s );
+ OBJECT * class_module = make_class_module( name, bases, frame );
+
+ module_t * const outer_module = frame->module;
+ frame->module = bindmodule( class_module );
+ object_free( class_module );
+
+ *(module_t * *)stack_allocate( s, sizeof( module_t * ) ) =
+ outer_module;
+ break;
+ }
+
+ case INSTR_BIND_MODULE_VARIABLES:
+ module_bind_variables( frame->module );
+ break;
+
+ case INSTR_APPEND_STRINGS:
+ {
+ string buf[ 1 ];
+ string_new( buf );
+ combine_strings( s, code->arg, buf );
+ stack_push( s, list_new( object_new( buf->value ) ) );
+ string_free( buf );
+ break;
+ }
+
+ case INSTR_WRITE_FILE:
+ {
+ string buf[ 1 ];
+ char const * out;
+ OBJECT * tmp_filename = 0;
+ int out_debug = DEBUG_EXEC ? 1 : 0;
+ FILE * out_file = 0;
+ string_new( buf );
+ combine_strings( s, code->arg, buf );
+ out = object_str( list_front( stack_top( s ) ) );
+
+ /* For stdout/stderr we will create a temp file and generate a
+ * command that outputs the content as needed.
+ */
+ if ( ( strcmp( "STDOUT", out ) == 0 ) ||
+ ( strcmp( "STDERR", out ) == 0 ) )
+ {
+ int err_redir = strcmp( "STDERR", out ) == 0;
+ string result[ 1 ];
+ tmp_filename = path_tmpfile();
+ string_new( result );
+ #ifdef OS_NT
+ string_append( result, "type \"" );
+ #else
+ string_append( result, "cat \"" );
+ #endif
+ string_append( result, object_str( tmp_filename ) );
+ string_push_back( result, '\"' );
+ if ( err_redir )
+ string_append( result, " 1>&2" );
+
+ /* Replace STDXXX with the temporary file. */
+ list_free( stack_pop( s ) );
+ stack_push( s, list_new( object_new( result->value ) ) );
+ out = object_str( tmp_filename );
+
+ string_free( result );
+
+ /* Make sure temp files created by this get nuked eventually. */
+ file_remove_atexit( tmp_filename );
+ }
+
+ if ( !globs.noexec )
+ {
+ string out_name[ 1 ];
+ /* Handle "path to file" filenames. */
+ if ( ( out[ 0 ] == '"' ) && ( out[ strlen( out ) - 1 ] == '"' )
+ )
+ {
+ string_copy( out_name, out + 1 );
+ string_truncate( out_name, out_name->size - 1 );
+ }
+ else
+ string_copy( out_name, out );
+ out_file = fopen( out_name->value, "w" );
+
+ if ( !out_file )
+ {
+ printf( "failed to write output file '%s'!\n",
+ out_name->value );
+ exit( EXITBAD );
+ }
+ string_free( out_name );
+ }
+
+ if ( out_debug ) printf( "\nfile %s\n", out );
+ if ( out_file ) fputs( buf->value, out_file );
+ if ( out_debug ) fputs( buf->value, stdout );
+ if ( out_file )
+ {
+ fflush( out_file );
+ fclose( out_file );
+ }
+ string_free( buf );
+ if ( tmp_filename )
+ object_free( tmp_filename );
+
+ if ( out_debug ) fputc( '\n', stdout );
+ break;
+ }
+
+ case INSTR_OUTPUT_STRINGS:
+ {
+ string * const buf = *(string * *)( (char *)stack_get( s ) + (
+ code->arg * sizeof( LIST * ) ) );
+ combine_strings( s, code->arg, buf );
+ break;
+ }
+
+ }
+ ++code;
+ }
+}
+
+
+#ifdef HAVE_PYTHON
+
+static struct arg_list * arg_list_compile_python( PyObject * bjam_signature,
+ int * num_arguments )
+{
+ if ( bjam_signature )
+ {
+ struct argument_list_compiler c[ 1 ];
+ struct arg_list * result;
+ Py_ssize_t s;
+ Py_ssize_t i;
+ argument_list_compiler_init( c );
+
+ s = PySequence_Size( bjam_signature );
+ for ( i = 0; i < s; ++i )
+ {
+ struct argument_compiler arg_comp[ 1 ];
+ struct arg_list arg;
+ PyObject * v = PySequence_GetItem( bjam_signature, i );
+ Py_ssize_t j;
+ Py_ssize_t inner;
+ argument_compiler_init( arg_comp );
+
+ inner = PySequence_Size( v );
+ for ( j = 0; j < inner; ++j )
+ argument_compiler_add( arg_comp, object_new( PyString_AsString(
+ PySequence_GetItem( v, j ) ) ), constant_builtin, -1 );
+
+ arg = arg_compile_impl( arg_comp, constant_builtin, -1 );
+ dynamic_array_push( c->args, arg );
+ argument_compiler_free( arg_comp );
+ Py_DECREF( v );
+ }
+
+ *num_arguments = c->args->size;
+ result = BJAM_MALLOC( c->args->size * sizeof( struct arg_list ) );
+ memcpy( result, c->args->data, c->args->size * sizeof( struct arg_list )
+ );
+ argument_list_compiler_free( c );
+ return result;
+ }
+ *num_arguments = 0;
+ return 0;
+}
+
+FUNCTION * function_python( PyObject * function, PyObject * bjam_signature )
+{
+ PYTHON_FUNCTION * result = BJAM_MALLOC( sizeof( PYTHON_FUNCTION ) );
+
+ result->base.type = FUNCTION_PYTHON;
+ result->base.reference_count = 1;
+ result->base.rulename = 0;
+ result->base.formal_arguments = arg_list_compile_python( bjam_signature,
+ &result->base.num_formal_arguments );
+ Py_INCREF( function );
+ result->python_function = function;
+
+ return (FUNCTION *)result;
+}
+
+
+static void argument_list_to_python( struct arg_list * formal, int formal_count,
+ FUNCTION * function, FRAME * frame, PyObject * kw )
+{
+ LOL * all_actual = frame->args;
+ int i;
+
+ for ( i = 0; i < formal_count; ++i )
+ {
+ LIST * actual = lol_get( all_actual, i );
+ LISTITER actual_iter = list_begin( actual );
+ LISTITER const actual_end = list_end( actual );
+ int j;
+ for ( j = 0; j < formal[ i ].size; ++j )
+ {
+ struct argument * formal_arg = &formal[ i ].args[ j ];
+ PyObject * value;
+ LIST * l;
+
+ switch ( formal_arg->flags )
+ {
+ case ARG_ONE:
+ if ( actual_iter == actual_end )
+ argument_error( "missing argument", function, frame,
+ formal_arg->arg_name );
+ type_check_range( formal_arg->type_name, actual_iter, list_next(
+ actual_iter ), frame, function, formal_arg->arg_name );
+ value = PyString_FromString( object_str( list_item( actual_iter
+ ) ) );
+ actual_iter = list_next( actual_iter );
+ break;
+ case ARG_OPTIONAL:
+ if ( actual_iter == actual_end )
+ value = 0;
+ else
+ {
+ type_check_range( formal_arg->type_name, actual_iter,
+ list_next( actual_iter ), frame, function,
+ formal_arg->arg_name );
+ value = PyString_FromString( object_str( list_item(
+ actual_iter ) ) );
+ actual_iter = list_next( actual_iter );
+ }
+ break;
+ case ARG_PLUS:
+ if ( actual_iter == actual_end )
+ argument_error( "missing argument", function, frame,
+ formal_arg->arg_name );
+ /* fallthrough */
+ case ARG_STAR:
+ type_check_range( formal_arg->type_name, actual_iter,
+ actual_end, frame, function, formal_arg->arg_name );
+ l = list_copy_range( actual, actual_iter, actual_end );
+ value = list_to_python( l );
+ list_free( l );
+ actual_iter = actual_end;
+ break;
+ case ARG_VARIADIC:
+ return;
+ }
+
+ if ( value )
+ {
+ PyObject * key = PyString_FromString( object_str(
+ formal_arg->arg_name ) );
+ PyDict_SetItem( kw, key, value );
+ Py_DECREF( key );
+ Py_DECREF( value );
+ }
+ }
+
+ if ( actual_iter != actual_end )
+ argument_error( "extra argument", function, frame, list_item(
+ actual_iter ) );
+ }
+
+ for ( ; i < all_actual->count; ++i )
+ {
+ LIST * const actual = lol_get( all_actual, i );
+ if ( !list_empty( actual ) )
+ argument_error( "extra argument", function, frame, list_front(
+ actual ) );
+ }
+}
+
+
+/* Given a Python object, return a string to use in Jam code instead of the said
+ * object.
+ *
+ * If the object is a string, use the string value.
+ * If the object implemenets __jam_repr__ method, use that.
+ * Otherwise return 0.
+ */
+
+OBJECT * python_to_string( PyObject * value )
+{
+ if ( PyString_Check( value ) )
+ return object_new( PyString_AS_STRING( value ) );
+
+ /* See if this instance defines the special __jam_repr__ method. */
+ if ( PyInstance_Check( value )
+ && PyObject_HasAttrString( value, "__jam_repr__" ) )
+ {
+ PyObject * repr = PyObject_GetAttrString( value, "__jam_repr__" );
+ if ( repr )
+ {
+ PyObject * arguments2 = PyTuple_New( 0 );
+ PyObject * value2 = PyObject_Call( repr, arguments2, 0 );
+ Py_DECREF( repr );
+ Py_DECREF( arguments2 );
+ if ( PyString_Check( value2 ) )
+ return object_new( PyString_AS_STRING( value2 ) );
+ Py_DECREF( value2 );
+ }
+ }
+ return 0;
+}
+
+
+static module_t * python_module()
+{
+ static module_t * python = 0;
+ if ( !python )
+ python = bindmodule( constant_python );
+ return python;
+}
+
+
+static LIST * call_python_function( PYTHON_FUNCTION * function, FRAME * frame )
+{
+ LIST * result = 0;
+ PyObject * arguments = 0;
+ PyObject * kw = NULL;
+ int i;
+ PyObject * py_result;
+ FRAME * prev_frame_before_python_call;
+
+ if ( function->base.formal_arguments )
+ {
+ arguments = PyTuple_New( 0 );
+ kw = PyDict_New();
+ argument_list_to_python( function->base.formal_arguments,
+ function->base.num_formal_arguments, &function->base, frame, kw );
+ }
+ else
+ {
+ arguments = PyTuple_New( frame->args->count );
+ for ( i = 0; i < frame->args->count; ++i )
+ PyTuple_SetItem( arguments, i, list_to_python( lol_get( frame->args,
+ i ) ) );
+ }
+
+ frame->module = python_module();
+
+ prev_frame_before_python_call = frame_before_python_call;
+ frame_before_python_call = frame;
+ py_result = PyObject_Call( function->python_function, arguments, kw );
+ frame_before_python_call = prev_frame_before_python_call;
+ Py_DECREF( arguments );
+ Py_XDECREF( kw );
+ if ( py_result != NULL )
+ {
+ if ( PyList_Check( py_result ) )
+ {
+ int size = PyList_Size( py_result );
+ int i;
+ for ( i = 0; i < size; ++i )
+ {
+ OBJECT * s = python_to_string( PyList_GetItem( py_result, i ) );
+ if ( !s )
+ fprintf( stderr,
+ "Non-string object returned by Python call.\n" );
+ else
+ result = list_push_back( result, s );
+ }
+ }
+ else if ( py_result == Py_None )
+ {
+ result = L0;
+ }
+ else
+ {
+ OBJECT * const s = python_to_string( py_result );
+ if ( s )
+ result = list_new( s );
+ else
+ /* We have tried all we could. Return empty list. There are
+ * cases, e.g. feature.feature function that should return a
+ * value for the benefit of Python code and which also can be
+ * called by Jam code, where no sensible value can be returned.
+ * We cannot even emit a warning, since there would be a pile of
+ * them.
+ */
+ result = L0;
+ }
+
+ Py_DECREF( py_result );
+ }
+ else
+ {
+ PyErr_Print();
+ fprintf( stderr, "Call failed\n" );
+ }
+
+ return result;
+}
+
+#endif
+
+
+void function_done( void )
+{
+ BJAM_FREE( stack );
+}
diff --git a/tools/build/v2/engine/function.h b/tools/build/src/engine/function.h
index 64f26b3cf7..64f26b3cf7 100644
--- a/tools/build/v2/engine/function.h
+++ b/tools/build/src/engine/function.h
diff --git a/tools/build/v2/engine/glob.c b/tools/build/src/engine/glob.c
index bec00ee562..bec00ee562 100644
--- a/tools/build/v2/engine/glob.c
+++ b/tools/build/src/engine/glob.c
diff --git a/tools/build/src/engine/hash.c b/tools/build/src/engine/hash.c
new file mode 100644
index 0000000000..36f8366685
--- /dev/null
+++ b/tools/build/src/engine/hash.c
@@ -0,0 +1,387 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * hash.c - simple in-memory hashing routines
+ *
+ * External routines:
+ * hashinit() - initialize a hash table, returning a handle
+ * hashitem() - find a record in the table, and optionally enter a new one
+ * hashdone() - free a hash table, given its handle
+ *
+ * Internal routines:
+ * hashrehash() - resize and rebuild hp->tab, the hash table
+ */
+
+#include "jam.h"
+#include "hash.h"
+
+#include "compile.h"
+
+#include <assert.h>
+
+/* */
+#define HASH_DEBUG_PROFILE 1
+/* */
+
+/* Header attached to all hash table data items. */
+
+typedef struct item ITEM;
+struct item
+{
+ ITEM * next;
+};
+
+#define MAX_LISTS 32
+
+struct hash
+{
+ /*
+ * the hash table, just an array of item pointers
+ */
+ struct
+ {
+ int nel;
+ ITEM * * base;
+ } tab;
+
+ int bloat; /* tab.nel / items.nel */
+ int inel; /* initial number of elements */
+
+ /*
+ * the array of records, maintained by these routines - essentially a
+ * microallocator
+ */
+ struct
+ {
+ int more; /* how many more ITEMs fit in lists[ list ] */
+ ITEM * free; /* free list of items */
+ char * next; /* where to put more ITEMs in lists[ list ] */
+ int size; /* sizeof( ITEM ) + aligned datalen */
+ int nel; /* total ITEMs held by all lists[] */
+ int list; /* index into lists[] */
+
+ struct
+ {
+ int nel; /* total ITEMs held by this list */
+ char * base; /* base of ITEMs array */
+ } lists[ MAX_LISTS ];
+ } items;
+
+ char const * name; /* just for hashstats() */
+};
+
+static void hashrehash( struct hash * );
+static void hashstat( struct hash * );
+
+static unsigned int hash_keyval( OBJECT * key )
+{
+ return object_hash( key );
+}
+
+#define hash_bucket(hp, keyval) ((hp)->tab.base + ((keyval) % (hp)->tab.nel))
+
+#define hash_data_key(data) (*(OBJECT * *)(data))
+#define hash_item_data(item) ((HASHDATA *)((char *)item + sizeof(ITEM)))
+#define hash_item_key(item) (hash_data_key(hash_item_data(item)))
+
+
+#define ALIGNED(x) ((x + sizeof(ITEM) - 1) & ~(sizeof(ITEM) - 1))
+
+/*
+ * hashinit() - initialize a hash table, returning a handle
+ */
+
+struct hash * hashinit( int datalen, char const * name )
+{
+ struct hash * hp = (struct hash *)BJAM_MALLOC( sizeof( *hp ) );
+
+ hp->bloat = 3;
+ hp->tab.nel = 0;
+ hp->tab.base = 0;
+ hp->items.more = 0;
+ hp->items.free = 0;
+ hp->items.size = sizeof( ITEM ) + ALIGNED( datalen );
+ hp->items.list = -1;
+ hp->items.nel = 0;
+ hp->inel = 11; /* 47 */
+ hp->name = name;
+
+ return hp;
+}
+
+
+/*
+ * hash_search() - Find the hash item for the given data.
+ *
+ * Returns a pointer to a hashed item with the given key. If given a 'previous'
+ * pointer, makes it point to the item prior to the found item in the same
+ * bucket or to 0 if our item is the first item in its bucket.
+ */
+
+static ITEM * hash_search( struct hash * hp, unsigned int keyval,
+ OBJECT * keydata, ITEM * * previous )
+{
+ ITEM * i = *hash_bucket( hp, keyval );
+ ITEM * p = 0;
+ for ( ; i; i = i->next )
+ {
+ if ( object_equal( hash_item_key( i ), keydata ) )
+ {
+ if ( previous )
+ *previous = p;
+ return i;
+ }
+ p = i;
+ }
+ return 0;
+}
+
+
+/*
+ * hash_insert() - insert a record in the table or return the existing one
+ */
+
+HASHDATA * hash_insert( struct hash * hp, OBJECT * key, int * found )
+{
+ ITEM * i;
+ unsigned int keyval = hash_keyval( key );
+
+ #ifdef HASH_DEBUG_PROFILE
+ profile_frame prof[ 1 ];
+ if ( DEBUG_PROFILE )
+ profile_enter( 0, prof );
+ #endif
+
+ if ( !hp->items.more )
+ hashrehash( hp );
+
+ i = hash_search( hp, keyval, key, 0 );
+ if ( i )
+ *found = 1;
+ else
+ {
+ ITEM * * base = hash_bucket( hp, keyval );
+
+ /* Try to grab one from the free list. */
+ if ( hp->items.free )
+ {
+ i = hp->items.free;
+ hp->items.free = i->next;
+ assert( !hash_item_key( i ) );
+ }
+ else
+ {
+ i = (ITEM *)hp->items.next;
+ hp->items.next += hp->items.size;
+ }
+ --hp->items.more;
+ i->next = *base;
+ *base = i;
+ *found = 0;
+ }
+
+ #ifdef HASH_DEBUG_PROFILE
+ if ( DEBUG_PROFILE )
+ profile_exit( prof );
+ #endif
+
+ return hash_item_data( i );
+}
+
+
+/*
+ * hash_find() - find a record in the table or NULL if none exists
+ */
+
+HASHDATA * hash_find( struct hash * hp, OBJECT * key )
+{
+ ITEM * i;
+ unsigned int keyval = hash_keyval( key );
+
+ #ifdef HASH_DEBUG_PROFILE
+ profile_frame prof[ 1 ];
+ if ( DEBUG_PROFILE )
+ profile_enter( 0, prof );
+ #endif
+
+ if ( !hp->items.nel )
+ {
+ #ifdef HASH_DEBUG_PROFILE
+ if ( DEBUG_PROFILE )
+ profile_exit( prof );
+ #endif
+ return 0;
+ }
+
+ i = hash_search( hp, keyval, key, 0 );
+
+ #ifdef HASH_DEBUG_PROFILE
+ if ( DEBUG_PROFILE )
+ profile_exit( prof );
+ #endif
+
+ return i ? hash_item_data( i ) : 0;
+}
+
+
+/*
+ * hashrehash() - resize and rebuild hp->tab, the hash table
+ */
+
+static void hashrehash( struct hash * hp )
+{
+ int i = ++hp->items.list;
+ hp->items.more = i ? 2 * hp->items.nel : hp->inel;
+ hp->items.next = (char *)BJAM_MALLOC( hp->items.more * hp->items.size );
+ hp->items.free = 0;
+
+ hp->items.lists[ i ].nel = hp->items.more;
+ hp->items.lists[ i ].base = hp->items.next;
+ hp->items.nel += hp->items.more;
+
+ if ( hp->tab.base )
+ BJAM_FREE( (char *)hp->tab.base );
+
+ hp->tab.nel = hp->items.nel * hp->bloat;
+ hp->tab.base = (ITEM * *)BJAM_MALLOC( hp->tab.nel * sizeof( ITEM * * ) );
+
+ memset( (char *)hp->tab.base, '\0', hp->tab.nel * sizeof( ITEM * ) );
+
+ for ( i = 0; i < hp->items.list; ++i )
+ {
+ int nel = hp->items.lists[ i ].nel;
+ char * next = hp->items.lists[ i ].base;
+
+ for ( ; nel--; next += hp->items.size )
+ {
+ ITEM * i = (ITEM *)next;
+ ITEM * * ip = hp->tab.base + object_hash( hash_item_key( i ) ) %
+ hp->tab.nel;
+ /* code currently assumes rehashing only when there are no free
+ * items
+ */
+ assert( hash_item_key( i ) );
+
+ i->next = *ip;
+ *ip = i;
+ }
+ }
+}
+
+
+void hashenumerate( struct hash * hp, void (* f)( void *, void * ), void * data
+ )
+{
+ int i;
+ for ( i = 0; i <= hp->items.list; ++i )
+ {
+ char * next = hp->items.lists[ i ].base;
+ int nel = hp->items.lists[ i ].nel;
+ if ( i == hp->items.list )
+ nel -= hp->items.more;
+
+ for ( ; nel--; next += hp->items.size )
+ {
+ ITEM * const i = (ITEM *)next;
+ if ( hash_item_key( i ) != 0 ) /* Do not enumerate freed items. */
+ f( hash_item_data( i ), data );
+ }
+ }
+}
+
+
+/*
+ * hash_free() - free a hash table, given its handle
+ */
+
+void hash_free( struct hash * hp )
+{
+ int i;
+ if ( !hp )
+ return;
+ if ( hp->tab.base )
+ BJAM_FREE( (char *)hp->tab.base );
+ for ( i = 0; i <= hp->items.list; ++i )
+ BJAM_FREE( hp->items.lists[ i ].base );
+ BJAM_FREE( (char *)hp );
+}
+
+
+static void hashstat( struct hash * hp )
+{
+ struct hashstats stats[ 1 ];
+ hashstats_init( stats );
+ hashstats_add( stats, hp );
+ hashstats_print( stats, hp->name );
+}
+
+
+void hashstats_init( struct hashstats * stats )
+{
+ stats->count = 0;
+ stats->num_items = 0;
+ stats->tab_size = 0;
+ stats->item_size = 0;
+ stats->sets = 0;
+ stats->num_hashes = 0;
+}
+
+
+void hashstats_add( struct hashstats * stats, struct hash * hp )
+{
+ if ( hp )
+ {
+ ITEM * * tab = hp->tab.base;
+ int nel = hp->tab.nel;
+ int count = 0;
+ int sets = 0;
+ int i;
+
+ for ( i = 0; i < nel; ++i )
+ {
+ ITEM * item;
+ int here = 0;
+ for ( item = tab[ i ]; item; item = item->next )
+ ++here;
+
+ count += here;
+ if ( here > 0 )
+ ++sets;
+ }
+
+ stats->count += count;
+ stats->sets += sets;
+ stats->num_items += hp->items.nel;
+ stats->tab_size += hp->tab.nel;
+ stats->item_size = hp->items.size;
+ ++stats->num_hashes;
+ }
+}
+
+
+void hashstats_print( struct hashstats * stats, char const * name )
+{
+ printf( "%s table: %d+%d+%d (%dK+%luK+%luK) items+table+hash, %f density\n",
+ name,
+ stats->count,
+ stats->num_items,
+ stats->tab_size,
+ stats->num_items * stats->item_size / 1024,
+ (long unsigned)stats->tab_size * sizeof( ITEM * * ) / 1024,
+ (long unsigned)stats->num_hashes * sizeof( struct hash ) / 1024,
+ (float)stats->count / (float)stats->sets );
+}
+
+
+void hashdone( struct hash * hp )
+{
+ if ( !hp )
+ return;
+ if ( DEBUG_MEM || DEBUG_PROFILE )
+ hashstat( hp );
+ hash_free( hp );
+}
diff --git a/tools/build/src/engine/hash.h b/tools/build/src/engine/hash.h
new file mode 100644
index 0000000000..7c40e8c4a1
--- /dev/null
+++ b/tools/build/src/engine/hash.h
@@ -0,0 +1,79 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * hash.h - simple in-memory hashing routines
+ */
+
+#ifndef BOOST_JAM_HASH_H
+#define BOOST_JAM_HASH_H
+
+#include "object.h"
+
+/*
+ * An opaque struct representing an item in the hash table. The first element of
+ * every struct stored in the table must be an OBJECT * which is treated as the
+ * key.
+ */
+typedef struct hashdata HASHDATA;
+
+/*
+ * hashinit() - initialize a hash table, returning a handle.
+ *
+ * Parameters:
+ * datalen - item size
+ * name - used for debugging
+ */
+struct hash * hashinit( int datalen, char const * name );
+
+/*
+ * hash_free() - free a hash table, given its handle
+ */
+void hash_free( struct hash * );
+void hashdone( struct hash * );
+
+/*
+ * hashenumerate() - call f(i, data) on each item, i in the hash table. The
+ * enumeration order is unspecified.
+ */
+void hashenumerate( struct hash *, void (* f)( void *, void * ), void * data );
+
+/*
+ * hash_insert() - insert a new item in a hash table, or return an existing one.
+ *
+ * Preconditions:
+ * - hp must be a hash table created by hashinit()
+ * - key must be an object created by object_new()
+ *
+ * Postconditions:
+ * - if the key does not already exist in the hash table, *found == 0 and the
+ * result will be a pointer to an uninitialized item. The key of the new
+ * item must be set to a value equal to key before any further operations on
+ * the hash table except hashdone().
+ * - if the key is present then *found == 1 and the result is a pointer to the
+ * existing record.
+ */
+HASHDATA * hash_insert( struct hash *, OBJECT * key, int * found );
+
+/*
+ * hash_find() - find a record in the table or NULL if none exists
+ */
+HASHDATA * hash_find( struct hash *, OBJECT * key );
+
+struct hashstats {
+ int count;
+ int num_items;
+ int tab_size;
+ int item_size;
+ int sets;
+ int num_hashes;
+};
+
+void hashstats_init( struct hashstats * stats );
+void hashstats_add( struct hashstats * stats, struct hash * );
+void hashstats_print( struct hashstats * stats, char const * name );
+
+#endif
diff --git a/tools/build/src/engine/hcache.c b/tools/build/src/engine/hcache.c
new file mode 100644
index 0000000000..3cf15f7766
--- /dev/null
+++ b/tools/build/src/engine/hcache.c
@@ -0,0 +1,519 @@
+/*
+ * This file has been donated to Jam.
+ */
+
+/*
+ * Craig W. McPheeters, Alias|Wavefront.
+ *
+ * hcache.c hcache.h - handle cacheing of #includes in source files.
+ *
+ * Create a cache of files scanned for headers. When starting jam, look for the
+ * cache file and load it if present. When finished the binding phase, create a
+ * new header cache. The cache contains files, their timestamps and the header
+ * files found in their scan. During the binding phase of jam, look in the
+ * header cache first for the headers contained in a file. If the cache is
+ * present and valid, use its contents. This results in dramatic speedups with
+ * large projects (e.g. 3min -> 1min startup for one project.)
+ *
+ * External routines:
+ * hcache_init() - read and parse the local .jamdeps file.
+ * hcache_done() - write a new .jamdeps file.
+ * hcache() - return list of headers on target. Use cache or do a scan.
+ *
+ * The dependency file format is an ASCII file with 1 line per target. Each line
+ * has the following fields:
+ * @boundname@ timestamp_sec timestamp_nsec @file@ @file@ @file@ ...
+ */
+
+#ifdef OPT_HEADER_CACHE_EXT
+
+#include "jam.h"
+#include "hcache.h"
+
+#include "hash.h"
+#include "headers.h"
+#include "lists.h"
+#include "modules.h"
+#include "object.h"
+#include "parse.h"
+#include "regexp.h"
+#include "rules.h"
+#include "search.h"
+#include "timestamp.h"
+#include "variable.h"
+
+typedef struct hcachedata HCACHEDATA ;
+
+struct hcachedata
+{
+ OBJECT * boundname;
+ timestamp time;
+ LIST * includes;
+ LIST * hdrscan; /* the HDRSCAN value for this target */
+ int age; /* if too old, we will remove it from cache */
+ HCACHEDATA * next;
+};
+
+
+static struct hash * hcachehash = 0;
+static HCACHEDATA * hcachelist = 0;
+
+static int queries = 0;
+static int hits = 0;
+
+#define CACHE_FILE_VERSION "version 5"
+#define CACHE_RECORD_HEADER "header"
+#define CACHE_RECORD_END "end"
+
+
+/*
+ * Return the name of the header cache file. May return NULL.
+ *
+ * The user sets this by setting the HCACHEFILE variable in a Jamfile. We cache
+ * the result so the user can not change the cache file during header scanning.
+ */
+
+static const char * cache_name( void )
+{
+ static OBJECT * name = 0;
+ if ( !name )
+ {
+ LIST * const hcachevar = var_get( root_module(), constant_HCACHEFILE );
+
+ if ( !list_empty( hcachevar ) )
+ {
+ TARGET * const t = bindtarget( list_front( hcachevar ) );
+
+ pushsettings( root_module(), t->settings );
+ /* Do not expect the cache file to be generated, so pass 0 as the
+ * third argument to search. Expect the location to be specified via
+ * LOCATE, so pass 0 as the fourth arugment.
+ */
+ object_free( t->boundname );
+ t->boundname = search( t->name, &t->time, 0, 0 );
+ popsettings( root_module(), t->settings );
+
+ name = object_copy( t->boundname );
+ }
+ }
+ return name ? object_str( name ) : 0;
+}
+
+
+/*
+ * Return the maximum age a cache entry can have before it is purged from the
+ * cache.
+ */
+
+static int cache_maxage( void )
+{
+ int age = 100;
+ LIST * const var = var_get( root_module(), constant_HCACHEMAXAGE );
+ if ( !list_empty( var ) )
+ {
+ age = atoi( object_str( list_front( var ) ) );
+ if ( age < 0 )
+ age = 0;
+ }
+ return age;
+}
+
+
+/*
+ * Read a netstring. The caveat is that the string can not contain ASCII 0. The
+ * returned value is as returned by object_new().
+ */
+
+OBJECT * read_netstring( FILE * f )
+{
+ unsigned long len;
+ static char * buf = NULL;
+ static unsigned long buf_len = 0;
+
+ if ( fscanf( f, " %9lu", &len ) != 1 )
+ return NULL;
+ if ( fgetc( f ) != (int)'\t' )
+ return NULL;
+
+ if ( len > 1024 * 64 )
+ return NULL; /* sanity check */
+
+ if ( len > buf_len )
+ {
+ unsigned long new_len = buf_len * 2;
+ if ( new_len < len )
+ new_len = len;
+ buf = (char *)BJAM_REALLOC( buf, new_len + 1 );
+ if ( buf )
+ buf_len = new_len;
+ }
+
+ if ( !buf )
+ return NULL;
+
+ if ( fread( buf, 1, len, f ) != len )
+ return NULL;
+ if ( fgetc( f ) != (int)'\n' )
+ return NULL;
+
+ buf[ len ] = 0;
+ return object_new( buf );
+}
+
+
+/*
+ * Write a netstring.
+ */
+
+void write_netstring( FILE * f, char const * s )
+{
+ if ( !s )
+ s = "";
+ fprintf( f, "%lu\t%s\n", (long unsigned)strlen( s ), s );
+}
+
+
+void hcache_init()
+{
+ FILE * f;
+ OBJECT * version = 0;
+ int header_count = 0;
+ const char * hcachename;
+
+ if ( hcachehash )
+ return;
+
+ hcachehash = hashinit( sizeof( HCACHEDATA ), "hcache" );
+
+ if ( !( hcachename = cache_name() ) )
+ return;
+
+ if ( !( f = fopen( hcachename, "rb" ) ) )
+ return;
+
+ version = read_netstring( f );
+
+ if ( !version || strcmp( object_str( version ), CACHE_FILE_VERSION ) )
+ goto bail;
+
+ while ( 1 )
+ {
+ HCACHEDATA cachedata;
+ HCACHEDATA * c;
+ OBJECT * record_type = 0;
+ OBJECT * time_secs_str = 0;
+ OBJECT * time_nsecs_str = 0;
+ OBJECT * age_str = 0;
+ OBJECT * includes_count_str = 0;
+ OBJECT * hdrscan_count_str = 0;
+ int i;
+ int count;
+ LIST * l;
+ int found;
+
+ cachedata.boundname = 0;
+ cachedata.includes = 0;
+ cachedata.hdrscan = 0;
+
+ record_type = read_netstring( f );
+ if ( !record_type )
+ {
+ fprintf( stderr, "invalid %s\n", hcachename );
+ goto cleanup;
+ }
+ if ( !strcmp( object_str( record_type ), CACHE_RECORD_END ) )
+ {
+ object_free( record_type );
+ break;
+ }
+ if ( strcmp( object_str( record_type ), CACHE_RECORD_HEADER ) )
+ {
+ fprintf( stderr, "invalid %s with record separator <%s>\n",
+ hcachename, record_type ? object_str( record_type ) : "<null>" );
+ goto cleanup;
+ }
+
+ cachedata.boundname = read_netstring( f );
+ time_secs_str = read_netstring( f );
+ time_nsecs_str = read_netstring( f );
+ age_str = read_netstring( f );
+ includes_count_str = read_netstring( f );
+
+ if ( !cachedata.boundname || !time_secs_str || !time_nsecs_str ||
+ !age_str || !includes_count_str )
+ {
+ fprintf( stderr, "invalid %s\n", hcachename );
+ goto cleanup;
+ }
+
+ timestamp_init( &cachedata.time, atoi( object_str( time_secs_str ) ),
+ atoi( object_str( time_nsecs_str ) ) );
+ cachedata.age = atoi( object_str( age_str ) ) + 1;
+
+ count = atoi( object_str( includes_count_str ) );
+ for ( l = L0, i = 0; i < count; ++i )
+ {
+ OBJECT * const s = read_netstring( f );
+ if ( !s )
+ {
+ fprintf( stderr, "invalid %s\n", hcachename );
+ list_free( l );
+ goto cleanup;
+ }
+ l = list_push_back( l, s );
+ }
+ cachedata.includes = l;
+
+ hdrscan_count_str = read_netstring( f );
+ if ( !hdrscan_count_str )
+ {
+ fprintf( stderr, "invalid %s\n", hcachename );
+ goto cleanup;
+ }
+
+ count = atoi( object_str( hdrscan_count_str ) );
+ for ( l = L0, i = 0; i < count; ++i )
+ {
+ OBJECT * const s = read_netstring( f );
+ if ( !s )
+ {
+ fprintf( stderr, "invalid %s\n", hcachename );
+ list_free( l );
+ goto cleanup;
+ }
+ l = list_push_back( l, s );
+ }
+ cachedata.hdrscan = l;
+
+ c = (HCACHEDATA *)hash_insert( hcachehash, cachedata.boundname, &found )
+ ;
+ if ( !found )
+ {
+ c->boundname = cachedata.boundname;
+ c->includes = cachedata.includes;
+ c->hdrscan = cachedata.hdrscan;
+ c->age = cachedata.age;
+ timestamp_copy( &c->time, &cachedata.time );
+ }
+ else
+ {
+ fprintf( stderr, "can not insert header cache item, bailing on %s"
+ "\n", hcachename );
+ goto cleanup;
+ }
+
+ c->next = hcachelist;
+ hcachelist = c;
+
+ ++header_count;
+
+ object_free( record_type );
+ object_free( time_secs_str );
+ object_free( time_nsecs_str );
+ object_free( age_str );
+ object_free( includes_count_str );
+ object_free( hdrscan_count_str );
+ continue;
+
+cleanup:
+
+ if ( record_type ) object_free( record_type );
+ if ( time_secs_str ) object_free( time_secs_str );
+ if ( time_nsecs_str ) object_free( time_nsecs_str );
+ if ( age_str ) object_free( age_str );
+ if ( includes_count_str ) object_free( includes_count_str );
+ if ( hdrscan_count_str ) object_free( hdrscan_count_str );
+
+ if ( cachedata.boundname ) object_free( cachedata.boundname );
+ if ( cachedata.includes ) list_free( cachedata.includes );
+ if ( cachedata.hdrscan ) list_free( cachedata.hdrscan );
+
+ goto bail;
+ }
+
+ if ( DEBUG_HEADER )
+ printf( "hcache read from file %s\n", hcachename );
+
+bail:
+ if ( version )
+ object_free( version );
+ fclose( f );
+}
+
+
+void hcache_done()
+{
+ FILE * f;
+ HCACHEDATA * c;
+ int header_count = 0;
+ const char * hcachename;
+ int maxage;
+
+ if ( !hcachehash )
+ return;
+
+ if ( !( hcachename = cache_name() ) )
+ goto cleanup;
+
+ if ( !( f = fopen( hcachename, "wb" ) ) )
+ goto cleanup;
+
+ maxage = cache_maxage();
+
+ /* Print out the version. */
+ write_netstring( f, CACHE_FILE_VERSION );
+
+ c = hcachelist;
+ for ( c = hcachelist; c; c = c->next )
+ {
+ LISTITER iter;
+ LISTITER end;
+ char time_secs_str[ 30 ];
+ char time_nsecs_str[ 30 ];
+ char age_str[ 30 ];
+ char includes_count_str[ 30 ];
+ char hdrscan_count_str[ 30 ];
+
+ if ( maxage == 0 )
+ c->age = 0;
+ else if ( c->age > maxage )
+ continue;
+
+ sprintf( includes_count_str, "%lu", (long unsigned)list_length(
+ c->includes ) );
+ sprintf( hdrscan_count_str, "%lu", (long unsigned)list_length(
+ c->hdrscan ) );
+ sprintf( time_secs_str, "%lu", (long unsigned)c->time.secs );
+ sprintf( time_nsecs_str, "%lu", (long unsigned)c->time.nsecs );
+ sprintf( age_str, "%lu", (long unsigned)c->age );
+
+ write_netstring( f, CACHE_RECORD_HEADER );
+ write_netstring( f, object_str( c->boundname ) );
+ write_netstring( f, time_secs_str );
+ write_netstring( f, time_nsecs_str );
+ write_netstring( f, age_str );
+ write_netstring( f, includes_count_str );
+ for ( iter = list_begin( c->includes ), end = list_end( c->includes );
+ iter != end; iter = list_next( iter ) )
+ write_netstring( f, object_str( list_item( iter ) ) );
+ write_netstring( f, hdrscan_count_str );
+ for ( iter = list_begin( c->hdrscan ), end = list_end( c->hdrscan );
+ iter != end; iter = list_next( iter ) )
+ write_netstring( f, object_str( list_item( iter ) ) );
+ fputs( "\n", f );
+ ++header_count;
+ }
+ write_netstring( f, CACHE_RECORD_END );
+
+ if ( DEBUG_HEADER )
+ printf( "hcache written to %s. %d dependencies, %.0f%% hit rate\n",
+ hcachename, header_count, queries ? 100.0 * hits / queries : 0 );
+
+ fclose ( f );
+
+cleanup:
+ for ( c = hcachelist; c; c = c->next )
+ {
+ list_free( c->includes );
+ list_free( c->hdrscan );
+ object_free( c->boundname );
+ }
+
+ hcachelist = 0;
+ if ( hcachehash )
+ hashdone( hcachehash );
+ hcachehash = 0;
+}
+
+
+LIST * hcache( TARGET * t, int rec, regexp * re[], LIST * hdrscan )
+{
+ HCACHEDATA * c;
+
+ ++queries;
+
+ if ( ( c = (HCACHEDATA *)hash_find( hcachehash, t->boundname ) ) )
+ {
+ if ( !timestamp_cmp( &c->time, &t->time ) )
+ {
+ LIST * const l1 = hdrscan;
+ LIST * const l2 = c->hdrscan;
+ LISTITER iter1 = list_begin( l1 );
+ LISTITER const end1 = list_end( l1 );
+ LISTITER iter2 = list_begin( l2 );
+ LISTITER const end2 = list_end( l2 );
+ while ( iter1 != end1 && iter2 != end2 )
+ {
+ if ( !object_equal( list_item( iter1 ), list_item( iter2 ) ) )
+ iter1 = end1;
+ else
+ {
+ iter1 = list_next( iter1 );
+ iter2 = list_next( iter2 );
+ }
+ }
+ if ( iter1 != end1 || iter2 != end2 )
+ {
+ if ( DEBUG_HEADER )
+ {
+ printf( "HDRSCAN out of date in cache for %s\n",
+ object_str( t->boundname ) );
+ printf(" real : ");
+ list_print( hdrscan );
+ printf( "\n cached: " );
+ list_print( c->hdrscan );
+ printf( "\n" );
+ }
+
+ list_free( c->includes );
+ list_free( c->hdrscan );
+ c->includes = L0;
+ c->hdrscan = L0;
+ }
+ else
+ {
+ if ( DEBUG_HEADER )
+ printf( "using header cache for %s\n", object_str(
+ t->boundname ) );
+ c->age = 0;
+ ++hits;
+ return list_copy( c->includes );
+ }
+ }
+ else
+ {
+ if ( DEBUG_HEADER )
+ printf ("header cache out of date for %s\n", object_str(
+ t->boundname ) );
+ list_free( c->includes );
+ list_free( c->hdrscan );
+ c->includes = L0;
+ c->hdrscan = L0;
+ }
+ }
+ else
+ {
+ int found;
+ c = (HCACHEDATA *)hash_insert( hcachehash, t->boundname, &found );
+ if ( !found )
+ {
+ c->boundname = object_copy( t->boundname );
+ c->next = hcachelist;
+ hcachelist = c;
+ }
+ }
+
+ /* 'c' points at the cache entry. Its out of date. */
+ {
+ LIST * const l = headers1( L0, t->boundname, rec, re );
+
+ timestamp_copy( &c->time, &t->time );
+ c->age = 0;
+ c->includes = list_copy( l );
+ c->hdrscan = list_copy( hdrscan );
+
+ return l;
+ }
+}
+
+#endif /* OPT_HEADER_CACHE_EXT */
diff --git a/tools/build/src/engine/hcache.h b/tools/build/src/engine/hcache.h
new file mode 100644
index 0000000000..a9d929d517
--- /dev/null
+++ b/tools/build/src/engine/hcache.h
@@ -0,0 +1,19 @@
+/*
+ * This file is not part of Jam
+ */
+
+/*
+ * hcache.h - handle #includes in source files
+ */
+#ifndef HCACHE_H
+#define HCACHE_H
+
+#include "lists.h"
+#include "regexp.h"
+#include "rules.h"
+
+void hcache_init( void );
+void hcache_done( void );
+LIST * hcache( TARGET * t, int rec, regexp * re[], LIST * hdrscan );
+
+#endif
diff --git a/tools/build/src/engine/hdrmacro.c b/tools/build/src/engine/hdrmacro.c
new file mode 100644
index 0000000000..eb4fe90f4b
--- /dev/null
+++ b/tools/build/src/engine/hdrmacro.c
@@ -0,0 +1,139 @@
+/*
+ * Copyright 1993, 2000 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * hdrmacro.c - handle header files that define macros used in #include
+ * statements.
+ *
+ * we look for lines like "#define MACRO <....>" or '#define MACRO " "' in
+ * the target file. When found, we then phony up a rule invocation like:
+ *
+ * $(HDRRULE) <target> : <resolved included files> ;
+ *
+ * External routines:
+ * headers1() - scan a target for "#include MACRO" lines and try to resolve
+ * them when needed
+ *
+ * Internal routines:
+ * headers1() - using regexp, scan a file and build include LIST
+ */
+
+#include "jam.h"
+#include "hdrmacro.h"
+
+#include "compile.h"
+#include "hash.h"
+#include "lists.h"
+#include "object.h"
+#include "parse.h"
+#include "rules.h"
+#include "strings.h"
+#include "subst.h"
+#include "variable.h"
+
+
+/* this type is used to store a dictionary of file header macros */
+typedef struct header_macro
+{
+ OBJECT * symbol;
+ OBJECT * filename; /* we could maybe use a LIST here ?? */
+} HEADER_MACRO;
+
+static struct hash * header_macros_hash = 0;
+
+
+/*
+ * headers() - scan a target for include files and call HDRRULE
+ */
+
+#define MAXINC 10
+
+void macro_headers( TARGET * t )
+{
+ static regexp * re = 0;
+ FILE * f;
+ char buf[ 1024 ];
+
+ if ( DEBUG_HEADER )
+ printf( "macro header scan for %s\n", object_str( t->name ) );
+
+ /* This regexp is used to detect lines of the form
+ * "#define MACRO <....>" or "#define MACRO "....."
+ * in the header macro files.
+ */
+ if ( !re )
+ {
+ OBJECT * const re_str = object_new(
+ "^[ ]*#[ ]*define[ ]*([A-Za-z][A-Za-z0-9_]*)[ ]*"
+ "[<\"]([^\">]*)[\">].*$" );
+ re = regex_compile( re_str );
+ object_free( re_str );
+ }
+
+ if ( !( f = fopen( object_str( t->boundname ), "r" ) ) )
+ return;
+
+ while ( fgets( buf, sizeof( buf ), f ) )
+ {
+ HEADER_MACRO var;
+ HEADER_MACRO * v = &var;
+
+ if ( regexec( re, buf ) && re->startp[ 1 ] )
+ {
+ OBJECT * symbol;
+ int found;
+ /* we detected a line that looks like "#define MACRO filename */
+ ( (char *)re->endp[ 1 ] )[ 0 ] = '\0';
+ ( (char *)re->endp[ 2 ] )[ 0 ] = '\0';
+
+ if ( DEBUG_HEADER )
+ printf( "macro '%s' used to define filename '%s' in '%s'\n",
+ re->startp[ 1 ], re->startp[ 2 ], object_str( t->boundname )
+ );
+
+ /* add macro definition to hash table */
+ if ( !header_macros_hash )
+ header_macros_hash = hashinit( sizeof( HEADER_MACRO ),
+ "hdrmacros" );
+
+ symbol = object_new( re->startp[ 1 ] );
+ v = (HEADER_MACRO *)hash_insert( header_macros_hash, symbol, &found
+ );
+ if ( !found )
+ {
+ v->symbol = symbol;
+ v->filename = object_new( re->startp[ 2 ] ); /* never freed */
+ }
+ else
+ object_free( symbol );
+ /* XXXX: FOR NOW, WE IGNORE MULTIPLE MACRO DEFINITIONS !! */
+ /* WE MIGHT AS WELL USE A LIST TO STORE THEM.. */
+ }
+ }
+
+ fclose( f );
+}
+
+
+OBJECT * macro_header_get( OBJECT * macro_name )
+{
+ HEADER_MACRO * v;
+ if ( header_macros_hash && ( v = (HEADER_MACRO *)hash_find(
+ header_macros_hash, macro_name ) ) )
+ {
+ if ( DEBUG_HEADER )
+ printf( "### macro '%s' evaluated to '%s'\n", object_str( macro_name
+ ), object_str( v->filename ) );
+ return v->filename;
+ }
+ return 0;
+}
diff --git a/tools/build/src/engine/hdrmacro.h b/tools/build/src/engine/hdrmacro.h
new file mode 100644
index 0000000000..7595ede26a
--- /dev/null
+++ b/tools/build/src/engine/hdrmacro.h
@@ -0,0 +1,21 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * hdrmacro.h - parses header files for #define MACRO <filename> or
+ * #define MACRO "filename" definitions
+ */
+
+#ifndef HDRMACRO_SW20111118_H
+#define HDRMACRO_SW20111118_H
+
+#include "object.h"
+#include "rules.h"
+
+void macro_headers( TARGET * );
+OBJECT * macro_header_get( OBJECT * macro_name );
+
+#endif
diff --git a/tools/build/src/engine/headers.c b/tools/build/src/engine/headers.c
new file mode 100644
index 0000000000..0d9558d5da
--- /dev/null
+++ b/tools/build/src/engine/headers.c
@@ -0,0 +1,197 @@
+/*
+ * Copyright 1993, 2000 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * headers.c - handle #includes in source files
+ *
+ * Using regular expressions provided as the variable $(HDRSCAN), headers()
+ * searches a file for #include files and phonies up a rule invocation:
+ * $(HDRRULE) <target> : <include files> ;
+ *
+ * External routines:
+ * headers() - scan a target for include files and call HDRRULE
+ *
+ * Internal routines:
+ * headers1() - using regexp, scan a file and build include LIST
+ */
+
+#include "jam.h"
+#include "headers.h"
+
+#include "compile.h"
+#include "hdrmacro.h"
+#include "lists.h"
+#include "modules.h"
+#include "object.h"
+#include "parse.h"
+#include "rules.h"
+#include "subst.h"
+#include "variable.h"
+
+#ifdef OPT_HEADER_CACHE_EXT
+# include "hcache.h"
+#endif
+
+#ifndef OPT_HEADER_CACHE_EXT
+static LIST * headers1( LIST *, OBJECT * file, int rec, regexp * re[] );
+#endif
+
+
+/*
+ * headers() - scan a target for include files and call HDRRULE
+ */
+
+#define MAXINC 10
+
+void headers( TARGET * t )
+{
+ LIST * hdrscan;
+ LIST * hdrrule;
+ #ifndef OPT_HEADER_CACHE_EXT
+ LIST * headlist = L0;
+ #endif
+ regexp * re[ MAXINC ];
+ int rec = 0;
+ LISTITER iter;
+ LISTITER end;
+
+ hdrscan = var_get( root_module(), constant_HDRSCAN );
+ if ( list_empty( hdrscan ) )
+ return;
+
+ hdrrule = var_get( root_module(), constant_HDRRULE );
+ if ( list_empty( hdrrule ) )
+ return;
+
+ if ( DEBUG_HEADER )
+ printf( "header scan %s\n", object_str( t->name ) );
+
+ /* Compile all regular expressions in HDRSCAN */
+ iter = list_begin( hdrscan );
+ end = list_end( hdrscan );
+ for ( ; ( rec < MAXINC ) && iter != end; iter = list_next( iter ) )
+ {
+ re[ rec++ ] = regex_compile( list_item( iter ) );
+ }
+
+ /* Doctor up call to HDRRULE rule */
+ /* Call headers1() to get LIST of included files. */
+ {
+ FRAME frame[ 1 ];
+ frame_init( frame );
+ lol_add( frame->args, list_new( object_copy( t->name ) ) );
+#ifdef OPT_HEADER_CACHE_EXT
+ lol_add( frame->args, hcache( t, rec, re, hdrscan ) );
+#else
+ lol_add( frame->args, headers1( headlist, t->boundname, rec, re ) );
+#endif
+
+ if ( lol_get( frame->args, 1 ) )
+ {
+ OBJECT * rulename = list_front( hdrrule );
+ /* The third argument to HDRRULE is the bound name of $(<). */
+ lol_add( frame->args, list_new( object_copy( t->boundname ) ) );
+ list_free( evaluate_rule( bindrule( rulename, frame->module ), rulename, frame ) );
+ }
+
+ /* Clean up. */
+ frame_free( frame );
+ }
+}
+
+
+/*
+ * headers1() - using regexp, scan a file and build include LIST.
+ */
+
+#ifndef OPT_HEADER_CACHE_EXT
+static
+#endif
+LIST * headers1( LIST * l, OBJECT * file, int rec, regexp * re[] )
+{
+ FILE * f;
+ char buf[ 1024 ];
+ int i;
+ static regexp * re_macros = 0;
+
+#ifdef OPT_IMPROVED_PATIENCE_EXT
+ static int count = 0;
+ ++count;
+ if ( ( ( count == 100 ) || !( count % 1000 ) ) && DEBUG_MAKE )
+ {
+ printf( "...patience...\n" );
+ fflush( stdout );
+ }
+#endif
+
+ /* The following regexp is used to detect cases where a file is included
+ * through a line like "#include MACRO".
+ */
+ if ( re_macros == 0 )
+ {
+ OBJECT * const re_str = object_new(
+ "#[ \t]*include[ \t]*([A-Za-z][A-Za-z0-9_]*).*$" );
+ re_macros = regex_compile( re_str );
+ object_free( re_str );
+ }
+
+ if ( !( f = fopen( object_str( file ), "r" ) ) )
+ return l;
+
+ while ( fgets( buf, sizeof( buf ), f ) )
+ {
+ for ( i = 0; i < rec; ++i )
+ if ( regexec( re[ i ], buf ) && re[ i ]->startp[ 1 ] )
+ {
+ ( (char *)re[ i ]->endp[ 1 ] )[ 0 ] = '\0';
+ if ( DEBUG_HEADER )
+ printf( "header found: %s\n", re[ i ]->startp[ 1 ] );
+ l = list_push_back( l, object_new( re[ i ]->startp[ 1 ] ) );
+ }
+
+ /* Special treatment for #include MACRO. */
+ if ( regexec( re_macros, buf ) && re_macros->startp[ 1 ] )
+ {
+ OBJECT * header_filename;
+ OBJECT * macro_name;
+
+ ( (char *)re_macros->endp[ 1 ] )[ 0 ] = '\0';
+
+ if ( DEBUG_HEADER )
+ printf( "macro header found: %s", re_macros->startp[ 1 ] );
+
+ macro_name = object_new( re_macros->startp[ 1 ] );
+ header_filename = macro_header_get( macro_name );
+ object_free( macro_name );
+ if ( header_filename )
+ {
+ if ( DEBUG_HEADER )
+ printf( " resolved to '%s'\n", object_str( header_filename )
+ );
+ l = list_push_back( l, object_copy( header_filename ) );
+ }
+ else
+ {
+ if ( DEBUG_HEADER )
+ printf( " ignored !!\n" );
+ }
+ }
+ }
+
+ fclose( f );
+ return l;
+}
+
+
+void regerror( char const * s )
+{
+ printf( "re error %s\n", s );
+}
diff --git a/tools/build/v2/engine/headers.h b/tools/build/src/engine/headers.h
index 1c0a642df8..1c0a642df8 100644
--- a/tools/build/v2/engine/headers.h
+++ b/tools/build/src/engine/headers.h
diff --git a/tools/build/src/engine/jam.c b/tools/build/src/engine/jam.c
new file mode 100644
index 0000000000..1c80eec50b
--- /dev/null
+++ b/tools/build/src/engine/jam.c
@@ -0,0 +1,656 @@
+/*
+ * /+\
+ * +\ Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
+ * \+/
+ *
+ * This file is part of jam.
+ *
+ * License is hereby granted to use this software and distribute it freely, as
+ * long as this copyright notice is retained and modifications are clearly
+ * marked.
+ *
+ * ALL WARRANTIES ARE HEREBY DISCLAIMED.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * jam.c - make redux
+ *
+ * See Jam.html for usage information.
+ *
+ * These comments document the code.
+ *
+ * The top half of the code is structured such:
+ *
+ * jam
+ * / | \
+ * +---+ | \
+ * / | \
+ * jamgram option \
+ * / | \ \
+ * / | \ \
+ * / | \ |
+ * scan | compile make
+ * | | / | \ / | \
+ * | | / | \ / | \
+ * | | / | \ / | \
+ * jambase parse | rules search make1
+ * | | | \
+ * | | | \
+ * | | | \
+ * builtins timestamp command execute
+ * |
+ * |
+ * |
+ * filesys
+ *
+ *
+ * The support routines are called by all of the above, but themselves are
+ * layered thus:
+ *
+ * variable|expand
+ * / | |
+ * / | |
+ * / | |
+ * lists | pathsys
+ * \ |
+ * \ hash
+ * \ |
+ * \ |
+ * \ |
+ * \ |
+ * \ |
+ * object
+ *
+ * Roughly, the modules are:
+ *
+ * builtins.c - jam's built-in rules
+ * command.c - maintain lists of commands
+ * compile.c - compile parsed jam statements
+ * exec*.c - execute a shell script on a specific OS
+ * file*.c - scan directories and archives on a specific OS
+ * hash.c - simple in-memory hashing routines
+ * hdrmacro.c - handle header file parsing for filename macro definitions
+ * headers.c - handle #includes in source files
+ * jambase.c - compilable copy of Jambase
+ * jamgram.y - jam grammar
+ * lists.c - maintain lists of strings
+ * make.c - bring a target up to date, once rules are in place
+ * make1.c - execute command to bring targets up to date
+ * object.c - string manipulation routines
+ * option.c - command line option processing
+ * parse.c - make and destroy parse trees as driven by the parser
+ * path*.c - manipulate file names on a specific OS
+ * hash.c - simple in-memory hashing routines
+ * regexp.c - Henry Spencer's regexp
+ * rules.c - access to RULEs, TARGETs, and ACTIONs
+ * scan.c - the jam yacc scanner
+ * search.c - find a target along $(SEARCH) or $(LOCATE)
+ * timestamp.c - get the timestamp of a file or archive member
+ * variable.c - handle jam multi-element variables
+ */
+
+
+#include "jam.h"
+#include "patchlevel.h"
+
+#include "builtins.h"
+#include "class.h"
+#include "compile.h"
+#include "constants.h"
+#include "filesys.h"
+#include "function.h"
+#include "hcache.h"
+#include "lists.h"
+#include "make.h"
+#include "object.h"
+#include "option.h"
+#include "output.h"
+#include "parse.h"
+#include "cwd.h"
+#include "rules.h"
+#include "scan.h"
+#include "search.h"
+#include "strings.h"
+#include "timestamp.h"
+#include "variable.h"
+
+/* Macintosh is "special" */
+#ifdef OS_MAC
+# include <QuickDraw.h>
+#endif
+
+/* And UNIX for this. */
+#ifdef unix
+# include <sys/utsname.h>
+# include <signal.h>
+#endif
+
+struct globs globs =
+{
+ 0, /* noexec */
+ 1, /* jobs */
+ 0, /* quitquick */
+ 0, /* newestfirst */
+ 0, /* pipes action stdout and stderr merged to action output */
+#ifdef OS_MAC
+ { 0, 0 }, /* debug - suppress tracing output */
+#else
+ { 0, 1 }, /* debug ... */
+#endif
+ 0, /* output commands, not run them */
+ 0, /* action timeout */
+ 0 /* maximum buffer size zero is all output */
+};
+
+/* Symbols to be defined as true for use in Jambase. */
+static char * othersyms[] = { OSMAJOR, OSMINOR, OSPLAT, JAMVERSYM, 0 };
+
+
+/* Known for sure:
+ * mac needs arg_enviro
+ * OS2 needs extern environ
+ */
+
+#ifdef OS_MAC
+# define use_environ arg_environ
+# ifdef MPW
+ QDGlobals qd;
+# endif
+#endif
+
+/* on Win32-LCC */
+#if defined( OS_NT ) && defined( __LCC__ )
+# define use_environ _environ
+#endif
+
+#if defined( __MWERKS__)
+# define use_environ _environ
+ extern char * * _environ;
+#endif
+
+#ifndef use_environ
+# define use_environ environ
+# if !defined( __WATCOM__ ) && !defined( OS_OS2 ) && !defined( OS_NT )
+ extern char **environ;
+# endif
+#endif
+
+#if YYDEBUG != 0
+ extern int yydebug;
+#endif
+
+#ifndef NDEBUG
+static void run_unit_tests()
+{
+# if defined( USE_EXECNT )
+ extern void execnt_unit_test();
+ execnt_unit_test();
+# endif
+ string_unit_test();
+}
+#endif
+
+int anyhow = 0;
+
+#ifdef HAVE_PYTHON
+ extern PyObject * bjam_call ( PyObject * self, PyObject * args );
+ extern PyObject * bjam_import_rule ( PyObject * self, PyObject * args );
+ extern PyObject * bjam_define_action( PyObject * self, PyObject * args );
+ extern PyObject * bjam_variable ( PyObject * self, PyObject * args );
+ extern PyObject * bjam_backtrace ( PyObject * self, PyObject * args );
+ extern PyObject * bjam_caller ( PyObject * self, PyObject * args );
+#endif
+
+void regex_done();
+
+char const * saved_argv0;
+
+int main( int argc, char * * argv, char * * arg_environ )
+{
+ int n;
+ char * s;
+ struct bjam_option optv[ N_OPTS ];
+ char const * all = "all";
+ int status;
+ int arg_c = argc;
+ char * * arg_v = argv;
+ char const * progname = argv[ 0 ];
+ module_t * environ_module;
+
+ saved_argv0 = argv[ 0 ];
+
+ BJAM_MEM_INIT();
+
+#ifdef OS_MAC
+ InitGraf( &qd.thePort );
+#endif
+
+ --argc;
+ ++argv;
+
+ if ( getoptions( argc, argv, "-:l:m:d:j:p:f:gs:t:ano:qv", optv ) < 0 )
+ {
+ printf( "\nusage: %s [ options ] targets...\n\n", progname );
+
+ printf( "-a Build all targets, even if they are current.\n" );
+ printf( "-dx Set the debug level to x (0-9).\n" );
+ printf( "-fx Read x instead of Jambase.\n" );
+ /* printf( "-g Build from newest sources first.\n" ); */
+ printf( "-jx Run up to x shell commands concurrently.\n" );
+ printf( "-lx Limit actions to x number of seconds after which they are stopped.\n" );
+ printf( "-mx Maximum target output saved (kb), default is to save all output.\n" );
+ printf( "-n Don't actually execute the updating actions.\n" );
+ printf( "-ox Write the updating actions to file x.\n" );
+ printf( "-px x=0, pipes action stdout and stderr merged into action output.\n" );
+ printf( "-q Quit quickly as soon as a target fails.\n" );
+ printf( "-sx=y Set variable x=y, overriding environment.\n" );
+ printf( "-tx Rebuild x, even if it is up-to-date.\n" );
+ printf( "-v Print the version of jam and exit.\n" );
+ printf( "--x Option is ignored.\n\n" );
+
+ exit( EXITBAD );
+ }
+
+ /* Version info. */
+ if ( ( s = getoptval( optv, 'v', 0 ) ) )
+ {
+ printf( "Boost.Jam Version %s. %s.\n", VERSION, OSMINOR );
+ printf( " Copyright 1993-2002 Christopher Seiwald and Perforce "
+ "Software, Inc.\n" );
+ printf( " Copyright 2001 David Turner.\n" );
+ printf( " Copyright 2001-2004 David Abrahams.\n" );
+ printf( " Copyright 2002-2008 Rene Rivera.\n" );
+ printf( " Copyright 2003-2008 Vladimir Prus.\n" );
+ return EXITOK;
+ }
+
+ /* Pick up interesting options. */
+ if ( ( s = getoptval( optv, 'n', 0 ) ) )
+ {
+ ++globs.noexec;
+ globs.debug[ 2 ] = 1;
+ }
+
+ if ( ( s = getoptval( optv, 'p', 0 ) ) )
+ {
+ /* Undocumented -p3 (acts like both -p1 -p2) means separate pipe action
+ * stdout and stderr.
+ */
+ globs.pipe_action = atoi( s );
+ if ( globs.pipe_action < 0 || 3 < globs.pipe_action )
+ {
+ printf( "Invalid pipe descriptor '%d', valid values are -p[0..3]."
+ "\n", globs.pipe_action );
+ exit( EXITBAD );
+ }
+ }
+
+ if ( ( s = getoptval( optv, 'q', 0 ) ) )
+ globs.quitquick = 1;
+
+ if ( ( s = getoptval( optv, 'a', 0 ) ) )
+ anyhow++;
+
+ if ( ( s = getoptval( optv, 'j', 0 ) ) )
+ {
+ globs.jobs = atoi( s );
+ if ( globs.jobs < 1 || globs.jobs > MAXJOBS )
+ {
+ printf( "Invalid value for the '-j' option, valid values are 1 "
+ "through %d.\n", MAXJOBS );
+ exit( EXITBAD );
+ }
+ }
+
+ if ( ( s = getoptval( optv, 'g', 0 ) ) )
+ globs.newestfirst = 1;
+
+ if ( ( s = getoptval( optv, 'l', 0 ) ) )
+ globs.timeout = atoi( s );
+
+ if ( ( s = getoptval( optv, 'm', 0 ) ) )
+ globs.max_buf = atoi( s ) * 1024; /* convert to kb */
+
+ /* Turn on/off debugging */
+ for ( n = 0; ( s = getoptval( optv, 'd', n ) ); ++n )
+ {
+ int i;
+
+ /* First -d, turn off defaults. */
+ if ( !n )
+ for ( i = 0; i < DEBUG_MAX; ++i )
+ globs.debug[i] = 0;
+
+ i = atoi( s );
+
+ if ( ( i < 0 ) || ( i >= DEBUG_MAX ) )
+ {
+ printf( "Invalid debug level '%s'.\n", s );
+ continue;
+ }
+
+ /* n turns on levels 1-n. */
+ /* +n turns on level n. */
+ if ( *s == '+' )
+ globs.debug[ i ] = 1;
+ else while ( i )
+ globs.debug[ i-- ] = 1;
+ }
+
+ constants_init();
+ cwd_init();
+
+ {
+ PROFILE_ENTER( MAIN );
+
+#ifdef HAVE_PYTHON
+ {
+ PROFILE_ENTER( MAIN_PYTHON );
+ Py_Initialize();
+ {
+ static PyMethodDef BjamMethods[] = {
+ {"call", bjam_call, METH_VARARGS,
+ "Call the specified bjam rule."},
+ {"import_rule", bjam_import_rule, METH_VARARGS,
+ "Imports Python callable to bjam."},
+ {"define_action", bjam_define_action, METH_VARARGS,
+ "Defines a command line action."},
+ {"variable", bjam_variable, METH_VARARGS,
+ "Obtains a variable from bjam's global module."},
+ {"backtrace", bjam_backtrace, METH_VARARGS,
+ "Returns bjam backtrace from the last call into Python."},
+ {"caller", bjam_caller, METH_VARARGS,
+ "Returns the module from which the last call into Python is made."},
+ {NULL, NULL, 0, NULL}
+ };
+
+ Py_InitModule( "bjam", BjamMethods );
+ }
+ PROFILE_EXIT( MAIN_PYTHON );
+ }
+#endif
+
+#ifndef NDEBUG
+ run_unit_tests();
+#endif
+#if YYDEBUG != 0
+ if ( DEBUG_PARSE )
+ yydebug = 1;
+#endif
+
+ /* Set JAMDATE. */
+ {
+ timestamp current;
+ timestamp_current( &current );
+ var_set( root_module(), constant_JAMDATE, list_new( outf_time(
+ &current ) ), VAR_SET );
+ }
+
+ /* Set JAM_VERSION. */
+ var_set( root_module(), constant_JAM_VERSION,
+ list_push_back( list_push_back( list_new(
+ object_new( VERSION_MAJOR_SYM ) ),
+ object_new( VERSION_MINOR_SYM ) ),
+ object_new( VERSION_PATCH_SYM ) ),
+ VAR_SET );
+
+ /* Set JAMUNAME. */
+#ifdef unix
+ {
+ struct utsname u;
+
+ if ( uname( &u ) >= 0 )
+ {
+ var_set( root_module(), constant_JAMUNAME,
+ list_push_back(
+ list_push_back(
+ list_push_back(
+ list_push_back(
+ list_new(
+ object_new( u.sysname ) ),
+ object_new( u.nodename ) ),
+ object_new( u.release ) ),
+ object_new( u.version ) ),
+ object_new( u.machine ) ), VAR_SET );
+ }
+ }
+#endif /* unix */
+
+ /* Set JAM_TIMESTAMP_RESOLUTION. */
+ {
+ timestamp fmt_resolution[ 1 ];
+ file_supported_fmt_resolution( fmt_resolution );
+ var_set( root_module(), constant_JAM_TIMESTAMP_RESOLUTION, list_new(
+ object_new( timestamp_timestr( fmt_resolution ) ) ), VAR_SET );
+ }
+
+ /* Load up environment variables. */
+
+ /* First into the global module, with splitting, for backward
+ * compatibility.
+ */
+ var_defines( root_module(), use_environ, 1 );
+
+ environ_module = bindmodule( constant_ENVIRON );
+ /* Then into .ENVIRON, without splitting. */
+ var_defines( environ_module, use_environ, 0 );
+
+ /*
+ * Jam defined variables OS & OSPLAT. We load them after environment, so
+ * that setting OS in environment does not change Jam's notion of the
+ * current platform.
+ */
+ var_defines( root_module(), othersyms, 1 );
+
+ /* Load up variables set on command line. */
+ for ( n = 0; ( s = getoptval( optv, 's', n ) ); ++n )
+ {
+ char * symv[ 2 ];
+ symv[ 0 ] = s;
+ symv[ 1 ] = 0;
+ var_defines( root_module(), symv, 1 );
+ var_defines( environ_module, symv, 0 );
+ }
+
+ /* Set the ARGV to reflect the complete list of arguments of invocation.
+ */
+ for ( n = 0; n < arg_c; ++n )
+ var_set( root_module(), constant_ARGV, list_new( object_new(
+ arg_v[ n ] ) ), VAR_APPEND );
+
+ /* Initialize built-in rules. */
+ load_builtins();
+
+ /* Add the targets in the command line to the update list. */
+ for ( n = 1; n < arg_c; ++n )
+ {
+ if ( arg_v[ n ][ 0 ] == '-' )
+ {
+ char * f = "-:l:d:j:f:gs:t:ano:qv";
+ for ( ; *f; ++f ) if ( *f == arg_v[ n ][ 1 ] ) break;
+ if ( ( f[ 1 ] == ':' ) && ( arg_v[ n ][ 2 ] == '\0' ) ) ++n;
+ }
+ else
+ {
+ OBJECT * const target = object_new( arg_v[ n ] );
+ mark_target_for_updating( target );
+ object_free( target );
+ }
+ }
+
+ if ( list_empty( targets_to_update() ) )
+ mark_target_for_updating( constant_all );
+
+ /* Parse ruleset. */
+ {
+ FRAME frame[ 1 ];
+ frame_init( frame );
+ for ( n = 0; ( s = getoptval( optv, 'f', n ) ); ++n )
+ {
+ OBJECT * const filename = object_new( s );
+ parse_file( filename, frame );
+ object_free( filename );
+ }
+
+ if ( !n )
+ parse_file( constant_plus, frame );
+ }
+
+ status = yyanyerrors();
+
+ /* Manually touch -t targets. */
+ for ( n = 0; ( s = getoptval( optv, 't', n ) ); ++n )
+ {
+ OBJECT * const target = object_new( s );
+ touch_target( target );
+ object_free( target );
+ }
+
+ /* If an output file is specified, set globs.cmdout to that. */
+ if ( ( s = getoptval( optv, 'o', 0 ) ) )
+ {
+ if ( !( globs.cmdout = fopen( s, "w" ) ) )
+ {
+ printf( "Failed to write to '%s'\n", s );
+ exit( EXITBAD );
+ }
+ ++globs.noexec;
+ }
+
+ /* The build system may set the PARALLELISM variable to override -j
+ * options.
+ */
+ {
+ LIST * const p = var_get( root_module(), constant_PARALLELISM );
+ if ( !list_empty( p ) )
+ {
+ int const j = atoi( object_str( list_front( p ) ) );
+ if ( j < 1 || j > MAXJOBS )
+ printf( "Invalid value of PARALLELISM: %s. Valid values "
+ "are 1 through %d.\n", object_str( list_front( p ) ),
+ MAXJOBS );
+ else
+ globs.jobs = j;
+ }
+ }
+
+ /* KEEP_GOING overrides -q option. */
+ {
+ LIST * const p = var_get( root_module(), constant_KEEP_GOING );
+ if ( !list_empty( p ) )
+ globs.quitquick = atoi( object_str( list_front( p ) ) ) ? 0 : 1;
+ }
+
+ /* Now make target. */
+ {
+ PROFILE_ENTER( MAIN_MAKE );
+ LIST * const targets = targets_to_update();
+ if ( !list_empty( targets ) )
+ status |= make( targets, anyhow );
+ else
+ status = last_update_now_status;
+ PROFILE_EXIT( MAIN_MAKE );
+ }
+
+ PROFILE_EXIT( MAIN );
+ }
+
+ if ( DEBUG_PROFILE )
+ profile_dump();
+
+
+#ifdef OPT_HEADER_CACHE_EXT
+ hcache_done();
+#endif
+
+ clear_targets_to_update();
+
+ /* Widely scattered cleanup. */
+ property_set_done();
+ file_done();
+ rules_done();
+ timestamp_done();
+ search_done();
+ class_done();
+ modules_done();
+ regex_done();
+ cwd_done();
+ path_done();
+ function_done();
+ list_done();
+ constants_done();
+ object_done();
+
+ /* Close cmdout. */
+ if ( globs.cmdout )
+ fclose( globs.cmdout );
+
+#ifdef HAVE_PYTHON
+ Py_Finalize();
+#endif
+
+ BJAM_MEM_CLOSE();
+
+ return status ? EXITBAD : EXITOK;
+}
+
+
+/*
+ * executable_path()
+ */
+
+#if defined(_WIN32)
+# define WIN32_LEAN_AND_MEAN
+# include <windows.h>
+char * executable_path( char const * argv0 )
+{
+ char buf[ 1024 ];
+ DWORD const ret = GetModuleFileName( NULL, buf, sizeof( buf ) );
+ return ( !ret || ret == sizeof( buf ) ) ? NULL : strdup( buf );
+}
+#elif defined(__APPLE__) /* Not tested */
+# include <mach-o/dyld.h>
+char *executable_path( char const * argv0 )
+{
+ char buf[ 1024 ];
+ uint32_t size = sizeof( buf );
+ return _NSGetExecutablePath( buf, &size ) ? NULL : strdup( buf );
+}
+#elif defined(sun) || defined(__sun) /* Not tested */
+# include <stdlib.h>
+char * executable_path( char const * argv0 )
+{
+ return strdup( getexecname() );
+}
+#elif defined(__FreeBSD__)
+# include <sys/sysctl.h>
+char * executable_path( char const * argv0 )
+{
+ int mib[ 4 ] = { CTL_KERN, KERN_PROC, KERN_PROC_PATHNAME, -1 };
+ char buf[ 1024 ];
+ size_t size = sizeof( buf );
+ sysctl( mib, 4, buf, &size, NULL, 0 );
+ return ( !size || size == sizeof( buf ) ) ? NULL : strndup( buf, size );
+}
+#elif defined(__linux__)
+# include <unistd.h>
+char * executable_path( char const * argv0 )
+{
+ char buf[ 1024 ];
+ ssize_t const ret = readlink( "/proc/self/exe", buf, sizeof( buf ) );
+ return ( !ret || ret == sizeof( buf ) ) ? NULL : strndup( buf, ret );
+}
+#else
+char * executable_path( char const * argv0 )
+{
+ /* If argv0 is an absolute path, assume it is the right absolute path. */
+ return argv0[ 0 ] == '/' ? strdup( argv0 ) : NULL;
+}
+#endif
diff --git a/tools/build/src/engine/jam.h b/tools/build/src/engine/jam.h
new file mode 100644
index 0000000000..497a5bfb1a
--- /dev/null
+++ b/tools/build/src/engine/jam.h
@@ -0,0 +1,475 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * jam.h - includes and globals for jam
+ */
+
+#ifndef JAM_H_VP_2003_08_01
+#define JAM_H_VP_2003_08_01
+
+#ifdef HAVE_PYTHON
+#include <Python.h>
+#endif
+
+/* Assume popen support is available unless known otherwise. */
+#define HAVE_POPEN 1
+
+/*
+ * Windows NT
+ */
+
+#ifdef NT
+
+#include <ctype.h>
+#include <fcntl.h>
+#include <malloc.h>
+#ifndef __MWERKS__
+ #include <memory.h>
+#endif
+#include <stdio.h>
+#include <stdlib.h>
+#include <signal.h>
+#include <string.h>
+#include <time.h>
+
+#define OSMAJOR "NT=true"
+#define OSMINOR "OS=NT"
+#define OS_NT
+#define SPLITPATH ';'
+#define MAXLINE (undefined__see_execnt_c) /* max chars per command line */
+#define USE_EXECNT
+#define PATH_DELIM '\\'
+
+/* AS400 cross-compile from NT. */
+
+#ifdef AS400
+ #undef OSMINOR
+ #undef OSMAJOR
+ #define OSMAJOR "AS400=true"
+ #define OSMINOR "OS=AS400"
+ #define OS_AS400
+#endif
+
+/* Metrowerks Standard Library on Windows. */
+
+#ifdef __MSL__
+ #undef HAVE_POPEN
+#endif
+
+#endif /* #ifdef NT */
+
+
+/*
+ * Windows MingW32
+ */
+
+#ifdef MINGW
+
+#include <fcntl.h>
+#include <stdlib.h>
+#include <stdio.h>
+#include <ctype.h>
+#include <malloc.h>
+#include <memory.h>
+#include <signal.h>
+#include <string.h>
+#include <time.h>
+
+#define OSMAJOR "MINGW=true"
+#define OSMINOR "OS=MINGW"
+#define OS_NT
+#define SPLITPATH ';'
+#define MAXLINE 996 /* max chars per command line */
+#define USE_EXECUNIX
+#define PATH_DELIM '\\'
+
+#endif /* #ifdef MINGW */
+
+
+/*
+ * God fearing UNIX.
+ */
+
+#ifndef OSMINOR
+
+#define OSMAJOR "UNIX=true"
+#define USE_EXECUNIX
+#define USE_FILEUNIX
+#define PATH_DELIM '/'
+
+#ifdef _AIX
+ #define unix
+ #define MAXLINE 23552 /* 24k - 1k, max chars per command line */
+ #define OSMINOR "OS=AIX"
+ #define OS_AIX
+ #define NO_VFORK
+#endif
+#ifdef AMIGA
+ #define OSMINOR "OS=AMIGA"
+ #define OS_AMIGA
+#endif
+#ifdef __BEOS__
+ #define unix
+ #define OSMINOR "OS=BEOS"
+ #define OS_BEOS
+ #define NO_VFORK
+#endif
+#ifdef __bsdi__
+ #define OSMINOR "OS=BSDI"
+ #define OS_BSDI
+#endif
+#if defined (COHERENT) && defined (_I386)
+ #define OSMINOR "OS=COHERENT"
+ #define OS_COHERENT
+ #define NO_VFORK
+#endif
+#if defined(__cygwin__) || defined(__CYGWIN__)
+ #define OSMINOR "OS=CYGWIN"
+ #define OS_CYGWIN
+#endif
+#if defined(__FreeBSD__) && !defined(__DragonFly__)
+ #define OSMINOR "OS=FREEBSD"
+ #define OS_FREEBSD
+#endif
+#ifdef __DragonFly__
+ #define OSMINOR "OS=DRAGONFLYBSD"
+ #define OS_DRAGONFLYBSD
+#endif
+#ifdef __DGUX__
+ #define OSMINOR "OS=DGUX"
+ #define OS_DGUX
+#endif
+#ifdef __hpux
+ #define OSMINOR "OS=HPUX"
+ #define OS_HPUX
+#endif
+#ifdef __OPENNT
+ #define unix
+ #define OSMINOR "OS=INTERIX"
+ #define OS_INTERIX
+ #define NO_VFORK
+#endif
+#ifdef __sgi
+ #define OSMINOR "OS=IRIX"
+ #define OS_IRIX
+ #define NO_VFORK
+#endif
+#ifdef __ISC
+ #define OSMINOR "OS=ISC"
+ #define OS_ISC
+ #define NO_VFORK
+#endif
+#ifdef linux
+ #define OSMINOR "OS=LINUX"
+ #define OS_LINUX
+#endif
+#ifdef __Lynx__
+ #define OSMINOR "OS=LYNX"
+ #define OS_LYNX
+ #define NO_VFORK
+ #define unix
+#endif
+#ifdef __MACHTEN__
+ #define OSMINOR "OS=MACHTEN"
+ #define OS_MACHTEN
+#endif
+#ifdef mpeix
+ #define unix
+ #define OSMINOR "OS=MPEIX"
+ #define OS_MPEIX
+ #define NO_VFORK
+#endif
+#ifdef __MVS__
+ #define unix
+ #define OSMINOR "OS=MVS"
+ #define OS_MVS
+#endif
+#ifdef _ATT4
+ #define OSMINOR "OS=NCR"
+ #define OS_NCR
+#endif
+#ifdef __NetBSD__
+ #define unix
+ #define OSMINOR "OS=NETBSD"
+ #define OS_NETBSD
+ #define NO_VFORK
+#endif
+#ifdef __QNX__
+ #define unix
+ #ifdef __QNXNTO__
+ #define OSMINOR "OS=QNXNTO"
+ #define OS_QNXNTO
+ #else
+ #define OSMINOR "OS=QNX"
+ #define OS_QNX
+ #define NO_VFORK
+ #define MAXLINE 996 /* max chars per command line */
+ #endif
+#endif
+#ifdef NeXT
+ #ifdef __APPLE__
+ #define OSMINOR "OS=RHAPSODY"
+ #define OS_RHAPSODY
+ #else
+ #define OSMINOR "OS=NEXT"
+ #define OS_NEXT
+ #endif
+#endif
+#ifdef __APPLE__
+ #define unix
+ #define OSMINOR "OS=MACOSX"
+ #define OS_MACOSX
+#endif
+#ifdef __osf__
+ #ifndef unix
+ #define unix
+ #endif
+ #define OSMINOR "OS=OSF"
+ #define OS_OSF
+#endif
+#ifdef _SEQUENT_
+ #define OSMINOR "OS=PTX"
+ #define OS_PTX
+#endif
+#ifdef M_XENIX
+ #define OSMINOR "OS=SCO"
+ #define OS_SCO
+ #define NO_VFORK
+#endif
+#ifdef sinix
+ #define unix
+ #define OSMINOR "OS=SINIX"
+ #define OS_SINIX
+#endif
+#ifdef sun
+ #if defined(__svr4__) || defined(__SVR4)
+ #define OSMINOR "OS=SOLARIS"
+ #define OS_SOLARIS
+ #else
+ #define OSMINOR "OS=SUNOS"
+ #define OS_SUNOS
+ #endif
+#endif
+#ifdef ultrix
+ #define OSMINOR "OS=ULTRIX"
+ #define OS_ULTRIX
+#endif
+#ifdef _UNICOS
+ #define OSMINOR "OS=UNICOS"
+ #define OS_UNICOS
+#endif
+#if defined(__USLC__) && !defined(M_XENIX)
+ #define OSMINOR "OS=UNIXWARE"
+ #define OS_UNIXWARE
+#endif
+#ifdef __OpenBSD__
+ #define OSMINOR "OS=OPENBSD"
+ #define OS_OPENBSD
+ #define unix
+#endif
+#if defined (__FreeBSD_kernel__) && !defined(__FreeBSD__)
+ #define OSMINOR "OS=KFREEBSD"
+ #define OS_KFREEBSD
+#endif
+#ifndef OSMINOR
+ #define OSMINOR "OS=UNKNOWN"
+#endif
+
+/* All the UNIX includes */
+
+#include <sys/types.h>
+
+#ifndef OS_MPEIX
+ #include <sys/file.h>
+#endif
+
+#include <fcntl.h>
+#include <stdio.h>
+#include <ctype.h>
+#include <signal.h>
+#include <string.h>
+#include <time.h>
+#include <unistd.h>
+
+#ifndef OS_QNX
+ #include <memory.h>
+#endif
+
+#ifndef OS_ULTRIX
+ #include <stdlib.h>
+#endif
+
+#if !defined( OS_BSDI ) && \
+ !defined( OS_FREEBSD ) && \
+ !defined( OS_DRAGONFLYBSD ) && \
+ !defined( OS_NEXT ) && \
+ !defined( OS_MACHTEN ) && \
+ !defined( OS_MACOSX ) && \
+ !defined( OS_RHAPSODY ) && \
+ !defined( OS_MVS ) && \
+ !defined( OS_OPENBSD )
+ #include <malloc.h>
+#endif
+
+#endif /* #ifndef OSMINOR */
+
+
+/*
+ * OSPLAT definitions - suppressed when it is a one-of-a-kind.
+ */
+
+#if defined( _M_PPC ) || \
+ defined( PPC ) || \
+ defined( ppc ) || \
+ defined( __powerpc__ ) || \
+ defined( __ppc__ )
+ #define OSPLAT "OSPLAT=PPC"
+#endif
+
+#if defined( _ALPHA_ ) || \
+ defined( __alpha__ )
+ #define OSPLAT "OSPLAT=AXP"
+#endif
+
+#if defined( _i386_ ) || \
+ defined( __i386__ ) || \
+ defined( __i386 ) || \
+ defined( _M_IX86 )
+ #define OSPLAT "OSPLAT=X86"
+#endif
+
+#if defined( __ia64__ ) || \
+ defined( __IA64__ ) || \
+ defined( __ia64 )
+ #define OSPLAT "OSPLAT=IA64"
+#endif
+
+#if defined( __x86_64__ ) || \
+ defined( __amd64__ ) || \
+ defined( _M_AMD64 )
+ #define OSPLAT "OSPLAT=X86_64"
+#endif
+
+#if defined( __sparc__ ) || \
+ defined( __sparc )
+ #define OSPLAT "OSPLAT=SPARC"
+#endif
+
+#ifdef __mips__
+ #define OSPLAT "OSPLAT=MIPS"
+#endif
+
+#ifdef __arm__
+ #define OSPLAT "OSPLAT=ARM"
+#endif
+
+#ifdef __s390__
+ #define OSPLAT "OSPLAT=390"
+#endif
+
+#ifdef __hppa
+ #define OSPLAT "OSPLAT=PARISC"
+#endif
+
+#ifndef OSPLAT
+ #define OSPLAT ""
+#endif
+
+
+/*
+ * Jam implementation misc.
+ */
+
+#ifndef MAXLINE
+ #define MAXLINE 102400 /* max chars per command line */
+#endif
+
+#ifndef EXITOK
+ #define EXITOK 0
+ #define EXITBAD 1
+#endif
+
+#ifndef SPLITPATH
+ #define SPLITPATH ':'
+#endif
+
+/* You probably do not need to muck with these. */
+
+#define MAXSYM 1024 /* longest symbol in the environment */
+#define MAXJPATH 1024 /* longest filename */
+
+#define MAXJOBS 256 /* internally enforced -j limit */
+#define MAXARGC 32 /* words in $(JAMSHELL) */
+
+/* Jam private definitions below. */
+
+#define DEBUG_MAX 14
+
+
+struct globs
+{
+ int noexec;
+ int jobs;
+ int quitquick;
+ int newestfirst; /* build newest sources first */
+ int pipe_action;
+ char debug[ DEBUG_MAX ];
+ FILE * cmdout; /* print cmds, not run them */
+ long timeout; /* number of seconds to limit actions to,
+ * default 0 for no limit.
+ */
+ int dart; /* output build and test results formatted for
+ * Dart
+ */
+ int max_buf; /* maximum amount of output saved from target
+ * (kb)
+ */
+};
+
+extern struct globs globs;
+
+#define DEBUG_MAKE ( globs.debug[ 1 ] ) /* show actions when executed */
+#define DEBUG_MAKEQ ( globs.debug[ 2 ] ) /* show even quiet actions */
+#define DEBUG_EXEC ( globs.debug[ 2 ] ) /* show text of actons */
+#define DEBUG_MAKEPROG ( globs.debug[ 3 ] ) /* show make0 progress */
+#define DEBUG_BIND ( globs.debug[ 3 ] ) /* show when files bound */
+
+#define DEBUG_EXECCMD ( globs.debug[ 4 ] ) /* show execcmds()'s work */
+
+#define DEBUG_COMPILE ( globs.debug[ 5 ] ) /* show rule invocations */
+
+#define DEBUG_HEADER ( globs.debug[ 6 ] ) /* show result of header scan */
+#define DEBUG_BINDSCAN ( globs.debug[ 6 ] ) /* show result of dir scan */
+#define DEBUG_SEARCH ( globs.debug[ 6 ] ) /* show binding attempts */
+
+#define DEBUG_VARSET ( globs.debug[ 7 ] ) /* show variable settings */
+#define DEBUG_VARGET ( globs.debug[ 8 ] ) /* show variable fetches */
+#define DEBUG_VAREXP ( globs.debug[ 8 ] ) /* show variable expansions */
+#define DEBUG_IF ( globs.debug[ 8 ] ) /* show 'if' calculations */
+#define DEBUG_LISTS ( globs.debug[ 9 ] ) /* show list manipulation */
+#define DEBUG_SCAN ( globs.debug[ 9 ] ) /* show scanner tokens */
+#define DEBUG_MEM ( globs.debug[ 9 ] ) /* show memory use */
+
+#define DEBUG_PROFILE ( globs.debug[ 10 ] ) /* dump rule execution times */
+#define DEBUG_PARSE ( globs.debug[ 11 ] ) /* debug parsing */
+#define DEBUG_GRAPH ( globs.debug[ 12 ] ) /* debug dependencies */
+#define DEBUG_FATE ( globs.debug[ 13 ] ) /* show fate changes in make0() */
+
+/* Everyone gets the memory definitions. */
+#include "mem.h"
+
+/* They also get the profile functions. */
+#include "debug.h"
+
+#endif
diff --git a/tools/build/v2/engine/jambase.c b/tools/build/src/engine/jambase.c
index b15282bc32..b15282bc32 100644
--- a/tools/build/v2/engine/jambase.c
+++ b/tools/build/src/engine/jambase.c
diff --git a/tools/build/v2/engine/jambase.h b/tools/build/src/engine/jambase.h
index c05ec79225..c05ec79225 100644
--- a/tools/build/v2/engine/jambase.h
+++ b/tools/build/src/engine/jambase.h
diff --git a/tools/build/v2/engine/jamgram.c b/tools/build/src/engine/jamgram.c
index 48c85228e8..48c85228e8 100644
--- a/tools/build/v2/engine/jamgram.c
+++ b/tools/build/src/engine/jamgram.c
diff --git a/tools/build/v2/engine/jamgram.h b/tools/build/src/engine/jamgram.h
index 97f117535d..97f117535d 100644
--- a/tools/build/v2/engine/jamgram.h
+++ b/tools/build/src/engine/jamgram.h
diff --git a/tools/build/v2/engine/jamgram.y b/tools/build/src/engine/jamgram.y
index 543f1561a4..543f1561a4 100644
--- a/tools/build/v2/engine/jamgram.y
+++ b/tools/build/src/engine/jamgram.y
diff --git a/tools/build/v2/engine/jamgram.yy b/tools/build/src/engine/jamgram.yy
index 8d20e3896e..8d20e3896e 100644
--- a/tools/build/v2/engine/jamgram.yy
+++ b/tools/build/src/engine/jamgram.yy
diff --git a/tools/build/v2/engine/jamgramtab.h b/tools/build/src/engine/jamgramtab.h
index a0fd43f6aa..a0fd43f6aa 100644
--- a/tools/build/v2/engine/jamgramtab.h
+++ b/tools/build/src/engine/jamgramtab.h
diff --git a/tools/build/src/engine/lists.c b/tools/build/src/engine/lists.c
new file mode 100644
index 0000000000..3f2309b05e
--- /dev/null
+++ b/tools/build/src/engine/lists.c
@@ -0,0 +1,475 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * lists.c - maintain lists of objects
+ */
+
+#include "jam.h"
+#include "lists.h"
+
+#include <assert.h>
+
+static LIST * freelist[ 32 ]; /* junkpile for list_dealloc() */
+
+static unsigned get_bucket( unsigned size )
+{
+ unsigned bucket = 0;
+ while ( size > ( 1u << bucket ) ) ++bucket;
+ return bucket;
+}
+
+static LIST * list_alloc( unsigned const size )
+{
+ unsigned const bucket = get_bucket( size );
+ if ( freelist[ bucket ] )
+ {
+ LIST * result = freelist[ bucket ];
+ freelist[ bucket ] = result->impl.next;
+ return result;
+ }
+ return (LIST *)BJAM_MALLOC( sizeof( LIST ) + ( 1u << bucket ) *
+ sizeof( OBJECT * ) );
+}
+
+static void list_dealloc( LIST * l )
+{
+ unsigned size = list_length( l );
+ unsigned bucket;
+ LIST * node = l;
+
+ if ( size == 0 ) return;
+
+ bucket = get_bucket( size );;
+
+#ifdef BJAM_NO_MEM_CACHE
+ BJAM_FREE( node );
+#else
+ node->impl.next = freelist[ bucket ];
+ freelist[ bucket ] = node;
+#endif
+}
+
+/*
+ * list_append() - append a list onto another one, returning total
+ */
+
+LIST * list_append( LIST * l, LIST * nl )
+{
+ if ( list_empty( l ) )
+ return nl;
+ if ( !list_empty( nl ) )
+ {
+ int const l_size = list_length( l );
+ int const nl_size = list_length( nl );
+ int const size = l_size + nl_size;
+ unsigned const bucket = get_bucket( size );
+
+ /* Do we need to reallocate? */
+ if ( l_size <= ( 1u << ( bucket - 1 ) ) )
+ {
+ LIST * result = list_alloc( size );
+ memcpy( list_begin( result ), list_begin( l ), l_size * sizeof(
+ OBJECT * ) );
+ list_dealloc( l );
+ l = result;
+ }
+
+ l->impl.size = size;
+ memcpy( list_begin( l ) + l_size, list_begin( nl ), nl_size * sizeof(
+ OBJECT * ) );
+ list_dealloc( nl );
+ }
+ return l;
+}
+
+LISTITER list_begin( LIST * l )
+{
+ return l ? (LISTITER)( (char *)l + sizeof( LIST ) ) : 0;
+}
+
+LISTITER list_end( LIST * l )
+{
+ return l ? list_begin( l ) + l->impl.size : 0;
+}
+
+LIST * list_new( OBJECT * value )
+{
+ LIST * const head = list_alloc( 1 ) ;
+ head->impl.size = 1;
+ list_begin( head )[ 0 ] = value;
+ return head;
+}
+
+/*
+ * list_push_back() - tack a string onto the end of a list of strings
+ */
+
+LIST * list_push_back( LIST * head, OBJECT * value )
+{
+ unsigned int size = list_length( head );
+ unsigned int i;
+
+ if ( DEBUG_LISTS )
+ printf( "list > %s <\n", object_str( value ) );
+
+ /* If the size is a power of 2, reallocate. */
+ if ( size == 0 )
+ {
+ head = list_alloc( 1 );
+ }
+ else if ( ( ( size - 1 ) & size ) == 0 )
+ {
+ LIST * l = list_alloc( size + 1 );
+ memcpy( l, head, sizeof( LIST ) + size * sizeof( OBJECT * ) );
+ list_dealloc( head );
+ head = l;
+ }
+
+ list_begin( head )[ size ] = value;
+ head->impl.size = size + 1;
+
+ return head;
+}
+
+
+/*
+ * list_copy() - copy a whole list of strings (nl) onto end of another (l).
+ */
+
+LIST * list_copy( LIST * l )
+{
+ int size = list_length( l );
+ int i;
+ LIST * result;
+
+ if ( size == 0 ) return L0;
+
+ result = list_alloc( size );
+ result->impl.size = size;
+ for ( i = 0; i < size; ++i )
+ list_begin( result )[ i ] = object_copy( list_begin( l )[ i ] );
+ return result;
+}
+
+
+LIST * list_copy_range( LIST * l, LISTITER first, LISTITER last )
+{
+ if ( first == last )
+ return L0;
+ else
+ {
+ int size = last - first;
+ LIST * result = list_alloc( size );
+ LISTITER dest = list_begin( result );
+ result->impl.size = size;
+ for ( ; first != last; ++first, ++dest )
+ *dest = object_copy( *first );
+ return result;
+ }
+}
+
+
+/*
+ * list_sublist() - copy a subset of a list of strings.
+ */
+
+LIST * list_sublist( LIST * l, int start, int count )
+{
+ int end = start + count;
+ int size = list_length( l );
+ if ( start >= size ) return L0;
+ if ( end > size ) end = size;
+ return list_copy_range( l, list_begin( l ) + start, list_begin( l ) + end );
+}
+
+
+static int str_ptr_compare( void const * va, void const * vb )
+{
+ OBJECT * a = *( (OBJECT * *)va );
+ OBJECT * b = *( (OBJECT * *)vb );
+ return strcmp( object_str( a ), object_str( b ) );
+}
+
+
+LIST * list_sort( LIST * l )
+{
+ int len;
+ int ii;
+ LIST * result;
+
+ if ( !l )
+ return L0;
+
+ len = list_length( l );
+ result = list_copy( l );
+
+ qsort( list_begin( result ), len, sizeof( OBJECT * ), str_ptr_compare );
+
+ return result;
+}
+
+
+/*
+ * list_free() - free a list of strings
+ */
+
+void list_free( LIST * head )
+{
+ if ( !list_empty( head ) )
+ {
+ LISTITER iter = list_begin( head );
+ LISTITER const end = list_end( head );
+ for ( ; iter != end; iter = list_next( iter ) )
+ object_free( list_item( iter ) );
+ list_dealloc( head );
+ }
+}
+
+
+/*
+ * list_pop_front() - remove the front element from a list of strings
+ */
+
+LIST * list_pop_front( LIST * l )
+{
+ unsigned size = list_length( l );
+ assert( size );
+ --size;
+ object_free( list_front( l ) );
+
+ if ( size == 0 )
+ {
+ list_dealloc( l );
+ return L0;
+ }
+
+ if ( ( ( size - 1 ) & size ) == 0 )
+ {
+ LIST * const nl = list_alloc( size );
+ nl->impl.size = size;
+ memcpy( list_begin( nl ), list_begin( l ) + 1, size * sizeof( OBJECT * )
+ );
+ list_dealloc( l );
+ return nl;
+ }
+
+ l->impl.size = size;
+ memmove( list_begin( l ), list_begin( l ) + 1, size * sizeof( OBJECT * ) );
+ return l;
+}
+
+LIST * list_reverse( LIST * l )
+{
+ int size = list_length( l );
+ if ( size == 0 ) return L0;
+ {
+ LIST * const result = list_alloc( size );
+ int i;
+ result->impl.size = size;
+ for ( i = 0; i < size; ++i )
+ list_begin( result )[ i ] = object_copy( list_begin( l )[ size - i -
+ 1 ] );
+ return result;
+ }
+}
+
+int list_cmp( LIST * t, LIST * s )
+{
+ int status = 0;
+ LISTITER t_it = list_begin( t );
+ LISTITER const t_end = list_end( t );
+ LISTITER s_it = list_begin( s );
+ LISTITER const s_end = list_end( s );
+
+ while ( !status && ( t_it != t_end || s_it != s_end ) )
+ {
+ char const * st = t_it != t_end ? object_str( list_item( t_it ) ) : "";
+ char const * ss = s_it != s_end ? object_str( list_item( s_it ) ) : "";
+
+ status = strcmp( st, ss );
+
+ t_it = t_it != t_end ? list_next( t_it ) : t_it;
+ s_it = s_it != s_end ? list_next( s_it ) : s_it;
+ }
+
+ return status;
+}
+
+int list_is_sublist( LIST * sub, LIST * l )
+{
+ LISTITER iter = list_begin( sub );
+ LISTITER const end = list_end( sub );
+ for ( ; iter != end; iter = list_next( iter ) )
+ if ( !list_in( l, list_item( iter ) ) )
+ return 0;
+ return 1;
+}
+
+/*
+ * list_print() - print a list of strings to stdout
+ */
+
+void list_print( LIST * l )
+{
+ LISTITER iter = list_begin( l ), end = list_end( l );
+ if ( iter != end )
+ {
+ printf( "%s", object_str( list_item( iter ) ) );
+ iter = list_next( iter );
+ for ( ; iter != end; iter = list_next( iter ) )
+ printf( " %s", object_str( list_item( iter ) ) );
+ }
+}
+
+
+/*
+ * list_length() - return the number of items in the list
+ */
+
+int list_length( LIST * l )
+{
+ return l ? l->impl.size : 0;
+}
+
+
+int list_in( LIST * l, OBJECT * value )
+{
+ LISTITER iter = list_begin( l );
+ LISTITER end = list_end( l );
+ for ( ; iter != end; iter = list_next( iter ) )
+ if ( object_equal( list_item( iter ), value ) )
+ return 1;
+ return 0;
+}
+
+
+LIST * list_unique( LIST * sorted_list )
+{
+ LIST * result = L0;
+ OBJECT * last_added = 0;
+
+ LISTITER iter = list_begin( sorted_list ), end = list_end( sorted_list );
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ if ( !last_added || !object_equal( list_item( iter ), last_added ) )
+ {
+ result = list_push_back( result, object_copy( list_item( iter ) ) );
+ last_added = list_item( iter );
+ }
+ }
+ return result;
+}
+
+void list_done()
+{
+ int i;
+ for ( i = 0; i < sizeof( freelist ) / sizeof( freelist[ 0 ] ); ++i )
+ {
+ LIST * l = freelist[ i ];
+ while ( l )
+ {
+ LIST * const tmp = l;
+ l = l->impl.next;
+ BJAM_FREE( tmp );
+ }
+ }
+}
+
+
+/*
+ * lol_init() - initialize a LOL (list of lists).
+ */
+
+void lol_init( LOL * lol )
+{
+ lol->count = 0;
+}
+
+
+/*
+ * lol_add() - append a LIST onto an LOL.
+ */
+
+void lol_add( LOL * lol, LIST * l )
+{
+ if ( lol->count < LOL_MAX )
+ lol->list[ lol->count++ ] = l;
+}
+
+
+/*
+ * lol_free() - free the LOL and its LISTs.
+ */
+
+void lol_free( LOL * lol )
+{
+ int i;
+ for ( i = 0; i < lol->count; ++i )
+ list_free( lol->list[ i ] );
+ lol->count = 0;
+}
+
+
+/*
+ * lol_get() - return one of the LISTs in the LOL.
+ */
+
+LIST * lol_get( LOL * lol, int i )
+{
+ return i < lol->count ? lol->list[ i ] : L0;
+}
+
+
+/*
+ * lol_print() - debug print LISTS separated by ":".
+ */
+
+void lol_print( LOL * lol )
+{
+ int i;
+ for ( i = 0; i < lol->count; ++i )
+ {
+ if ( i )
+ printf( " : " );
+ list_print( lol->list[ i ] );
+ }
+}
+
+#ifdef HAVE_PYTHON
+
+PyObject * list_to_python( LIST * l )
+{
+ PyObject * result = PyList_New( 0 );
+ LISTITER iter = list_begin( l );
+ LISTITER const end = list_end( l );
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ PyObject * s = PyString_FromString( object_str( list_item( iter ) ) );
+ PyList_Append( result, s );
+ Py_DECREF( s );
+ }
+
+ return result;
+}
+
+LIST * list_from_python( PyObject * l )
+{
+ LIST * result = L0;
+
+ Py_ssize_t n = PySequence_Size( l );
+ Py_ssize_t i;
+ for ( i = 0; i < n; ++i )
+ {
+ PyObject * v = PySequence_GetItem( l, i );
+ result = list_push_back( result, object_new( PyString_AsString( v ) ) );
+ Py_DECREF( v );
+ }
+
+ return result;
+}
+
+#endif
diff --git a/tools/build/src/engine/lists.h b/tools/build/src/engine/lists.h
new file mode 100644
index 0000000000..3dd8fe873c
--- /dev/null
+++ b/tools/build/src/engine/lists.h
@@ -0,0 +1,113 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * lists.h - the LIST structure and routines to manipulate them
+ *
+ * The whole of jam relies on lists of objects as a datatype. This module, in
+ * conjunction with object.c, handles these relatively efficiently.
+ *
+ * Structures defined:
+ *
+ * LIST - list of OBJECTs
+ * LOL - list of LISTs
+ *
+ * External routines:
+ *
+ * list_append() - append a list onto another one, returning total
+ * list_new() - tack an object onto the end of a list of objects
+ * list_copy() - copy a whole list of objects
+ * list_sublist() - copy a subset of a list of objects
+ * list_free() - free a list of objects
+ * list_print() - print a list of objects to stdout
+ * list_length() - return the number of items in the list
+ *
+ * lol_init() - initialize a LOL (list of lists)
+ * lol_add() - append a LIST onto an LOL
+ * lol_free() - free the LOL and its LISTs
+ * lol_get() - return one of the LISTs in the LOL
+ * lol_print() - debug print LISTS separated by ":"
+ */
+
+#ifndef LISTS_DWA20011022_H
+#define LISTS_DWA20011022_H
+
+#include "object.h"
+
+#ifdef HAVE_PYTHON
+# include <Python.h>
+#endif
+
+/*
+ * LIST - list of strings
+ */
+
+typedef struct _list {
+ union {
+ int size;
+ struct _list * next;
+ OBJECT * align;
+ } impl;
+} LIST;
+
+typedef OBJECT * * LISTITER;
+
+/*
+ * LOL - list of LISTs
+ */
+
+#define LOL_MAX 19
+typedef struct _lol {
+ int count;
+ LIST * list[ LOL_MAX ];
+} LOL;
+
+LIST * list_new( OBJECT * value );
+LIST * list_append( LIST * destination, LIST * source );
+LIST * list_copy( LIST * );
+LIST * list_copy_range( LIST * destination, LISTITER first, LISTITER last );
+void list_free( LIST * head );
+LIST * list_push_back( LIST * head, OBJECT * value );
+void list_print( LIST * );
+int list_length( LIST * );
+LIST * list_sublist( LIST *, int start, int count );
+LIST * list_pop_front( LIST * );
+LIST * list_sort( LIST * );
+LIST * list_unique( LIST * sorted_list );
+int list_in( LIST *, OBJECT * value );
+LIST * list_reverse( LIST * );
+int list_cmp( LIST * lhs, LIST * rhs );
+int list_is_sublist( LIST * sub, LIST * l );
+void list_done();
+
+LISTITER list_begin( LIST * );
+LISTITER list_end( LIST * );
+#define list_next( it ) ((it) + 1)
+#define list_item( it ) (*(it))
+#define list_empty( l ) ((l) == L0)
+#define list_front( l ) list_item( list_begin( l ) )
+
+#define L0 ((LIST *)0)
+
+void lol_add( LOL *, LIST * );
+void lol_init( LOL * );
+void lol_free( LOL * );
+LIST * lol_get( LOL *, int i );
+void lol_print( LOL * );
+void lol_build( LOL *, char const * * elements );
+
+#ifdef HAVE_PYTHON
+PyObject * list_to_python( LIST * );
+LIST * list_from_python( PyObject * );
+#endif
+
+#endif
diff --git a/tools/build/src/engine/make.c b/tools/build/src/engine/make.c
new file mode 100644
index 0000000000..c83f525c83
--- /dev/null
+++ b/tools/build/src/engine/make.c
@@ -0,0 +1,935 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * make.c - bring a target up to date, once rules are in place.
+ *
+ * This modules controls the execution of rules to bring a target and its
+ * dependencies up to date. It is invoked after the targets, rules, et. al.
+ * described in rules.h are created by the interpreting jam files.
+ *
+ * This file contains the main make() entry point and the first pass make0().
+ * The second pass, make1(), which actually does the command execution, is in
+ * make1.c.
+ *
+ * External routines:
+ * make() - make a target, given its name
+ *
+ * Internal routines:
+ * make0() - bind and scan everything to make a TARGET
+ * make0sort() - reorder TARGETS chain by their time (newest to oldest)
+ */
+
+#include "jam.h"
+#include "make.h"
+
+#include "command.h"
+#ifdef OPT_HEADER_CACHE_EXT
+# include "hcache.h"
+#endif
+#include "headers.h"
+#include "lists.h"
+#include "object.h"
+#include "parse.h"
+#include "rules.h"
+#include "search.h"
+#include "timestamp.h"
+#include "variable.h"
+
+#include <assert.h>
+
+#ifndef max
+# define max(a,b) ((a)>(b)?(a):(b))
+#endif
+
+static TARGETS * make0sort( TARGETS * c );
+
+#ifdef OPT_GRAPH_DEBUG_EXT
+ static void dependGraphOutput( TARGET * t, int depth );
+#endif
+
+static char const * target_fate[] =
+{
+ "init", /* T_FATE_INIT */
+ "making", /* T_FATE_MAKING */
+ "stable", /* T_FATE_STABLE */
+ "newer", /* T_FATE_NEWER */
+ "temp", /* T_FATE_ISTMP */
+ "touched", /* T_FATE_TOUCHED */
+ "rebuild", /* T_FATE_REBUILD */
+ "missing", /* T_FATE_MISSING */
+ "needtmp", /* T_FATE_NEEDTMP */
+ "old", /* T_FATE_OUTDATED */
+ "update", /* T_FATE_UPDATE */
+ "nofind", /* T_FATE_CANTFIND */
+ "nomake" /* T_FATE_CANTMAKE */
+};
+
+static char const * target_bind[] =
+{
+ "unbound",
+ "missing",
+ "parents",
+ "exists",
+};
+
+#define spaces(x) ( " " + ( x > 20 ? 0 : 20-x ) )
+
+
+/*
+ * make() - make a target, given its name.
+ */
+
+int make( LIST * targets, int anyhow )
+{
+ COUNTS counts[ 1 ];
+ int status = 0; /* 1 if anything fails */
+
+#ifdef OPT_HEADER_CACHE_EXT
+ hcache_init();
+#endif
+
+ memset( (char *)counts, 0, sizeof( *counts ) );
+
+ /* First bind all targets with LOCATE_TARGET setting. This is needed to
+ * correctly handle dependencies to generated headers.
+ */
+ bind_explicitly_located_targets();
+
+ {
+ LISTITER iter, end;
+ PROFILE_ENTER( MAKE_MAKE0 );
+ for ( iter = list_begin( targets ), end = list_end( targets ); iter != end; iter = list_next( iter ) )
+ {
+ TARGET * t = bindtarget( list_item( iter ) );
+ if ( t->fate == T_FATE_INIT )
+ make0( t, 0, 0, counts, anyhow, 0 );
+ }
+ PROFILE_EXIT( MAKE_MAKE0 );
+ }
+
+#ifdef OPT_GRAPH_DEBUG_EXT
+ if ( DEBUG_GRAPH )
+ {
+ LISTITER iter, end;
+ for ( iter = list_begin( targets ), end = list_end( targets ); iter != end; iter = list_next( iter ) )
+ dependGraphOutput( bindtarget( list_item( iter ) ), 0 );
+ }
+#endif
+
+ if ( DEBUG_MAKE )
+ {
+ if ( counts->targets )
+ printf( "...found %d target%s...\n", counts->targets,
+ counts->targets > 1 ? "s" : "" );
+ if ( counts->temp )
+ printf( "...using %d temp target%s...\n", counts->temp,
+ counts->temp > 1 ? "s" : "" );
+ if ( counts->updating )
+ printf( "...updating %d target%s...\n", counts->updating,
+ counts->updating > 1 ? "s" : "" );
+ if ( counts->cantfind )
+ printf( "...can't find %d target%s...\n", counts->cantfind,
+ counts->cantfind > 1 ? "s" : "" );
+ if ( counts->cantmake )
+ printf( "...can't make %d target%s...\n", counts->cantmake,
+ counts->cantmake > 1 ? "s" : "" );
+ }
+
+ status = counts->cantfind || counts->cantmake;
+
+ {
+ PROFILE_ENTER( MAKE_MAKE1 );
+ status |= make1( targets );
+ PROFILE_EXIT( MAKE_MAKE1 );
+ }
+
+ return status;
+}
+
+
+/* Force any dependants of t that have already at least begun being visited by
+ * make0() to be updated.
+ */
+
+static void force_rebuilds( TARGET * t );
+
+static void update_dependants( TARGET * t )
+{
+ TARGETS * q;
+
+ for ( q = t->dependants; q; q = q->next )
+ {
+ TARGET * p = q->target;
+ char fate0 = p->fate;
+
+ /* If we have already at least begun visiting it and we are not already
+ * rebuilding it for other reasons.
+ */
+ if ( ( fate0 != T_FATE_INIT ) && ( fate0 < T_FATE_BUILD ) )
+ {
+ p->fate = T_FATE_UPDATE;
+
+ if ( DEBUG_FATE )
+ {
+ printf( "fate change %s from %s to %s (as dependant of %s)\n",
+ object_str( p->name ), target_fate[ (int) fate0 ], target_fate[ (int) p->fate ], object_str( t->name ) );
+ }
+
+ /* If we are done visiting it, go back and make sure its dependants
+ * get rebuilt.
+ */
+ if ( fate0 > T_FATE_MAKING )
+ update_dependants( p );
+ }
+ }
+ /* Make sure that rebuilds can be chained. */
+ force_rebuilds( t );
+}
+
+
+/*
+ * Make sure that all of t's rebuilds get rebuilt.
+ */
+
+static void force_rebuilds( TARGET * t )
+{
+ TARGETS * d;
+ for ( d = t->rebuilds; d; d = d->next )
+ {
+ TARGET * r = d->target;
+
+ /* If it is not already being rebuilt for other reasons. */
+ if ( r->fate < T_FATE_BUILD )
+ {
+ if ( DEBUG_FATE )
+ printf( "fate change %s from %s to %s (by rebuild)\n",
+ object_str( r->name ), target_fate[ (int) r->fate ], target_fate[ T_FATE_REBUILD ] );
+
+ /* Force rebuild it. */
+ r->fate = T_FATE_REBUILD;
+
+ /* And make sure its dependants are updated too. */
+ update_dependants( r );
+ }
+ }
+}
+
+
+int make0rescan( TARGET * t, TARGET * rescanning )
+{
+ int result = 0;
+ TARGETS * c;
+
+ /* Check whether we have already found a cycle. */
+ if ( target_scc( t ) == rescanning )
+ return 1;
+
+ /* If we have already visited this node, ignore it. */
+ if ( t->rescanning == rescanning )
+ return 0;
+
+ /* If t is already updated, ignore it. */
+ if ( t->scc_root == NULL && t->progress > T_MAKE_ACTIVE )
+ return 0;
+
+ t->rescanning = rescanning;
+ for ( c = t->depends; c; c = c->next )
+ {
+ TARGET * dependency = c->target;
+ /* Always start at the root of each new strongly connected component. */
+ if ( target_scc( dependency ) != target_scc( t ) )
+ dependency = target_scc( dependency );
+ result |= make0rescan( dependency, rescanning );
+
+ /* Make sure that we pick up the new include node. */
+ if ( c->target->includes == rescanning )
+ result = 1;
+ }
+ if ( result && t->scc_root == NULL )
+ {
+ t->scc_root = rescanning;
+ rescanning->depends = targetentry( rescanning->depends, t );
+ }
+ return result;
+}
+
+
+/*
+ * make0() - bind and scan everything to make a TARGET.
+ *
+ * Recursively binds a target, searches for #included headers, calls itself on
+ * those headers and any dependencies.
+ */
+
+void make0
+(
+ TARGET * t,
+ TARGET * p, /* parent */
+ int depth, /* for display purposes */
+ COUNTS * counts, /* for reporting */
+ int anyhow,
+ TARGET * rescanning
+) /* forcibly touch all (real) targets */
+{
+ TARGETS * c;
+ TARGET * ptime = t;
+ TARGET * located_target = 0;
+ timestamp last;
+ timestamp leaf;
+ timestamp hlast;
+ int fate;
+ char const * flag = "";
+ SETTINGS * s;
+
+#ifdef OPT_GRAPH_DEBUG_EXT
+ int savedFate;
+ int oldTimeStamp;
+#endif
+
+ if ( DEBUG_MAKEPROG )
+ printf( "make\t--\t%s%s\n", spaces( depth ), object_str( t->name ) );
+
+ /*
+ * Step 1: Initialize.
+ */
+
+ if ( DEBUG_MAKEPROG )
+ printf( "make\t--\t%s%s\n", spaces( depth ), object_str( t->name ) );
+
+ t->fate = T_FATE_MAKING;
+ t->depth = depth;
+
+ /*
+ * Step 2: Under the influence of "on target" variables, bind the target and
+ * search for headers.
+ */
+
+ /* Step 2a: Set "on target" variables. */
+ s = copysettings( t->settings );
+ pushsettings( root_module(), s );
+
+ /* Step 2b: Find and timestamp the target file (if it is a file). */
+ if ( ( t->binding == T_BIND_UNBOUND ) && !( t->flags & T_FLAG_NOTFILE ) )
+ {
+ OBJECT * another_target;
+ object_free( t->boundname );
+ t->boundname = search( t->name, &t->time, &another_target,
+ t->flags & T_FLAG_ISFILE );
+ /* If it was detected that this target refers to an already existing and
+ * bound target, we add a dependency so that every target depending on
+ * us will depend on that other target as well.
+ */
+ if ( another_target )
+ located_target = bindtarget( another_target );
+
+ t->binding = timestamp_empty( &t->time )
+ ? T_BIND_MISSING
+ : T_BIND_EXISTS;
+ }
+
+ /* INTERNAL, NOTFILE header nodes have the time of their parents. */
+ if ( p && ( t->flags & T_FLAG_INTERNAL ) )
+ ptime = p;
+
+ /* If temp file does not exist but parent does, use parent. */
+ if ( p && ( t->flags & T_FLAG_TEMP ) &&
+ ( t->binding == T_BIND_MISSING ) &&
+ ( p->binding != T_BIND_MISSING ) )
+ {
+ t->binding = T_BIND_PARENTS;
+ ptime = p;
+ }
+
+#ifdef OPT_SEMAPHORE
+ {
+ LIST * var = var_get( root_module(), constant_JAM_SEMAPHORE );
+ if ( !list_empty( var ) )
+ {
+ TARGET * const semaphore = bindtarget( list_front( var ) );
+ semaphore->progress = T_MAKE_SEMAPHORE;
+ t->semaphore = semaphore;
+ }
+ }
+#endif
+
+ /* Step 2c: If its a file, search for headers. */
+ if ( t->binding == T_BIND_EXISTS )
+ headers( t );
+
+ /* Step 2d: reset "on target" variables. */
+ popsettings( root_module(), s );
+ freesettings( s );
+
+ /*
+ * Pause for a little progress reporting.
+ */
+
+ if ( DEBUG_BIND )
+ {
+ if ( !object_equal( t->name, t->boundname ) )
+ printf( "bind\t--\t%s%s: %s\n", spaces( depth ),
+ object_str( t->name ), object_str( t->boundname ) );
+
+ switch ( t->binding )
+ {
+ case T_BIND_UNBOUND:
+ case T_BIND_MISSING:
+ case T_BIND_PARENTS:
+ printf( "time\t--\t%s%s: %s\n", spaces( depth ),
+ object_str( t->name ), target_bind[ (int)t->binding ] );
+ break;
+
+ case T_BIND_EXISTS:
+ printf( "time\t--\t%s%s: %s\n", spaces( depth ),
+ object_str( t->name ), timestamp_str( &t->time ) );
+ break;
+ }
+ }
+
+ /*
+ * Step 3: Recursively make0() dependencies & headers.
+ */
+
+ /* Step 3a: Recursively make0() dependencies. */
+ for ( c = t->depends; c; c = c->next )
+ {
+ int const internal = t->flags & T_FLAG_INTERNAL;
+
+ /* Warn about circular deps, except for includes, which include each
+ * other alot.
+ */
+ if ( c->target->fate == T_FATE_INIT )
+ make0( c->target, ptime, depth + 1, counts, anyhow, rescanning );
+ else if ( c->target->fate == T_FATE_MAKING && !internal )
+ printf( "warning: %s depends on itself\n", object_str(
+ c->target->name ) );
+ else if ( c->target->fate != T_FATE_MAKING && rescanning )
+ make0rescan( c->target, rescanning );
+ if ( rescanning && c->target->includes && c->target->includes->fate !=
+ T_FATE_MAKING )
+ make0rescan( target_scc( c->target->includes ), rescanning );
+ }
+
+ if ( located_target )
+ {
+ if ( located_target->fate == T_FATE_INIT )
+ make0( located_target, ptime, depth + 1, counts, anyhow, rescanning
+ );
+ else if ( located_target->fate != T_FATE_MAKING && rescanning )
+ make0rescan( located_target, rescanning );
+ }
+
+ /* Step 3b: Recursively make0() internal includes node. */
+ if ( t->includes )
+ make0( t->includes, p, depth + 1, counts, anyhow, rescanning );
+
+ /* Step 3c: Add dependencies' includes to our direct dependencies. */
+ {
+ TARGETS * incs = 0;
+ for ( c = t->depends; c; c = c->next )
+ if ( c->target->includes )
+ incs = targetentry( incs, c->target->includes );
+ t->depends = targetchain( t->depends, incs );
+ }
+
+ if ( located_target )
+ t->depends = targetentry( t->depends, located_target );
+
+ /* Step 3d: Detect cycles. */
+ {
+ int cycle_depth = depth;
+ for ( c = t->depends; c; c = c->next )
+ {
+ TARGET * scc_root = target_scc( c->target );
+ if ( scc_root->fate == T_FATE_MAKING &&
+ ( !scc_root->includes ||
+ scc_root->includes->fate != T_FATE_MAKING ) )
+ {
+ if ( scc_root->depth < cycle_depth )
+ {
+ cycle_depth = scc_root->depth;
+ t->scc_root = scc_root;
+ }
+ }
+ }
+ }
+
+ /*
+ * Step 4: Compute time & fate.
+ */
+
+ /* Step 4a: Pick up dependencies' time and fate. */
+ timestamp_clear( &last );
+ timestamp_clear( &leaf );
+ fate = T_FATE_STABLE;
+ for ( c = t->depends; c; c = c->next )
+ {
+ /* If we are in a different strongly connected component, pull
+ * timestamps from the root.
+ */
+ if ( c->target->scc_root )
+ {
+ TARGET * const scc_root = target_scc( c->target );
+ if ( scc_root != t->scc_root )
+ {
+ timestamp_max( &c->target->leaf, &c->target->leaf,
+ &scc_root->leaf );
+ timestamp_max( &c->target->time, &c->target->time,
+ &scc_root->time );
+ c->target->fate = max( c->target->fate, scc_root->fate );
+ }
+ }
+
+ /* If LEAVES has been applied, we only heed the timestamps of the leaf
+ * source nodes.
+ */
+ timestamp_max( &leaf, &leaf, &c->target->leaf );
+ if ( t->flags & T_FLAG_LEAVES )
+ {
+ timestamp_copy( &last, &leaf );
+ continue;
+ }
+ timestamp_max( &last, &last, &c->target->time );
+ fate = max( fate, c->target->fate );
+
+#ifdef OPT_GRAPH_DEBUG_EXT
+ if ( DEBUG_FATE )
+ if ( fate < c->target->fate )
+ printf( "fate change %s from %s to %s by dependency %s\n",
+ object_str( t->name ), target_fate[ (int)fate ],
+ target_fate[ (int)c->target->fate ], object_str(
+ c->target->name ) );
+#endif
+ }
+
+ /* Step 4b: Pick up included headers time. */
+
+ /*
+ * If a header is newer than a temp source that includes it, the temp source
+ * will need building.
+ */
+ if ( t->includes )
+ timestamp_copy( &hlast, &t->includes->time );
+ else
+ timestamp_clear( &hlast );
+
+ /* Step 4c: handle NOUPDATE oddity.
+ *
+ * If a NOUPDATE file exists, mark it as having eternally old dependencies.
+ * Do not inherit our fate from our dependencies. Decide fate based only on
+ * other flags and our binding (done later).
+ */
+ if ( t->flags & T_FLAG_NOUPDATE )
+ {
+#ifdef OPT_GRAPH_DEBUG_EXT
+ if ( DEBUG_FATE )
+ if ( fate != T_FATE_STABLE )
+ printf( "fate change %s back to stable, NOUPDATE.\n",
+ object_str( t->name ) );
+#endif
+
+ timestamp_clear( &last );
+ timestamp_clear( &t->time );
+
+ /* Do not inherit our fate from our dependencies. Decide fate based only
+ * upon other flags and our binding (done later).
+ */
+ fate = T_FATE_STABLE;
+ }
+
+ /* Step 4d: Determine fate: rebuild target or what? */
+
+ /*
+ In English:
+ If can not find or make child, can not make target.
+ If children changed, make target.
+ If target missing, make it.
+ If children newer, make target.
+ If temp's children newer than parent, make temp.
+ If temp's headers newer than parent, make temp.
+ If deliberately touched, make it.
+ If up-to-date temp file present, use it.
+ If target newer than non-notfile parent, mark target newer.
+ Otherwise, stable!
+
+ Note this block runs from least to most stable: as we make it further
+ down the list, the target's fate gets more stable.
+ */
+
+#ifdef OPT_GRAPH_DEBUG_EXT
+ savedFate = fate;
+ oldTimeStamp = 0;
+#endif
+
+ if ( fate >= T_FATE_BROKEN )
+ {
+ fate = T_FATE_CANTMAKE;
+ }
+ else if ( fate >= T_FATE_SPOIL )
+ {
+ fate = T_FATE_UPDATE;
+ }
+ else if ( t->binding == T_BIND_MISSING )
+ {
+ fate = T_FATE_MISSING;
+ }
+ else if ( t->binding == T_BIND_EXISTS && timestamp_cmp( &last, &t->time ) >
+ 0 )
+ {
+#ifdef OPT_GRAPH_DEBUG_EXT
+ oldTimeStamp = 1;
+#endif
+ fate = T_FATE_OUTDATED;
+ }
+ else if ( t->binding == T_BIND_PARENTS && timestamp_cmp( &last, &p->time ) >
+ 0 )
+ {
+#ifdef OPT_GRAPH_DEBUG_EXT
+ oldTimeStamp = 1;
+#endif
+ fate = T_FATE_NEEDTMP;
+ }
+ else if ( t->binding == T_BIND_PARENTS && timestamp_cmp( &hlast, &p->time )
+ > 0 )
+ {
+ fate = T_FATE_NEEDTMP;
+ }
+ else if ( t->flags & T_FLAG_TOUCHED )
+ {
+ fate = T_FATE_TOUCHED;
+ }
+ else if ( anyhow && !( t->flags & T_FLAG_NOUPDATE ) )
+ {
+ fate = T_FATE_TOUCHED;
+ }
+ else if ( t->binding == T_BIND_EXISTS && ( t->flags & T_FLAG_TEMP ) )
+ {
+ fate = T_FATE_ISTMP;
+ }
+ else if ( t->binding == T_BIND_EXISTS && p && p->binding != T_BIND_UNBOUND
+ && timestamp_cmp( &t->time, &p->time ) > 0 )
+ {
+#ifdef OPT_GRAPH_DEBUG_EXT
+ oldTimeStamp = 1;
+#endif
+ fate = T_FATE_NEWER;
+ }
+ else
+ {
+ fate = T_FATE_STABLE;
+ }
+#ifdef OPT_GRAPH_DEBUG_EXT
+ if ( DEBUG_FATE && ( fate != savedFate ) )
+ {
+ if ( savedFate == T_FATE_STABLE )
+ printf( "fate change %s set to %s%s\n", object_str( t->name ),
+ target_fate[ fate ], oldTimeStamp ? " (by timestamp)" : "" );
+ else
+ printf( "fate change %s from %s to %s%s\n", object_str( t->name ),
+ target_fate[ savedFate ], target_fate[ fate ], oldTimeStamp ?
+ " (by timestamp)" : "" );
+ }
+#endif
+
+ /* Step 4e: Handle missing files. */
+ /* If it is missing and there are no actions to create it, boom. */
+ /* If we can not make a target we do not care about it, okay. */
+ /* We could insist that there are updating actions for all missing */
+ /* files, but if they have dependencies we just pretend it is a NOTFILE. */
+
+ if ( ( fate == T_FATE_MISSING ) && !t->actions && !t->depends )
+ {
+ if ( t->flags & T_FLAG_NOCARE )
+ {
+#ifdef OPT_GRAPH_DEBUG_EXT
+ if ( DEBUG_FATE )
+ printf( "fate change %s to STABLE from %s, "
+ "no actions, no dependencies and do not care\n",
+ object_str( t->name ), target_fate[ fate ] );
+#endif
+ fate = T_FATE_STABLE;
+ }
+ else
+ {
+ printf( "don't know how to make %s\n", object_str( t->name ) );
+ fate = T_FATE_CANTFIND;
+ }
+ }
+
+ /* Step 4f: Propagate dependencies' time & fate. */
+ /* Set leaf time to be our time only if this is a leaf. */
+
+ timestamp_max( &t->time, &t->time, &last );
+ timestamp_copy( &t->leaf, timestamp_empty( &leaf ) ? &t->time : &leaf );
+ /* This target's fate may have been updated by virtue of following some
+ * target's rebuilds list, so only allow it to be increased to the fate we
+ * have calculated. Otherwise, grab its new fate.
+ */
+ if ( fate > t->fate )
+ t->fate = fate;
+ else
+ fate = t->fate;
+
+ /*
+ * Step 4g: If this target needs to be built, make0 all targets
+ * that are updated by the same actions used to update this target.
+ * These have already been marked as REBUILDS, and make1 has
+ * special handling for them. We just need to make sure that
+ * they get make0ed.
+ */
+ if ( ( fate >= T_FATE_BUILD ) && ( fate < T_FATE_BROKEN ) )
+ {
+ ACTIONS * a;
+ TARGETS * c;
+ for ( a = t->actions; a; a = a->next )
+ {
+ for ( c = a->action->targets; c; c = c->next )
+ {
+ if ( c->target->fate == T_FATE_INIT )
+ {
+ make0( c->target, ptime, depth + 1, counts, anyhow, rescanning );
+ }
+ }
+ }
+ }
+
+ /* Step 4h: If this target needs to be built, force rebuild everything in
+ * its rebuilds list.
+ */
+ if ( ( fate >= T_FATE_BUILD ) && ( fate < T_FATE_BROKEN ) )
+ force_rebuilds( t );
+
+ /*
+ * Step 5: Sort dependencies by their update time.
+ */
+
+ if ( globs.newestfirst )
+ t->depends = make0sort( t->depends );
+
+ /*
+ * Step 6: A little harmless tabulating for tracing purposes.
+ */
+
+ /* Do not count or report interal includes nodes. */
+ if ( t->flags & T_FLAG_INTERNAL )
+ return;
+
+ if ( counts )
+ {
+#ifdef OPT_IMPROVED_PATIENCE_EXT
+ ++counts->targets;
+#else
+ if ( !( ++counts->targets % 1000 ) && DEBUG_MAKE )
+ {
+ printf( "...patience...\n" );
+ fflush(stdout);
+ }
+#endif
+
+ if ( fate == T_FATE_ISTMP )
+ ++counts->temp;
+ else if ( fate == T_FATE_CANTFIND )
+ ++counts->cantfind;
+ else if ( ( fate == T_FATE_CANTMAKE ) && t->actions )
+ ++counts->cantmake;
+ else if ( ( fate >= T_FATE_BUILD ) && ( fate < T_FATE_BROKEN ) &&
+ t->actions )
+ ++counts->updating;
+ }
+
+ if ( !( t->flags & T_FLAG_NOTFILE ) && ( fate >= T_FATE_SPOIL ) )
+ flag = "+";
+ else if ( t->binding == T_BIND_EXISTS && p && timestamp_cmp( &t->time,
+ &p->time ) > 0 )
+ flag = "*";
+
+ if ( DEBUG_MAKEPROG )
+ printf( "made%s\t%s\t%s%s\n", flag, target_fate[ (int)t->fate ],
+ spaces( depth ), object_str( t->name ) );
+}
+
+
+#ifdef OPT_GRAPH_DEBUG_EXT
+
+static char const * target_name( TARGET * t )
+{
+ static char buf[ 1000 ];
+ if ( t->flags & T_FLAG_INTERNAL )
+ {
+ sprintf( buf, "%s (internal node)", object_str( t->name ) );
+ return buf;
+ }
+ return object_str( t->name );
+}
+
+
+/*
+ * dependGraphOutput() - output the DG after make0 has run.
+ */
+
+static void dependGraphOutput( TARGET * t, int depth )
+{
+ TARGETS * c;
+
+ if ( ( t->flags & T_FLAG_VISITED ) || !t->name || !t->boundname )
+ return;
+
+ t->flags |= T_FLAG_VISITED;
+
+ switch ( t->fate )
+ {
+ case T_FATE_TOUCHED:
+ case T_FATE_MISSING:
+ case T_FATE_OUTDATED:
+ case T_FATE_UPDATE:
+ printf( "->%s%2d Name: %s\n", spaces( depth ), depth, target_name( t
+ ) );
+ break;
+ default:
+ printf( " %s%2d Name: %s\n", spaces( depth ), depth, target_name( t
+ ) );
+ break;
+ }
+
+ if ( !object_equal( t->name, t->boundname ) )
+ printf( " %s Loc: %s\n", spaces( depth ), object_str( t->boundname )
+ );
+
+ switch ( t->fate )
+ {
+ case T_FATE_STABLE:
+ printf( " %s : Stable\n", spaces( depth ) );
+ break;
+ case T_FATE_NEWER:
+ printf( " %s : Newer\n", spaces( depth ) );
+ break;
+ case T_FATE_ISTMP:
+ printf( " %s : Up to date temp file\n", spaces( depth ) );
+ break;
+ case T_FATE_NEEDTMP:
+ printf( " %s : Temporary file, to be updated\n", spaces( depth )
+ );
+ break;
+ case T_FATE_TOUCHED:
+ printf( " %s : Been touched, updating it\n", spaces( depth ) );
+ break;
+ case T_FATE_MISSING:
+ printf( " %s : Missing, creating it\n", spaces( depth ) );
+ break;
+ case T_FATE_OUTDATED:
+ printf( " %s : Outdated, updating it\n", spaces( depth ) );
+ break;
+ case T_FATE_REBUILD:
+ printf( " %s : Rebuild, updating it\n", spaces( depth ) );
+ break;
+ case T_FATE_UPDATE:
+ printf( " %s : Updating it\n", spaces( depth ) );
+ break;
+ case T_FATE_CANTFIND:
+ printf( " %s : Can not find it\n", spaces( depth ) );
+ break;
+ case T_FATE_CANTMAKE:
+ printf( " %s : Can make it\n", spaces( depth ) );
+ break;
+ }
+
+ if ( t->flags & ~T_FLAG_VISITED )
+ {
+ printf( " %s : ", spaces( depth ) );
+ if ( t->flags & T_FLAG_TEMP ) printf( "TEMPORARY " );
+ if ( t->flags & T_FLAG_NOCARE ) printf( "NOCARE " );
+ if ( t->flags & T_FLAG_NOTFILE ) printf( "NOTFILE " );
+ if ( t->flags & T_FLAG_TOUCHED ) printf( "TOUCHED " );
+ if ( t->flags & T_FLAG_LEAVES ) printf( "LEAVES " );
+ if ( t->flags & T_FLAG_NOUPDATE ) printf( "NOUPDATE " );
+ printf( "\n" );
+ }
+
+ for ( c = t->depends; c; c = c->next )
+ {
+ printf( " %s : Depends on %s (%s)", spaces( depth ),
+ target_name( c->target ), target_fate[ (int)c->target->fate ] );
+ if ( !timestamp_cmp( &c->target->time, &t->time ) )
+ printf( " (max time)");
+ printf( "\n" );
+ }
+
+ for ( c = t->depends; c; c = c->next )
+ dependGraphOutput( c->target, depth + 1 );
+}
+#endif
+
+
+/*
+ * make0sort() - reorder TARGETS chain by their time (newest to oldest).
+ *
+ * We walk chain, taking each item and inserting it on the sorted result, with
+ * newest items at the front. This involves updating each of the TARGETS'
+ * c->next and c->tail. Note that we make c->tail a valid prev pointer for every
+ * entry. Normally, it is only valid at the head, where prev == tail. Note also
+ * that while tail is a loop, next ends at the end of the chain.
+ */
+
+static TARGETS * make0sort( TARGETS * chain )
+{
+ PROFILE_ENTER( MAKE_MAKE0SORT );
+
+ TARGETS * result = 0;
+
+ /* Walk the current target list. */
+ while ( chain )
+ {
+ TARGETS * c = chain;
+ TARGETS * s = result;
+
+ chain = chain->next;
+
+ /* Find point s in result for c. */
+ while ( s && timestamp_cmp( &s->target->time, &c->target->time ) > 0 )
+ s = s->next;
+
+ /* Insert c in front of s (might be 0). */
+ c->next = s; /* good even if s = 0 */
+ if ( result == s ) result = c; /* new head of chain? */
+ if ( !s ) s = result; /* wrap to ensure a next */
+ if ( result != c ) s->tail->next = c; /* not head? be prev's next */
+ c->tail = s->tail; /* take on next's prev */
+ s->tail = c; /* make next's prev us */
+ }
+
+ PROFILE_EXIT( MAKE_MAKE0SORT );
+ return result;
+}
+
+
+static LIST * targets_to_update_ = L0;
+
+
+void mark_target_for_updating( OBJECT * target )
+{
+ targets_to_update_ = list_push_back( targets_to_update_, object_copy(
+ target ) );
+}
+
+
+LIST * targets_to_update()
+{
+ return targets_to_update_;
+}
+
+
+void clear_targets_to_update()
+{
+ list_free( targets_to_update_ );
+ targets_to_update_ = L0;
+}
diff --git a/tools/build/src/engine/make.h b/tools/build/src/engine/make.h
new file mode 100644
index 0000000000..2c3ba16789
--- /dev/null
+++ b/tools/build/src/engine/make.h
@@ -0,0 +1,44 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * make.h - bring a target up to date, once rules are in place
+ */
+
+#ifndef MAKE_SW20111118_H
+#define MAKE_SW20111118_H
+
+#include "lists.h"
+#include "object.h"
+#include "rules.h"
+
+int make( LIST * targets, int anyhow );
+int make1( LIST * t );
+
+typedef struct {
+ int temp;
+ int updating;
+ int cantfind;
+ int cantmake;
+ int targets;
+ int made;
+} COUNTS ;
+
+
+void make0( TARGET * t, TARGET * p, int depth, COUNTS * counts, int anyhow,
+ TARGET * rescanning );
+
+
+/* Specifies that the target should be updated. */
+void mark_target_for_updating( OBJECT * target );
+
+/* Returns targets previously passed to mark_target_for_updating(). */
+LIST * targets_to_update();
+
+/* Clears/unmarks all targets currently marked for update. */
+void clear_targets_to_update();
+
+#endif
diff --git a/tools/build/src/engine/make1.c b/tools/build/src/engine/make1.c
new file mode 100644
index 0000000000..5a96dc4e5d
--- /dev/null
+++ b/tools/build/src/engine/make1.c
@@ -0,0 +1,1460 @@
+/*
+ * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * make1.c - execute commands to bring targets up to date
+ *
+ * This module contains make1(), the entry point called by make() to recursively
+ * descend the dependency graph executing update actions as marked by make0().
+ *
+ * External routines:
+ * make1() - execute commands to update a TARGET and all of its dependencies
+ *
+ * Internal routines, the recursive/asynchronous command executors:
+ * make1a() - recursively schedules dependency builds and then goes to
+ * MAKE1B
+ * make1b() - if nothing is blocking this target's build, proceed to
+ * MAKE1C
+ * make1c() - launch target's next command, or go to parents' MAKE1B
+ * if none
+ * make1c_closure() - handle command execution completion and go to MAKE1C
+ *
+ * Internal support routines:
+ * make1cmds() - turn ACTIONS into CMDs, grouping, splitting, etc.
+ * make1list() - turn a list of targets into a LIST, for $(<) and $(>)
+ * make1settings() - for vars with bound values, build up replacement lists
+ * make1bind() - bind targets that weren't bound in dependency analysis
+ */
+
+#include "jam.h"
+#include "make.h"
+
+#include "command.h"
+#include "compile.h"
+#include "execcmd.h"
+#include "headers.h"
+#include "lists.h"
+#include "object.h"
+#include "output.h"
+#include "parse.h"
+#include "rules.h"
+#include "search.h"
+#include "variable.h"
+
+#include <assert.h>
+#include <stdlib.h>
+
+#if !defined( NT ) || defined( __GNUC__ )
+ #include <unistd.h> /* for unlink */
+#endif
+
+static CMD * make1cmds ( TARGET * );
+static LIST * make1list ( LIST *, TARGETS *, int flags );
+static SETTINGS * make1settings ( struct module_t *, LIST * vars );
+static void make1bind ( TARGET * );
+static TARGET * make1findcycle ( TARGET * );
+static void make1breakcycle( TARGET *, TARGET * cycle_root );
+static void push_cmds( CMDLIST * cmds, int status );
+static int cmd_sem_lock( TARGET * t );
+static void cmd_sem_unlock( TARGET * t );
+
+static int targets_contains( TARGETS * l, TARGET * t );
+static int targets_equal( TARGETS * l1, TARGETS * l2 );
+
+/* Ugly static - it is too hard to carry it through the callbacks. */
+
+static struct
+{
+ int failed;
+ int skipped;
+ int total;
+ int made;
+} counts[ 1 ];
+
+/* Target state. */
+#define T_STATE_MAKE1A 0 /* make1a() should be called */
+#define T_STATE_MAKE1B 1 /* make1b() should be called */
+#define T_STATE_MAKE1C 2 /* make1c() should be called */
+
+typedef struct _state state;
+struct _state
+{
+ state * prev; /* previous state on stack */
+ TARGET * t; /* current target */
+ TARGET * parent; /* parent argument necessary for MAKE1A */
+ int curstate; /* current state */
+};
+
+static void make1a( state * const );
+static void make1b( state * const );
+static void make1c( state const * const );
+
+static void make1c_closure( void * const closure, int status,
+ timing_info const * const, char const * const cmd_stdout,
+ char const * const cmd_stderr, int const cmd_exit_reason );
+
+typedef struct _stack
+{
+ state * stack;
+} stack;
+
+static stack state_stack = { NULL };
+
+static state * state_freelist = NULL;
+
+/* Currently running command counter. */
+static int cmdsrunning;
+
+
+static state * alloc_state()
+{
+ if ( state_freelist )
+ {
+ state * const pState = state_freelist;
+ state_freelist = pState->prev;
+ memset( pState, 0, sizeof( state ) );
+ return pState;
+ }
+ return (state *)BJAM_MALLOC( sizeof( state ) );
+}
+
+
+static void free_state( state * const pState )
+{
+ pState->prev = state_freelist;
+ state_freelist = pState;
+}
+
+
+static void clear_state_freelist()
+{
+ while ( state_freelist )
+ {
+ state * const pState = state_freelist;
+ state_freelist = state_freelist->prev;
+ BJAM_FREE( pState );
+ }
+}
+
+
+static state * current_state( stack * const pStack )
+{
+ return pStack->stack;
+}
+
+
+static void pop_state( stack * const pStack )
+{
+ if ( pStack->stack )
+ {
+ state * const pState = pStack->stack->prev;
+ free_state( pStack->stack );
+ pStack->stack = pState;
+ }
+}
+
+
+static state * push_state( stack * const pStack, TARGET * const t,
+ TARGET * const parent, int const curstate )
+{
+ state * const pState = alloc_state();
+ pState->t = t;
+ pState->parent = parent;
+ pState->prev = pStack->stack;
+ pState->curstate = curstate;
+ return pStack->stack = pState;
+}
+
+
+/*
+ * Pushes a stack onto another stack, effectively reversing the order.
+ */
+
+static void push_stack_on_stack( stack * const pDest, stack * const pSrc )
+{
+ while ( pSrc->stack )
+ {
+ state * const pState = pSrc->stack;
+ pSrc->stack = pState->prev;
+ pState->prev = pDest->stack;
+ pDest->stack = pState;
+ }
+}
+
+
+/*
+ * make1() - execute commands to update a list of targets and all of their dependencies
+ */
+
+static int intr = 0;
+static int quit = 0;
+
+int make1( LIST * targets )
+{
+ state * pState;
+ int status = 0;
+
+ memset( (char *)counts, 0, sizeof( *counts ) );
+
+ {
+ LISTITER iter, end;
+ stack temp_stack = { NULL };
+ for ( iter = list_begin( targets ), end = list_end( targets );
+ iter != end; iter = list_next( iter ) )
+ push_state( &temp_stack, bindtarget( list_item( iter ) ), NULL, T_STATE_MAKE1A );
+ push_stack_on_stack( &state_stack, &temp_stack );
+ }
+
+ /* Clear any state left over from the past */
+ quit = 0;
+
+ /* Recursively make the target and its dependencies. */
+
+ while ( 1 )
+ {
+ while ( ( pState = current_state( &state_stack ) ) )
+ {
+ if ( quit )
+ pop_state( &state_stack );
+
+ switch ( pState->curstate )
+ {
+ case T_STATE_MAKE1A: make1a( pState ); break;
+ case T_STATE_MAKE1B: make1b( pState ); break;
+ case T_STATE_MAKE1C: make1c( pState ); break;
+ default:
+ assert( !"make1(): Invalid state detected." );
+ }
+ }
+ if ( !cmdsrunning )
+ break;
+ /* Wait for outstanding commands to finish running. */
+ exec_wait();
+ }
+
+ clear_state_freelist();
+
+ /* Talk about it. */
+ if ( counts->failed )
+ printf( "...failed updating %d target%s...\n", counts->failed,
+ counts->failed > 1 ? "s" : "" );
+ if ( DEBUG_MAKE && counts->skipped )
+ printf( "...skipped %d target%s...\n", counts->skipped,
+ counts->skipped > 1 ? "s" : "" );
+ if ( DEBUG_MAKE && counts->made )
+ printf( "...updated %d target%s...\n", counts->made,
+ counts->made > 1 ? "s" : "" );
+
+ /* If we were interrupted, exit now that all child processes
+ have finished. */
+ if ( intr )
+ exit( 1 );
+
+ {
+ LISTITER iter, end;
+ for ( iter = list_begin( targets ), end = list_end( targets );
+ iter != end; iter = list_next( iter ) )
+ {
+ /* Check that the target was updated and that the
+ update succeeded. */
+ TARGET * t = bindtarget( list_item( iter ) );
+ if (t->progress == T_MAKE_DONE)
+ {
+ if (t->status != EXEC_CMD_OK)
+ status = 1;
+ }
+ else if ( ! ( t->progress == T_MAKE_NOEXEC_DONE && globs.noexec ) )
+ {
+ status = 1;
+ }
+ }
+ }
+ return status;
+}
+
+
+/*
+ * make1a() - recursively schedules dependency builds and then goes to MAKE1B
+ *
+ * Called to start processing a specified target. Does nothing if the target is
+ * already being processed or otherwise starts processing all of its
+ * dependencies.
+ */
+
+static void make1a( state * const pState )
+{
+ TARGET * t = pState->t;
+ TARGET * const scc_root = target_scc( t );
+
+ if ( !pState->parent || target_scc( pState->parent ) != scc_root )
+ pState->t = t = scc_root;
+
+ /* If the parent is the first to try to build this target or this target is
+ * in the MAKE1C quagmire, arrange for the parent to be notified when this
+ * target has been built.
+ */
+ if ( pState->parent && t->progress <= T_MAKE_RUNNING )
+ {
+ TARGET * const parent_scc = target_scc( pState->parent );
+ if ( t != parent_scc )
+ {
+ t->parents = targetentry( t->parents, parent_scc );
+ ++parent_scc->asynccnt;
+ }
+ }
+
+ /* If the target has been previously updated with -n in effect, and we are
+ * now ignoring -n, update it for real. E.g. if the UPDATE_NOW rule was
+ * called for it twice - first with the -n option and then without.
+ */
+ if ( !globs.noexec && t->progress == T_MAKE_NOEXEC_DONE )
+ t->progress = T_MAKE_INIT;
+
+ /* If this target is already being processed then do nothing. There is no
+ * need to start processing the same target all over again.
+ */
+ if ( t->progress != T_MAKE_INIT )
+ {
+ pop_state( &state_stack );
+ return;
+ }
+
+ /* Guard against circular dependencies. */
+ t->progress = T_MAKE_ONSTACK;
+
+ /* 'asynccnt' counts the dependencies preventing this target from proceeding
+ * to MAKE1C for actual building. We start off with a count of 1 to prevent
+ * anything from happening until we can notify all dependencies that they
+ * are needed. This 1 is then accounted for when we enter MAKE1B ourselves,
+ * below. Without this if a dependency gets built before we finish
+ * processing all of our other dependencies our build might be triggerred
+ * prematurely.
+ */
+ t->asynccnt = 1;
+
+ /* Push dependency build requests (to be executed in the natural order). */
+ {
+ stack temp_stack = { NULL };
+ TARGETS * c;
+ for ( c = t->depends; c && !quit; c = c->next )
+ push_state( &temp_stack, c->target, t, T_STATE_MAKE1A );
+ push_stack_on_stack( &state_stack, &temp_stack );
+ }
+
+ t->progress = T_MAKE_ACTIVE;
+
+ /* Once all of our dependencies have started getting processed we can move
+ * onto MAKE1B.
+ */
+ /* Implementation note:
+ * In theory this would be done by popping this state before pushing
+ * dependency target build requests but as a slight optimization we simply
+ * modify our current state and leave it on the stack instead.
+ */
+ pState->curstate = T_STATE_MAKE1B;
+}
+
+
+/*
+ * make1b() - if nothing is blocking this target's build, proceed to MAKE1C
+ *
+ * Called after something stops blocking this target's build, e.g. that all of
+ * its dependencies have started being processed, one of its dependencies has
+ * been built or a semaphore this target has been waiting for is free again.
+ */
+
+static void make1b( state * const pState )
+{
+ TARGET * const t = pState->t;
+ TARGET * failed = 0;
+ char const * failed_name = "dependencies";
+
+ pop_state( &state_stack );
+
+ /* If any dependencies are still outstanding, wait until they signal their
+ * completion by pushing this same state for their parent targets.
+ */
+ if ( --t->asynccnt )
+ {
+ return;
+ }
+
+ /* Now ready to build target 't', if dependencies built OK. */
+
+ /* Collect status from dependencies. If -n was passed then act as though all
+ * dependencies built correctly (the only way they can fail is if UPDATE_NOW
+ * was called). If the dependencies can not be found or we got an interrupt,
+ * we can not get here.
+ */
+ if ( !globs.noexec )
+ {
+ TARGETS * c;
+ for ( c = t->depends; c; c = c->next )
+ if ( c->target->status > t->status && !( c->target->flags &
+ T_FLAG_NOCARE ) )
+ {
+ failed = c->target;
+ t->status = c->target->status;
+ }
+ }
+
+ /* If an internal header node failed to build, we want to output the target
+ * that it failed on.
+ */
+ if ( failed )
+ failed_name = failed->flags & T_FLAG_INTERNAL
+ ? failed->failed
+ : object_str( failed->name );
+ t->failed = failed_name;
+
+ /* If actions for building any of the dependencies have failed, bail.
+ * Otherwise, execute all actions to make the current target.
+ */
+ if ( ( t->status == EXEC_CMD_FAIL ) && t->actions )
+ {
+ ++counts->skipped;
+ if ( ( t->flags & ( T_FLAG_RMOLD | T_FLAG_NOTFILE ) ) == T_FLAG_RMOLD )
+ {
+ if ( !unlink( object_str( t->boundname ) ) )
+ printf( "...removing outdated %s\n", object_str( t->boundname )
+ );
+ }
+ else
+ printf( "...skipped %s for lack of %s...\n", object_str( t->name ),
+ failed_name );
+ }
+
+ if ( t->status == EXEC_CMD_OK )
+ switch ( t->fate )
+ {
+ case T_FATE_STABLE:
+ case T_FATE_NEWER:
+ break;
+
+ case T_FATE_CANTFIND:
+ case T_FATE_CANTMAKE:
+ t->status = EXEC_CMD_FAIL;
+ break;
+
+ case T_FATE_ISTMP:
+ if ( DEBUG_MAKE )
+ printf( "...using %s...\n", object_str( t->name ) );
+ break;
+
+ case T_FATE_TOUCHED:
+ case T_FATE_MISSING:
+ case T_FATE_NEEDTMP:
+ case T_FATE_OUTDATED:
+ case T_FATE_UPDATE:
+ case T_FATE_REBUILD:
+ /* Prepare commands for executing actions scheduled for this target.
+ * Commands have their embedded variables automatically expanded,
+ * including making use of any "on target" variables.
+ */
+ if ( t->actions )
+ {
+ ++counts->total;
+ if ( DEBUG_MAKE && !( counts->total % 100 ) )
+ printf( "...on %dth target...\n", counts->total );
+
+ t->cmds = (char *)make1cmds( t );
+ /* Update the target's "progress" so MAKE1C processing counts it
+ * among its successes/failures.
+ */
+ t->progress = T_MAKE_RUNNING;
+ }
+ break;
+
+ /* All valid fates should have been accounted for by now. */
+ default:
+ printf( "ERROR: %s has bad fate %d", object_str( t->name ),
+ t->fate );
+ abort();
+ }
+
+ /* Proceed to MAKE1C to begin executing the chain of commands prepared for
+ * building the target. If we are not going to build the target (e.g. due to
+ * dependency failures or no commands needing to be run) the chain will be
+ * empty and MAKE1C processing will directly signal the target's completion.
+ */
+
+ if ( t->cmds == NULL || --( ( CMD * )t->cmds )->asynccnt == 0 )
+ push_state( &state_stack, t, NULL, T_STATE_MAKE1C );
+ else if ( DEBUG_EXECCMD )
+ {
+ CMD * cmd = ( CMD * )t->cmds;
+ printf( "Delaying %s %s: %d targets not ready\n", object_str( cmd->rule->name ), object_str( t->boundname ), cmd->asynccnt );
+ }
+}
+
+
+/*
+ * make1c() - launch target's next command, or go to parents' MAKE1B if none
+ *
+ * If there are (more) commands to run to build this target (and we have not hit
+ * an error running earlier comands) we launch the command using exec_cmd().
+ * Command execution signals its completion in exec_wait() by calling our
+ * make1c_closure() callback.
+ *
+ * If there are no more commands to run, we collect the status from all the
+ * actions and report our completion to all the parents.
+ */
+
+static void make1c( state const * const pState )
+{
+ TARGET * const t = pState->t;
+ CMD * const cmd = (CMD *)t->cmds;
+
+ if ( cmd )
+ {
+ /* Pop state first in case something below (e.g. exec_cmd(), exec_wait()
+ * or make1c_closure()) pushes a new state. Note that we must not access
+ * the popped state data after this as the same stack node might have
+ * been reused internally for some newly pushed state.
+ */
+ pop_state( &state_stack );
+
+ if ( cmd->status != EXEC_CMD_OK )
+ {
+ t->cmds = NULL;
+ push_cmds( cmd->next, cmd->status );
+ cmd_free( cmd );
+ return;
+ }
+
+#ifdef OPT_SEMAPHORE
+ if ( ! cmd_sem_lock( t ) )
+ {
+ return;
+ }
+#endif
+
+ /* Increment the jobs running counter. */
+ ++cmdsrunning;
+
+ /* Execute the actual build command or fake it if no-op. */
+ if ( globs.noexec || cmd->noop )
+ {
+ timing_info time_info = { 0 };
+ timestamp_current( &time_info.start );
+ timestamp_copy( &time_info.end, &time_info.start );
+ make1c_closure( t, EXEC_CMD_OK, &time_info, "", "", EXIT_OK );
+ }
+ else
+ {
+ exec_cmd( cmd->buf, make1c_closure, t, cmd->shell );
+
+ /* Wait until under the concurrent command count limit. */
+ /* FIXME: This wait could be skipped here and moved to just before
+ * trying to execute a command that would cross the command count
+ * limit. Note though that this might affect the order in which
+ * unrelated targets get built and would thus require that all
+ * affected Boost Build tests be updated.
+ */
+ assert( 0 < globs.jobs );
+ assert( globs.jobs <= MAXJOBS );
+ while ( cmdsrunning >= globs.jobs )
+ exec_wait();
+ }
+ }
+ else
+ {
+ ACTIONS * actions;
+
+ /* Tally success/failure for those we tried to update. */
+ if ( t->progress == T_MAKE_RUNNING )
+ switch ( t->status )
+ {
+ case EXEC_CMD_OK: ++counts->made; break;
+ case EXEC_CMD_FAIL: ++counts->failed; break;
+ }
+
+ /* Tell parents their dependency has been built. */
+ {
+ TARGETS * c;
+ stack temp_stack = { NULL };
+ TARGET * additional_includes = NULL;
+
+ t->progress = globs.noexec ? T_MAKE_NOEXEC_DONE : T_MAKE_DONE;
+
+ /* Target has been updated so rescan it for dependencies. */
+ if ( t->fate >= T_FATE_MISSING && t->status == EXEC_CMD_OK &&
+ !( t->flags & T_FLAG_INTERNAL ) )
+ {
+ TARGET * saved_includes;
+ SETTINGS * s;
+
+ t->rescanned = 1;
+
+ /* Clean current includes. */
+ saved_includes = t->includes;
+ t->includes = 0;
+
+ s = copysettings( t->settings );
+ pushsettings( root_module(), s );
+ headers( t );
+ popsettings( root_module(), s );
+ freesettings( s );
+
+ if ( t->includes )
+ {
+ /* Tricky. The parents have already been processed, but they
+ * have not seen the internal node, because it was just
+ * created. We need to:
+ * - push MAKE1A states that would have been pushed by the
+ * parents here
+ * - make sure all unprocessed parents will pick up the
+ * new includes
+ * - make sure processing the additional MAKE1A states is
+ * done before processing the MAKE1B state for our
+ * current target (which would mean this target has
+ * already been built), otherwise the parent would be
+ * considered built before the additional MAKE1A state
+ * processing even got a chance to start.
+ */
+ make0( t->includes, t->parents->target, 0, 0, 0, t->includes
+ );
+ /* Link the old includes on to make sure that it gets
+ * cleaned up correctly.
+ */
+ t->includes->includes = saved_includes;
+ for ( c = t->dependants; c; c = c->next )
+ c->target->depends = targetentry( c->target->depends,
+ t->includes );
+ /* Will be processed below. */
+ additional_includes = t->includes;
+ }
+ else
+ {
+ t->includes = saved_includes;
+ }
+ }
+
+ if ( additional_includes )
+ for ( c = t->parents; c; c = c->next )
+ push_state( &temp_stack, additional_includes, c->target,
+ T_STATE_MAKE1A );
+
+ if ( t->scc_root )
+ {
+ TARGET * const scc_root = target_scc( t );
+ assert( scc_root->progress < T_MAKE_DONE );
+ for ( c = t->parents; c; c = c->next )
+ {
+ if ( target_scc( c->target ) == scc_root )
+ push_state( &temp_stack, c->target, NULL, T_STATE_MAKE1B
+ );
+ else
+ scc_root->parents = targetentry( scc_root->parents,
+ c->target );
+ }
+ }
+ else
+ {
+ for ( c = t->parents; c; c = c->next )
+ push_state( &temp_stack, c->target, NULL, T_STATE_MAKE1B );
+ }
+
+ /* Must pop state before pushing any more. */
+ pop_state( &state_stack );
+
+ /* Using stacks reverses the order of execution. Reverse it back. */
+ push_stack_on_stack( &state_stack, &temp_stack );
+ }
+ }
+}
+
+
+/*
+ * call_timing_rule() - Look up the __TIMING_RULE__ variable on the given
+ * target, and if non-empty, invoke the rule it names, passing the given
+ * timing_info.
+ */
+
+static void call_timing_rule( TARGET * target, timing_info const * const time )
+{
+ LIST * timing_rule;
+
+ pushsettings( root_module(), target->settings );
+ timing_rule = var_get( root_module(), constant_TIMING_RULE );
+ popsettings( root_module(), target->settings );
+
+ if ( !list_empty( timing_rule ) )
+ {
+ /* rule timing-rule ( args * : target : start end user system ) */
+
+ /* Prepare the argument list. */
+ FRAME frame[ 1 ];
+ OBJECT * rulename = list_front( timing_rule );
+ frame_init( frame );
+
+ /* args * :: $(__TIMING_RULE__[2-]) */
+ lol_add( frame->args, list_copy_range( timing_rule, list_next(
+ list_begin( timing_rule ) ), list_end( timing_rule ) ) );
+
+ /* target :: the name of the target */
+ lol_add( frame->args, list_new( object_copy( target->name ) ) );
+
+ /* start end user system :: info about the action command */
+ lol_add( frame->args, list_push_back( list_push_back( list_push_back( list_new(
+ outf_time( &time->start ) ),
+ outf_time( &time->end ) ),
+ outf_double( time->user ) ),
+ outf_double( time->system ) ) );
+
+ /* Call the rule. */
+ evaluate_rule( bindrule( rulename , root_module() ), rulename, frame );
+
+ /* Clean up. */
+ frame_free( frame );
+ }
+}
+
+
+/*
+ * call_action_rule() - Look up the __ACTION_RULE__ variable on the given
+ * target, and if non-empty, invoke the rule it names, passing the given info,
+ * timing_info, executed command and command output.
+ */
+
+static void call_action_rule
+(
+ TARGET * target,
+ int status,
+ timing_info const * time,
+ char const * executed_command,
+ char const * command_output
+)
+{
+ LIST * action_rule;
+
+ pushsettings( root_module(), target->settings );
+ action_rule = var_get( root_module(), constant_ACTION_RULE );
+ popsettings( root_module(), target->settings );
+
+ if ( !list_empty( action_rule ) )
+ {
+ /* rule action-rule (
+ args * :
+ target :
+ command status start end user system :
+ output ? ) */
+
+ /* Prepare the argument list. */
+ FRAME frame[ 1 ];
+ OBJECT * rulename = list_front( action_rule );
+ frame_init( frame );
+
+ /* args * :: $(__ACTION_RULE__[2-]) */
+ lol_add( frame->args, list_copy_range( action_rule, list_next(
+ list_begin( action_rule ) ), list_end( action_rule ) ) );
+
+ /* target :: the name of the target */
+ lol_add( frame->args, list_new( object_copy( target->name ) ) );
+
+ /* command status start end user system :: info about the action command
+ */
+ lol_add( frame->args,
+ list_push_back( list_push_back( list_push_back( list_push_back( list_push_back( list_new(
+ object_new( executed_command ) ),
+ outf_int( status ) ),
+ outf_time( &time->start ) ),
+ outf_time( &time->end ) ),
+ outf_double( time->user ) ),
+ outf_double( time->system ) ) );
+
+ /* output ? :: the output of the action command */
+ if ( command_output )
+ lol_add( frame->args, list_new( object_new( command_output ) ) );
+ else
+ lol_add( frame->args, L0 );
+
+ /* Call the rule. */
+ evaluate_rule( bindrule( rulename, root_module() ), rulename, frame );
+
+ /* Clean up. */
+ frame_free( frame );
+ }
+}
+
+
+/*
+ * make1c_closure() - handle command execution completion and go to MAKE1C.
+ *
+ * Internal function passed as a notification callback for when a command
+ * finishes getting executed by the OS or called directly when faking that a
+ * command had been executed by the OS.
+ *
+ * Now all we need to do is fiddle with the command exit status and push a new
+ * MAKE1C state to execute the next command scheduled for building this target
+ * or close up the target's build process in case there are no more commands
+ * scheduled for it. On interrupts, we bail heavily.
+ */
+
+static void make1c_closure
+(
+ void * const closure,
+ int status_orig,
+ timing_info const * const time,
+ char const * const cmd_stdout,
+ char const * const cmd_stderr,
+ int const cmd_exit_reason
+)
+{
+ TARGET * const t = (TARGET *)closure;
+ CMD * const cmd = (CMD *)t->cmds;
+ char const * rule_name = 0;
+ char const * target_name = 0;
+
+ assert( cmd );
+
+ --cmdsrunning;
+
+ /* Calculate the target's status from the cmd execution result. */
+ {
+ /* Store the target's status. */
+ t->status = status_orig;
+
+ /* Invert OK/FAIL target status when FAIL_EXPECTED has been applied. */
+ if ( t->flags & T_FLAG_FAIL_EXPECTED && !globs.noexec )
+ {
+ switch ( t->status )
+ {
+ case EXEC_CMD_FAIL: t->status = EXEC_CMD_OK; break;
+ case EXEC_CMD_OK: t->status = EXEC_CMD_FAIL; break;
+ }
+ }
+
+ /* Ignore failures for actions marked as 'ignore'. */
+ if ( t->status == EXEC_CMD_FAIL && cmd->rule->actions->flags &
+ RULE_IGNORE )
+ t->status = EXEC_CMD_OK;
+ }
+
+ if ( DEBUG_MAKEQ ||
+ ( DEBUG_MAKE && !( cmd->rule->actions->flags & RULE_QUIETLY ) ) )
+ {
+ rule_name = object_str( cmd->rule->name );
+ target_name = object_str( list_front( lol_get( (LOL *)&cmd->args, 0 ) )
+ );
+ }
+
+ out_action( rule_name, target_name, cmd->buf->value, cmd_stdout, cmd_stderr,
+ cmd_exit_reason );
+
+ if ( !globs.noexec )
+ {
+ call_timing_rule( t, time );
+ if ( DEBUG_EXECCMD )
+ printf( "%f sec system; %f sec user\n", time->system, time->user );
+
+ /* Assume -p0 is in effect, i.e. cmd_stdout contains merged output. */
+ call_action_rule( t, status_orig, time, cmd->buf->value, cmd_stdout );
+ }
+
+ /* Print command text on failure. */
+ if ( t->status == EXEC_CMD_FAIL && DEBUG_MAKE )
+ {
+ if ( !DEBUG_EXEC )
+ printf( "%s\n", cmd->buf->value );
+
+ printf( "...failed %s ", object_str( cmd->rule->name ) );
+ list_print( lol_get( (LOL *)&cmd->args, 0 ) );
+ printf( "...\n" );
+ }
+
+ /* On interrupt, set quit so _everything_ fails. Do the same for failed
+ * commands if we were asked to stop the build in case of any errors.
+ */
+ if ( t->status == EXEC_CMD_INTR )
+ {
+ ++intr;
+ ++quit;
+ }
+ if ( t->status == EXEC_CMD_FAIL && globs.quitquick )
+ ++quit;
+
+ /* If the command was not successful remove all of its targets not marked as
+ * "precious".
+ */
+ if ( t->status != EXEC_CMD_OK )
+ {
+ LIST * const targets = lol_get( (LOL *)&cmd->args, 0 );
+ LISTITER iter = list_begin( targets );
+ LISTITER const end = list_end( targets );
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ char const * const filename = object_str( list_item( iter ) );
+ TARGET const * const t = bindtarget( list_item( iter ) );
+ if ( !( t->flags & T_FLAG_PRECIOUS ) && !unlink( filename ) )
+ printf( "...removing %s\n", filename );
+ }
+ }
+
+#ifdef OPT_SEMAPHORE
+ /* Release any semaphores used by this action. */
+ cmd_sem_unlock( t );
+#endif
+
+ /* Free this command and push the MAKE1C state to execute the next one
+ * scheduled for building this same target.
+ */
+ t->cmds = NULL;
+ push_cmds( cmd->next, t->status );
+ cmd_free( cmd );
+}
+
+/* push the next MAKE1C state after a command is run. */
+static void push_cmds( CMDLIST * cmds, int status )
+{
+ CMDLIST * cmd_iter;
+ for( cmd_iter = cmds; cmd_iter; cmd_iter = cmd_iter->next )
+ {
+ if ( cmd_iter->iscmd )
+ {
+ CMD * next_cmd = cmd_iter->impl.cmd;
+ /* Propagate the command status. */
+ if ( next_cmd->status < status )
+ next_cmd->status = status;
+ if ( --next_cmd->asynccnt == 0 )
+ {
+ /* Select the first target associated with the action.
+ * This is safe because sibling CMDs cannot have targets
+ * in common.
+ */
+ TARGET * first_target = bindtarget( list_front( lol_get( &next_cmd->args, 0 ) ) );
+ first_target->cmds = (char *)next_cmd;
+ push_state( &state_stack, first_target, NULL, T_STATE_MAKE1C );
+ }
+ else if ( DEBUG_EXECCMD )
+ {
+ TARGET * first_target = bindtarget( list_front( lol_get( &next_cmd->args, 0 ) ) );
+ printf( "Delaying %s %s: %d targets not ready\n", object_str( next_cmd->rule->name ), object_str( first_target->boundname ), next_cmd->asynccnt );
+ }
+ }
+ else
+ {
+ /* This is a target that we're finished updating */
+ TARGET * updated_target = cmd_iter->impl.t;
+ if ( updated_target->status < status )
+ updated_target->status = status;
+ updated_target->cmds = NULL;
+ push_state( &state_stack, updated_target, NULL, T_STATE_MAKE1C );
+ }
+ }
+}
+
+
+/*
+ * swap_settings() - replace the settings from the current module and target
+ * with those from the new module and target
+ */
+
+static void swap_settings
+(
+ module_t * * current_module,
+ TARGET * * current_target,
+ module_t * new_module,
+ TARGET * new_target
+)
+{
+ if ( ( new_target == *current_target ) &&
+ ( new_module == *current_module ) )
+ return;
+
+ if ( *current_target )
+ popsettings( *current_module, (*current_target)->settings );
+
+ if ( new_target )
+ pushsettings( new_module, new_target->settings );
+
+ *current_module = new_module;
+ *current_target = new_target;
+}
+
+
+/*
+ * make1cmds() - turn ACTIONS into CMDs, grouping, splitting, etc.
+ *
+ * Essentially copies a chain of ACTIONs to a chain of CMDs, grouping
+ * RULE_TOGETHER actions, splitting RULE_PIECEMEAL actions, and handling
+ * RULE_NEWSRCS actions. The result is a chain of CMDs which has already had all
+ * of its embedded variable references expanded and can now be executed using
+ * exec_cmd().
+ */
+
+static CMD * make1cmds( TARGET * t )
+{
+ CMD * cmds = 0;
+ CMD * last_cmd;
+ LIST * shell = L0;
+ module_t * settings_module = 0;
+ TARGET * settings_target = 0;
+ ACTIONS * a0;
+ int const running_flag = globs.noexec ? A_RUNNING_NOEXEC : A_RUNNING;
+
+ /* Step through actions.
+ */
+ for ( a0 = t->actions; a0; a0 = a0->next )
+ {
+ RULE * rule = a0->action->rule;
+ rule_actions * actions = rule->actions;
+ SETTINGS * boundvars;
+ LIST * nt;
+ LIST * ns;
+ ACTIONS * a1;
+
+ /* Only do rules with commands to execute.
+ */
+ if ( !actions )
+ continue;
+
+ if ( a0->action->running >= running_flag )
+ {
+ CMD * first;
+ /* If this action was skipped either because it was
+ * combined with another action by RULE_TOGETHER, or
+ * because all of its sources were filtered out,
+ * then we don't have anything to do here.
+ */
+ if ( a0->action->first_cmd == NULL )
+ continue;
+ /* This action has already been processed for another target.
+ * Just set up the dependency graph correctly and move on.
+ */
+ first = a0->action->first_cmd;
+ if( cmds )
+ {
+ last_cmd->next = cmdlist_append_cmd( last_cmd->next, first );
+ }
+ else
+ {
+ cmds = first;
+ }
+ last_cmd = a0->action->last_cmd;
+ continue;
+ }
+
+ a0->action->running = running_flag;
+
+ /* Make LISTS of targets and sources. If `execute together` has been
+ * specified for this rule, tack on sources from each instance of this
+ * rule for this target.
+ */
+ nt = make1list( L0, a0->action->targets, 0 );
+ ns = make1list( L0, a0->action->sources, actions->flags );
+ if ( actions->flags & RULE_TOGETHER )
+ for ( a1 = a0->next; a1; a1 = a1->next )
+ if ( a1->action->rule == rule &&
+ a1->action->running < running_flag &&
+ targets_equal( a0->action->targets, a1->action->targets ) )
+ {
+ ns = make1list( ns, a1->action->sources, actions->flags );
+ a1->action->running = running_flag;
+ }
+
+ /* If doing only updated (or existing) sources, but none have been
+ * updated (or exist), skip this action.
+ */
+ if ( list_empty( ns ) &&
+ ( actions->flags & ( RULE_NEWSRCS | RULE_EXISTING ) ) )
+ {
+ list_free( nt );
+ continue;
+ }
+
+ swap_settings( &settings_module, &settings_target, rule->module, t );
+ if ( list_empty( shell ) )
+ {
+ /* shell is per-target */
+ shell = var_get( rule->module, constant_JAMSHELL );
+ }
+
+ /* If we had 'actions xxx bind vars' we bind the vars now. */
+ boundvars = make1settings( rule->module, actions->bindlist );
+ pushsettings( rule->module, boundvars );
+
+ /*
+ * Build command, starting with all source args.
+ *
+ * For actions that allow PIECEMEAL commands, if the constructed command
+ * string is too long, we retry constructing it with a reduced number of
+ * source arguments presented.
+ *
+ * While reducing slowly takes a bit of compute time to get things just
+ * right, it is worth it to get as close to maximum allowed command
+ * string length as possible, because launching the commands we are
+ * executing is likely to be much more compute intensive.
+ *
+ * Note that we loop through at least once, for sourceless actions.
+ */
+ {
+ int const length = list_length( ns );
+ int start = 0;
+ int chunk = length;
+ int cmd_count = 0;
+ LIST * cmd_targets = L0;
+ LIST * cmd_shell = L0;
+ TARGETS * semaphores = NULL;
+ TARGETS * targets_iter;
+ int unique_targets;
+ do
+ {
+ CMD * cmd;
+ int cmd_check_result;
+ int cmd_error_length;
+ int cmd_error_max_length;
+ int retry = 0;
+ int accept_command = 0;
+
+ /* Build cmd: cmd_new() takes ownership of its lists. */
+ if ( list_empty( cmd_targets ) ) cmd_targets = list_copy( nt );
+ if ( list_empty( cmd_shell ) ) cmd_shell = list_copy( shell );
+ cmd = cmd_new( rule, cmd_targets, list_sublist( ns, start,
+ chunk ), cmd_shell );
+
+ cmd_check_result = exec_check( cmd->buf, &cmd->shell,
+ &cmd_error_length, &cmd_error_max_length );
+
+ if ( cmd_check_result == EXEC_CHECK_OK )
+ {
+ accept_command = 1;
+ }
+ else if ( cmd_check_result == EXEC_CHECK_NOOP )
+ {
+ accept_command = 1;
+ cmd->noop = 1;
+ }
+ else if ( ( actions->flags & RULE_PIECEMEAL ) && ( chunk > 1 ) )
+ {
+ /* Too long but splittable. Reduce chunk size slowly and
+ * retry.
+ */
+ assert( cmd_check_result == EXEC_CHECK_TOO_LONG ||
+ cmd_check_result == EXEC_CHECK_LINE_TOO_LONG );
+ chunk = chunk * 9 / 10;
+ retry = 1;
+ }
+ else
+ {
+ /* Too long and not splittable. */
+ char const * const error_message = cmd_check_result ==
+ EXEC_CHECK_TOO_LONG
+ ? "is too long"
+ : "contains a line that is too long";
+ assert( cmd_check_result == EXEC_CHECK_TOO_LONG ||
+ cmd_check_result == EXEC_CHECK_LINE_TOO_LONG );
+ printf( "%s action %s (%d, max %d):\n", object_str(
+ rule->name ), error_message, cmd_error_length,
+ cmd_error_max_length );
+
+ /* Tell the user what did not fit. */
+ fputs( cmd->buf->value, stdout );
+ exit( EXITBAD );
+ }
+
+ assert( !retry || !accept_command );
+
+ if ( accept_command )
+ {
+ /* Chain it up. */
+ if ( cmds )
+ {
+ last_cmd->next = cmdlist_append_cmd( last_cmd->next, cmd );
+ last_cmd = cmd;
+ }
+ else
+ {
+ cmds = last_cmd = cmd;
+ }
+
+ if ( cmd_count++ == 0 )
+ {
+ a0->action->first_cmd = cmd;
+ }
+
+ /* Mark lists we need recreated for the next command since
+ * they got consumed by the cmd object.
+ */
+ cmd_targets = L0;
+ cmd_shell = L0;
+ }
+ else
+ {
+ /* We can reuse targets & shell lists for the next command
+ * if we do not let them die with this cmd object.
+ */
+ cmd_release_targets_and_shell( cmd );
+ cmd_free( cmd );
+ }
+
+ if ( !retry )
+ start += chunk;
+ }
+ while ( start < length );
+
+ /* Record the end of the actions cmds */
+ a0->action->last_cmd = last_cmd;
+
+ unique_targets = 0;
+ for ( targets_iter = a0->action->targets; targets_iter; targets_iter = targets_iter->next )
+ {
+ if ( targets_contains( targets_iter->next, targets_iter->target ) )
+ continue;
+ /* Add all targets produced by the action to the update list. */
+ push_state( &state_stack, targets_iter->target, NULL, T_STATE_MAKE1A );
+ ++unique_targets;
+ }
+ /* We need to wait until all the targets agree that
+ * it's okay to run this action.
+ */
+ ( ( CMD * )a0->action->first_cmd )->asynccnt = unique_targets;
+
+#if OPT_SEMAPHORE
+ /* Collect semaphores */
+ for ( targets_iter = a0->action->targets; targets_iter; targets_iter = targets_iter->next )
+ {
+ TARGET * sem = targets_iter->target->semaphore;
+ if ( sem )
+ {
+ TARGETS * semiter;
+ if ( ! targets_contains( semaphores, sem ) )
+ semaphores = targetentry( semaphores, sem );
+ }
+ }
+ ( ( CMD * )a0->action->first_cmd )->lock = semaphores;
+ ( ( CMD * )a0->action->last_cmd )->unlock = semaphores;
+#endif
+ }
+
+ /* These were always copied when used. */
+ list_free( nt );
+ list_free( ns );
+
+ /* Free variables with values bound by 'actions xxx bind vars'. */
+ popsettings( rule->module, boundvars );
+ freesettings( boundvars );
+ }
+
+ if ( cmds )
+ {
+ last_cmd->next = cmdlist_append_target( last_cmd->next, t );
+ }
+
+ swap_settings( &settings_module, &settings_target, 0, 0 );
+ return cmds;
+}
+
+
+/*
+ * make1list() - turn a list of targets into a LIST, for $(<) and $(>)
+ */
+
+static LIST * make1list( LIST * l, TARGETS * targets, int flags )
+{
+ for ( ; targets; targets = targets->next )
+ {
+ TARGET * t = targets->target;
+
+ if ( t->binding == T_BIND_UNBOUND )
+ make1bind( t );
+
+ if ( ( flags & RULE_EXISTING ) && ( flags & RULE_NEWSRCS ) )
+ {
+ if ( ( t->binding != T_BIND_EXISTS ) &&
+ ( t->fate <= T_FATE_STABLE ) )
+ continue;
+ }
+ else if ( flags & RULE_EXISTING )
+ {
+ if ( t->binding != T_BIND_EXISTS )
+ continue;
+ }
+ else if ( flags & RULE_NEWSRCS )
+ {
+ if ( t->fate <= T_FATE_STABLE )
+ continue;
+ }
+
+ /* Prohibit duplicates for RULE_TOGETHER. */
+ if ( flags & RULE_TOGETHER )
+ {
+ LISTITER iter = list_begin( l );
+ LISTITER const end = list_end( l );
+ for ( ; iter != end; iter = list_next( iter ) )
+ if ( object_equal( list_item( iter ), t->boundname ) )
+ break;
+ if ( iter != end )
+ continue;
+ }
+
+ /* Build new list. */
+ l = list_push_back( l, object_copy( t->boundname ) );
+ }
+
+ return l;
+}
+
+
+/*
+ * make1settings() - for vars with bound values, build up replacement lists
+ */
+
+static SETTINGS * make1settings( struct module_t * module, LIST * vars )
+{
+ SETTINGS * settings = 0;
+
+ LISTITER vars_iter = list_begin( vars );
+ LISTITER const vars_end = list_end( vars );
+ for ( ; vars_iter != vars_end; vars_iter = list_next( vars_iter ) )
+ {
+ LIST * const l = var_get( module, list_item( vars_iter ) );
+ LIST * nl = L0;
+ LISTITER iter = list_begin( l );
+ LISTITER const end = list_end( l );
+
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ TARGET * const t = bindtarget( list_item( iter ) );
+
+ /* Make sure the target is bound. */
+ if ( t->binding == T_BIND_UNBOUND )
+ make1bind( t );
+
+ /* Build a new list. */
+ nl = list_push_back( nl, object_copy( t->boundname ) );
+ }
+
+ /* Add to settings chain. */
+ settings = addsettings( settings, VAR_SET, list_item( vars_iter ), nl );
+ }
+
+ return settings;
+}
+
+
+/*
+ * make1bind() - bind targets that were not bound during dependency analysis
+ *
+ * Spot the kludge! If a target is not in the dependency tree, it did not get
+ * bound by make0(), so we have to do it here. Ugly.
+ */
+
+static void make1bind( TARGET * t )
+{
+ if ( t->flags & T_FLAG_NOTFILE )
+ return;
+
+ pushsettings( root_module(), t->settings );
+ object_free( t->boundname );
+ t->boundname = search( t->name, &t->time, 0, t->flags & T_FLAG_ISFILE );
+ t->binding = timestamp_empty( &t->time ) ? T_BIND_MISSING : T_BIND_EXISTS;
+ popsettings( root_module(), t->settings );
+}
+
+
+static int targets_contains( TARGETS * l, TARGET * t )
+{
+ for ( ; l; l = l->next )
+ {
+ if ( t == l->target )
+ {
+ return 1;
+ }
+ }
+ return 0;
+}
+
+static int targets_equal( TARGETS * l1, TARGETS * l2 )
+{
+ for ( ; l1 && l2; l1 = l1->next, l2 = l2->next )
+ {
+ if ( l1->target != l2->target )
+ return 0;
+ }
+ return !l1 && !l2;
+}
+
+
+#ifdef OPT_SEMAPHORE
+
+static int cmd_sem_lock( TARGET * t )
+{
+ CMD * cmd = (CMD *)t->cmds;
+ TARGETS * iter;
+ /* Check whether all the semaphores required for updating
+ * this target are free.
+ */
+ for ( iter = cmd->lock; iter; iter = iter->next )
+ {
+ if ( iter->target->asynccnt > 0 )
+ {
+ if ( DEBUG_EXECCMD )
+ printf( "SEM: %s is busy, delaying launch of %s\n",
+ object_str( iter->target->name ), object_str( t->name ) );
+ iter->target->parents = targetentry( iter->target->parents, t );
+ return 0;
+ }
+ }
+ /* Lock the semaphores. */
+ for ( iter = cmd->lock; iter; iter = iter->next )
+ {
+ ++iter->target->asynccnt;
+ if ( DEBUG_EXECCMD )
+ printf( "SEM: %s now used by %s\n", object_str( iter->target->name
+ ), object_str( t->name ) );
+ }
+ /* A cmd only needs to be locked around its execution.
+ * clearing cmd->lock here makes it safe to call cmd_sem_lock
+ * twice.
+ */
+ cmd->lock = NULL;
+ return 1;
+}
+
+static void cmd_sem_unlock( TARGET * t )
+{
+ CMD * cmd = ( CMD * )t->cmds;
+ TARGETS * iter;
+ /* Release the semaphores. */
+ for ( iter = cmd->unlock; iter; iter = iter->next )
+ {
+ if ( DEBUG_EXECCMD )
+ printf( "SEM: %s is now free\n", object_str(
+ iter->target->name ) );
+ --iter->target->asynccnt;
+ assert( iter->target->asynccnt <= 0 );
+ }
+ for ( iter = cmd->unlock; iter; iter = iter->next )
+ {
+ /* Find a waiting target that's ready */
+ while ( iter->target->parents )
+ {
+ TARGETS * first = iter->target->parents;
+ TARGET * t1 = first->target;
+
+ /* Pop the first waiting CMD */
+ if ( first->next )
+ first->next->tail = first->tail;
+ iter->target->parents = first->next;
+ BJAM_FREE( first );
+
+ if ( cmd_sem_lock( t1 ) )
+ {
+ push_state( &state_stack, t1, NULL, T_STATE_MAKE1C );
+ break;
+ }
+ }
+ }
+}
+
+#endif
diff --git a/tools/build/v2/engine/md5.c b/tools/build/src/engine/md5.c
index c35d96c5ef..c35d96c5ef 100644
--- a/tools/build/v2/engine/md5.c
+++ b/tools/build/src/engine/md5.c
diff --git a/tools/build/v2/engine/md5.h b/tools/build/src/engine/md5.h
index 698c995d8f..698c995d8f 100644
--- a/tools/build/v2/engine/md5.h
+++ b/tools/build/src/engine/md5.h
diff --git a/tools/build/v2/engine/mem.c b/tools/build/src/engine/mem.c
index 6a11fb38a5..6a11fb38a5 100644
--- a/tools/build/v2/engine/mem.c
+++ b/tools/build/src/engine/mem.c
diff --git a/tools/build/src/engine/mem.h b/tools/build/src/engine/mem.h
new file mode 100644
index 0000000000..8718b07fd4
--- /dev/null
+++ b/tools/build/src/engine/mem.h
@@ -0,0 +1,133 @@
+/*
+ * Copyright 2006. Rene Rivera
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+#ifndef BJAM_MEM_H
+#define BJAM_MEM_H
+
+#ifdef OPT_BOEHM_GC
+
+ /* Use Boehm GC memory allocator. */
+ #include <gc.h>
+
+ #define bjam_malloc_x(s) memset(GC_malloc(s),0,s)
+ #define bjam_malloc_atomic_x(s) memset(GC_malloc_atomic(s),0,s)
+ #define bjam_calloc_x(n,s) memset(GC_malloc((n)*(s)),0,(n)*(s))
+ #define bjam_calloc_atomic_x(n,s) memset(GC_malloc_atomic((n)*(s)),0,(n)*(s))
+ #define bjam_realloc_x(p,s) GC_realloc(p,s)
+ #define bjam_free_x(p) GC_free(p)
+ #define bjam_mem_init_x() GC_init(); GC_enable_incremental()
+
+ #define bjam_malloc_raw_x(s) malloc(s)
+ #define bjam_calloc_raw_x(n,s) calloc(n,s)
+ #define bjam_realloc_raw_x(p,s) realloc(p,s)
+ #define bjam_free_raw_x(p) free(p)
+
+ #ifndef BJAM_NEWSTR_NO_ALLOCATE
+ # define BJAM_NEWSTR_NO_ALLOCATE
+ #endif
+
+#elif defined( OPT_DUMA )
+
+ /* Use Duma memory debugging library. */
+ #include <stdlib.h>
+
+ #define _DUMA_CONFIG_H_
+ #define DUMA_NO_GLOBAL_MALLOC_FREE
+ #define DUMA_EXPLICIT_INIT
+ #define DUMA_NO_THREAD_SAFETY
+ #define DUMA_NO_CPP_SUPPORT
+ /* #define DUMA_NO_LEAKDETECTION */
+ /* #define DUMA_USE_FRAMENO */
+ /* #define DUMA_PREFER_ATEXIT */
+ /* #define DUMA_OLD_DEL_MACRO */
+ /* #define DUMA_NO_HANG_MSG */
+ #define DUMA_PAGE_SIZE 4096
+ #define DUMA_MIN_ALIGNMENT 1
+ /* #define DUMA_GNU_INIT_ATTR 0 */
+ typedef unsigned int DUMA_ADDR;
+ typedef unsigned int DUMA_SIZE;
+ #include <duma.h>
+
+ #define bjam_malloc_x(s) malloc(s)
+ #define bjam_calloc_x(n,s) calloc(n,s)
+ #define bjam_realloc_x(p,s) realloc(p,s)
+ #define bjam_free_x(p) free(p)
+
+ #ifndef BJAM_NEWSTR_NO_ALLOCATE
+ # define BJAM_NEWSTR_NO_ALLOCATE
+ #endif
+
+#else
+
+ /* Standard C memory allocation. */
+ #include <stdlib.h>
+
+ #define bjam_malloc_x(s) malloc(s)
+ #define bjam_calloc_x(n,s) calloc(n,s)
+ #define bjam_realloc_x(p,s) realloc(p,s)
+ #define bjam_free_x(p) free(p)
+
+#endif
+
+#ifndef bjam_malloc_atomic_x
+ #define bjam_malloc_atomic_x(s) bjam_malloc_x(s)
+#endif
+#ifndef bjam_calloc_atomic_x
+ #define bjam_calloc_atomic_x(n,s) bjam_calloc_x(n,s)
+#endif
+#ifndef bjam_mem_init_x
+ #define bjam_mem_init_x()
+#endif
+#ifndef bjam_mem_close_x
+ #define bjam_mem_close_x()
+#endif
+#ifndef bjam_malloc_raw_x
+ #define bjam_malloc_raw_x(s) bjam_malloc_x(s)
+#endif
+#ifndef bjam_calloc_raw_x
+ #define bjam_calloc_raw_x(n,s) bjam_calloc_x(n,s)
+#endif
+#ifndef bjam_realloc_raw_x
+ #define bjam_realloc_raw_x(p,s) bjam_realloc_x(p,s)
+#endif
+#ifndef bjam_free_raw_x
+ #define bjam_free_raw_x(p) bjam_free_x(p)
+#endif
+
+#ifdef OPT_DEBUG_PROFILE
+ /* Profile tracing of memory allocations. */
+ #include "debug.h"
+
+ #define BJAM_MALLOC(s) (profile_memory(s), bjam_malloc_x(s))
+ #define BJAM_MALLOC_ATOMIC(s) (profile_memory(s), bjam_malloc_atomic_x(s))
+ #define BJAM_CALLOC(n,s) (profile_memory(n*s), bjam_calloc_x(n,s))
+ #define BJAM_CALLOC_ATOMIC(n,s) (profile_memory(n*s), bjam_calloc_atomic_x(n,s))
+ #define BJAM_REALLOC(p,s) (profile_memory(s), bjam_realloc_x(p,s))
+
+ #define BJAM_MALLOC_RAW(s) (profile_memory(s), bjam_malloc_raw_x(s))
+ #define BJAM_CALLOC_RAW(n,s) (profile_memory(n*s), bjam_calloc_raw_x(n,s))
+ #define BJAM_REALLOC_RAW(p,s) (profile_memory(s), bjam_realloc_raw_x(p,s))
+#else
+ /* No mem tracing. */
+ #define BJAM_MALLOC(s) bjam_malloc_x(s)
+ #define BJAM_MALLOC_ATOMIC(s) bjam_malloc_atomic_x(s)
+ #define BJAM_CALLOC(n,s) bjam_calloc_x(n,s)
+ #define BJAM_CALLOC_ATOMIC(n,s) bjam_calloc_atomic_x(n,s)
+ #define BJAM_REALLOC(p,s) bjam_realloc_x(p,s)
+
+ #define BJAM_MALLOC_RAW(s) bjam_malloc_raw_x(s)
+ #define BJAM_CALLOC_RAW(n,s) bjam_calloc_raw_x(n,s)
+ #define BJAM_REALLOC_RAW(p,s) bjam_realloc_raw_x(p,s)
+#endif
+
+#define BJAM_MEM_INIT() bjam_mem_init_x()
+#define BJAM_MEM_CLOSE() bjam_mem_close_x()
+
+#define BJAM_FREE(p) bjam_free_x(p)
+#define BJAM_FREE_RAW(p) bjam_free_raw_x(p)
+
+#endif
diff --git a/tools/build/v2/engine/mkjambase.c b/tools/build/src/engine/mkjambase.c
index cdf5998200..cdf5998200 100644
--- a/tools/build/v2/engine/mkjambase.c
+++ b/tools/build/src/engine/mkjambase.c
diff --git a/tools/build/src/engine/modules.c b/tools/build/src/engine/modules.c
new file mode 100644
index 0000000000..6be82fe12a
--- /dev/null
+++ b/tools/build/src/engine/modules.c
@@ -0,0 +1,431 @@
+/*
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+#include "jam.h"
+#include "modules.h"
+
+#include "hash.h"
+#include "lists.h"
+#include "native.h"
+#include "object.h"
+#include "parse.h"
+#include "rules.h"
+#include "strings.h"
+#include "variable.h"
+
+#include <assert.h>
+#include <string.h>
+
+static struct hash * module_hash = 0;
+static module_t root;
+
+
+module_t * bindmodule( OBJECT * name )
+{
+ if ( !name )
+ return &root;
+
+ {
+ PROFILE_ENTER( BINDMODULE );
+
+ module_t * m;
+ int found;
+
+ if ( !module_hash )
+ module_hash = hashinit( sizeof( module_t ), "modules" );
+
+ m = (module_t *)hash_insert( module_hash, name, &found );
+ if ( !found )
+ {
+ m->name = object_copy( name );
+ m->variables = 0;
+ m->variable_indices = 0;
+ m->num_fixed_variables = 0;
+ m->fixed_variables = 0;
+ m->rules = 0;
+ m->imported_modules = 0;
+ m->class_module = 0;
+ m->native_rules = 0;
+ m->user_module = 0;
+ }
+
+ PROFILE_EXIT( BINDMODULE );
+
+ return m;
+ }
+}
+
+
+/*
+ * demand_rules() - Get the module's "rules" hash on demand.
+ */
+struct hash * demand_rules( module_t * m )
+{
+ if ( !m->rules )
+ m->rules = hashinit( sizeof( RULE ), "rules" );
+ return m->rules;
+}
+
+
+/*
+ * delete_module() - wipe out the module's rules and variables.
+ */
+
+static void delete_rule_( void * xrule, void * data )
+{
+ rule_free( (RULE *)xrule );
+}
+
+
+static void delete_native_rule( void * xrule, void * data )
+{
+ native_rule_t * rule = (native_rule_t *)xrule;
+ object_free( rule->name );
+ if ( rule->procedure )
+ function_free( rule->procedure );
+}
+
+
+static void delete_imported_modules( void * xmodule_name, void * data )
+{
+ object_free( *(OBJECT * *)xmodule_name );
+}
+
+
+static void free_fixed_variable( void * xvar, void * data );
+
+void delete_module( module_t * m )
+{
+ /* Clear out all the rules. */
+ if ( m->rules )
+ {
+ hashenumerate( m->rules, delete_rule_, (void *)0 );
+ hash_free( m->rules );
+ m->rules = 0;
+ }
+
+ if ( m->native_rules )
+ {
+ hashenumerate( m->native_rules, delete_native_rule, (void *)0 );
+ hash_free( m->native_rules );
+ m->native_rules = 0;
+ }
+
+ if ( m->variables )
+ {
+ var_done( m );
+ m->variables = 0;
+ }
+
+ if ( m->fixed_variables )
+ {
+ int i;
+ for ( i = 0; i < m->num_fixed_variables; ++i )
+ {
+ list_free( m->fixed_variables[ i ] );
+ }
+ BJAM_FREE( m->fixed_variables );
+ m->fixed_variables = 0;
+ }
+
+ if ( m->variable_indices )
+ {
+ hashenumerate( m->variable_indices, &free_fixed_variable, (void *)0 );
+ hash_free( m->variable_indices );
+ m->variable_indices = 0;
+ }
+
+ if ( m->imported_modules )
+ {
+ hashenumerate( m->imported_modules, delete_imported_modules, (void *)0 );
+ hash_free( m->imported_modules );
+ m->imported_modules = 0;
+ }
+}
+
+
+struct module_stats
+{
+ OBJECT * module_name;
+ struct hashstats rules_stats[ 1 ];
+ struct hashstats variables_stats[ 1 ];
+ struct hashstats variable_indices_stats[ 1 ];
+ struct hashstats imported_modules_stats[ 1 ];
+};
+
+
+static void module_stat( struct hash * hp, OBJECT * module, const char * name )
+{
+ if ( hp )
+ {
+ struct hashstats stats[ 1 ];
+ string id[ 1 ];
+ hashstats_init( stats );
+ string_new( id );
+ string_append( id, object_str( module ) );
+ string_push_back( id, ' ' );
+ string_append( id, name );
+
+ hashstats_add( stats, hp );
+ hashstats_print( stats, id->value );
+
+ string_free( id );
+ }
+}
+
+
+static void class_module_stat( struct hashstats * stats, OBJECT * module, const char * name )
+{
+ if ( stats->item_size )
+ {
+ string id[ 1 ];
+ string_new( id );
+ string_append( id, object_str( module ) );
+ string_append( id, " object " );
+ string_append( id, name );
+
+ hashstats_print( stats, id->value );
+
+ string_free( id );
+ }
+}
+
+
+static void stat_module( void * xmodule, void * data )
+{
+ module_t *m = (module_t *)xmodule;
+
+ if ( DEBUG_MEM || DEBUG_PROFILE )
+ {
+ struct hash * class_info = (struct hash *)data;
+ if ( m->class_module )
+ {
+ int found;
+ struct module_stats * ms = (struct module_stats *)hash_insert( class_info, m->class_module->name, &found );
+ if ( !found )
+ {
+ ms->module_name = m->class_module->name;
+ hashstats_init( ms->rules_stats );
+ hashstats_init( ms->variables_stats );
+ hashstats_init( ms->variable_indices_stats );
+ hashstats_init( ms->imported_modules_stats );
+ }
+
+ hashstats_add( ms->rules_stats, m->rules );
+ hashstats_add( ms->variables_stats, m->variables );
+ hashstats_add( ms->variable_indices_stats, m->variable_indices );
+ hashstats_add( ms->imported_modules_stats, m->imported_modules );
+ }
+ else
+ {
+ module_stat( m->rules, m->name, "rules" );
+ module_stat( m->variables, m->name, "variables" );
+ module_stat( m->variable_indices, m->name, "fixed variables" );
+ module_stat( m->imported_modules, m->name, "imported modules" );
+ }
+ }
+
+ delete_module( m );
+ object_free( m->name );
+}
+
+static void print_class_stats( void * xstats, void * data )
+{
+ struct module_stats * stats = (struct module_stats *)xstats;
+ class_module_stat( stats->rules_stats, stats->module_name, "rules" );
+ class_module_stat( stats->variables_stats, stats->module_name, "variables" );
+ class_module_stat( stats->variable_indices_stats, stats->module_name, "fixed variables" );
+ class_module_stat( stats->imported_modules_stats, stats->module_name, "imported modules" );
+}
+
+
+static void delete_module_( void * xmodule, void * data )
+{
+ module_t *m = (module_t *)xmodule;
+
+ delete_module( m );
+ object_free( m->name );
+}
+
+
+void modules_done()
+{
+ if ( DEBUG_MEM || DEBUG_PROFILE )
+ {
+ struct hash * class_hash = hashinit( sizeof( struct module_stats ), "object info" );
+ hashenumerate( module_hash, stat_module, (void *)class_hash );
+ hashenumerate( class_hash, print_class_stats, (void *)0 );
+ hash_free( class_hash );
+ }
+ hashenumerate( module_hash, delete_module_, (void *)0 );
+ hashdone( module_hash );
+ module_hash = 0;
+ delete_module( &root );
+}
+
+module_t * root_module()
+{
+ return &root;
+}
+
+
+void import_module( LIST * module_names, module_t * target_module )
+{
+ PROFILE_ENTER( IMPORT_MODULE );
+
+ struct hash * h;
+ LISTITER iter;
+ LISTITER end;
+
+ if ( !target_module->imported_modules )
+ target_module->imported_modules = hashinit( sizeof( char * ), "imported"
+ );
+ h = target_module->imported_modules;
+
+ iter = list_begin( module_names );
+ end = list_end( module_names );
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ int found;
+ OBJECT * const s = list_item( iter );
+ OBJECT * * const ss = (OBJECT * *)hash_insert( h, s, &found );
+ if ( !found )
+ *ss = object_copy( s );
+ }
+
+ PROFILE_EXIT( IMPORT_MODULE );
+}
+
+
+static void add_module_name( void * r_, void * result_ )
+{
+ OBJECT * * const r = (OBJECT * *)r_;
+ LIST * * const result = (LIST * *)result_;
+ *result = list_push_back( *result, object_copy( *r ) );
+}
+
+
+LIST * imported_modules( module_t * module )
+{
+ LIST * result = L0;
+ if ( module->imported_modules )
+ hashenumerate( module->imported_modules, add_module_name, &result );
+ return result;
+}
+
+
+FUNCTION * function_bind_variables( FUNCTION *, module_t *, int * counter );
+FUNCTION * function_unbind_variables( FUNCTION * );
+
+struct fixed_variable
+{
+ OBJECT * key;
+ int n;
+};
+
+struct bind_vars_t
+{
+ module_t * module;
+ int counter;
+};
+
+
+static void free_fixed_variable( void * xvar, void * data )
+{
+ object_free( ( (struct fixed_variable *)xvar )->key );
+}
+
+
+static void bind_variables_for_rule( void * xrule, void * xdata )
+{
+ RULE * rule = (RULE *)xrule;
+ struct bind_vars_t * data = (struct bind_vars_t *)xdata;
+ if ( rule->procedure && rule->module == data->module )
+ rule->procedure = function_bind_variables( rule->procedure,
+ data->module, &data->counter );
+}
+
+
+void module_bind_variables( struct module_t * m )
+{
+ if ( m != root_module() && m->rules )
+ {
+ struct bind_vars_t data;
+ data.module = m;
+ data.counter = m->num_fixed_variables;
+ hashenumerate( m->rules, &bind_variables_for_rule, &data );
+ module_set_fixed_variables( m, data.counter );
+ }
+}
+
+
+int module_add_fixed_var( struct module_t * m, OBJECT * name, int * counter )
+{
+ struct fixed_variable * v;
+ int found;
+
+ assert( !m->class_module );
+
+ if ( !m->variable_indices )
+ m->variable_indices = hashinit( sizeof( struct fixed_variable ), "variable index table" );
+
+ v = (struct fixed_variable *)hash_insert( m->variable_indices, name, &found );
+ if ( !found )
+ {
+ v->key = object_copy( name );
+ v->n = (*counter)++;
+ }
+
+ return v->n;
+}
+
+
+LIST * var_get_and_clear_raw( module_t * m, OBJECT * name );
+
+static void load_fixed_variable( void * xvar, void * data )
+{
+ struct fixed_variable * var = (struct fixed_variable *)xvar;
+ struct module_t * m = (struct module_t *)data;
+ if ( var->n >= m->num_fixed_variables )
+ m->fixed_variables[ var->n ] = var_get_and_clear_raw( m, var->key );
+}
+
+
+void module_set_fixed_variables( struct module_t * m, int n_variables )
+{
+ /* Reallocate */
+ struct hash * variable_indices;
+ LIST * * fixed_variables = BJAM_MALLOC( n_variables * sizeof( LIST * ) );
+ if ( m->fixed_variables )
+ {
+ memcpy( fixed_variables, m->fixed_variables, m->num_fixed_variables * sizeof( LIST * ) );
+ BJAM_FREE( m->fixed_variables );
+ }
+ m->fixed_variables = fixed_variables;
+ variable_indices = m->class_module
+ ? m->class_module->variable_indices
+ : m->variable_indices;
+ if ( variable_indices )
+ hashenumerate( variable_indices, &load_fixed_variable, m );
+ m->num_fixed_variables = n_variables;
+}
+
+
+int module_get_fixed_var( struct module_t * m_, OBJECT * name )
+{
+ struct fixed_variable * v;
+ struct module_t * m = m_;
+
+ if ( m->class_module )
+ m = m->class_module;
+
+ if ( !m->variable_indices )
+ return -1;
+
+ v = (struct fixed_variable *)hash_find( m->variable_indices, name );
+ return v && v->n < m_->num_fixed_variables ? v->n : -1;
+}
diff --git a/tools/build/src/engine/modules.h b/tools/build/src/engine/modules.h
new file mode 100644
index 0000000000..1b161c6e55
--- /dev/null
+++ b/tools/build/src/engine/modules.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+#ifndef MODULES_DWA10182001_H
+#define MODULES_DWA10182001_H
+
+#include "lists.h"
+#include "object.h"
+
+typedef struct module_t module_t ;
+struct module_t
+{
+ OBJECT * name;
+ struct hash * rules;
+ struct hash * variables;
+ struct hash * variable_indices;
+ int num_fixed_variables;
+ LIST * * fixed_variables;
+ struct hash * imported_modules;
+ module_t * class_module;
+ struct hash * native_rules;
+ int user_module;
+};
+
+module_t * bindmodule( OBJECT * name );
+module_t * root_module();
+void delete_module( module_t * );
+
+void import_module( LIST * module_names, module_t * target_module );
+LIST * imported_modules( module_t * );
+
+struct hash * demand_rules( module_t * );
+
+void module_bind_variables( module_t * );
+
+/*
+ * After calling module_add_fixed_var, module_set_fixed_variables must be called
+ * before accessing any variables in the module.
+ */
+int module_add_fixed_var( module_t *, OBJECT * name, int * n );
+void module_set_fixed_variables( module_t *, int n );
+
+/*
+ * Returns the index of the variable or -1 if none exists.
+ */
+int module_get_fixed_var( module_t *, OBJECT * name );
+
+void modules_done();
+
+#endif
diff --git a/tools/build/src/engine/modules/order.c b/tools/build/src/engine/modules/order.c
new file mode 100644
index 0000000000..3a83d3895c
--- /dev/null
+++ b/tools/build/src/engine/modules/order.c
@@ -0,0 +1,160 @@
+/* Copyright 2004. Vladimir Prus
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+#include "../lists.h"
+#include "../mem.h"
+#include "../native.h"
+#include "../object.h"
+#include "../strings.h"
+#include "../variable.h"
+
+
+/* Use quite klugy approach: when we add order dependency from 'a' to 'b', just
+ * append 'b' to of value of variable 'a'.
+ */
+LIST * add_pair( FRAME * frame, int flags )
+{
+ LIST * arg = lol_get( frame->args, 0 );
+ LISTITER iter = list_begin( arg );
+ LISTITER const end = list_end( arg );
+ var_set( frame->module, list_item( iter ), list_copy_range( arg, list_next(
+ iter ), end ), VAR_APPEND );
+ return L0;
+}
+
+
+/* Given a list and a value, returns position of that value in the list, or -1
+ * if not found.
+ */
+int list_index( LIST * list, OBJECT * value )
+{
+ int result = 0;
+ LISTITER iter = list_begin( list );
+ LISTITER const end = list_end( list );
+ for ( ; iter != end; iter = list_next( iter ), ++result )
+ if ( object_equal( list_item( iter ), value ) )
+ return result;
+ return -1;
+}
+
+enum colors { white, gray, black };
+
+
+/* Main routine for topological sort. Calls itself recursively on all adjacent
+ * vertices which were not yet visited. After that, 'current_vertex' is added to
+ * '*result_ptr'.
+ */
+void do_ts( int * * graph, int current_vertex, int * colors, int * * result_ptr
+ )
+{
+ int i;
+
+ colors[ current_vertex ] = gray;
+ for ( i = 0; graph[ current_vertex ][ i ] != -1; ++i )
+ {
+ int adjacent_vertex = graph[ current_vertex ][ i ];
+ if ( colors[ adjacent_vertex ] == white )
+ do_ts( graph, adjacent_vertex, colors, result_ptr );
+ /* The vertex is either black, in which case we do not have to do
+ * anything, or gray, in which case we have a loop. If we have a loop,
+ * it is not clear what useful diagnostic we can emit, so we emit
+ * nothing.
+ */
+ }
+ colors[ current_vertex ] = black;
+ **result_ptr = current_vertex;
+ ( *result_ptr )++;
+}
+
+
+void topological_sort( int * * graph, int num_vertices, int * result )
+{
+ int i;
+ int * colors = ( int * )BJAM_CALLOC( num_vertices, sizeof( int ) );
+ for ( i = 0; i < num_vertices; ++i )
+ colors[ i ] = white;
+
+ for ( i = 0; i < num_vertices; ++i )
+ if ( colors[ i ] == white )
+ do_ts( graph, i, colors, &result );
+
+ BJAM_FREE( colors );
+}
+
+
+LIST * order( FRAME * frame, int flags )
+{
+ LIST * arg = lol_get( frame->args, 0 );
+ LIST * result = L0;
+ int src;
+ LISTITER iter = list_begin( arg );
+ LISTITER const end = list_end( arg );
+
+ /* We need to create a graph of order dependencies between the passed
+ * objects. We assume there are no duplicates passed to 'add_pair'.
+ */
+ int length = list_length( arg );
+ int * * graph = ( int * * )BJAM_CALLOC( length, sizeof( int * ) );
+ int * order = ( int * )BJAM_MALLOC( ( length + 1 ) * sizeof( int ) );
+
+ for ( src = 0; iter != end; iter = list_next( iter ), ++src )
+ {
+ /* For all objects this one depends upon, add elements to 'graph'. */
+ LIST * dependencies = var_get( frame->module, list_item( iter ) );
+ int index = 0;
+ LISTITER dep_iter = list_begin( dependencies );
+ LISTITER const dep_end = list_end( dependencies );
+
+ graph[ src ] = ( int * )BJAM_CALLOC( list_length( dependencies ) + 1,
+ sizeof( int ) );
+ for ( ; dep_iter != dep_end; dep_iter = list_next( dep_iter ) )
+ {
+ int const dst = list_index( arg, list_item( dep_iter ) );
+ if ( dst != -1 )
+ graph[ src ][ index++ ] = dst;
+ }
+ graph[ src ][ index ] = -1;
+ }
+
+ topological_sort( graph, length, order );
+
+ {
+ int index = length - 1;
+ for ( ; index >= 0; --index )
+ {
+ int i;
+ LISTITER iter = list_begin( arg );
+ LISTITER const end = list_end( arg );
+ for ( i = 0; i < order[ index ]; ++i, iter = list_next( iter ) );
+ result = list_push_back( result, object_copy( list_item( iter ) ) );
+ }
+ }
+
+ /* Clean up */
+ {
+ int i;
+ for ( i = 0; i < length; ++i )
+ BJAM_FREE( graph[ i ] );
+ BJAM_FREE( graph );
+ BJAM_FREE( order );
+ }
+
+ return result;
+}
+
+
+void init_order()
+{
+ {
+ char const * args[] = { "first", "second", 0 };
+ declare_native_rule( "class@order", "add-pair", args, add_pair, 1 );
+ }
+
+ {
+ char const * args[] = { "objects", "*", 0 };
+ declare_native_rule( "class@order", "order", args, order, 1 );
+ }
+}
diff --git a/tools/build/src/engine/modules/path.c b/tools/build/src/engine/modules/path.c
new file mode 100644
index 0000000000..f8dedaccd5
--- /dev/null
+++ b/tools/build/src/engine/modules/path.c
@@ -0,0 +1,25 @@
+/* Copyright Vladimir Prus 2003.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+#include "../constants.h"
+#include "../frames.h"
+#include "../lists.h"
+#include "../native.h"
+#include "../timestamp.h"
+
+
+LIST * path_exists( FRAME * frame, int flags )
+{
+ return file_query( list_front( lol_get( frame->args, 0 ) ) ) ?
+ list_new( object_copy( constant_true ) ) : L0;
+}
+
+
+void init_path()
+{
+ char const * args[] = { "location", 0 };
+ declare_native_rule( "path", "exists", args, path_exists, 1 );
+}
diff --git a/tools/build/src/engine/modules/property-set.c b/tools/build/src/engine/modules/property-set.c
new file mode 100644
index 0000000000..21e35d5ab7
--- /dev/null
+++ b/tools/build/src/engine/modules/property-set.c
@@ -0,0 +1,330 @@
+/*
+ * Copyright 2013 Steven Watanabe
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+#include "../object.h"
+#include "../lists.h"
+#include "../modules.h"
+#include "../rules.h"
+#include "../variable.h"
+#include "../native.h"
+#include "../compile.h"
+#include "../mem.h"
+#include "../constants.h"
+#include "string.h"
+
+struct ps_map_entry
+{
+ struct ps_map_entry * next;
+ LIST * key;
+ OBJECT * value;
+};
+
+struct ps_map
+{
+ struct ps_map_entry * * table;
+ size_t table_size;
+ size_t num_elems;
+};
+
+static unsigned list_hash(LIST * key)
+{
+ unsigned int hash = 0;
+ LISTITER iter = list_begin( key ), end = list_end( key );
+ for ( ; iter != end; ++iter )
+ {
+ hash = hash * 2147059363 + object_hash( list_item( iter ) );
+ }
+ return hash;
+}
+
+static int list_equal( LIST * lhs, LIST * rhs )
+{
+ LISTITER lhs_iter, lhs_end, rhs_iter;
+ if ( list_length( lhs ) != list_length( rhs ) )
+ {
+ return 0;
+ }
+ lhs_iter = list_begin( lhs );
+ lhs_end = list_end( lhs );
+ rhs_iter = list_begin( rhs );
+ for ( ; lhs_iter != lhs_end; ++lhs_iter, ++rhs_iter )
+ {
+ if ( ! object_equal( list_item( lhs_iter ), list_item( rhs_iter ) ) )
+ {
+ return 0;
+ }
+ }
+ return 1;
+}
+
+static void ps_map_init( struct ps_map * map )
+{
+ size_t i;
+ map->table_size = 2;
+ map->num_elems = 0;
+ map->table = BJAM_MALLOC( map->table_size * sizeof( struct ps_map_entry * ) );
+ for ( i = 0; i < map->table_size; ++i )
+ {
+ map->table[ i ] = NULL;
+ }
+}
+
+static void ps_map_destroy( struct ps_map * map )
+{
+ size_t i;
+ for ( i = 0; i < map->table_size; ++i )
+ {
+ struct ps_map_entry * pos;
+ for ( pos = map->table[ i ]; pos; )
+ {
+ struct ps_map_entry * tmp = pos->next;
+ BJAM_FREE( pos );
+ pos = tmp;
+ }
+ }
+ BJAM_FREE( map->table );
+}
+
+static void ps_map_rehash( struct ps_map * map )
+{
+ struct ps_map old = *map;
+ size_t i;
+ map->table = BJAM_MALLOC( map->table_size * 2 * sizeof( struct ps_map_entry * ) );
+ map->table_size *= 2;
+ for ( i = 0; i < map->table_size; ++i )
+ {
+ map->table[ i ] = NULL;
+ }
+ for ( i = 0; i < old.table_size; ++i )
+ {
+ struct ps_map_entry * pos;
+ for ( pos = old.table[ i ]; pos; )
+ {
+ struct ps_map_entry * tmp = pos->next;
+
+ unsigned hash_val = list_hash( pos->key );
+ unsigned bucket = hash_val % map->table_size;
+ pos->next = map->table[ bucket ];
+ map->table[ bucket ] = pos;
+
+ pos = tmp;
+ }
+ }
+ BJAM_FREE( old.table );
+}
+
+static struct ps_map_entry * ps_map_insert(struct ps_map * map, LIST * key)
+{
+ unsigned hash_val = list_hash( key );
+ unsigned bucket = hash_val % map->table_size;
+ struct ps_map_entry * pos;
+ for ( pos = map->table[bucket]; pos ; pos = pos->next )
+ {
+ if ( list_equal( pos->key, key ) )
+ return pos;
+ }
+
+ if ( map->num_elems >= map->table_size )
+ {
+ ps_map_rehash( map );
+ bucket = hash_val % map->table_size;
+ }
+ pos = BJAM_MALLOC( sizeof( struct ps_map_entry ) );
+ pos->next = map->table[bucket];
+ pos->key = key;
+ pos->value = 0;
+ map->table[bucket] = pos;
+ ++map->num_elems;
+ return pos;
+}
+
+static struct ps_map all_property_sets;
+
+LIST * property_set_create( FRAME * frame, int flags )
+{
+ LIST * properties = lol_get( frame->args, 0 );
+ LIST * sorted = list_sort( properties );
+ LIST * unique = list_unique( sorted );
+ struct ps_map_entry * pos = ps_map_insert( &all_property_sets, unique );
+ list_free( sorted );
+ if ( pos->value )
+ {
+ list_free( unique );
+ return list_new( object_copy( pos->value ) );
+ }
+ else
+ {
+ OBJECT * rulename = object_new( "new" );
+ OBJECT * varname = object_new( "self.raw" );
+ LIST * val = call_rule( rulename, frame,
+ list_new( object_new( "property-set" ) ), 0 );
+ LISTITER iter, end;
+ object_free( rulename );
+ pos->value = list_front( val );
+ var_set( bindmodule( pos->value ), varname, unique, VAR_SET );
+ object_free( varname );
+
+ for ( iter = list_begin( unique ), end = list_end( unique ); iter != end; ++iter )
+ {
+ const char * str = object_str( list_item( iter ) );
+ if ( str[ 0 ] != '<' || ! strchr( str, '>' ) )
+ {
+ string message[ 1 ];
+ string_new( message );
+ string_append( message, "Invalid property: '" );
+ string_append( message, str );
+ string_append( message, "'" );
+ rulename = object_new( "errors.error" );
+ call_rule( rulename, frame,
+ list_new( object_new( message->value ) ), 0 );
+ /* unreachable */
+ string_free( message );
+ object_free( rulename );
+ }
+ }
+
+ return val;
+ }
+}
+
+/* binary search for the property value */
+LIST * property_set_get( FRAME * frame, int flags )
+{
+ OBJECT * varname = object_new( "self.raw" );
+ LIST * props = var_get( frame->module, varname );
+ const char * name = object_str( list_front( lol_get( frame->args, 0 ) ) );
+ size_t name_len = strlen( name );
+ LISTITER begin, end;
+ LIST * result = L0;
+ object_free( varname );
+
+ /* Assumes random access */
+ begin = list_begin( props ), end = list_end( props );
+
+ while ( 1 )
+ {
+ ptrdiff_t diff = (end - begin);
+ LISTITER mid = begin + diff / 2;
+ int res;
+ if ( diff == 0 )
+ {
+ return L0;
+ }
+ res = strncmp( object_str( list_item( mid ) ), name, name_len );
+ if ( res < 0 )
+ {
+ begin = mid + 1;
+ }
+ else if ( res > 0 )
+ {
+ end = mid;
+ }
+ else /* We've found the property */
+ {
+ /* Find the beginning of the group */
+ LISTITER tmp = mid;
+ while ( tmp > begin )
+ {
+ --tmp;
+ res = strncmp( object_str( list_item( tmp ) ), name, name_len );
+ if ( res != 0 )
+ {
+ ++tmp;
+ break;
+ }
+ }
+ begin = tmp;
+ /* Find the end of the group */
+ tmp = mid + 1;
+ while ( tmp < end )
+ {
+ res = strncmp( object_str( list_item( tmp ) ), name, name_len );
+ if ( res != 0 ) break;
+ ++tmp;
+ }
+ end = tmp;
+ break;
+ }
+ }
+
+ for ( ; begin != end; ++begin )
+ {
+ result = list_push_back( result,
+ object_new( object_str( list_item( begin ) ) + name_len ) );
+ }
+
+ return result;
+}
+
+/* binary search for the property value */
+LIST * property_set_contains_features( FRAME * frame, int flags )
+{
+ OBJECT * varname = object_new( "self.raw" );
+ LIST * props = var_get( frame->module, varname );
+ LIST * features = lol_get( frame->args, 0 );
+ LIST * result = L0;
+ LISTITER features_iter = list_begin( features );
+ LISTITER features_end = list_end( features ) ;
+ object_free( varname );
+
+ for ( ; features_iter != features_end; ++features_iter )
+ {
+ const char * name = object_str( list_item( features_iter ) );
+ size_t name_len = strlen( name );
+ LISTITER begin, end;
+ /* Assumes random access */
+ begin = list_begin( props ), end = list_end( props );
+
+ while ( 1 )
+ {
+ ptrdiff_t diff = (end - begin);
+ LISTITER mid = begin + diff / 2;
+ int res;
+ if ( diff == 0 )
+ {
+ /* The feature is missing */
+ return L0;
+ }
+ res = strncmp( object_str( list_item( mid ) ), name, name_len );
+ if ( res < 0 )
+ {
+ begin = mid + 1;
+ }
+ else if ( res > 0 )
+ {
+ end = mid;
+ }
+ else /* We've found the property */
+ {
+ break;
+ }
+ }
+ }
+ return list_new( object_copy( constant_true ) );
+}
+
+void init_property_set()
+{
+ {
+ char const * args[] = { "raw-properties", "*", 0 };
+ declare_native_rule( "property-set", "create", args, property_set_create, 1 );
+ }
+ {
+ char const * args[] = { "feature", 0 };
+ declare_native_rule( "class@property-set", "get", args, property_set_get, 1 );
+ }
+ {
+ char const * args[] = { "features", "*", 0 };
+ declare_native_rule( "class@property-set", "contains-features", args, property_set_contains_features, 1 );
+ }
+ ps_map_init( &all_property_sets );
+}
+
+void property_set_done()
+{
+ ps_map_destroy( &all_property_sets );
+}
diff --git a/tools/build/v2/engine/modules/readme.txt b/tools/build/src/engine/modules/readme.txt
index 2edf6e17f8..2edf6e17f8 100644
--- a/tools/build/v2/engine/modules/readme.txt
+++ b/tools/build/src/engine/modules/readme.txt
diff --git a/tools/build/src/engine/modules/regex.c b/tools/build/src/engine/modules/regex.c
new file mode 100644
index 0000000000..d9f8177bfc
--- /dev/null
+++ b/tools/build/src/engine/modules/regex.c
@@ -0,0 +1,220 @@
+/*
+ * Copyright 2003. Vladimir Prus
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+#include "../mem.h"
+#include "../native.h"
+#include "../strings.h"
+#include "../subst.h"
+
+/*
+rule split ( string separator )
+{
+ local result ;
+ local s = $(string) ;
+
+ local match = 1 ;
+ while $(match)
+ {
+ match = [ MATCH ^(.*)($(separator))(.*) : $(s) ] ;
+ if $(match)
+ {
+ match += "" ; # in case 3rd item was empty - works around MATCH bug
+ result = $(match[3]) $(result) ;
+ s = $(match[1]) ;
+ }
+ }
+ return $(s) $(result) ;
+}
+*/
+
+LIST * regex_split( FRAME * frame, int flags )
+{
+ LIST * args = lol_get( frame->args, 0 );
+ OBJECT * s;
+ OBJECT * separator;
+ regexp * re;
+ const char * pos;
+ LIST * result = L0;
+ LISTITER iter = list_begin( args );
+ s = list_item( iter );
+ separator = list_item( list_next( iter ) );
+
+ re = regex_compile( separator );
+
+ pos = object_str( s );
+ while ( regexec( re, pos ) )
+ {
+ result = list_push_back( result, object_new_range( pos, re->startp[ 0 ] - pos ) );
+ pos = re->endp[ 0 ];
+ }
+
+ result = list_push_back( result, object_new( pos ) );
+
+ return result;
+}
+
+/*
+rule replace (
+ string # The string to modify.
+ match # The characters to replace.
+ replacement # The string to replace with.
+ )
+{
+ local result = "" ;
+ local parts = 1 ;
+ while $(parts)
+ {
+ parts = [ MATCH ^(.*)($(match))(.*) : $(string) ] ;
+ if $(parts)
+ {
+ parts += "" ;
+ result = "$(replacement)$(parts[3])$(result)" ;
+ string = $(parts[1]) ;
+ }
+ }
+ string ?= "" ;
+ result = "$(string)$(result)" ;
+ return $(result) ;
+}
+*/
+
+LIST * regex_replace( FRAME * frame, int flags )
+{
+ LIST * args = lol_get( frame->args, 0 );
+ OBJECT * s;
+ OBJECT * match;
+ OBJECT * replacement;
+ regexp * re;
+ const char * pos;
+ string buf[ 1 ];
+ LIST * result;
+ LISTITER iter = list_begin( args );
+ s = list_item( iter );
+ iter = list_next( iter );
+ match = list_item( iter );
+ iter = list_next( iter );
+ replacement = list_item(iter );
+
+ re = regex_compile( match );
+
+ string_new( buf );
+
+ pos = object_str( s );
+ while ( regexec( re, pos ) )
+ {
+ string_append_range( buf, pos, re->startp[ 0 ] );
+ string_append( buf, object_str( replacement ) );
+ pos = re->endp[ 0 ];
+ }
+ string_append( buf, pos );
+
+ result = list_new( object_new( buf->value ) );
+
+ string_free( buf );
+
+ return result;
+}
+
+/*
+rule transform ( list * : pattern : indices * )
+{
+ indices ?= 1 ;
+ local result ;
+ for local e in $(list)
+ {
+ local m = [ MATCH $(pattern) : $(e) ] ;
+ if $(m)
+ {
+ result += $(m[$(indices)]) ;
+ }
+ }
+ return $(result) ;
+}
+*/
+
+LIST * regex_transform( FRAME * frame, int flags )
+{
+ LIST * const l = lol_get( frame->args, 0 );
+ LIST * const pattern = lol_get( frame->args, 1 );
+ LIST * const indices_list = lol_get( frame->args, 2 );
+ int * indices = 0;
+ int size;
+ LIST * result = L0;
+
+ if ( !list_empty( indices_list ) )
+ {
+ int * p;
+ LISTITER iter = list_begin( indices_list );
+ LISTITER const end = list_end( indices_list );
+ size = list_length( indices_list );
+ indices = (int *)BJAM_MALLOC( size * sizeof( int ) );
+ for ( p = indices; iter != end; iter = list_next( iter ) )
+ *p++ = atoi( object_str( list_item( iter ) ) );
+ }
+ else
+ {
+ size = 1;
+ indices = (int *)BJAM_MALLOC( sizeof( int ) );
+ *indices = 1;
+ }
+
+ {
+ /* Result is cached and intentionally never freed */
+ regexp * const re = regex_compile( list_front( pattern ) );
+
+ LISTITER iter = list_begin( l );
+ LISTITER const end = list_end( l );
+
+ string buf[ 1 ];
+ string_new( buf );
+
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ if ( regexec( re, object_str( list_item( iter ) ) ) )
+ {
+ int i = 0;
+ for ( ; i < size; ++i )
+ {
+ int const index = indices[ i ];
+ /* Skip empty submatches. Not sure it is right in all cases,
+ * but surely is right for the case for which this routine
+ * is optimized -- header scanning.
+ */
+ if ( re->startp[ index ] != re->endp[ index ] )
+ {
+ string_append_range( buf, re->startp[ index ],
+ re->endp[ index ] );
+ result = list_push_back( result, object_new( buf->value
+ ) );
+ string_truncate( buf, 0 );
+ }
+ }
+ }
+ }
+ string_free( buf );
+ }
+
+ BJAM_FREE( indices );
+ return result;
+}
+
+
+void init_regex()
+{
+ {
+ char const * args[] = { "string", "separator", 0 };
+ declare_native_rule( "regex", "split", args, regex_split, 1 );
+ }
+ {
+ char const * args[] = { "string", "match", "replacement", 0 };
+ declare_native_rule( "regex", "replace", args, regex_replace, 1 );
+ }
+ {
+ char const * args[] = { "list", "*", ":", "pattern", ":", "indices", "*", 0 };
+ declare_native_rule( "regex", "transform", args, regex_transform, 2 );
+ }
+}
diff --git a/tools/build/src/engine/modules/sequence.c b/tools/build/src/engine/modules/sequence.c
new file mode 100644
index 0000000000..08ed305994
--- /dev/null
+++ b/tools/build/src/engine/modules/sequence.c
@@ -0,0 +1,97 @@
+/*
+ * Copyright Vladimir Prus 2003.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+#include "../native.h"
+#include "../object.h"
+#include "../lists.h"
+#include "../compile.h"
+
+#include <stdlib.h>
+
+
+#ifndef max
+# define max(a,b) ((a)>(b)?(a):(b))
+#endif
+
+
+LIST * sequence_select_highest_ranked( FRAME * frame, int flags )
+{
+ /* Returns all of 'elements' for which corresponding element in parallel */
+ /* list 'rank' is equal to the maximum value in 'rank'. */
+
+ LIST * const elements = lol_get( frame->args, 0 );
+ LIST * const rank = lol_get( frame->args, 1 );
+
+ LIST * result = L0;
+ int highest_rank = -1;
+
+ {
+ LISTITER iter = list_begin( rank );
+ LISTITER const end = list_end( rank );
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ int const current = atoi( object_str( list_item( iter ) ) );
+ highest_rank = max( highest_rank, current );
+ }
+ }
+
+ {
+ LISTITER iter = list_begin( rank );
+ LISTITER const end = list_end( rank );
+ LISTITER elements_iter = list_begin( elements );
+ LISTITER const elements_end = list_end( elements );
+ for ( ; iter != end; iter = list_next( iter ), elements_iter =
+ list_next( elements_iter ) )
+ if ( atoi( object_str( list_item( iter ) ) ) == highest_rank )
+ result = list_push_back( result, object_copy( list_item(
+ elements_iter ) ) );
+ }
+
+ return result;
+}
+
+LIST * sequence_transform( FRAME * frame, int flags )
+{
+ LIST * function = lol_get( frame->args, 0 );
+ LIST * sequence = lol_get( frame->args, 1 );
+ LIST * result = L0;
+ OBJECT * function_name = list_front( function );
+ LISTITER args_begin = list_next( list_begin( function ) ), args_end = list_end( function );
+ LISTITER iter = list_begin( sequence ), end = list_end( sequence );
+ RULE * rule = bindrule( function_name, frame->prev->module );
+
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ FRAME inner[ 1 ];
+
+ frame_init( inner );
+ inner->prev = frame;
+ inner->prev_user = frame->prev_user;
+ inner->module = frame->prev->module;
+
+ lol_add( inner->args, list_push_back( list_copy_range( function, args_begin, args_end ), object_copy( list_item( iter ) ) ) );
+ result = list_append( result, evaluate_rule( rule, function_name, inner ) );
+
+ frame_free( inner );
+ }
+
+ return result;
+}
+
+void init_sequence()
+{
+ {
+ char const * args[] = { "elements", "*", ":", "rank", "*", 0 };
+ declare_native_rule( "sequence", "select-highest-ranked", args,
+ sequence_select_highest_ranked, 1 );
+ }
+ {
+ char const * args[] = { "function", "+", ":", "sequence", "*", 0 };
+ declare_native_rule( "sequence", "transform", args,
+ sequence_transform, 1 );
+ }
+}
diff --git a/tools/build/v2/engine/modules/set.c b/tools/build/src/engine/modules/set.c
index 77a314d57d..77a314d57d 100644
--- a/tools/build/v2/engine/modules/set.c
+++ b/tools/build/src/engine/modules/set.c
diff --git a/tools/build/src/engine/native.c b/tools/build/src/engine/native.c
new file mode 100644
index 0000000000..68828aa315
--- /dev/null
+++ b/tools/build/src/engine/native.c
@@ -0,0 +1,34 @@
+/* Copyright 2003. Vladimir Prus
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+#include "native.h"
+
+#include "hash.h"
+
+#include <assert.h>
+
+
+void declare_native_rule( char const * module, char const * rule,
+ char const * * args, LIST * (*f)( FRAME *, int ), int version )
+{
+ OBJECT * const module_obj = module ? object_new( module ) : 0 ;
+ module_t * m = bindmodule( module_obj );
+ if ( module_obj )
+ object_free( module_obj );
+ if ( !m->native_rules )
+ m->native_rules = hashinit( sizeof( native_rule_t ), "native rules" );
+
+ {
+ OBJECT * const name = object_new( rule );
+ int found;
+ native_rule_t * const np = (native_rule_t *)hash_insert(
+ m->native_rules, name, &found );
+ np->name = name;
+ assert( !found );
+ np->procedure = function_builtin( f, 0, args );
+ np->version = version;
+ }
+}
diff --git a/tools/build/src/engine/native.h b/tools/build/src/engine/native.h
new file mode 100644
index 0000000000..6d38d01e02
--- /dev/null
+++ b/tools/build/src/engine/native.h
@@ -0,0 +1,34 @@
+/* Copyright 2003. David Abrahams
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+#ifndef NATIVE_H_VP_2003_12_09
+#define NATIVE_H_VP_2003_12_09
+
+#include "function.h"
+#include "frames.h"
+#include "lists.h"
+#include "object.h"
+
+typedef struct native_rule_t
+{
+ OBJECT * name;
+ FUNCTION * procedure;
+
+ /* Version of the interface that the native rule provides. It is possible
+ * that we want to change the set parameter for existing native rule. In
+ * that case, version number should be incremented so Boost.Build can check
+ * for the version it relies on.
+ *
+ * Versions are numbered from 1.
+ */
+ int version;
+} native_rule_t;
+/* MSVC debugger gets confused unless the native_rule_t typedef is provided. */
+
+void declare_native_rule( char const * module, char const * rule,
+ char const * * args, LIST * (*f)( FRAME *, int ), int version );
+
+#endif
diff --git a/tools/build/src/engine/object.c b/tools/build/src/engine/object.c
new file mode 100644
index 0000000000..ef46e4ae46
--- /dev/null
+++ b/tools/build/src/engine/object.c
@@ -0,0 +1,394 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ * Copyright 2011 Steven Watanabe
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * object.c - object manipulation routines
+ *
+ * External functions:
+ * object_new() - create an object from a string
+ * object_new_range() - create an object from a string of given length
+ * object_copy() - return a copy of an object
+ * object_free() - free an object
+ * object_str() - get the string value of an object
+ * object_done() - free string tables
+ *
+ * This implementation builds a hash table of all strings, so that multiple
+ * calls of object_new() on the same string allocate memory for the string once.
+ * Strings are never actually freed.
+ */
+
+#include "jam.h"
+#include "object.h"
+
+#include <assert.h>
+#include <stddef.h>
+#include <stdlib.h>
+
+
+#define OBJECT_MAGIC 0xa762e0e3u
+
+#ifndef object_copy
+
+struct hash_header
+{
+#ifndef NDEBUG
+ unsigned int magic;
+#endif
+ unsigned int hash;
+ struct hash_item * next;
+};
+
+#endif
+
+struct hash_item
+{
+ struct hash_header header;
+ char data[ 1 ];
+};
+
+#define ALLOC_ALIGNMENT (sizeof(struct hash_item) - sizeof(struct hash_header))
+
+typedef struct string_set
+{
+ unsigned int num;
+ unsigned int size;
+ struct hash_item * * data;
+} string_set;
+
+static string_set strhash;
+static int strtotal = 0;
+static int strcount_in = 0;
+static int strcount_out = 0;
+
+
+/*
+ * Immortal string allocator implementation speeds string allocation and cuts
+ * down on internal fragmentation.
+ */
+
+#define STRING_BLOCK 4096
+typedef struct strblock
+{
+ struct strblock * next;
+ char data[ STRING_BLOCK ];
+} strblock;
+
+static strblock * strblock_chain = 0;
+
+/* Storage remaining in the current strblock */
+static char * storage_start = 0;
+static char * storage_finish = 0;
+
+
+/*
+ * allocate() - Allocate n bytes of immortal string storage.
+ */
+
+static char * allocate( size_t n )
+{
+#ifdef BJAM_NEWSTR_NO_ALLOCATE
+ return (char *)BJAM_MALLOC( n );
+#else
+ /* See if we can grab storage from an existing block. */
+ size_t remaining = storage_finish - storage_start;
+ n = ( ( n + ALLOC_ALIGNMENT - 1 ) / ALLOC_ALIGNMENT ) * ALLOC_ALIGNMENT;
+ if ( remaining >= n )
+ {
+ char * result = storage_start;
+ storage_start += n;
+ return result;
+ }
+ else /* Must allocate a new block. */
+ {
+ strblock * new_block;
+ size_t nalloc = n;
+ if ( nalloc < STRING_BLOCK )
+ nalloc = STRING_BLOCK;
+
+ /* Allocate a new block and link into the chain. */
+ new_block = (strblock *)BJAM_MALLOC( offsetof( strblock, data[ 0 ] ) +
+ nalloc * sizeof( new_block->data[ 0 ] ) );
+ if ( new_block == 0 )
+ return 0;
+ new_block->next = strblock_chain;
+ strblock_chain = new_block;
+
+ /* Take future allocations out of the larger remaining space. */
+ if ( remaining < nalloc - n )
+ {
+ storage_start = new_block->data + n;
+ storage_finish = new_block->data + nalloc;
+ }
+ return new_block->data;
+ }
+#endif
+}
+
+
+static unsigned int hash_keyval( char const * key, int const size )
+{
+ unsigned int const magic = 2147059363;
+ unsigned int hash = 0;
+
+ unsigned int i;
+ for ( i = 0; i < size / sizeof( unsigned int ); ++i )
+ {
+ unsigned int val;
+ memcpy( &val, key, sizeof( unsigned int ) );
+ hash = hash * magic + val;
+ key += sizeof( unsigned int );
+ }
+
+ {
+ unsigned int val = 0;
+ memcpy( &val, key, size % sizeof( unsigned int ) );
+ hash = hash * magic + val;
+ }
+
+ return hash + ( hash >> 17 );
+}
+
+
+static void string_set_init( string_set * set )
+{
+ set->size = 0;
+ set->num = 4;
+ set->data = (struct hash_item * *)BJAM_MALLOC( set->num * sizeof( struct hash_item * ) );
+ memset( set->data, 0, set->num * sizeof( struct hash_item * ) );
+}
+
+
+static void string_set_done( string_set * set )
+{
+ BJAM_FREE( set->data );
+}
+
+
+static void string_set_resize( string_set * set )
+{
+ unsigned i;
+ string_set new_set;
+ new_set.num = set->num * 2;
+ new_set.size = set->size;
+ new_set.data = (struct hash_item * *)BJAM_MALLOC( sizeof( struct hash_item *
+ ) * new_set.num );
+ memset( new_set.data, 0, sizeof( struct hash_item * ) * new_set.num );
+ for ( i = 0; i < set->num; ++i )
+ {
+ while ( set->data[ i ] )
+ {
+ struct hash_item * temp = set->data[ i ];
+ unsigned pos = temp->header.hash % new_set.num;
+ set->data[ i ] = temp->header.next;
+ temp->header.next = new_set.data[ pos ];
+ new_set.data[ pos ] = temp;
+ }
+ }
+ BJAM_FREE( set->data );
+ *set = new_set;
+}
+
+
+static char const * string_set_insert( string_set * set, char const * string,
+ int const size )
+{
+ unsigned hash = hash_keyval( string, size );
+ unsigned pos = hash % set->num;
+
+ struct hash_item * result;
+
+ for ( result = set->data[ pos ]; result; result = result->header.next )
+ if ( !strncmp( result->data, string, size ) && !result->data[ size ] )
+ return result->data;
+
+ if ( set->size >= set->num )
+ {
+ string_set_resize( set );
+ pos = hash % set->num;
+ }
+
+ result = (struct hash_item *)allocate( sizeof( struct hash_header ) + size +
+ 1 );
+ result->header.hash = hash;
+ result->header.next = set->data[ pos ];
+#ifndef NDEBUG
+ result->header.magic = OBJECT_MAGIC;
+#endif
+ memcpy( result->data, string, size );
+ result->data[ size ] = '\0';
+ assert( hash_keyval( result->data, size ) == result->header.hash );
+ set->data[ pos ] = result;
+ strtotal += size + 1;
+ ++set->size;
+
+ return result->data;
+}
+
+
+static struct hash_item * object_get_item( OBJECT * obj )
+{
+ return (struct hash_item *)( (char *)obj - offsetof( struct hash_item, data
+ ) );
+}
+
+
+static void object_validate( OBJECT * obj )
+{
+ assert( obj );
+ assert( object_get_item( obj )->header.magic == OBJECT_MAGIC );
+}
+
+
+/*
+ * object_new_range() - create an object from a string of given length
+ */
+
+OBJECT * object_new_range( char const * const string, int const size )
+{
+ ++strcount_in;
+
+#ifdef BJAM_NO_MEM_CACHE
+ {
+ struct hash_item * const m = (struct hash_item *)BJAM_MALLOC( sizeof(
+ struct hash_header ) + size + 1 );
+ strtotal += size + 1;
+ memcpy( m->data, string, size );
+ m->data[ size ] = '\0';
+ m->header.magic = OBJECT_MAGIC;
+ return (OBJECT *)m->data;
+ }
+#else
+ if ( !strhash.data )
+ string_set_init( &strhash );
+ return (OBJECT *)string_set_insert( &strhash, string, size );
+#endif
+}
+
+
+/*
+ * object_new() - create an object from a string
+ */
+
+OBJECT * object_new( char const * const string )
+{
+ return object_new_range( string, strlen( string ) );
+}
+
+
+#ifndef object_copy
+
+/*
+ * object_copy() - return a copy of an object
+ */
+
+OBJECT * object_copy( OBJECT * obj )
+{
+ object_validate( obj );
+#ifdef BJAM_NO_MEM_CACHE
+ return object_new( object_str( obj ) );
+#else
+ ++strcount_in;
+ return obj;
+#endif
+}
+
+
+/*
+ * object_free() - free an object
+ */
+
+void object_free( OBJECT * obj )
+{
+ object_validate( obj );
+#ifdef BJAM_NO_MEM_CACHE
+ BJAM_FREE( object_get_item( obj ) );
+#endif
+ ++strcount_out;
+}
+
+
+/*
+ * object_str() - return the OBJECT's internal C string
+ */
+
+char const * object_str( OBJECT * obj )
+{
+ object_validate( obj );
+ return (char const *)obj;
+}
+
+
+/*
+ * object_equal() - compare two objects
+ */
+
+int object_equal( OBJECT * lhs, OBJECT * rhs )
+{
+ object_validate( lhs );
+ object_validate( rhs );
+#ifdef BJAM_NO_MEM_CACHE
+ return !strcmp( object_str( lhs ), object_str( rhs ) );
+#else
+ assert( ( lhs == rhs ) == !strcmp( object_str( lhs ), object_str( rhs ) ) );
+ return lhs == rhs;
+#endif
+}
+
+
+/*
+ * object_hash() - returns the hash value of an object
+ */
+
+unsigned int object_hash( OBJECT * obj )
+{
+ object_validate( obj );
+#ifdef BJAM_NO_MEM_CACHE
+ return hash_keyval( object_str( obj ), strlen( object_str( obj ) ) );
+#else
+ return object_get_item( obj )->header.hash;
+#endif
+}
+
+#endif
+
+/*
+ * object_done() - free string tables.
+ */
+
+void object_done()
+{
+#ifdef BJAM_NEWSTR_NO_ALLOCATE
+ unsigned i;
+ for ( i = 0; i < strhash.num; ++i )
+ {
+ while ( strhash.data[ i ] )
+ {
+ struct hash_item * item = strhash.data[ i ];
+ strhash.data[ i ] = item->header.next;
+ BJAM_FREE( item );
+ }
+ }
+#else
+ /* Reclaim string blocks. */
+ while ( strblock_chain )
+ {
+ strblock * const n = strblock_chain->next;
+ BJAM_FREE( strblock_chain );
+ strblock_chain = n;
+ }
+#endif
+
+ string_set_done( &strhash );
+
+ if ( DEBUG_MEM )
+ {
+ printf( "%dK in strings\n", strtotal / 1024 );
+ if ( strcount_in != strcount_out )
+ printf( "--- %d strings of %d dangling\n", strcount_in -
+ strcount_out, strcount_in );
+ }
+}
diff --git a/tools/build/src/engine/object.h b/tools/build/src/engine/object.h
new file mode 100644
index 0000000000..cabb9f6f20
--- /dev/null
+++ b/tools/build/src/engine/object.h
@@ -0,0 +1,44 @@
+/*
+ * Copyright 2011 Steven Watanabe
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * object.h - object manipulation routines
+ */
+
+#ifndef BOOST_JAM_OBJECT_H
+#define BOOST_JAM_OBJECT_H
+
+typedef struct _object OBJECT;
+
+OBJECT * object_new( char const * const );
+OBJECT * object_new_range( char const * const, int const size );
+void object_done( void );
+
+#if defined(NDEBUG) && !defined(BJAM_NO_MEM_CACHE)
+
+struct hash_header
+{
+ unsigned int hash;
+ struct hash_item * next;
+};
+
+#define object_str( obj ) ((char const *)(obj))
+#define object_copy( obj ) (obj)
+#define object_free( obj ) ((void)0)
+#define object_equal( lhs, rhs ) ((lhs) == (rhs))
+#define object_hash( obj ) (((struct hash_header *)((char *)(obj) - sizeof(struct hash_header)))->hash)
+
+#else
+
+char const * object_str ( OBJECT * );
+OBJECT * object_copy ( OBJECT * );
+void object_free ( OBJECT * );
+int object_equal( OBJECT *, OBJECT * );
+unsigned int object_hash ( OBJECT * );
+
+#endif
+
+#endif
diff --git a/tools/build/v2/engine/option.c b/tools/build/src/engine/option.c
index d25e5e8ad1..d25e5e8ad1 100644
--- a/tools/build/v2/engine/option.c
+++ b/tools/build/src/engine/option.c
diff --git a/tools/build/src/engine/option.h b/tools/build/src/engine/option.h
new file mode 100644
index 0000000000..7c9c7479b7
--- /dev/null
+++ b/tools/build/src/engine/option.h
@@ -0,0 +1,23 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * option.h - command line option processing
+ *
+ * {o >o
+ * \ -) "Command line option."
+ */
+
+typedef struct bjam_option
+{
+ char flag; /* filled in by getoption() */
+ char * val; /* set to random address if true */
+} bjam_option;
+
+#define N_OPTS 256
+
+int getoptions( int argc, char * * argv, char * opts, bjam_option * optv );
+char * getoptval( bjam_option * optv, char opt, int subopt );
diff --git a/tools/build/src/engine/output.c b/tools/build/src/engine/output.c
new file mode 100644
index 0000000000..eaaee434bd
--- /dev/null
+++ b/tools/build/src/engine/output.c
@@ -0,0 +1,98 @@
+/*
+ Copyright 2007 Rene Rivera
+ Distributed under the Boost Software License, Version 1.0.
+ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+*/
+
+#include "jam.h"
+#include "output.h"
+
+#include <stdio.h>
+
+
+#define bjam_out (stdout)
+#define bjam_err (stderr)
+
+static void out_( char const * data, FILE * const io )
+{
+ while ( *data )
+ {
+ size_t const len = strcspn( data, "\r" );
+ data += fwrite( data, 1, len, io );
+ if ( *data == '\r' ) ++data;
+ }
+}
+
+
+void out_action
+(
+ char const * const action,
+ char const * const target,
+ char const * const command,
+ char const * const out_data,
+ char const * const err_data,
+ int const exit_reason
+)
+{
+ /* Print out the action + target line, if the action is quiet the action
+ * should be null.
+ */
+ if ( action )
+ fprintf( bjam_out, "%s %s\n", action, target );
+
+ /* Print out the command executed if given -d+2. */
+ if ( DEBUG_EXEC )
+ {
+ fputs( command, bjam_out );
+ fputc( '\n', bjam_out );
+ }
+
+ /* Print out the command executed to the command stream. */
+ if ( globs.cmdout )
+ fputs( command, globs.cmdout );
+
+ /* If the process expired, make user aware with an explicit message, but do
+ * this only for non-quiet actions.
+ */
+ if ( exit_reason == EXIT_TIMEOUT && action )
+ fprintf( bjam_out, "%ld second time limit exceeded\n", globs.timeout );
+
+ /* Print out the command output, if requested, or if the program failed, but
+ * only output for non-quiet actions.
+ */
+ if ( action || exit_reason != EXIT_OK )
+ {
+ if ( out_data &&
+ ( ( globs.pipe_action & 1 /* STDOUT_FILENO */ ) ||
+ ( globs.pipe_action == 0 ) ) )
+ out_( out_data, bjam_out );
+ if ( err_data && ( globs.pipe_action & 2 /* STDERR_FILENO */ ) )
+ out_( err_data, bjam_err );
+ }
+
+ fflush( bjam_out );
+ fflush( bjam_err );
+ fflush( globs.cmdout );
+}
+
+
+OBJECT * outf_int( int const value )
+{
+ char buffer[ 50 ];
+ sprintf( buffer, "%i", value );
+ return object_new( buffer );
+}
+
+
+OBJECT * outf_double( double const value )
+{
+ char buffer[ 50 ];
+ sprintf( buffer, "%f", value );
+ return object_new( buffer );
+}
+
+
+OBJECT * outf_time( timestamp const * const time )
+{
+ return object_new( timestamp_str( time ) );
+}
diff --git a/tools/build/src/engine/output.h b/tools/build/src/engine/output.h
new file mode 100644
index 0000000000..186e867f69
--- /dev/null
+++ b/tools/build/src/engine/output.h
@@ -0,0 +1,30 @@
+/*
+ Copyright 2007 Rene Rivera
+ Distributed under the Boost Software License, Version 1.0.
+ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+*/
+
+#ifndef BJAM_OUTPUT_H
+#define BJAM_OUTPUT_H
+
+#include "object.h"
+#include "timestamp.h"
+
+#define EXIT_OK 0
+#define EXIT_FAIL 1
+#define EXIT_TIMEOUT 2
+
+void out_action(
+ char const * const action,
+ char const * const target,
+ char const * const command,
+ char const * const out_data,
+ char const * const err_data,
+ int const exit_reason
+);
+
+OBJECT * outf_int( int const value );
+OBJECT * outf_double( double const value );
+OBJECT * outf_time( timestamp const * const value );
+
+#endif
diff --git a/tools/build/src/engine/parse.c b/tools/build/src/engine/parse.c
new file mode 100644
index 0000000000..02412e085e
--- /dev/null
+++ b/tools/build/src/engine/parse.c
@@ -0,0 +1,132 @@
+/*
+ * Copyright 1993, 2000 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+#include "jam.h"
+#include "lists.h"
+#include "parse.h"
+#include "scan.h"
+#include "object.h"
+#include "modules.h"
+#include "frames.h"
+#include "function.h"
+
+/*
+ * parse.c - make and destroy parse trees as driven by the parser
+ *
+ * 09/07/00 (seiwald) - ref count on PARSE to avoid freeing when used,
+ * as per Matt Armstrong.
+ * 09/11/00 (seiwald) - structure reworked to reflect that (*func)()
+ * returns a LIST *.
+ */
+
+static PARSE * yypsave;
+
+void parse_file( OBJECT * f, FRAME * frame )
+{
+ /* Suspend scan of current file and push this new file in the stream. */
+ yyfparse( f );
+
+ /* Now parse each block of rules and execute it. Execute it outside of the
+ * parser so that recursive calls to yyrun() work (no recursive yyparse's).
+ */
+
+ for ( ; ; )
+ {
+ PARSE * p;
+ FUNCTION * func;
+
+ /* Filled by yyparse() calling parse_save(). */
+ yypsave = 0;
+
+ /* If parse error or empty parse, outta here. */
+ if ( yyparse() || !( p = yypsave ) )
+ break;
+
+ /* Run the parse tree. */
+ func = function_compile( p );
+ parse_free( p );
+ list_free( function_run( func, frame, stack_global() ) );
+ function_free( func );
+ }
+}
+
+
+void parse_save( PARSE * p )
+{
+ yypsave = p;
+}
+
+
+PARSE * parse_make(
+ int type,
+ PARSE * left,
+ PARSE * right,
+ PARSE * third,
+ OBJECT * string,
+ OBJECT * string1,
+ int num )
+{
+ PARSE * p = (PARSE *)BJAM_MALLOC( sizeof( PARSE ) );
+
+ p->type = type;
+ p->left = left;
+ p->right = right;
+ p->third = third;
+ p->string = string;
+ p->string1 = string1;
+ p->num = num;
+ p->refs = 1;
+ p->rulename = 0;
+
+ if ( left )
+ {
+ p->file = object_copy( left->file );
+ p->line = left->line;
+ }
+ else
+ {
+ yyinput_last_read_token( &p->file, &p->line );
+ p->file = object_copy( p->file );
+ }
+
+ return p;
+}
+
+
+void parse_refer( PARSE * p )
+{
+ ++p->refs;
+}
+
+
+void parse_free( PARSE * p )
+{
+ if ( --p->refs )
+ return;
+
+ if ( p->string )
+ object_free( p->string );
+ if ( p->string1 )
+ object_free( p->string1 );
+ if ( p->left )
+ parse_free( p->left );
+ if ( p->right )
+ parse_free( p->right );
+ if ( p->third )
+ parse_free( p->third );
+ if ( p->rulename )
+ object_free( p->rulename );
+ if ( p->file )
+ object_free( p->file );
+
+ BJAM_FREE( (char *)p );
+}
diff --git a/tools/build/src/engine/parse.h b/tools/build/src/engine/parse.h
new file mode 100644
index 0000000000..bb47af6d36
--- /dev/null
+++ b/tools/build/src/engine/parse.h
@@ -0,0 +1,76 @@
+/*
+ * Copyright 1993, 2000 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * parse.h - make and destroy parse trees as driven by the parser.
+ */
+
+#ifndef PARSE_DWA20011020_H
+#define PARSE_DWA20011020_H
+
+#include "frames.h"
+#include "lists.h"
+#include "modules.h"
+
+
+#define PARSE_APPEND 0
+#define PARSE_FOREACH 1
+#define PARSE_IF 2
+#define PARSE_EVAL 3
+#define PARSE_INCLUDE 4
+#define PARSE_LIST 5
+#define PARSE_LOCAL 6
+#define PARSE_MODULE 7
+#define PARSE_CLASS 8
+#define PARSE_NULL 9
+#define PARSE_ON 10
+#define PARSE_RULE 11
+#define PARSE_RULES 12
+#define PARSE_SET 13
+#define PARSE_SETCOMP 14
+#define PARSE_SETEXEC 15
+#define PARSE_SETTINGS 16
+#define PARSE_SWITCH 17
+#define PARSE_WHILE 18
+
+
+/*
+ * Parse tree node.
+ */
+
+typedef struct _PARSE PARSE;
+
+struct _PARSE {
+ int type;
+ PARSE * left;
+ PARSE * right;
+ PARSE * third;
+ OBJECT * string;
+ OBJECT * string1;
+ int num;
+ int refs;
+ OBJECT * rulename;
+ OBJECT * file;
+ int line;
+};
+
+void parse_file( OBJECT *, FRAME * );
+void parse_save( PARSE * );
+
+PARSE * parse_make( int type, PARSE * left, PARSE * right, PARSE * third,
+ OBJECT * string, OBJECT * string1, int num );
+
+void parse_refer( PARSE * );
+void parse_free( PARSE * );
+LIST * parse_evaluate( PARSE *, FRAME * );
+
+#endif
diff --git a/tools/build/src/engine/patchlevel.h b/tools/build/src/engine/patchlevel.h
new file mode 100644
index 0000000000..4da43e1ff8
--- /dev/null
+++ b/tools/build/src/engine/patchlevel.h
@@ -0,0 +1,17 @@
+/*
+ * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* Keep JAMVERSYM in sync with VERSION. */
+/* It can be accessed as $(JAMVERSION) in the Jamfile. */
+
+#define VERSION_MAJOR 2014
+#define VERSION_MINOR 3
+#define VERSION_PATCH 0
+#define VERSION_MAJOR_SYM "2014"
+#define VERSION_MINOR_SYM "03"
+#define VERSION_PATCH_SYM "00"
+#define VERSION "2014.03"
+#define JAMVERSYM "JAMVERSION=2014.03"
diff --git a/tools/build/src/engine/pathnt.c b/tools/build/src/engine/pathnt.c
new file mode 100644
index 0000000000..412f5f4b50
--- /dev/null
+++ b/tools/build/src/engine/pathnt.c
@@ -0,0 +1,308 @@
+/*
+ * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Copyright 2005 Rene Rivera.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * pathnt.c - NT specific path manipulation support
+ */
+
+#include "pathsys.h"
+
+#include "hash.h"
+
+#define WIN32_LEAN_AND_MEAN
+#include <windows.h>
+
+#include <assert.h>
+#include <stdlib.h>
+
+
+/* The definition of this in winnt.h is not ANSI-C compatible. */
+#undef INVALID_FILE_ATTRIBUTES
+#define INVALID_FILE_ATTRIBUTES ((DWORD)-1)
+
+
+typedef struct path_key_entry
+{
+ OBJECT * path;
+ OBJECT * key;
+ int exists;
+} path_key_entry;
+
+static struct hash * path_key_cache;
+
+
+/*
+ * path_get_process_id_()
+ */
+
+unsigned long path_get_process_id_( void )
+{
+ return GetCurrentProcessId();
+}
+
+
+/*
+ * path_get_temp_path_()
+ */
+
+void path_get_temp_path_( string * buffer )
+{
+ DWORD pathLength = GetTempPathA( 0, NULL );
+ string_reserve( buffer, pathLength );
+ pathLength = GetTempPathA( pathLength, buffer->value );
+ buffer->value[ pathLength - 1 ] = '\0';
+ buffer->size = pathLength - 1;
+}
+
+
+/*
+ * canonicWindowsPath() - convert a given path into its canonic/long format
+ *
+ * Appends the canonic path to the end of the given 'string' object.
+ *
+ * FIXME: This function is still work-in-progress as it originally did not
+ * necessarily return the canonic path format (could return slightly different
+ * results for certain equivalent path strings) and could accept paths pointing
+ * to non-existing file system entities as well.
+ *
+ * Caches results internally, automatically caching any parent paths it has to
+ * convert to their canonic format in the process.
+ *
+ * Prerequisites:
+ * - path given in normalized form, i.e. all of its folder separators have
+ * already been converted into '\\'
+ * - path_key_cache path/key mapping cache object already initialized
+ */
+
+static int canonicWindowsPath( char const * const path, int const path_length,
+ string * const out )
+{
+ char const * last_element;
+ unsigned long saved_size;
+ char const * p;
+ int missing_parent;
+
+ /* This is only called via path_key(), which initializes the cache. */
+ assert( path_key_cache );
+
+ if ( !path_length )
+ return 1;
+
+ if ( path_length == 1 && path[ 0 ] == '\\' )
+ {
+ string_push_back( out, '\\' );
+ return 1;
+ }
+
+ if ( path[ 1 ] == ':' &&
+ ( path_length == 2 ||
+ ( path_length == 3 && path[ 2 ] == '\\' ) ) )
+ {
+ string_push_back( out, toupper( path[ 0 ] ) );
+ string_push_back( out, ':' );
+ string_push_back( out, '\\' );
+ return 1;
+ }
+
+ /* Find last '\\'. */
+ for ( p = path + path_length - 1; p >= path && *p != '\\'; --p );
+ last_element = p + 1;
+
+ /* Special case '\' && 'D:\' - include trailing '\'. */
+ if ( p == path ||
+ p == path + 2 && path[ 1 ] == ':' )
+ ++p;
+
+ missing_parent = 0;
+
+ if ( p >= path )
+ {
+ char const * const dir = path;
+ int const dir_length = p - path;
+ OBJECT * const dir_obj = object_new_range( dir, dir_length );
+ int found;
+ path_key_entry * const result = (path_key_entry *)hash_insert(
+ path_key_cache, dir_obj, &found );
+ if ( !found )
+ {
+ result->path = dir_obj;
+ if ( canonicWindowsPath( dir, dir_length, out ) )
+ result->exists = 1;
+ else
+ result->exists = 0;
+ result->key = object_new( out->value );
+ }
+ else
+ {
+ object_free( dir_obj );
+ string_append( out, object_str( result->key ) );
+ }
+ if ( !result->exists )
+ missing_parent = 1;
+ }
+
+ if ( out->size && out->value[ out->size - 1 ] != '\\' )
+ string_push_back( out, '\\' );
+
+ saved_size = out->size;
+ string_append_range( out, last_element, path + path_length );
+
+ if ( !missing_parent )
+ {
+ char const * const n = last_element;
+ int const n_length = path + path_length - n;
+ if ( !( n_length == 1 && n[ 0 ] == '.' )
+ && !( n_length == 2 && n[ 0 ] == '.' && n[ 1 ] == '.' ) )
+ {
+ WIN32_FIND_DATA fd;
+ HANDLE const hf = FindFirstFileA( out->value, &fd );
+ if ( hf != INVALID_HANDLE_VALUE )
+ {
+ string_truncate( out, saved_size );
+ string_append( out, fd.cFileName );
+ FindClose( hf );
+ return 1;
+ }
+ }
+ else
+ {
+ return 1;
+ }
+ }
+ return 0;
+}
+
+
+/*
+ * normalize_path() - 'normalizes' the given path for the path-key mapping
+ *
+ * The resulting string has nothing to do with 'normalized paths' as used in
+ * Boost Jam build scripts and the built-in NORMALIZE_PATH rule. It is intended
+ * to be used solely as an intermediate step when mapping an arbitrary path to
+ * its canonical representation.
+ *
+ * When choosing the intermediate string the important things are for it to be
+ * inexpensive to calculate and any two paths having different canonical
+ * representations also need to have different calculated intermediate string
+ * representations. Any implemented additional rules serve only to simplify
+ * constructing the canonical path representation from the calculated
+ * intermediate string.
+ *
+ * Implemented returned path rules:
+ * - use backslashes as path separators
+ * - lowercase only (since all Windows file systems are case insensitive)
+ * - trim trailing path separator except in case of a root path, i.e. 'X:\'
+ */
+
+static void normalize_path( string * path )
+{
+ char * s;
+ for ( s = path->value; s < path->value + path->size; ++s )
+ *s = *s == '/' ? '\\' : tolower( *s );
+ /* Strip trailing "/". */
+ if ( path->size && path->size != 3 && path->value[ path->size - 1 ] == '\\'
+ )
+ string_pop_back( path );
+}
+
+
+static path_key_entry * path_key( OBJECT * const path,
+ int const known_to_be_canonic )
+{
+ path_key_entry * result;
+ int found;
+
+ if ( !path_key_cache )
+ path_key_cache = hashinit( sizeof( path_key_entry ), "path to key" );
+
+ result = (path_key_entry *)hash_insert( path_key_cache, path, &found );
+ if ( !found )
+ {
+ OBJECT * normalized;
+ int normalized_size;
+ path_key_entry * nresult;
+ result->path = path;
+ {
+ string buf[ 1 ];
+ string_copy( buf, object_str( path ) );
+ normalize_path( buf );
+ normalized = object_new( buf->value );
+ normalized_size = buf->size;
+ string_free( buf );
+ }
+ nresult = (path_key_entry *)hash_insert( path_key_cache, normalized,
+ &found );
+ if ( !found || nresult == result )
+ {
+ nresult->path = normalized;
+ if ( known_to_be_canonic )
+ {
+ nresult->key = object_copy( path );
+ nresult->exists = 1;
+ }
+ else
+ {
+ string canonic_path[ 1 ];
+ string_new( canonic_path );
+ if ( canonicWindowsPath( object_str( normalized ), normalized_size,
+ canonic_path ) )
+ nresult->exists = 1;
+ else
+ nresult->exists = 0;
+ nresult->key = object_new( canonic_path->value );
+ string_free( canonic_path );
+ }
+ }
+ else
+ object_free( normalized );
+ if ( nresult != result )
+ {
+ result->path = object_copy( path );
+ result->key = object_copy( nresult->key );
+ result->exists = nresult->exists;
+ }
+ }
+
+ return result;
+}
+
+
+void path_register_key( OBJECT * canonic_path )
+{
+ path_key( canonic_path, 1 );
+}
+
+
+OBJECT * path_as_key( OBJECT * path )
+{
+ return object_copy( path_key( path, 0 )->key );
+}
+
+
+static void free_path_key_entry( void * xentry, void * const data )
+{
+ path_key_entry * const entry = (path_key_entry *)xentry;
+ object_free( entry->path );
+ object_free( entry->key );
+}
+
+
+void path_done( void )
+{
+ if ( path_key_cache )
+ {
+ hashenumerate( path_key_cache, &free_path_key_entry, 0 );
+ hashdone( path_key_cache );
+ }
+}
diff --git a/tools/build/src/engine/pathsys.c b/tools/build/src/engine/pathsys.c
new file mode 100644
index 0000000000..ae4e6e052a
--- /dev/null
+++ b/tools/build/src/engine/pathsys.c
@@ -0,0 +1,285 @@
+/*
+ * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Copyright 2005 Rene Rivera.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * pathsys.c - platform independent path manipulation support
+ *
+ * External routines:
+ * path_build() - build a filename given dir/base/suffix/member
+ * path_parent() - make a PATHNAME point to its parent dir
+ * path_parse() - split a file name into dir/base/suffix/member
+ * path_tmpdir() - returns the system dependent temporary folder path
+ * path_tmpfile() - returns a new temporary path
+ * path_tmpnam() - returns a new temporary name
+ *
+ * File_parse() and path_build() just manipulate a string and a structure;
+ * they do not make system calls.
+ */
+
+#include "jam.h"
+#include "pathsys.h"
+
+#include "filesys.h"
+
+#include <stdlib.h>
+#include <time.h>
+
+
+/* Internal OS specific implementation details - have names ending with an
+ * underscore and are expected to be implemented in an OS specific pathXXX.c
+ * module.
+ */
+unsigned long path_get_process_id_( void );
+void path_get_temp_path_( string * buffer );
+
+
+/*
+ * path_parse() - split a file name into dir/base/suffix/member
+ */
+
+void path_parse( char const * file, PATHNAME * f )
+{
+ char const * p;
+ char const * q;
+ char const * end;
+
+ memset( (char *)f, 0, sizeof( *f ) );
+
+ /* Look for '<grist>'. */
+
+ if ( ( file[ 0 ] == '<' ) && ( p = strchr( file, '>' ) ) )
+ {
+ f->f_grist.ptr = file;
+ f->f_grist.len = p - file;
+ file = p + 1;
+ }
+
+ /* Look for 'dir/'. */
+
+ p = strrchr( file, '/' );
+
+#if PATH_DELIM == '\\'
+ /* On NT, look for dir\ as well */
+ {
+ char * const p1 = strrchr( p ? p + 1 : file, '\\' );
+ if ( p1 ) p = p1;
+ }
+#endif
+
+ if ( p )
+ {
+ f->f_dir.ptr = file;
+ f->f_dir.len = p - file;
+
+ /* Special case for / - dirname is /, not "" */
+ if ( !f->f_dir.len )
+ ++f->f_dir.len;
+
+#if PATH_DELIM == '\\'
+ /* Special case for D:/ - dirname is D:/, not "D:" */
+ if ( f->f_dir.len == 2 && file[ 1 ] == ':' )
+ ++f->f_dir.len;
+#endif
+
+ file = p + 1;
+ }
+
+ end = file + strlen( file );
+
+ /* Look for '(member)'. */
+ if ( ( p = strchr( file, '(' ) ) && ( end[ -1 ] == ')' ) )
+ {
+ f->f_member.ptr = p + 1;
+ f->f_member.len = end - p - 2;
+ end = p;
+ }
+
+ /* Look for '.suffix'. This would be memrchr(). */
+ p = 0;
+ for ( q = file; ( q = (char *)memchr( q, '.', end - q ) ); ++q )
+ p = q;
+ if ( p )
+ {
+ f->f_suffix.ptr = p;
+ f->f_suffix.len = end - p;
+ end = p;
+ }
+
+ /* Leaves base. */
+ f->f_base.ptr = file;
+ f->f_base.len = end - file;
+}
+
+
+/*
+ * is_path_delim() - true iff c is a path delimiter
+ */
+
+static int is_path_delim( char const c )
+{
+ return c == PATH_DELIM
+#if PATH_DELIM == '\\'
+ || c == '/'
+#endif
+ ;
+}
+
+
+/*
+ * as_path_delim() - convert c to a path delimiter if it is not one already
+ */
+
+static char as_path_delim( char const c )
+{
+ return is_path_delim( c ) ? c : PATH_DELIM;
+}
+
+
+/*
+ * path_build() - build a filename given dir/base/suffix/member
+ *
+ * To avoid changing slash direction on NT when reconstituting paths, instead of
+ * unconditionally appending PATH_DELIM we check the past-the-end character of
+ * the previous path element. If it is a path delimiter, we append that, and
+ * only append PATH_DELIM as a last resort. This heuristic is based on the fact
+ * that PATHNAME objects are usually the result of calling path_parse, which
+ * leaves the original slashes in the past-the-end position. Correctness depends
+ * on the assumption that all strings are zero terminated, so a past-the-end
+ * character will always be available.
+ *
+ * As an attendant patch, we had to ensure that backslashes are used explicitly
+ * in 'timestamp.c'.
+ */
+
+void path_build( PATHNAME * f, string * file )
+{
+ file_build1( f, file );
+
+ /* Do not prepend root if it is '.' or the directory is rooted. */
+ if ( f->f_root.len
+ && !( f->f_root.len == 1 && f->f_root.ptr[ 0 ] == '.' )
+ && !( f->f_dir.len && f->f_dir.ptr[ 0 ] == '/' )
+#if PATH_DELIM == '\\'
+ && !( f->f_dir.len && f->f_dir.ptr[ 0 ] == '\\' )
+ && !( f->f_dir.len && f->f_dir.ptr[ 1 ] == ':' )
+#endif
+ )
+ {
+ string_append_range( file, f->f_root.ptr, f->f_root.ptr + f->f_root.len
+ );
+ /* If 'root' already ends with a path delimeter, do not add another one.
+ */
+ if ( !is_path_delim( f->f_root.ptr[ f->f_root.len - 1 ] ) )
+ string_push_back( file, as_path_delim( f->f_root.ptr[ f->f_root.len
+ ] ) );
+ }
+
+ if ( f->f_dir.len )
+ string_append_range( file, f->f_dir.ptr, f->f_dir.ptr + f->f_dir.len );
+
+ /* Put path separator between dir and file. */
+ /* Special case for root dir: do not add another path separator. */
+ if ( f->f_dir.len && ( f->f_base.len || f->f_suffix.len )
+#if PATH_DELIM == '\\'
+ && !( f->f_dir.len == 3 && f->f_dir.ptr[ 1 ] == ':' )
+#endif
+ && !( f->f_dir.len == 1 && is_path_delim( f->f_dir.ptr[ 0 ] ) ) )
+ string_push_back( file, as_path_delim( f->f_dir.ptr[ f->f_dir.len ] ) );
+
+ if ( f->f_base.len )
+ string_append_range( file, f->f_base.ptr, f->f_base.ptr + f->f_base.len
+ );
+
+ if ( f->f_suffix.len )
+ string_append_range( file, f->f_suffix.ptr, f->f_suffix.ptr +
+ f->f_suffix.len );
+
+ if ( f->f_member.len )
+ {
+ string_push_back( file, '(' );
+ string_append_range( file, f->f_member.ptr, f->f_member.ptr +
+ f->f_member.len );
+ string_push_back( file, ')' );
+ }
+}
+
+
+/*
+ * path_parent() - make a PATHNAME point to its parent dir
+ */
+
+void path_parent( PATHNAME * f )
+{
+ f->f_base.ptr = f->f_suffix.ptr = f->f_member.ptr = "";
+ f->f_base.len = f->f_suffix.len = f->f_member.len = 0;
+}
+
+
+/*
+ * path_tmpdir() - returns the system dependent temporary folder path
+ *
+ * Returned value is stored inside a static buffer and should not be modified.
+ * Returned value does *not* include a trailing path separator.
+ */
+
+string const * path_tmpdir()
+{
+ static string buffer[ 1 ];
+ static int have_result;
+ if ( !have_result )
+ {
+ string_new( buffer );
+ path_get_temp_path_( buffer );
+ have_result = 1;
+ }
+ return buffer;
+}
+
+
+/*
+ * path_tmpnam() - returns a new temporary name
+ */
+
+OBJECT * path_tmpnam( void )
+{
+ char name_buffer[ 64 ];
+ unsigned long const pid = path_get_process_id_();
+ static unsigned long t;
+ if ( !t ) t = time( 0 ) & 0xffff;
+ t += 1;
+ sprintf( name_buffer, "jam%lx%lx.000", pid, t );
+ return object_new( name_buffer );
+}
+
+
+/*
+ * path_tmpfile() - returns a new temporary path
+ */
+
+OBJECT * path_tmpfile( void )
+{
+ OBJECT * result;
+ OBJECT * tmpnam;
+
+ string file_path[ 1 ];
+ string_copy( file_path, path_tmpdir()->value );
+ string_push_back( file_path, PATH_DELIM );
+ tmpnam = path_tmpnam();
+ string_append( file_path, object_str( tmpnam ) );
+ object_free( tmpnam );
+ result = object_new( file_path->value );
+ string_free( file_path );
+
+ return result;
+}
diff --git a/tools/build/src/engine/pathsys.h b/tools/build/src/engine/pathsys.h
new file mode 100644
index 0000000000..9b7a4caf1d
--- /dev/null
+++ b/tools/build/src/engine/pathsys.h
@@ -0,0 +1,85 @@
+/*
+ * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * pathsys.h - PATHNAME struct
+ */
+
+/*
+ * PATHNAME - a name of a file, broken into <grist>dir/base/suffix(member)
+ *
+ * <grist> - salt to distinguish between targets that would otherwise have the
+ * same name - it never appears in the bound name of a target.
+ *
+ * (member) - archive member name: the syntax is arbitrary, but must agree in
+ * path_parse(), path_build() and the Jambase.
+ */
+
+#ifndef PATHSYS_VP_20020211_H
+#define PATHSYS_VP_20020211_H
+
+#include "object.h"
+#include "strings.h"
+
+
+typedef struct _pathpart
+{
+ char const * ptr;
+ int len;
+} PATHPART;
+
+typedef struct _pathname
+{
+ PATHPART part[ 6 ];
+
+#define f_grist part[ 0 ]
+#define f_root part[ 1 ]
+#define f_dir part[ 2 ]
+#define f_base part[ 3 ]
+#define f_suffix part[ 4 ]
+#define f_member part[ 5 ]
+} PATHNAME;
+
+
+void path_build( PATHNAME *, string * file );
+void path_parse( char const * file, PATHNAME * );
+void path_parent( PATHNAME * );
+
+/* Given a path, returns an object containing an equivalent path in canonical
+ * format that can be used as a unique key for that path. Equivalent paths such
+ * as a/b, A\B, and a\B on NT all yield the same key.
+ */
+OBJECT * path_as_key( OBJECT * path );
+
+/* Called as an optimization when we know we have a path that is already in its
+ * canonical/long/key form. Avoids the need for some subsequent path_as_key()
+ * call to do a potentially expensive path conversion requiring access to the
+ * actual underlying file system.
+ */
+void path_register_key( OBJECT * canonic_path );
+
+/* Returns a static pointer to the system dependent path to the temporary
+ * directory. NOTE: Does *not* include a trailing path separator.
+ */
+string const * path_tmpdir( void );
+
+/* Returns a new temporary name. */
+OBJECT * path_tmpnam( void );
+
+/* Returns a new temporary path. */
+OBJECT * path_tmpfile( void );
+
+/* Give the first argument to 'main', return a full path to our executable.
+ * Returns null in the unlikely case it cannot be determined. Caller is
+ * responsible for freeing the string.
+ *
+ * Implemented in jam.c
+ */
+char * executable_path( char const * argv0 );
+
+void path_done( void );
+
+#endif
diff --git a/tools/build/src/engine/pathunix.c b/tools/build/src/engine/pathunix.c
new file mode 100644
index 0000000000..8ca0d185e1
--- /dev/null
+++ b/tools/build/src/engine/pathunix.c
@@ -0,0 +1,71 @@
+/*
+ * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Copyright 2005 Rene Rivera.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * pathunix.c - UNIX specific path manipulation support
+ */
+
+#include "pathsys.h"
+
+#include <stdlib.h>
+#include <unistd.h> /* needed for getpid() */
+
+
+/*
+ * path_get_process_id_()
+ */
+
+unsigned long path_get_process_id_( void )
+{
+ return getpid();
+}
+
+
+/*
+ * path_get_temp_path_()
+ */
+
+void path_get_temp_path_( string * buffer )
+{
+ char const * t = getenv( "TMPDIR" );
+ string_append( buffer, t ? t : "/tmp" );
+}
+
+
+/*
+ * path_register_key()
+ */
+
+void path_register_key( OBJECT * path )
+{
+}
+
+
+/*
+ * path_as_key()
+ */
+
+OBJECT * path_as_key( OBJECT * path )
+{
+ return object_copy( path );
+}
+
+
+/*
+ * path_done()
+ */
+
+void path_done( void )
+{
+}
diff --git a/tools/build/src/engine/regexp.c b/tools/build/src/engine/regexp.c
new file mode 100644
index 0000000000..c64201b90b
--- /dev/null
+++ b/tools/build/src/engine/regexp.c
@@ -0,0 +1,1329 @@
+/*
+ * regcomp and regexec -- regsub and regerror are elsewhere
+ *
+ * Copyright (c) 1986 by University of Toronto.
+ * Written by Henry Spencer. Not derived from licensed software.
+ *
+ * Permission is granted to anyone to use this software for any
+ * purpose on any computer system, and to redistribute it freely,
+ * subject to the following restrictions:
+ *
+ * 1. The author is not responsible for the consequences of use of
+ * this software, no matter how awful, even if they arise
+ * from defects in it.
+ *
+ * 2. The origin of this software must not be misrepresented, either
+ * by explicit claim or by omission.
+ *
+ * 3. Altered versions must be plainly marked as such, and must not
+ * be misrepresented as being the original software.
+ *** THIS IS AN ALTERED VERSION. It was altered by John Gilmore,
+ *** hoptoad!gnu, on 27 Dec 1986, to add \n as an alternative to |
+ *** to assist in implementing egrep.
+ *** THIS IS AN ALTERED VERSION. It was altered by John Gilmore,
+ *** hoptoad!gnu, on 27 Dec 1986, to add \< and \> for word-matching
+ *** as in BSD grep and ex.
+ *** THIS IS AN ALTERED VERSION. It was altered by John Gilmore,
+ *** hoptoad!gnu, on 28 Dec 1986, to optimize characters quoted with \.
+ *** THIS IS AN ALTERED VERSION. It was altered by James A. Woods,
+ *** ames!jaw, on 19 June 1987, to quash a regcomp() redundancy.
+ *** THIS IS AN ALTERED VERSION. It was altered by Christopher Seiwald
+ *** seiwald@vix.com, on 28 August 1993, for use in jam. Regmagic.h
+ *** was moved into regexp.h, and the include of regexp.h now uses "'s
+ *** to avoid conflicting with the system regexp.h. Const, bless its
+ *** soul, was removed so it can compile everywhere. The declaration
+ *** of strchr() was in conflict on AIX, so it was removed (as it is
+ *** happily defined in string.h).
+ *** THIS IS AN ALTERED VERSION. It was altered by Christopher Seiwald
+ *** seiwald@perforce.com, on 20 January 2000, to use function prototypes.
+ *
+ * Beware that some of this code is subtly aware of the way operator precedence
+ * is structured in regular expressions. Serious changes in regular-expression
+ * syntax might require a total rethink.
+ */
+
+
+#include "jam.h"
+#include "regexp.h"
+
+#include <stdio.h>
+#include <ctype.h>
+#ifndef ultrix
+# include <stdlib.h>
+#endif
+#include <string.h>
+
+
+/*
+ * The "internal use only" fields in regexp.h are present to pass info from
+ * compile to execute that permits the execute phase to run lots faster on
+ * simple cases. They are:
+ :
+ * regstart char that must begin a match; '\0' if none obvious.
+ * reganch is the match anchored (at beginning-of-line only)?
+ * regmust string (pointer into program) that match must include, or NULL.
+ * regmlen length of regmust string.
+ *
+ * Regstart and reganch permit very fast decisions on suitable starting points
+ * for a match, cutting down the work a lot. Regmust permits fast rejection of
+ * lines that cannot possibly match. The regmust tests are costly enough that
+ * regcomp() supplies a regmust only if the r.e. contains something potentially
+ * expensive (at present, the only such thing detected is * or + at the start of
+ * the r.e., which can involve a lot of backup). Regmlen is supplied because the
+ * test in regexec() needs it and regcomp() is computing it anyway.
+ */
+
+/*
+ * Structure for regexp "program". This is essentially a linear encoding of a
+ * nondeterministic finite-state machine (aka syntax charts or "railroad normal
+ * form" in parsing technology). Each node is an opcode plus a "next" pointer,
+ * possibly plus an operand. "Next" pointers of all nodes except BRANCH
+ * implement concatenation; a "next" pointer with a BRANCH on both ends of it is
+ * connecting two alternatives. [Here we have one of the subtle syntax
+ * dependencies: an individual BRANCH, as opposed to a collection of them, is
+ * never concatenated with anything because of operator precedence.] The operand
+ * of some types of node is a literal string; for others, it is a node leading
+ * into a sub-FSM. In particular, the operand of a BRANCH node is the first node
+ * of the branch. [NB this is *not* a tree structure: the tail of the branch
+ * connects to the thing following the set of BRANCHes.] The opcodes are:
+ */
+
+/* definition number opnd? meaning */
+#define END 0 /* no End of program. */
+#define BOL 1 /* no Match "" at beginning of line. */
+#define EOL 2 /* no Match "" at end of line. */
+#define ANY 3 /* no Match any one character. */
+#define ANYOF 4 /* str Match any character in this string. */
+#define ANYBUT 5 /* str Match any character not in this string. */
+#define BRANCH 6 /* node Match this alternative, or the next... */
+#define BACK 7 /* no Match "", "next" ptr points backward. */
+#define EXACTLY 8 /* str Match this string. */
+#define NOTHING 9 /* no Match empty string. */
+#define STAR 10 /* node Match this (simple) thing 0 or more times. */
+#define PLUS 11 /* node Match this (simple) thing 1 or more times. */
+#define WORDA 12 /* no Match "" at wordchar, where prev is nonword */
+#define WORDZ 13 /* no Match "" at nonwordchar, where prev is word */
+#define OPEN 20 /* no Mark this point in input as start of #n. */
+ /* OPEN+1 is number 1, etc. */
+#define CLOSE 30 /* no Analogous to OPEN. */
+
+
+/*
+ * Opcode notes:
+ *
+ * BRANCH The set of branches constituting a single choice are hooked
+ * together with their "next" pointers, since precedence prevents
+ * anything being concatenated to any individual branch. The
+ * "next" pointer of the last BRANCH in a choice points to the
+ * thing following the whole choice. This is also where the
+ * final "next" pointer of each individual branch points; each
+ * branch starts with the operand node of a BRANCH node.
+ *
+ * BACK Normal "next" pointers all implicitly point forward; BACK
+ * exists to make loop structures possible.
+ *
+ * STAR,PLUS '?', and complex '*' and '+', are implemented as circular
+ * BRANCH structures using BACK. Simple cases (one character
+ * per match) are implemented with STAR and PLUS for speed
+ * and to minimize recursive plunges.
+ *
+ * OPEN,CLOSE ...are numbered at compile time.
+ */
+
+/*
+ * A node is one char of opcode followed by two chars of "next" pointer.
+ * "Next" pointers are stored as two 8-bit pieces, high order first. The
+ * value is a positive offset from the opcode of the node containing it.
+ * An operand, if any, simply follows the node. (Note that much of the
+ * code generation knows about this implicit relationship.)
+ *
+ * Using two bytes for the "next" pointer is vast overkill for most things,
+ * but allows patterns to get big without disasters.
+ */
+#define OP(p) (*(p))
+#define NEXT(p) (((*((p)+1)&0377)<<8) + (*((p)+2)&0377))
+#define OPERAND(p) ((p) + 3)
+
+/*
+ * See regmagic.h for one further detail of program structure.
+ */
+
+
+/*
+ * Utility definitions.
+ */
+#ifndef CHARBITS
+#define UCHARAT(p) ((int)*(const unsigned char *)(p))
+#else
+#define UCHARAT(p) ((int)*(p)&CHARBITS)
+#endif
+
+#define FAIL(m) { regerror(m); return(NULL); }
+#define ISMULT(c) ((c) == '*' || (c) == '+' || (c) == '?')
+
+/*
+ * Flags to be passed up and down.
+ */
+#define HASWIDTH 01 /* Known never to match null string. */
+#define SIMPLE 02 /* Simple enough to be STAR/PLUS operand. */
+#define SPSTART 04 /* Starts with * or +. */
+#define WORST 0 /* Worst case. */
+
+/*
+ * Global work variables for regcomp().
+ */
+static char *regparse; /* Input-scan pointer. */
+static int regnpar; /* () count. */
+static char regdummy;
+static char *regcode; /* Code-emit pointer; &regdummy = don't. */
+static long regsize; /* Code size. */
+
+/*
+ * Forward declarations for regcomp()'s friends.
+ */
+#ifndef STATIC
+#define STATIC static
+#endif
+STATIC char *reg( int paren, int *flagp );
+STATIC char *regbranch( int *flagp );
+STATIC char *regpiece( int *flagp );
+STATIC char *regatom( int *flagp );
+STATIC char *regnode( int op );
+STATIC char *regnext( register char *p );
+STATIC void regc( int b );
+STATIC void reginsert( char op, char *opnd );
+STATIC void regtail( char *p, char *val );
+STATIC void regoptail( char *p, char *val );
+#ifdef STRCSPN
+STATIC int strcspn();
+#endif
+
+/*
+ - regcomp - compile a regular expression into internal code
+ *
+ * We can't allocate space until we know how big the compiled form will be,
+ * but we can't compile it (and thus know how big it is) until we've got a
+ * place to put the code. So we cheat: we compile it twice, once with code
+ * generation turned off and size counting turned on, and once "for real".
+ * This also means that we don't allocate space until we are sure that the
+ * thing really will compile successfully, and we never have to move the
+ * code and thus invalidate pointers into it. (Note that it has to be in
+ * one piece because free() must be able to free it all.)
+ *
+ * Beware that the optimization-preparation code in here knows about some
+ * of the structure of the compiled regexp.
+ */
+regexp *
+regcomp( const char *exp )
+{
+ register regexp *r;
+ register char *scan;
+ register char *longest;
+ register unsigned len;
+ int flags;
+
+ if (exp == NULL)
+ FAIL("NULL argument");
+
+ /* First pass: determine size, legality. */
+#ifdef notdef
+ if (exp[0] == '.' && exp[1] == '*') exp += 2; /* aid grep */
+#endif
+ regparse = (char *)exp;
+ regnpar = 1;
+ regsize = 0L;
+ regcode = &regdummy;
+ regc(MAGIC);
+ if (reg(0, &flags) == NULL)
+ return(NULL);
+
+ /* Small enough for pointer-storage convention? */
+ if (regsize >= 32767L) /* Probably could be 65535L. */
+ FAIL("regexp too big");
+
+ /* Allocate space. */
+ r = (regexp *)BJAM_MALLOC(sizeof(regexp) + (unsigned)regsize);
+ if (r == NULL)
+ FAIL("out of space");
+
+ /* Second pass: emit code. */
+ regparse = (char *)exp;
+ regnpar = 1;
+ regcode = r->program;
+ regc(MAGIC);
+ if (reg(0, &flags) == NULL)
+ return(NULL);
+
+ /* Dig out information for optimizations. */
+ r->regstart = '\0'; /* Worst-case defaults. */
+ r->reganch = 0;
+ r->regmust = NULL;
+ r->regmlen = 0;
+ scan = r->program+1; /* First BRANCH. */
+ if (OP(regnext(scan)) == END) { /* Only one top-level choice. */
+ scan = OPERAND(scan);
+
+ /* Starting-point info. */
+ if (OP(scan) == EXACTLY)
+ r->regstart = *OPERAND(scan);
+ else if (OP(scan) == BOL)
+ r->reganch++;
+
+ /*
+ * If there's something expensive in the r.e., find the
+ * longest literal string that must appear and make it the
+ * regmust. Resolve ties in favor of later strings, since
+ * the regstart check works with the beginning of the r.e.
+ * and avoiding duplication strengthens checking. Not a
+ * strong reason, but sufficient in the absence of others.
+ */
+ if (flags&SPSTART) {
+ longest = NULL;
+ len = 0;
+ for (; scan != NULL; scan = regnext(scan))
+ if (OP(scan) == EXACTLY && strlen(OPERAND(scan)) >= len) {
+ longest = OPERAND(scan);
+ len = strlen(OPERAND(scan));
+ }
+ r->regmust = longest;
+ r->regmlen = len;
+ }
+ }
+
+ return(r);
+}
+
+/*
+ - reg - regular expression, i.e. main body or parenthesized thing
+ *
+ * Caller must absorb opening parenthesis.
+ *
+ * Combining parenthesis handling with the base level of regular expression
+ * is a trifle forced, but the need to tie the tails of the branches to what
+ * follows makes it hard to avoid.
+ */
+static char *
+reg(
+ int paren, /* Parenthesized? */
+ int *flagp )
+{
+ register char *ret;
+ register char *br;
+ register char *ender;
+ register int parno = 0;
+ int flags;
+
+ *flagp = HASWIDTH; /* Tentatively. */
+
+ /* Make an OPEN node, if parenthesized. */
+ if (paren) {
+ if (regnpar >= NSUBEXP)
+ FAIL("too many ()");
+ parno = regnpar;
+ regnpar++;
+ ret = regnode(OPEN+parno);
+ } else
+ ret = NULL;
+
+ /* Pick up the branches, linking them together. */
+ br = regbranch(&flags);
+ if (br == NULL)
+ return(NULL);
+ if (ret != NULL)
+ regtail(ret, br); /* OPEN -> first. */
+ else
+ ret = br;
+ if (!(flags&HASWIDTH))
+ *flagp &= ~HASWIDTH;
+ *flagp |= flags&SPSTART;
+ while (*regparse == '|' || *regparse == '\n') {
+ regparse++;
+ br = regbranch(&flags);
+ if (br == NULL)
+ return(NULL);
+ regtail(ret, br); /* BRANCH -> BRANCH. */
+ if (!(flags&HASWIDTH))
+ *flagp &= ~HASWIDTH;
+ *flagp |= flags&SPSTART;
+ }
+
+ /* Make a closing node, and hook it on the end. */
+ ender = regnode((paren) ? CLOSE+parno : END);
+ regtail(ret, ender);
+
+ /* Hook the tails of the branches to the closing node. */
+ for (br = ret; br != NULL; br = regnext(br))
+ regoptail(br, ender);
+
+ /* Check for proper termination. */
+ if (paren && *regparse++ != ')') {
+ FAIL("unmatched ()");
+ } else if (!paren && *regparse != '\0') {
+ if (*regparse == ')') {
+ FAIL("unmatched ()");
+ } else
+ FAIL("junk on end"); /* "Can't happen". */
+ /* NOTREACHED */
+ }
+
+ return(ret);
+}
+
+/*
+ - regbranch - one alternative of an | operator
+ *
+ * Implements the concatenation operator.
+ */
+static char *
+regbranch( int *flagp )
+{
+ register char *ret;
+ register char *chain;
+ register char *latest;
+ int flags;
+
+ *flagp = WORST; /* Tentatively. */
+
+ ret = regnode(BRANCH);
+ chain = NULL;
+ while (*regparse != '\0' && *regparse != ')' &&
+ *regparse != '\n' && *regparse != '|') {
+ latest = regpiece(&flags);
+ if (latest == NULL)
+ return(NULL);
+ *flagp |= flags&HASWIDTH;
+ if (chain == NULL) /* First piece. */
+ *flagp |= flags&SPSTART;
+ else
+ regtail(chain, latest);
+ chain = latest;
+ }
+ if (chain == NULL) /* Loop ran zero times. */
+ (void) regnode(NOTHING);
+
+ return(ret);
+}
+
+/*
+ - regpiece - something followed by possible [*+?]
+ *
+ * Note that the branching code sequences used for ? and the general cases
+ * of * and + are somewhat optimized: they use the same NOTHING node as
+ * both the endmarker for their branch list and the body of the last branch.
+ * It might seem that this node could be dispensed with entirely, but the
+ * endmarker role is not redundant.
+ */
+static char *
+regpiece( int *flagp )
+{
+ register char *ret;
+ register char op;
+ register char *next;
+ int flags;
+
+ ret = regatom(&flags);
+ if (ret == NULL)
+ return(NULL);
+
+ op = *regparse;
+ if (!ISMULT(op)) {
+ *flagp = flags;
+ return(ret);
+ }
+
+ if (!(flags&HASWIDTH) && op != '?')
+ FAIL("*+ operand could be empty");
+ *flagp = (op != '+') ? (WORST|SPSTART) : (WORST|HASWIDTH);
+
+ if (op == '*' && (flags&SIMPLE))
+ reginsert(STAR, ret);
+ else if (op == '*') {
+ /* Emit x* as (x&|), where & means "self". */
+ reginsert(BRANCH, ret); /* Either x */
+ regoptail(ret, regnode(BACK)); /* and loop */
+ regoptail(ret, ret); /* back */
+ regtail(ret, regnode(BRANCH)); /* or */
+ regtail(ret, regnode(NOTHING)); /* null. */
+ } else if (op == '+' && (flags&SIMPLE))
+ reginsert(PLUS, ret);
+ else if (op == '+') {
+ /* Emit x+ as x(&|), where & means "self". */
+ next = regnode(BRANCH); /* Either */
+ regtail(ret, next);
+ regtail(regnode(BACK), ret); /* loop back */
+ regtail(next, regnode(BRANCH)); /* or */
+ regtail(ret, regnode(NOTHING)); /* null. */
+ } else if (op == '?') {
+ /* Emit x? as (x|) */
+ reginsert(BRANCH, ret); /* Either x */
+ regtail(ret, regnode(BRANCH)); /* or */
+ next = regnode(NOTHING); /* null. */
+ regtail(ret, next);
+ regoptail(ret, next);
+ }
+ regparse++;
+ if (ISMULT(*regparse))
+ FAIL("nested *?+");
+
+ return(ret);
+}
+
+/*
+ - regatom - the lowest level
+ *
+ * Optimization: gobbles an entire sequence of ordinary characters so that
+ * it can turn them into a single node, which is smaller to store and
+ * faster to run. Backslashed characters are exceptions, each becoming a
+ * separate node; the code is simpler that way and it's not worth fixing.
+ */
+static char *
+regatom( int *flagp )
+{
+ register char *ret;
+ int flags;
+
+ *flagp = WORST; /* Tentatively. */
+
+ switch (*regparse++) {
+ /* FIXME: these chars only have meaning at beg/end of pat? */
+ case '^':
+ ret = regnode(BOL);
+ break;
+ case '$':
+ ret = regnode(EOL);
+ break;
+ case '.':
+ ret = regnode(ANY);
+ *flagp |= HASWIDTH|SIMPLE;
+ break;
+ case '[': {
+ register int classr;
+ register int classend;
+
+ if (*regparse == '^') { /* Complement of range. */
+ ret = regnode(ANYBUT);
+ regparse++;
+ } else
+ ret = regnode(ANYOF);
+ if (*regparse == ']' || *regparse == '-')
+ regc(*regparse++);
+ while (*regparse != '\0' && *regparse != ']') {
+ if (*regparse == '-') {
+ regparse++;
+ if (*regparse == ']' || *regparse == '\0')
+ regc('-');
+ else {
+ classr = UCHARAT(regparse-2)+1;
+ classend = UCHARAT(regparse);
+ if (classr > classend+1)
+ FAIL("invalid [] range");
+ for (; classr <= classend; classr++)
+ regc(classr);
+ regparse++;
+ }
+ } else
+ regc(*regparse++);
+ }
+ regc('\0');
+ if (*regparse != ']')
+ FAIL("unmatched []");
+ regparse++;
+ *flagp |= HASWIDTH|SIMPLE;
+ }
+ break;
+ case '(':
+ ret = reg(1, &flags);
+ if (ret == NULL)
+ return(NULL);
+ *flagp |= flags&(HASWIDTH|SPSTART);
+ break;
+ case '\0':
+ case '|':
+ case '\n':
+ case ')':
+ FAIL("internal urp"); /* Supposed to be caught earlier. */
+ break;
+ case '?':
+ case '+':
+ case '*':
+ FAIL("?+* follows nothing");
+ break;
+ case '\\':
+ switch (*regparse++) {
+ case '\0':
+ FAIL("trailing \\");
+ break;
+ case '<':
+ ret = regnode(WORDA);
+ break;
+ case '>':
+ ret = regnode(WORDZ);
+ break;
+ /* FIXME: Someday handle \1, \2, ... */
+ default:
+ /* Handle general quoted chars in exact-match routine */
+ goto de_fault;
+ }
+ break;
+ de_fault:
+ default:
+ /*
+ * Encode a string of characters to be matched exactly.
+ *
+ * This is a bit tricky due to quoted chars and due to
+ * '*', '+', and '?' taking the SINGLE char previous
+ * as their operand.
+ *
+ * On entry, the char at regparse[-1] is going to go
+ * into the string, no matter what it is. (It could be
+ * following a \ if we are entered from the '\' case.)
+ *
+ * Basic idea is to pick up a good char in ch and
+ * examine the next char. If it's *+? then we twiddle.
+ * If it's \ then we frozzle. If it's other magic char
+ * we push ch and terminate the string. If none of the
+ * above, we push ch on the string and go around again.
+ *
+ * regprev is used to remember where "the current char"
+ * starts in the string, if due to a *+? we need to back
+ * up and put the current char in a separate, 1-char, string.
+ * When regprev is NULL, ch is the only char in the
+ * string; this is used in *+? handling, and in setting
+ * flags |= SIMPLE at the end.
+ */
+ {
+ char *regprev;
+ register char ch;
+
+ regparse--; /* Look at cur char */
+ ret = regnode(EXACTLY);
+ for ( regprev = 0 ; ; ) {
+ ch = *regparse++; /* Get current char */
+ switch (*regparse) { /* look at next one */
+
+ default:
+ regc(ch); /* Add cur to string */
+ break;
+
+ case '.': case '[': case '(':
+ case ')': case '|': case '\n':
+ case '$': case '^':
+ case '\0':
+ /* FIXME, $ and ^ should not always be magic */
+ magic:
+ regc(ch); /* dump cur char */
+ goto done; /* and we are done */
+
+ case '?': case '+': case '*':
+ if (!regprev) /* If just ch in str, */
+ goto magic; /* use it */
+ /* End mult-char string one early */
+ regparse = regprev; /* Back up parse */
+ goto done;
+
+ case '\\':
+ regc(ch); /* Cur char OK */
+ switch (regparse[1]){ /* Look after \ */
+ case '\0':
+ case '<':
+ case '>':
+ /* FIXME: Someday handle \1, \2, ... */
+ goto done; /* Not quoted */
+ default:
+ /* Backup point is \, scan * point is after it. */
+ regprev = regparse;
+ regparse++;
+ continue; /* NOT break; */
+ }
+ }
+ regprev = regparse; /* Set backup point */
+ }
+ done:
+ regc('\0');
+ *flagp |= HASWIDTH;
+ if (!regprev) /* One char? */
+ *flagp |= SIMPLE;
+ }
+ break;
+ }
+
+ return(ret);
+}
+
+/*
+ - regnode - emit a node
+ */
+static char * /* Location. */
+regnode( int op )
+{
+ register char *ret;
+ register char *ptr;
+
+ ret = regcode;
+ if (ret == &regdummy) {
+ regsize += 3;
+ return(ret);
+ }
+
+ ptr = ret;
+ *ptr++ = op;
+ *ptr++ = '\0'; /* Null "next" pointer. */
+ *ptr++ = '\0';
+ regcode = ptr;
+
+ return(ret);
+}
+
+/*
+ - regc - emit (if appropriate) a byte of code
+ */
+static void
+regc( int b )
+{
+ if (regcode != &regdummy)
+ *regcode++ = b;
+ else
+ regsize++;
+}
+
+/*
+ - reginsert - insert an operator in front of already-emitted operand
+ *
+ * Means relocating the operand.
+ */
+static void
+reginsert(
+ char op,
+ char *opnd )
+{
+ register char *src;
+ register char *dst;
+ register char *place;
+
+ if (regcode == &regdummy) {
+ regsize += 3;
+ return;
+ }
+
+ src = regcode;
+ regcode += 3;
+ dst = regcode;
+ while (src > opnd)
+ *--dst = *--src;
+
+ place = opnd; /* Op node, where operand used to be. */
+ *place++ = op;
+ *place++ = '\0';
+ *place++ = '\0';
+}
+
+/*
+ - regtail - set the next-pointer at the end of a node chain
+ */
+static void
+regtail(
+ char *p,
+ char *val )
+{
+ register char *scan;
+ register char *temp;
+ register int offset;
+
+ if (p == &regdummy)
+ return;
+
+ /* Find last node. */
+ scan = p;
+ for (;;) {
+ temp = regnext(scan);
+ if (temp == NULL)
+ break;
+ scan = temp;
+ }
+
+ if (OP(scan) == BACK)
+ offset = scan - val;
+ else
+ offset = val - scan;
+ *(scan+1) = (offset>>8)&0377;
+ *(scan+2) = offset&0377;
+}
+
+/*
+ - regoptail - regtail on operand of first argument; nop if operandless
+ */
+
+static void
+regoptail(
+ char *p,
+ char *val )
+{
+ /* "Operandless" and "op != BRANCH" are synonymous in practice. */
+ if (p == NULL || p == &regdummy || OP(p) != BRANCH)
+ return;
+ regtail(OPERAND(p), val);
+}
+
+/*
+ * regexec and friends
+ */
+
+/*
+ * Global work variables for regexec().
+ */
+static const char *reginput; /* String-input pointer. */
+static const char *regbol; /* Beginning of input, for ^ check. */
+static const char **regstartp; /* Pointer to startp array. */
+static const char **regendp; /* Ditto for endp. */
+
+/*
+ * Forwards.
+ */
+STATIC int regtry( regexp *prog, const char *string );
+STATIC int regmatch( char *prog );
+STATIC int regrepeat( char *p );
+
+#ifdef DEBUG
+int regnarrate = 0;
+void regdump();
+STATIC char *regprop();
+#endif
+
+/*
+ - regexec - match a regexp against a string
+ */
+int
+regexec(
+ register regexp *prog,
+ register const char *string )
+{
+ register char *s;
+
+ /* Be paranoid... */
+ if (prog == NULL || string == NULL) {
+ regerror("NULL parameter");
+ return(0);
+ }
+
+ /* Check validity of program. */
+ if (UCHARAT(prog->program) != MAGIC) {
+ regerror("corrupted program");
+ return(0);
+ }
+
+ /* If there is a "must appear" string, look for it. */
+ if ( prog->regmust != NULL )
+ {
+ s = (char *)string;
+ while ( ( s = strchr( s, prog->regmust[ 0 ] ) ) != NULL )
+ {
+ if ( !strncmp( s, prog->regmust, prog->regmlen ) )
+ break; /* Found it. */
+ ++s;
+ }
+ if ( s == NULL ) /* Not present. */
+ return 0;
+ }
+
+ /* Mark beginning of line for ^ . */
+ regbol = (char *)string;
+
+ /* Simplest case: anchored match need be tried only once. */
+ if ( prog->reganch )
+ return regtry( prog, string );
+
+ /* Messy cases: unanchored match. */
+ s = (char *)string;
+ if (prog->regstart != '\0')
+ /* We know what char it must start with. */
+ while ((s = strchr(s, prog->regstart)) != NULL) {
+ if (regtry(prog, s))
+ return(1);
+ s++;
+ }
+ else
+ /* We do not -- general case. */
+ do {
+ if ( regtry( prog, s ) )
+ return( 1 );
+ } while ( *s++ != '\0' );
+
+ /* Failure. */
+ return 0;
+}
+
+
+/*
+ * regtry() - try match at specific point.
+ */
+
+static int /* 0 failure, 1 success */
+regtry(
+ regexp *prog,
+ const char *string )
+{
+ register int i;
+ register const char * * sp;
+ register const char * * ep;
+
+ reginput = string;
+ regstartp = prog->startp;
+ regendp = prog->endp;
+
+ sp = prog->startp;
+ ep = prog->endp;
+ for ( i = NSUBEXP; i > 0; --i )
+ {
+ *sp++ = NULL;
+ *ep++ = NULL;
+ }
+ if ( regmatch( prog->program + 1 ) )
+ {
+ prog->startp[ 0 ] = string;
+ prog->endp[ 0 ] = reginput;
+ return 1;
+ }
+ else
+ return 0;
+}
+
+
+/*
+ * regmatch() - main matching routine.
+ *
+ * Conceptually the strategy is simple: check to see whether the current node
+ * matches, call self recursively to see whether the rest matches, and then act
+ * accordingly. In practice we make some effort to avoid recursion, in
+ * particular by going through "ordinary" nodes (that do not need to know
+ * whether the rest of the match failed) by a loop instead of by recursion.
+ */
+
+static int /* 0 failure, 1 success */
+regmatch( char * prog )
+{
+ char * scan; /* Current node. */
+ char * next; /* Next node. */
+
+ scan = prog;
+#ifdef DEBUG
+ if (scan != NULL && regnarrate)
+ fprintf(stderr, "%s(\n", regprop(scan));
+#endif
+ while (scan != NULL) {
+#ifdef DEBUG
+ if (regnarrate)
+ fprintf(stderr, "%s...\n", regprop(scan));
+#endif
+ next = regnext(scan);
+
+ switch (OP(scan)) {
+ case BOL:
+ if (reginput != regbol)
+ return(0);
+ break;
+ case EOL:
+ if (*reginput != '\0')
+ return(0);
+ break;
+ case WORDA:
+ /* Must be looking at a letter, digit, or _ */
+ if ((!isalnum(*reginput)) && *reginput != '_')
+ return(0);
+ /* Prev must be BOL or nonword */
+ if (reginput > regbol &&
+ (isalnum(reginput[-1]) || reginput[-1] == '_'))
+ return(0);
+ break;
+ case WORDZ:
+ /* Must be looking at non letter, digit, or _ */
+ if (isalnum(*reginput) || *reginput == '_')
+ return(0);
+ /* We don't care what the previous char was */
+ break;
+ case ANY:
+ if (*reginput == '\0')
+ return(0);
+ reginput++;
+ break;
+ case EXACTLY: {
+ register int len;
+ register char *opnd;
+
+ opnd = OPERAND(scan);
+ /* Inline the first character, for speed. */
+ if (*opnd != *reginput)
+ return(0);
+ len = strlen(opnd);
+ if (len > 1 && strncmp(opnd, reginput, len) != 0)
+ return(0);
+ reginput += len;
+ }
+ break;
+ case ANYOF:
+ if (*reginput == '\0' || strchr(OPERAND(scan), *reginput) == NULL)
+ return(0);
+ reginput++;
+ break;
+ case ANYBUT:
+ if (*reginput == '\0' || strchr(OPERAND(scan), *reginput) != NULL)
+ return(0);
+ reginput++;
+ break;
+ case NOTHING:
+ break;
+ case BACK:
+ break;
+ case OPEN+1:
+ case OPEN+2:
+ case OPEN+3:
+ case OPEN+4:
+ case OPEN+5:
+ case OPEN+6:
+ case OPEN+7:
+ case OPEN+8:
+ case OPEN+9: {
+ register int no;
+ register const char *save;
+
+ no = OP(scan) - OPEN;
+ save = reginput;
+
+ if (regmatch(next)) {
+ /*
+ * Don't set startp if some later
+ * invocation of the same parentheses
+ * already has.
+ */
+ if (regstartp[no] == NULL)
+ regstartp[no] = save;
+ return(1);
+ } else
+ return(0);
+ }
+ break;
+ case CLOSE+1:
+ case CLOSE+2:
+ case CLOSE+3:
+ case CLOSE+4:
+ case CLOSE+5:
+ case CLOSE+6:
+ case CLOSE+7:
+ case CLOSE+8:
+ case CLOSE+9: {
+ register int no;
+ register const char *save;
+
+ no = OP(scan) - CLOSE;
+ save = reginput;
+
+ if (regmatch(next)) {
+ /*
+ * Don't set endp if some later
+ * invocation of the same parentheses
+ * already has.
+ */
+ if (regendp[no] == NULL)
+ regendp[no] = save;
+ return(1);
+ } else
+ return(0);
+ }
+ break;
+ case BRANCH: {
+ register const char *save;
+
+ if (OP(next) != BRANCH) /* No choice. */
+ next = OPERAND(scan); /* Avoid recursion. */
+ else {
+ do {
+ save = reginput;
+ if (regmatch(OPERAND(scan)))
+ return(1);
+ reginput = save;
+ scan = regnext(scan);
+ } while (scan != NULL && OP(scan) == BRANCH);
+ return(0);
+ /* NOTREACHED */
+ }
+ }
+ break;
+ case STAR:
+ case PLUS: {
+ register char nextch;
+ register int no;
+ register const char *save;
+ register int min;
+
+ /*
+ * Lookahead to avoid useless match attempts
+ * when we know what character comes next.
+ */
+ nextch = '\0';
+ if (OP(next) == EXACTLY)
+ nextch = *OPERAND(next);
+ min = (OP(scan) == STAR) ? 0 : 1;
+ save = reginput;
+ no = regrepeat(OPERAND(scan));
+ while (no >= min) {
+ /* If it could work, try it. */
+ if (nextch == '\0' || *reginput == nextch)
+ if (regmatch(next))
+ return(1);
+ /* Couldn't or didn't -- back up. */
+ no--;
+ reginput = save + no;
+ }
+ return(0);
+ }
+ break;
+ case END:
+ return(1); /* Success! */
+ break;
+ default:
+ regerror("memory corruption");
+ return(0);
+ break;
+ }
+
+ scan = next;
+ }
+
+ /*
+ * We get here only if there's trouble -- normally "case END" is
+ * the terminating point.
+ */
+ regerror("corrupted pointers");
+ return(0);
+}
+
+/*
+ - regrepeat - repeatedly match something simple, report how many
+ */
+static int
+regrepeat( char *p )
+{
+ register int count = 0;
+ register const char *scan;
+ register char *opnd;
+
+ scan = reginput;
+ opnd = OPERAND(p);
+ switch (OP(p)) {
+ case ANY:
+ count = strlen(scan);
+ scan += count;
+ break;
+ case EXACTLY:
+ while (*opnd == *scan) {
+ count++;
+ scan++;
+ }
+ break;
+ case ANYOF:
+ while (*scan != '\0' && strchr(opnd, *scan) != NULL) {
+ count++;
+ scan++;
+ }
+ break;
+ case ANYBUT:
+ while (*scan != '\0' && strchr(opnd, *scan) == NULL) {
+ count++;
+ scan++;
+ }
+ break;
+ default: /* Oh dear. Called inappropriately. */
+ regerror("internal foulup");
+ count = 0; /* Best compromise. */
+ break;
+ }
+ reginput = scan;
+
+ return(count);
+}
+
+/*
+ - regnext - dig the "next" pointer out of a node
+ */
+static char *
+regnext( register char *p )
+{
+ register int offset;
+
+ if (p == &regdummy)
+ return(NULL);
+
+ offset = NEXT(p);
+ if (offset == 0)
+ return(NULL);
+
+ if (OP(p) == BACK)
+ return(p-offset);
+ else
+ return(p+offset);
+}
+
+#ifdef DEBUG
+
+STATIC char *regprop();
+
+/*
+ - regdump - dump a regexp onto stdout in vaguely comprehensible form
+ */
+void
+regdump( regexp *r )
+{
+ register char *s;
+ register char op = EXACTLY; /* Arbitrary non-END op. */
+ register char *next;
+
+
+ s = r->program + 1;
+ while (op != END) { /* While that wasn't END last time... */
+ op = OP(s);
+ printf("%2d%s", s-r->program, regprop(s)); /* Where, what. */
+ next = regnext(s);
+ if (next == NULL) /* Next ptr. */
+ printf("(0)");
+ else
+ printf("(%d)", (s-r->program)+(next-s));
+ s += 3;
+ if (op == ANYOF || op == ANYBUT || op == EXACTLY) {
+ /* Literal string, where present. */
+ while (*s != '\0') {
+ putchar(*s);
+ s++;
+ }
+ s++;
+ }
+ putchar('\n');
+ }
+
+ /* Header fields of interest. */
+ if (r->regstart != '\0')
+ printf("start `%c' ", r->regstart);
+ if (r->reganch)
+ printf("anchored ");
+ if (r->regmust != NULL)
+ printf("must have \"%s\"", r->regmust);
+ printf("\n");
+}
+
+/*
+ - regprop - printable representation of opcode
+ */
+static char *
+regprop( char *op )
+{
+ register char *p;
+ static char buf[50];
+
+ (void) strcpy(buf, ":");
+
+ switch (OP(op)) {
+ case BOL:
+ p = "BOL";
+ break;
+ case EOL:
+ p = "EOL";
+ break;
+ case ANY:
+ p = "ANY";
+ break;
+ case ANYOF:
+ p = "ANYOF";
+ break;
+ case ANYBUT:
+ p = "ANYBUT";
+ break;
+ case BRANCH:
+ p = "BRANCH";
+ break;
+ case EXACTLY:
+ p = "EXACTLY";
+ break;
+ case NOTHING:
+ p = "NOTHING";
+ break;
+ case BACK:
+ p = "BACK";
+ break;
+ case END:
+ p = "END";
+ break;
+ case OPEN+1:
+ case OPEN+2:
+ case OPEN+3:
+ case OPEN+4:
+ case OPEN+5:
+ case OPEN+6:
+ case OPEN+7:
+ case OPEN+8:
+ case OPEN+9:
+ sprintf(buf+strlen(buf), "OPEN%d", OP(op)-OPEN);
+ p = NULL;
+ break;
+ case CLOSE+1:
+ case CLOSE+2:
+ case CLOSE+3:
+ case CLOSE+4:
+ case CLOSE+5:
+ case CLOSE+6:
+ case CLOSE+7:
+ case CLOSE+8:
+ case CLOSE+9:
+ sprintf(buf+strlen(buf), "CLOSE%d", OP(op)-CLOSE);
+ p = NULL;
+ break;
+ case STAR:
+ p = "STAR";
+ break;
+ case PLUS:
+ p = "PLUS";
+ break;
+ case WORDA:
+ p = "WORDA";
+ break;
+ case WORDZ:
+ p = "WORDZ";
+ break;
+ default:
+ regerror("corrupted opcode");
+ break;
+ }
+ if (p != NULL)
+ (void) strcat(buf, p);
+ return(buf);
+}
+#endif
+
+/*
+ * The following is provided for those people who do not have strcspn() in
+ * their C libraries. They should get off their butts and do something
+ * about it; at least one public-domain implementation of those (highly
+ * useful) string routines has been published on Usenet.
+ */
+#ifdef STRCSPN
+/*
+ * strcspn - find length of initial segment of s1 consisting entirely
+ * of characters not from s2
+ */
+
+static int
+strcspn(
+ char *s1,
+ char *s2 )
+{
+ register char *scan1;
+ register char *scan2;
+ register int count;
+
+ count = 0;
+ for (scan1 = s1; *scan1 != '\0'; scan1++) {
+ for (scan2 = s2; *scan2 != '\0';) /* ++ moved down. */
+ if (*scan1 == *scan2++)
+ return(count);
+ count++;
+ }
+ return(count);
+}
+#endif
diff --git a/tools/build/src/engine/regexp.h b/tools/build/src/engine/regexp.h
new file mode 100644
index 0000000000..6898ccdceb
--- /dev/null
+++ b/tools/build/src/engine/regexp.h
@@ -0,0 +1,34 @@
+/*
+ * Definitions etc. for regexp(3) routines.
+ *
+ * Caveat: this is V8 regexp(3) [actually, a reimplementation thereof],
+ * not the System V one.
+ */
+#ifndef REGEXP_DWA20011023_H
+#define REGEXP_DWA20011023_H
+
+#define NSUBEXP 10
+typedef struct regexp {
+ char const * startp[ NSUBEXP ];
+ char const * endp[ NSUBEXP ];
+ char regstart; /* Internal use only. */
+ char reganch; /* Internal use only. */
+ char * regmust; /* Internal use only. */
+ int regmlen; /* Internal use only. */
+ char program[ 1 ]; /* Unwarranted chumminess with compiler. */
+} regexp;
+
+
+regexp * regcomp( char const * exp );
+int regexec( regexp * prog, char const * string );
+void regerror( char const * s );
+
+
+/*
+ * The first byte of the regexp internal "program" is actually this magic
+ * number; the start node begins in the second byte.
+ */
+#define MAGIC 0234
+
+#endif
+
diff --git a/tools/build/src/engine/rules.c b/tools/build/src/engine/rules.c
new file mode 100644
index 0000000000..7947c55071
--- /dev/null
+++ b/tools/build/src/engine/rules.c
@@ -0,0 +1,740 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * rules.c - access to RULEs, TARGETs, and ACTIONs
+ *
+ * External routines:
+ * bindrule() - return pointer to RULE, creating it if necessary.
+ * bindtarget() - return pointer to TARGET, creating it if necessary.
+ * touch_target() - mark a target to simulate being new.
+ * targetlist() - turn list of target names into a TARGET chain.
+ * targetentry() - add a TARGET to a chain of TARGETS.
+ * actionlist() - append to an ACTION chain.
+ * addsettings() - add a deferred "set" command to a target.
+ * pushsettings() - set all target specific variables.
+ * popsettings() - reset target specific variables to their pre-push values.
+ * freesettings() - delete a settings list.
+ * rules_done() - free RULE and TARGET tables.
+ */
+
+#include "jam.h"
+#include "rules.h"
+
+#include "hash.h"
+#include "lists.h"
+#include "object.h"
+#include "parse.h"
+#include "pathsys.h"
+#include "search.h"
+#include "variable.h"
+
+
+static void set_rule_actions( RULE *, rule_actions * );
+static void set_rule_body ( RULE *, FUNCTION * );
+
+static struct hash * targethash = 0;
+
+
+/*
+ * get_target_includes() - lazy creates a target's internal includes node
+ *
+ * The newly created node is not entered into the hash table as there should
+ * never be a need to bind them directly from a target names. If you want to
+ * access an internal includes node by name, first access the actual target and
+ * then read the internal includes node from there.
+ */
+
+static TARGET * get_target_includes( TARGET * const t )
+{
+ if ( !t->includes )
+ {
+ TARGET * const i = (TARGET *)BJAM_MALLOC( sizeof( *t ) );
+ memset( (char *)i, '\0', sizeof( *i ) );
+ i->name = object_copy( t->name );
+ i->boundname = object_copy( i->name );
+ i->flags |= T_FLAG_NOTFILE | T_FLAG_INTERNAL;
+ i->original_target = t;
+ t->includes = i;
+ }
+ return t->includes;
+}
+
+
+/*
+ * target_include() - adds a target to the given targe's 'included' list
+ * target_include_many() - adds targets to the given target's 'included' list
+ *
+ * Included targets are modeled as dependencies of the including target's
+ * internal include node.
+ */
+
+void target_include( TARGET * const including, TARGET * const included )
+{
+ TARGET * const internal = get_target_includes( including );
+ internal->depends = targetentry( internal->depends, included );
+}
+
+void target_include_many( TARGET * const including, LIST * const included_names
+ )
+{
+ TARGET * const internal = get_target_includes( including );
+ internal->depends = targetlist( internal->depends, included_names );
+}
+
+
+/*
+ * enter_rule() - return pointer to RULE, creating it if necessary in
+ * target_module.
+ */
+
+static RULE * enter_rule( OBJECT * rulename, module_t * target_module )
+{
+ int found;
+ RULE * const r = (RULE *)hash_insert( demand_rules( target_module ),
+ rulename, &found );
+ if ( !found )
+ {
+ r->name = object_copy( rulename );
+ r->procedure = 0;
+ r->module = 0;
+ r->actions = 0;
+ r->exported = 0;
+ r->module = target_module;
+ }
+ return r;
+}
+
+
+/*
+ * define_rule() - return pointer to RULE, creating it if necessary in
+ * target_module. Prepare it to accept a body or action originating in
+ * src_module.
+ */
+
+static RULE * define_rule( module_t * src_module, OBJECT * rulename,
+ module_t * target_module )
+{
+ RULE * const r = enter_rule( rulename, target_module );
+ if ( r->module != src_module )
+ {
+ /* If the rule was imported from elsewhere, clear it now. */
+ set_rule_body( r, 0 );
+ set_rule_actions( r, 0 );
+ /* r will be executed in the source module. */
+ r->module = src_module;
+ }
+ return r;
+}
+
+
+void rule_free( RULE * r )
+{
+ object_free( r->name );
+ r->name = 0;
+ if ( r->procedure )
+ function_free( r->procedure );
+ r->procedure = 0;
+ if ( r->actions )
+ actions_free( r->actions );
+ r->actions = 0;
+}
+
+
+/*
+ * bindtarget() - return pointer to TARGET, creating it if necessary.
+ */
+
+TARGET * bindtarget( OBJECT * const target_name )
+{
+ int found;
+ TARGET * t;
+
+ if ( !targethash )
+ targethash = hashinit( sizeof( TARGET ), "targets" );
+
+ t = (TARGET *)hash_insert( targethash, target_name, &found );
+ if ( !found )
+ {
+ memset( (char *)t, '\0', sizeof( *t ) );
+ t->name = object_copy( target_name );
+ t->boundname = object_copy( t->name ); /* default for T_FLAG_NOTFILE */
+ }
+
+ return t;
+}
+
+
+static void bind_explicitly_located_target( void * xtarget, void * data )
+{
+ TARGET * t = (TARGET *)xtarget;
+ if ( !( t->flags & T_FLAG_NOTFILE ) )
+ {
+ /* Check if there is a setting for LOCATE. */
+ SETTINGS * s = t->settings;
+ for ( ; s ; s = s->next )
+ {
+ if ( object_equal( s->symbol, constant_LOCATE ) && ! list_empty( s->value ) )
+ {
+ set_explicit_binding( t->name, list_front( s->value ) );
+ break;
+ }
+ }
+ }
+}
+
+
+void bind_explicitly_located_targets()
+{
+ if ( targethash )
+ hashenumerate( targethash, bind_explicitly_located_target, (void *)0 );
+}
+
+
+/*
+ * touch_target() - mark a target to simulate being new.
+ */
+
+void touch_target( OBJECT * const t )
+{
+ bindtarget( t )->flags |= T_FLAG_TOUCHED;
+}
+
+
+/*
+ * target_scc() - returns the root of a strongly connected component that this
+ * target is a part of.
+ */
+
+TARGET * target_scc( TARGET * t )
+{
+ TARGET * result = t;
+ while ( result->scc_root )
+ result = result->scc_root;
+ while ( t->scc_root )
+ {
+ TARGET * const tmp = t->scc_root;
+ t->scc_root = result;
+ t = tmp;
+ }
+ return result;
+}
+
+
+/*
+ * targetlist() - turn list of target names into a TARGET chain.
+ *
+ * Inputs:
+ * chain existing TARGETS to append to
+ * targets list of target names
+ */
+
+TARGETS * targetlist( TARGETS * chain, LIST * target_names )
+{
+ LISTITER iter = list_begin( target_names );
+ LISTITER const end = list_end( target_names );
+ for ( ; iter != end; iter = list_next( iter ) )
+ chain = targetentry( chain, bindtarget( list_item( iter ) ) );
+ return chain;
+}
+
+
+/*
+ * targetentry() - add a TARGET to a chain of TARGETS.
+ *
+ * Inputs:
+ * chain existing TARGETS to append to
+ * target new target to append
+ */
+
+TARGETS * targetentry( TARGETS * chain, TARGET * target )
+{
+ TARGETS * const c = (TARGETS *)BJAM_MALLOC( sizeof( TARGETS ) );
+ c->target = target;
+
+ if ( !chain ) chain = c;
+ else chain->tail->next = c;
+ chain->tail = c;
+ c->next = 0;
+
+ return chain;
+}
+
+
+/*
+ * targetchain() - append two TARGET chains.
+ *
+ * Inputs:
+ * chain existing TARGETS to append to
+ * target new target to append
+ */
+
+TARGETS * targetchain( TARGETS * chain, TARGETS * targets )
+{
+ if ( !targets ) return chain;
+ if ( !chain ) return targets;
+
+ chain->tail->next = targets;
+ chain->tail = targets->tail;
+ return chain;
+}
+
+/*
+ * action_free - decrement the ACTIONs refrence count and (maybe) free it.
+ */
+
+void action_free( ACTION * action )
+{
+ if ( --action->refs == 0 )
+ {
+ freetargets( action->targets );
+ freetargets( action->sources );
+ BJAM_FREE( action );
+ }
+}
+
+
+/*
+ * actionlist() - append to an ACTION chain.
+ */
+
+ACTIONS * actionlist( ACTIONS * chain, ACTION * action )
+{
+ ACTIONS * const actions = (ACTIONS *)BJAM_MALLOC( sizeof( ACTIONS ) );
+ actions->action = action;
+ ++action->refs;
+ if ( !chain ) chain = actions;
+ else chain->tail->next = actions;
+ chain->tail = actions;
+ actions->next = 0;
+ return chain;
+}
+
+static SETTINGS * settings_freelist;
+
+
+/*
+ * addsettings() - add a deferred "set" command to a target.
+ *
+ * Adds a variable setting (varname=list) onto a chain of settings for a
+ * particular target. 'flag' controls the relationship between new and old
+ * values in the same way as in var_set() function (see variable.c). Returns the
+ * head of the settings chain.
+ */
+
+SETTINGS * addsettings( SETTINGS * head, int flag, OBJECT * symbol,
+ LIST * value )
+{
+ SETTINGS * v;
+
+ /* Look for previous settings. */
+ for ( v = head; v; v = v->next )
+ if ( object_equal( v->symbol, symbol ) )
+ break;
+
+ /* If not previously set, alloc a new. */
+ /* If appending, do so. */
+ /* Else free old and set new. */
+ if ( !v )
+ {
+ v = settings_freelist;
+ if ( v )
+ settings_freelist = v->next;
+ else
+ v = (SETTINGS *)BJAM_MALLOC( sizeof( *v ) );
+
+ v->symbol = object_copy( symbol );
+ v->value = value;
+ v->next = head;
+ head = v;
+ }
+ else if ( flag == VAR_APPEND )
+ {
+ v->value = list_append( v->value, value );
+ }
+ else if ( flag != VAR_DEFAULT )
+ {
+ list_free( v->value );
+ v->value = value;
+ }
+ else
+ list_free( value );
+
+ /* Return (new) head of list. */
+ return head;
+}
+
+
+/*
+ * pushsettings() - set all target specific variables.
+ */
+
+void pushsettings( struct module_t * module, SETTINGS * v )
+{
+ for ( ; v; v = v->next )
+ v->value = var_swap( module, v->symbol, v->value );
+}
+
+
+/*
+ * popsettings() - reset target specific variables to their pre-push values.
+ */
+
+void popsettings( struct module_t * module, SETTINGS * v )
+{
+ pushsettings( module, v ); /* just swap again */
+}
+
+
+/*
+ * copysettings() - duplicate a settings list, returning the new copy.
+ */
+
+SETTINGS * copysettings( SETTINGS * head )
+{
+ SETTINGS * copy = 0;
+ SETTINGS * v;
+ for ( v = head; v; v = v->next )
+ copy = addsettings( copy, VAR_SET, v->symbol, list_copy( v->value ) );
+ return copy;
+}
+
+
+/*
+ * freetargets() - delete a targets list.
+ */
+
+void freetargets( TARGETS * chain )
+{
+ while ( chain )
+ {
+ TARGETS * const n = chain->next;
+ BJAM_FREE( chain );
+ chain = n;
+ }
+}
+
+
+/*
+ * freeactions() - delete an action list.
+ */
+
+void freeactions( ACTIONS * chain )
+{
+ while ( chain )
+ {
+ ACTIONS * const n = chain->next;
+ action_free( chain->action );
+ BJAM_FREE( chain );
+ chain = n;
+ }
+}
+
+
+/*
+ * freesettings() - delete a settings list.
+ */
+
+void freesettings( SETTINGS * v )
+{
+ while ( v )
+ {
+ SETTINGS * const n = v->next;
+ object_free( v->symbol );
+ list_free( v->value );
+ v->next = settings_freelist;
+ settings_freelist = v;
+ v = n;
+ }
+}
+
+
+static void freetarget( void * xt, void * data )
+{
+ TARGET * const t = (TARGET *)xt;
+ if ( t->name ) object_free ( t->name );
+ if ( t->boundname ) object_free ( t->boundname );
+ if ( t->settings ) freesettings( t->settings );
+ if ( t->depends ) freetargets ( t->depends );
+ if ( t->dependants ) freetargets ( t->dependants );
+ if ( t->parents ) freetargets ( t->parents );
+ if ( t->actions ) freeactions ( t->actions );
+ if ( t->includes )
+ {
+ freetarget( t->includes, (void *)0 );
+ BJAM_FREE( t->includes );
+ }
+}
+
+
+/*
+ * rules_done() - free RULE and TARGET tables.
+ */
+
+void rules_done()
+{
+ if ( targethash )
+ {
+ hashenumerate( targethash, freetarget, 0 );
+ hashdone( targethash );
+ }
+ while ( settings_freelist )
+ {
+ SETTINGS * const n = settings_freelist->next;
+ BJAM_FREE( settings_freelist );
+ settings_freelist = n;
+ }
+}
+
+
+/*
+ * actions_refer() - add a new reference to the given actions.
+ */
+
+void actions_refer( rule_actions * a )
+{
+ ++a->reference_count;
+}
+
+
+/*
+ * actions_free() - release a reference to given actions.
+ */
+
+void actions_free( rule_actions * a )
+{
+ if ( --a->reference_count <= 0 )
+ {
+ function_free( a->command );
+ list_free( a->bindlist );
+ BJAM_FREE( a );
+ }
+}
+
+
+/*
+ * set_rule_body() - set the argument list and procedure of the given rule.
+ */
+
+static void set_rule_body( RULE * rule, FUNCTION * procedure )
+{
+ if ( procedure )
+ function_refer( procedure );
+ if ( rule->procedure )
+ function_free( rule->procedure );
+ rule->procedure = procedure;
+}
+
+
+/*
+ * global_name() - given a rule, return the name for a corresponding rule in the
+ * global module.
+ */
+
+static OBJECT * global_rule_name( RULE * r )
+{
+ if ( r->module == root_module() )
+ return object_copy( r->name );
+
+ {
+ char name[ 4096 ] = "";
+ if ( r->module->name )
+ {
+ strncat( name, object_str( r->module->name ), sizeof( name ) - 1 );
+ strncat( name, ".", sizeof( name ) - 1 );
+ }
+ strncat( name, object_str( r->name ), sizeof( name ) - 1 );
+ return object_new( name );
+ }
+}
+
+
+/*
+ * global_rule() - given a rule, produce a corresponding entry in the global
+ * module.
+ */
+
+static RULE * global_rule( RULE * r )
+{
+ if ( r->module == root_module() )
+ return r;
+
+ {
+ OBJECT * const name = global_rule_name( r );
+ RULE * const result = define_rule( r->module, name, root_module() );
+ object_free( name );
+ return result;
+ }
+}
+
+
+/*
+ * new_rule_body() - make a new rule named rulename in the given module, with
+ * the given argument list and procedure. If exported is true, the rule is
+ * exported to the global module as modulename.rulename.
+ */
+
+RULE * new_rule_body( module_t * m, OBJECT * rulename, FUNCTION * procedure,
+ int exported )
+{
+ RULE * const local = define_rule( m, rulename, m );
+ local->exported = exported;
+ set_rule_body( local, procedure );
+
+ /* Mark the procedure with the global rule name, regardless of whether the
+ * rule is exported. That gives us something reasonably identifiable that we
+ * can use, e.g. in profiling output. Only do this once, since this could be
+ * called multiple times with the same procedure.
+ */
+ if ( !function_rulename( procedure ) )
+ function_set_rulename( procedure, global_rule_name( local ) );
+
+ return local;
+}
+
+
+static void set_rule_actions( RULE * rule, rule_actions * actions )
+{
+ if ( actions )
+ actions_refer( actions );
+ if ( rule->actions )
+ actions_free( rule->actions );
+ rule->actions = actions;
+}
+
+
+static rule_actions * actions_new( FUNCTION * command, LIST * bindlist,
+ int flags )
+{
+ rule_actions * const result = (rule_actions *)BJAM_MALLOC( sizeof(
+ rule_actions ) );
+ function_refer( command );
+ result->command = command;
+ result->bindlist = bindlist;
+ result->flags = flags;
+ result->reference_count = 0;
+ return result;
+}
+
+
+RULE * new_rule_actions( module_t * m, OBJECT * rulename, FUNCTION * command,
+ LIST * bindlist, int flags )
+{
+ RULE * const local = define_rule( m, rulename, m );
+ RULE * const global = global_rule( local );
+ set_rule_actions( local, actions_new( command, bindlist, flags ) );
+ set_rule_actions( global, local->actions );
+ return local;
+}
+
+
+/*
+ * Looks for a rule in the specified module, and returns it, if found. First
+ * checks if the rule is present in the module's rule table. Second, if the
+ * rule's name is in the form name1.name2 and name1 is in the list of imported
+ * modules, look in module 'name1' for rule 'name2'.
+ */
+
+RULE * lookup_rule( OBJECT * rulename, module_t * m, int local_only )
+{
+ RULE * r;
+ RULE * result = 0;
+ module_t * original_module = m;
+
+ if ( m->class_module )
+ m = m->class_module;
+
+ if ( m->rules && ( r = (RULE *)hash_find( m->rules, rulename ) ) )
+ result = r;
+ else if ( !local_only && m->imported_modules )
+ {
+ /* Try splitting the name into module and rule. */
+ char * p = strchr( object_str( rulename ), '.' ) ;
+ if ( p )
+ {
+ /* Now, r->name keeps the module name, and p + 1 keeps the rule
+ * name.
+ */
+ OBJECT * rule_part = object_new( p + 1 );
+ OBJECT * module_part;
+ {
+ string buf[ 1 ];
+ string_new( buf );
+ string_append_range( buf, object_str( rulename ), p );
+ module_part = object_new( buf->value );
+ string_free( buf );
+ }
+ if ( hash_find( m->imported_modules, module_part ) )
+ result = lookup_rule( rule_part, bindmodule( module_part ), 1 );
+ object_free( module_part );
+ object_free( rule_part );
+ }
+ }
+
+ if ( result )
+ {
+ if ( local_only && !result->exported )
+ result = 0;
+ else if ( original_module != m )
+ {
+ /* Lookup started in class module. We have found a rule in class
+ * module, which is marked for execution in that module, or in some
+ * instance. Mark it for execution in the instance where we started
+ * the lookup.
+ */
+ int const execute_in_class = result->module == m;
+ int const execute_in_some_instance =
+ result->module->class_module == m;
+ if ( execute_in_class || execute_in_some_instance )
+ result->module = original_module;
+ }
+ }
+
+ return result;
+}
+
+
+RULE * bindrule( OBJECT * rulename, module_t * m )
+{
+ RULE * result = lookup_rule( rulename, m, 0 );
+ if ( !result )
+ result = lookup_rule( rulename, root_module(), 0 );
+ /* We have only one caller, 'evaluate_rule', which will complain about
+ * calling an undefined rule. We could issue the error here, but we do not
+ * have the necessary information, such as frame.
+ */
+ if ( !result )
+ result = enter_rule( rulename, m );
+ return result;
+}
+
+
+RULE * import_rule( RULE * source, module_t * m, OBJECT * name )
+{
+ RULE * const dest = define_rule( source->module, name, m );
+ set_rule_body( dest, source->procedure );
+ set_rule_actions( dest, source->actions );
+ return dest;
+}
+
+
+void rule_localize( RULE * rule, module_t * m )
+{
+ rule->module = m;
+ if ( rule->procedure )
+ {
+ FUNCTION * procedure = function_unbind_variables( rule->procedure );
+ function_refer( procedure );
+ function_free( rule->procedure );
+ rule->procedure = procedure;
+ }
+}
diff --git a/tools/build/src/engine/rules.h b/tools/build/src/engine/rules.h
new file mode 100644
index 0000000000..f3a020bb8c
--- /dev/null
+++ b/tools/build/src/engine/rules.h
@@ -0,0 +1,275 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * rules.h - targets, rules, and related information
+ *
+ * This file describes the structures holding the targets, rules, and related
+ * information accumulated by interpreting the statements of the jam files.
+ *
+ * The following are defined:
+ *
+ * RULE - a generic jam rule, the product of RULE and ACTIONS.
+ * ACTIONS - a chain of ACTIONs.
+ * ACTION - a RULE instance with targets and sources.
+ * SETTINGS - variables to set when executing a TARGET's ACTIONS.
+ * TARGETS - a chain of TARGETs.
+ * TARGET - an entity (e.g. a file) that can be built.
+ */
+
+#ifndef RULES_DWA_20011020_H
+#define RULES_DWA_20011020_H
+
+#include "function.h"
+#include "modules.h"
+#include "timestamp.h"
+
+
+typedef struct _rule RULE;
+typedef struct _target TARGET;
+typedef struct _targets TARGETS;
+typedef struct _action ACTION;
+typedef struct _actions ACTIONS;
+typedef struct _settings SETTINGS ;
+
+/* RULE - a generic jam rule, the product of RULE and ACTIONS. */
+
+/* Build actions corresponding to a rule. */
+struct rule_actions
+{
+ int reference_count;
+ FUNCTION * command; /* command string from ACTIONS */
+ LIST * bindlist;
+ int flags; /* modifiers on ACTIONS */
+
+#define RULE_NEWSRCS 0x01 /* $(>) is updated sources only */
+#define RULE_TOGETHER 0x02 /* combine actions on single target */
+#define RULE_IGNORE 0x04 /* ignore return status of executes */
+#define RULE_QUIETLY 0x08 /* do not mention it unless verbose */
+#define RULE_PIECEMEAL 0x10 /* split exec so each $(>) is small */
+#define RULE_EXISTING 0x20 /* $(>) is pre-exisitng sources only */
+};
+
+typedef struct rule_actions rule_actions;
+typedef struct argument_list argument_list;
+
+struct _rule
+{
+ OBJECT * name;
+ FUNCTION * procedure;
+ rule_actions * actions; /* build actions, or NULL for no actions */
+ module_t * module; /* module in which this rule is executed */
+ int exported; /* nonzero if this rule is supposed to appear in
+ * the global module and be automatically
+ * imported into other modules
+ */
+};
+
+/* ACTIONS - a chain of ACTIONs. */
+struct _actions
+{
+ ACTIONS * next;
+ ACTIONS * tail; /* valid only for head */
+ ACTION * action;
+};
+
+/* ACTION - a RULE instance with targets and sources. */
+struct _action
+{
+ RULE * rule;
+ TARGETS * targets;
+ TARGETS * sources; /* aka $(>) */
+ char running; /* has been started */
+#define A_INIT 0
+#define A_RUNNING_NOEXEC 1
+#define A_RUNNING 2
+ int refs;
+
+ /* WARNING: These variables are used to pass state required by make1cmds and
+ * are not valid anywhere else.
+ */
+ void * first_cmd; /* Pointer to the first CMD created by this action */
+ void * last_cmd; /* Pointer to the last CMD created by this action */
+};
+
+/* SETTINGS - variables to set when executing a TARGET's ACTIONS. */
+struct _settings
+{
+ SETTINGS * next;
+ OBJECT * symbol; /* symbol name for var_set() */
+ LIST * value; /* symbol value for var_set() */
+};
+
+/* TARGETS - a chain of TARGETs. */
+struct _targets
+{
+ TARGETS * next;
+ TARGETS * tail; /* valid only for head */
+ TARGET * target;
+};
+
+/* TARGET - an entity (e.g. a file) that can be built. */
+struct _target
+{
+ OBJECT * name;
+ OBJECT * boundname; /* if search() relocates target */
+ ACTIONS * actions; /* rules to execute, if any */
+ SETTINGS * settings; /* variables to define */
+
+ short flags; /* status info */
+
+#define T_FLAG_TEMP 0x0001 /* TEMPORARY applied */
+#define T_FLAG_NOCARE 0x0002 /* NOCARE applied */
+#define T_FLAG_NOTFILE 0x0004 /* NOTFILE applied */
+#define T_FLAG_TOUCHED 0x0008 /* ALWAYS applied or -t target */
+#define T_FLAG_LEAVES 0x0010 /* LEAVES applied */
+#define T_FLAG_NOUPDATE 0x0020 /* NOUPDATE applied */
+#define T_FLAG_VISITED 0x0040 /* CWM: Used in debugging */
+
+/* This flag has been added to support a new built-in rule named "RMBAD". It is
+ * used to force removal of outdated targets whose dependencies fail to build.
+ */
+#define T_FLAG_RMOLD 0x0080 /* RMBAD applied */
+
+/* This flag was added to support a new built-in rule named "FAIL_EXPECTED" used
+ * to indicate that the result of running a given action should be inverted,
+ * i.e. ok <=> fail. Useful for launching certain test runs from a Jamfile.
+ */
+#define T_FLAG_FAIL_EXPECTED 0x0100 /* FAIL_EXPECTED applied */
+
+#define T_FLAG_INTERNAL 0x0200 /* internal INCLUDES node */
+
+/* Indicates that the target must be a file. Prevents matching non-files, like
+ * directories, when a target is searched.
+ */
+#define T_FLAG_ISFILE 0x0400
+
+#define T_FLAG_PRECIOUS 0x0800
+
+ char binding; /* how target relates to a real file or
+ * folder
+ */
+
+#define T_BIND_UNBOUND 0 /* a disembodied name */
+#define T_BIND_MISSING 1 /* could not find real file */
+#define T_BIND_PARENTS 2 /* using parent's timestamp */
+#define T_BIND_EXISTS 3 /* real file, timestamp valid */
+
+ TARGETS * depends; /* dependencies */
+ TARGETS * dependants; /* the inverse of dependencies */
+ TARGETS * rebuilds; /* targets that should be force-rebuilt
+ * whenever this one is
+ */
+ TARGET * includes; /* internal includes node */
+ TARGET * original_target; /* original_target->includes = this */
+ char rescanned;
+
+ timestamp time; /* update time */
+ timestamp leaf; /* update time of leaf sources */
+
+ char fate; /* make0()'s diagnosis */
+
+#define T_FATE_INIT 0 /* nothing done to target */
+#define T_FATE_MAKING 1 /* make0(target) on stack */
+
+#define T_FATE_STABLE 2 /* target did not need updating */
+#define T_FATE_NEWER 3 /* target newer than parent */
+
+#define T_FATE_SPOIL 4 /* >= SPOIL rebuilds parents */
+#define T_FATE_ISTMP 4 /* unneeded temp target oddly present */
+
+#define T_FATE_BUILD 5 /* >= BUILD rebuilds target */
+#define T_FATE_TOUCHED 5 /* manually touched with -t */
+#define T_FATE_REBUILD 6
+#define T_FATE_MISSING 7 /* is missing, needs updating */
+#define T_FATE_NEEDTMP 8 /* missing temp that must be rebuild */
+#define T_FATE_OUTDATED 9 /* is out of date, needs updating */
+#define T_FATE_UPDATE 10 /* deps updated, needs updating */
+
+#define T_FATE_BROKEN 11 /* >= BROKEN ruins parents */
+#define T_FATE_CANTFIND 11 /* no rules to make missing target */
+#define T_FATE_CANTMAKE 12 /* can not find dependencies */
+
+ char progress; /* tracks make1() progress */
+
+#define T_MAKE_INIT 0 /* make1(target) not yet called */
+#define T_MAKE_ONSTACK 1 /* make1(target) on stack */
+#define T_MAKE_ACTIVE 2 /* make1(target) in make1b() */
+#define T_MAKE_RUNNING 3 /* make1(target) running commands */
+#define T_MAKE_DONE 4 /* make1(target) done */
+#define T_MAKE_NOEXEC_DONE 5 /* make1(target) done with -n in effect */
+
+#ifdef OPT_SEMAPHORE
+ #define T_MAKE_SEMAPHORE 5 /* Special target type for semaphores */
+#endif
+
+#ifdef OPT_SEMAPHORE
+ TARGET * semaphore; /* used in serialization */
+#endif
+
+ char status; /* exec_cmd() result */
+
+ int asynccnt; /* child deps outstanding */
+ TARGETS * parents; /* used by make1() for completion */
+ TARGET * scc_root; /* used by make to resolve cyclic includes
+ */
+ TARGET * rescanning; /* used by make0 to mark visited targets
+ * when rescanning
+ */
+ int depth; /* The depth of the target in the make0
+ * stack.
+ */
+ char * cmds; /* type-punned command list */
+
+ char const * failed;
+};
+
+
+/* Action related functions. */
+void action_free ( ACTION * );
+ACTIONS * actionlist ( ACTIONS *, ACTION * );
+void freeactions ( ACTIONS * );
+SETTINGS * addsettings ( SETTINGS *, int flag, OBJECT * symbol, LIST * value );
+void pushsettings ( module_t *, SETTINGS * );
+void popsettings ( module_t *, SETTINGS * );
+SETTINGS * copysettings ( SETTINGS * );
+void freesettings ( SETTINGS * );
+void actions_refer( rule_actions * );
+void actions_free ( rule_actions * );
+
+/* Rule related functions. */
+RULE * bindrule ( OBJECT * rulename, module_t * );
+RULE * import_rule ( RULE * source, module_t *, OBJECT * name );
+void rule_localize ( RULE * rule, module_t * module );
+RULE * new_rule_body ( module_t *, OBJECT * rulename, FUNCTION * func, int exprt );
+RULE * new_rule_actions( module_t *, OBJECT * rulename, FUNCTION * command, LIST * bindlist, int flags );
+void rule_free ( RULE * );
+
+/* Target related functions. */
+void bind_explicitly_located_targets();
+TARGET * bindtarget ( OBJECT * const );
+void freetargets ( TARGETS * );
+TARGETS * targetchain ( TARGETS *, TARGETS * );
+TARGETS * targetentry ( TARGETS *, TARGET * );
+void target_include ( TARGET * const including,
+ TARGET * const included );
+void target_include_many ( TARGET * const including,
+ LIST * const included_names );
+TARGETS * targetlist ( TARGETS *, LIST * target_names );
+void touch_target ( OBJECT * const );
+void clear_includes ( TARGET * );
+TARGET * target_scc ( TARGET * );
+
+/* Final module cleanup. */
+void rules_done();
+
+#endif
diff --git a/tools/build/src/engine/scan.c b/tools/build/src/engine/scan.c
new file mode 100644
index 0000000000..d92fdca145
--- /dev/null
+++ b/tools/build/src/engine/scan.c
@@ -0,0 +1,404 @@
+/*
+ * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * scan.c - the jam yacc scanner
+ *
+ */
+
+#include "jam.h"
+#include "scan.h"
+
+#include "constants.h"
+#include "jambase.h"
+#include "jamgram.h"
+
+
+struct keyword
+{
+ char * word;
+ int type;
+} keywords[] =
+{
+#include "jamgramtab.h"
+ { 0, 0 }
+};
+
+typedef struct include include;
+struct include
+{
+ include * next; /* next serial include file */
+ char * string; /* pointer into current line */
+ char * * strings; /* for yyfparse() -- text to parse */
+ FILE * file; /* for yyfparse() -- file being read */
+ OBJECT * fname; /* for yyfparse() -- file name */
+ int line; /* line counter for error messages */
+ char buf[ 512 ]; /* for yyfparse() -- line buffer */
+};
+
+static include * incp = 0; /* current file; head of chain */
+
+static int scanmode = SCAN_NORMAL;
+static int anyerrors = 0;
+
+
+static char * symdump( YYSTYPE * );
+
+#define BIGGEST_TOKEN 10240 /* no single token can be larger */
+
+
+/*
+ * Set parser mode: normal, string, or keyword.
+ */
+
+void yymode( int n )
+{
+ scanmode = n;
+}
+
+
+void yyerror( char const * s )
+{
+ /* We use yylval instead of incp to access the error location information as
+ * the incp pointer will already be reset to 0 in case the error occurred at
+ * EOF.
+ *
+ * The two may differ only if ran into an unexpected EOF or we get an error
+ * while reading a lexical token spanning multiple lines, e.g. a multi-line
+ * string literal or action body, in which case yylval location information
+ * will hold the information about where the token started while incp will
+ * hold the information about where reading it broke.
+ */
+ printf( "%s:%d: %s at %s\n", object_str( yylval.file ), yylval.line, s,
+ symdump( &yylval ) );
+ ++anyerrors;
+}
+
+
+int yyanyerrors()
+{
+ return anyerrors != 0;
+}
+
+
+void yyfparse( OBJECT * s )
+{
+ include * i = (include *)BJAM_MALLOC( sizeof( *i ) );
+
+ /* Push this onto the incp chain. */
+ i->string = "";
+ i->strings = 0;
+ i->file = 0;
+ i->fname = object_copy( s );
+ i->line = 0;
+ i->next = incp;
+ incp = i;
+
+ /* If the filename is "+", it means use the internal jambase. */
+ if ( !strcmp( object_str( s ), "+" ) )
+ i->strings = jambase;
+}
+
+
+/*
+ * yyline() - read new line and return first character.
+ *
+ * Fabricates a continuous stream of characters across include files, returning
+ * EOF at the bitter end.
+ */
+
+int yyline()
+{
+ include * const i = incp;
+
+ if ( !incp )
+ return EOF;
+
+ /* Once we start reading from the input stream, we reset the include
+ * insertion point so that the next include file becomes the head of the
+ * list.
+ */
+
+ /* If there is more data in this line, return it. */
+ if ( *i->string )
+ return *i->string++;
+
+ /* If we are reading from an internal string list, go to the next string. */
+ if ( i->strings )
+ {
+ if ( *i->strings )
+ {
+ ++i->line;
+ i->string = *(i->strings++);
+ return *i->string++;
+ }
+ }
+ else
+ {
+ /* If necessary, open the file. */
+ if ( !i->file )
+ {
+ FILE * f = stdin;
+ if ( strcmp( object_str( i->fname ), "-" ) && !( f = fopen( object_str( i->fname ), "r" ) ) )
+ perror( object_str( i->fname ) );
+ i->file = f;
+ }
+
+ /* If there is another line in this file, start it. */
+ if ( i->file && fgets( i->buf, sizeof( i->buf ), i->file ) )
+ {
+ ++i->line;
+ i->string = i->buf;
+ return *i->string++;
+ }
+ }
+
+ /* This include is done. Free it up and return EOF so yyparse() returns to
+ * parse_file().
+ */
+
+ incp = i->next;
+
+ /* Close file, free name. */
+ if ( i->file && ( i->file != stdin ) )
+ fclose( i->file );
+ object_free( i->fname );
+ BJAM_FREE( (char *)i );
+
+ return EOF;
+}
+
+
+/*
+ * yylex() - set yylval to current token; return its type.
+ *
+ * Macros to move things along:
+ *
+ * yychar() - return and advance character; invalid after EOF.
+ * yyprev() - back up one character; invalid before yychar().
+ *
+ * yychar() returns a continuous stream of characters, until it hits the EOF of
+ * the current include file.
+ */
+
+#define yychar() ( *incp->string ? *incp->string++ : yyline() )
+#define yyprev() ( incp->string-- )
+
+int yylex()
+{
+ int c;
+ char buf[ BIGGEST_TOKEN ];
+ char * b = buf;
+
+ if ( !incp )
+ goto eof;
+
+ /* Get first character (whitespace or of token). */
+ c = yychar();
+
+ if ( scanmode == SCAN_STRING )
+ {
+ /* If scanning for a string (action's {}'s), look for the closing brace.
+ * We handle matching braces, if they match.
+ */
+
+ int nest = 1;
+
+ while ( ( c != EOF ) && ( b < buf + sizeof( buf ) ) )
+ {
+ if ( c == '{' )
+ ++nest;
+
+ if ( ( c == '}' ) && !--nest )
+ break;
+
+ *b++ = c;
+
+ c = yychar();
+
+ /* Turn trailing "\r\n" sequences into plain "\n" for Cygwin. */
+ if ( ( c == '\n' ) && ( b[ -1 ] == '\r' ) )
+ --b;
+ }
+
+ /* We ate the ending brace -- regurgitate it. */
+ if ( c != EOF )
+ yyprev();
+
+ /* Check for obvious errors. */
+ if ( b == buf + sizeof( buf ) )
+ {
+ yyerror( "action block too big" );
+ goto eof;
+ }
+
+ if ( nest )
+ {
+ yyerror( "unmatched {} in action block" );
+ goto eof;
+ }
+
+ *b = 0;
+ yylval.type = STRING;
+ yylval.string = object_new( buf );
+ yylval.file = incp->fname;
+ yylval.line = incp->line;
+ }
+ else
+ {
+ char * b = buf;
+ struct keyword * k;
+ int inquote = 0;
+ int notkeyword;
+
+ /* Eat white space. */
+ for ( ; ; )
+ {
+ /* Skip past white space. */
+ while ( ( c != EOF ) && isspace( c ) )
+ c = yychar();
+
+ /* Not a comment? */
+ if ( c != '#' )
+ break;
+
+ /* Swallow up comment line. */
+ while ( ( ( c = yychar() ) != EOF ) && ( c != '\n' ) ) ;
+ }
+
+ /* c now points to the first character of a token. */
+ if ( c == EOF )
+ goto eof;
+
+ yylval.file = incp->fname;
+ yylval.line = incp->line;
+
+ /* While scanning the word, disqualify it for (expensive) keyword lookup
+ * when we can: $anything, "anything", \anything
+ */
+ notkeyword = c == '$';
+
+ /* Look for white space to delimit word. "'s get stripped but preserve
+ * white space. \ protects next character.
+ */
+ while
+ (
+ ( c != EOF ) &&
+ ( b < buf + sizeof( buf ) ) &&
+ ( inquote || !isspace( c ) )
+ )
+ {
+ if ( c == '"' )
+ {
+ /* begin or end " */
+ inquote = !inquote;
+ notkeyword = 1;
+ }
+ else if ( c != '\\' )
+ {
+ /* normal char */
+ *b++ = c;
+ }
+ else if ( ( c = yychar() ) != EOF )
+ {
+ /* \c */
+ if (c == 'n')
+ c = '\n';
+ else if (c == 'r')
+ c = '\r';
+ else if (c == 't')
+ c = '\t';
+ *b++ = c;
+ notkeyword = 1;
+ }
+ else
+ {
+ /* \EOF */
+ break;
+ }
+
+ c = yychar();
+ }
+
+ /* Check obvious errors. */
+ if ( b == buf + sizeof( buf ) )
+ {
+ yyerror( "string too big" );
+ goto eof;
+ }
+
+ if ( inquote )
+ {
+ yyerror( "unmatched \" in string" );
+ goto eof;
+ }
+
+ /* We looked ahead a character - back up. */
+ if ( c != EOF )
+ yyprev();
+
+ /* Scan token table. Do not scan if it is obviously not a keyword or if
+ * it is an alphabetic when were looking for punctuation.
+ */
+
+ *b = 0;
+ yylval.type = ARG;
+
+ if ( !notkeyword && !( isalpha( *buf ) && ( scanmode == SCAN_PUNCT ) ) )
+ for ( k = keywords; k->word; ++k )
+ if ( ( *buf == *k->word ) && !strcmp( k->word, buf ) )
+ {
+ yylval.type = k->type;
+ yylval.keyword = k->word; /* used by symdump */
+ break;
+ }
+
+ if ( yylval.type == ARG )
+ yylval.string = object_new( buf );
+ }
+
+ if ( DEBUG_SCAN )
+ printf( "scan %s\n", symdump( &yylval ) );
+
+ return yylval.type;
+
+eof:
+ /* We do not reset yylval.file & yylval.line here so unexpected EOF error
+ * messages would include correct error location information.
+ */
+ yylval.type = EOF;
+ return yylval.type;
+}
+
+
+static char * symdump( YYSTYPE * s )
+{
+ static char buf[ BIGGEST_TOKEN + 20 ];
+ switch ( s->type )
+ {
+ case EOF : sprintf( buf, "EOF" ); break;
+ case 0 : sprintf( buf, "unknown symbol %s", object_str( s->string ) ); break;
+ case ARG : sprintf( buf, "argument %s" , object_str( s->string ) ); break;
+ case STRING: sprintf( buf, "string \"%s\"" , object_str( s->string ) ); break;
+ default : sprintf( buf, "keyword %s" , s->keyword ); break;
+ }
+ return buf;
+}
+
+
+/*
+ * Get information about the current file and line, for those epsilon
+ * transitions that produce a parse.
+ */
+
+void yyinput_last_read_token( OBJECT * * name, int * line )
+{
+ /* TODO: Consider whether and when we might want to report where the last
+ * read token ended, e.g. EOF errors inside string literals.
+ */
+ *name = yylval.file;
+ *line = yylval.line;
+}
diff --git a/tools/build/src/engine/scan.h b/tools/build/src/engine/scan.h
new file mode 100644
index 0000000000..745477fc1a
--- /dev/null
+++ b/tools/build/src/engine/scan.h
@@ -0,0 +1,61 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * scan.h - the jam yacc scanner
+ *
+ * External functions:
+ * yyerror( char *s ) - print a parsing error message.
+ * yyfparse( char *s ) - scan include file s.
+ * yylex() - parse the next token, returning its type.
+ * yymode() - adjust lexicon of scanner.
+ * yyparse() - declaration for yacc parser.
+ * yyanyerrors() - indicate if any parsing errors occured.
+ *
+ * The yymode() function is for the parser to adjust the lexicon of the scanner.
+ * Aside from normal keyword scanning, there is a mode to handle action strings
+ * (look only for the closing }) and a mode to ignore most keywords when looking
+ * for a punctuation keyword. This allows non-punctuation keywords to be used in
+ * lists without quoting.
+ */
+
+#include "lists.h"
+#include "object.h"
+#include "parse.h"
+
+
+/*
+ * YYSTYPE - value of a lexical token
+ */
+
+#define YYSTYPE YYSYMBOL
+
+typedef struct _YYSTYPE
+{
+ int type;
+ OBJECT * string;
+ PARSE * parse;
+ LIST * list;
+ int number;
+ OBJECT * file;
+ int line;
+ char const * keyword;
+} YYSTYPE;
+
+extern YYSTYPE yylval;
+
+void yymode( int n );
+void yyerror( char const * s );
+int yyanyerrors();
+void yyfparse( OBJECT * s );
+int yyline();
+int yylex();
+int yyparse();
+void yyinput_last_read_token( OBJECT * * name, int * line );
+
+#define SCAN_NORMAL 0 /* normal parsing */
+#define SCAN_STRING 1 /* look only for matching } */
+#define SCAN_PUNCT 2 /* only punctuation keywords */
diff --git a/tools/build/src/engine/search.c b/tools/build/src/engine/search.c
new file mode 100644
index 0000000000..b2beadaaa4
--- /dev/null
+++ b/tools/build/src/engine/search.c
@@ -0,0 +1,274 @@
+/*
+ * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+#include "jam.h"
+#include "search.h"
+
+#include "compile.h"
+#include "filesys.h"
+#include "hash.h"
+#include "lists.h"
+#include "object.h"
+#include "pathsys.h"
+#include "strings.h"
+#include "timestamp.h"
+#include "variable.h"
+
+#include <string.h>
+
+
+typedef struct _binding
+{
+ OBJECT * binding;
+ OBJECT * target;
+} BINDING;
+
+static struct hash * explicit_bindings = 0;
+
+
+void call_bind_rule( OBJECT * target_, OBJECT * boundname_ )
+{
+ LIST * const bind_rule = var_get( root_module(), constant_BINDRULE );
+ if ( !list_empty( bind_rule ) )
+ {
+ OBJECT * target = object_copy( target_ );
+ OBJECT * boundname = object_copy( boundname_ );
+ if ( boundname && target )
+ {
+ /* Prepare the argument list. */
+ FRAME frame[ 1 ];
+ frame_init( frame );
+
+ /* First argument is the target name. */
+ lol_add( frame->args, list_new( target ) );
+
+ lol_add( frame->args, list_new( boundname ) );
+ if ( lol_get( frame->args, 1 ) )
+ {
+ OBJECT * rulename = list_front( bind_rule );
+ list_free( evaluate_rule( bindrule( rulename, root_module() ), rulename, frame ) );
+ }
+
+ /* Clean up */
+ frame_free( frame );
+ }
+ else
+ {
+ if ( boundname )
+ object_free( boundname );
+ if ( target )
+ object_free( target );
+ }
+ }
+}
+
+/* Records the binding of a target with an explicit LOCATE. */
+void set_explicit_binding( OBJECT * target, OBJECT * locate )
+{
+ OBJECT * boundname;
+ OBJECT * key;
+ PATHNAME f[ 1 ];
+ string buf[ 1 ];
+ int found;
+ BINDING * ba;
+
+ if ( !explicit_bindings )
+ explicit_bindings = hashinit( sizeof( BINDING ), "explicitly specified "
+ "locations" );
+
+ string_new( buf );
+
+ /* Parse the filename. */
+ path_parse( object_str( target ), f );
+
+ /* Ignore the grist. */
+ f->f_grist.ptr = 0;
+ f->f_grist.len = 0;
+
+ /* Root the target path at the given location. */
+ f->f_root.ptr = object_str( locate );
+ f->f_root.len = strlen( object_str( locate ) );
+
+ path_build( f, buf );
+ boundname = object_new( buf->value );
+ if ( DEBUG_SEARCH )
+ printf( "explicit locate %s: %s\n", object_str( target ), buf->value );
+ string_free( buf );
+ key = path_as_key( boundname );
+ object_free( boundname );
+
+ ba = (BINDING *)hash_insert( explicit_bindings, key, &found );
+ if ( !found )
+ {
+ ba->binding = key;
+ ba->target = target;
+ }
+ else
+ object_free( key );
+}
+
+/*
+ * search.c - find a target along $(SEARCH) or $(LOCATE).
+ *
+ * First, check if LOCATE is set. If so, use it to determine the location of
+ * target and return, regardless of whether anything exists at that location.
+ *
+ * Second, examine all directories in SEARCH. If the file exists there or there
+ * is another target with the same name already placed at this location via the
+ * LOCATE setting, stop and return the location. In case of a previous target,
+ * return its name via the 'another_target' argument.
+ *
+ * This behaviour allows handling dependencies on generated files.
+ *
+ * If caller does not expect that the target is generated, 0 can be passed as
+ * 'another_target'.
+ */
+
+OBJECT * search( OBJECT * target, timestamp * const time,
+ OBJECT * * another_target, int const file )
+{
+ PATHNAME f[ 1 ];
+ LIST * varlist;
+ string buf[ 1 ];
+ int found = 0;
+ OBJECT * boundname = 0;
+
+ if ( another_target )
+ *another_target = 0;
+
+ if ( !explicit_bindings )
+ explicit_bindings = hashinit( sizeof( BINDING ), "explicitly specified "
+ "locations" );
+
+ string_new( buf );
+
+ /* Parse the filename. */
+ path_parse( object_str( target ), f );
+
+ f->f_grist.ptr = 0;
+ f->f_grist.len = 0;
+
+ varlist = var_get( root_module(), constant_LOCATE );
+ if ( !list_empty( varlist ) )
+ {
+ OBJECT * key;
+ f->f_root.ptr = object_str( list_front( varlist ) );
+ f->f_root.len = strlen( object_str( list_front( varlist ) ) );
+
+ path_build( f, buf );
+
+ if ( DEBUG_SEARCH )
+ printf( "locate %s: %s\n", object_str( target ), buf->value );
+
+ key = object_new( buf->value );
+ timestamp_from_path( time, key );
+ object_free( key );
+ found = 1;
+ }
+ else if ( varlist = var_get( root_module(), constant_SEARCH ),
+ !list_empty( varlist ) )
+ {
+ LISTITER iter = list_begin( varlist );
+ LISTITER const end = list_end( varlist );
+ for ( ; iter != end; iter = list_next( iter ) )
+ {
+ BINDING * ba;
+ file_info_t * ff;
+ OBJECT * key;
+ OBJECT * test_path;
+
+ f->f_root.ptr = object_str( list_item( iter ) );
+ f->f_root.len = strlen( object_str( list_item( iter ) ) );
+
+ string_truncate( buf, 0 );
+ path_build( f, buf );
+
+ if ( DEBUG_SEARCH )
+ printf( "search %s: %s\n", object_str( target ), buf->value );
+
+ test_path = object_new( buf->value );
+ key = path_as_key( test_path );
+ object_free( test_path );
+ ff = file_query( key );
+ timestamp_from_path( time, key );
+
+ if ( ( ba = (BINDING *)hash_find( explicit_bindings, key ) ) )
+ {
+ if ( DEBUG_SEARCH )
+ printf(" search %s: found explicitly located target %s\n",
+ object_str( target ), object_str( ba->target ) );
+ if ( another_target )
+ *another_target = ba->target;
+ found = 1;
+ object_free( key );
+ break;
+ }
+ else if ( ff )
+ {
+ if ( !file || ff->is_file )
+ {
+ found = 1;
+ object_free( key );
+ break;
+ }
+ }
+ object_free( key );
+ }
+ }
+
+ if ( !found )
+ {
+ /* Look for the obvious. */
+ /* This is a questionable move. Should we look in the obvious place if
+ * SEARCH is set?
+ */
+ OBJECT * key;
+
+ f->f_root.ptr = 0;
+ f->f_root.len = 0;
+
+ string_truncate( buf, 0 );
+ path_build( f, buf );
+
+ if ( DEBUG_SEARCH )
+ printf( "search %s: %s\n", object_str( target ), buf->value );
+
+ key = object_new( buf->value );
+ timestamp_from_path( time, key );
+ object_free( key );
+ }
+
+ boundname = object_new( buf->value );
+ string_free( buf );
+
+ /* Prepare a call to BINDRULE if the variable is set. */
+ call_bind_rule( target, boundname );
+
+ return boundname;
+}
+
+
+static void free_binding( void * xbinding, void * data )
+{
+ object_free( ( (BINDING *)xbinding )->binding );
+}
+
+
+void search_done( void )
+{
+ if ( explicit_bindings )
+ {
+ hashenumerate( explicit_bindings, free_binding, 0 );
+ hashdone( explicit_bindings );
+ }
+}
diff --git a/tools/build/src/engine/search.h b/tools/build/src/engine/search.h
new file mode 100644
index 0000000000..7e74f79728
--- /dev/null
+++ b/tools/build/src/engine/search.h
@@ -0,0 +1,22 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * search.h - find a target along $(SEARCH) or $(LOCATE)
+ */
+
+#ifndef SEARCH_SW20111118_H
+#define SEARCH_SW20111118_H
+
+#include "object.h"
+#include "timestamp.h"
+
+void set_explicit_binding( OBJECT * target, OBJECT * locate );
+OBJECT * search( OBJECT * target, timestamp * const time,
+ OBJECT * * another_target, int const file );
+void search_done( void );
+
+#endif
diff --git a/tools/build/src/engine/strings.c b/tools/build/src/engine/strings.c
new file mode 100644
index 0000000000..3d3e19b3e9
--- /dev/null
+++ b/tools/build/src/engine/strings.c
@@ -0,0 +1,223 @@
+/* Copyright David Abrahams 2004. Distributed under the Boost */
+/* Software License, Version 1.0. (See accompanying */
+/* file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) */
+
+#include "jam.h"
+#include "strings.h"
+
+#include <assert.h>
+#include <stdlib.h>
+#include <string.h>
+
+
+#ifndef NDEBUG
+# define JAM_STRING_MAGIC ((char)0xcf)
+# define JAM_STRING_MAGIC_SIZE 4
+static void assert_invariants( string * self )
+{
+ int i;
+
+ if ( self->value == 0 )
+ {
+ assert( self->size == 0 );
+ assert( self->capacity == 0 );
+ assert( self->opt[ 0 ] == 0 );
+ return;
+ }
+
+ assert( self->size < self->capacity );
+ assert( ( self->capacity <= sizeof( self->opt ) ) == ( self->value == self->opt ) );
+ assert( self->value[ self->size ] == 0 );
+ /* String objects modified manually after construction to contain embedded
+ * '\0' characters are considered structurally valid.
+ */
+ assert( strlen( self->value ) <= self->size );
+
+ for ( i = 0; i < 4; ++i )
+ {
+ assert( self->magic[ i ] == JAM_STRING_MAGIC );
+ assert( self->value[ self->capacity + i ] == JAM_STRING_MAGIC );
+ }
+}
+#else
+# define JAM_STRING_MAGIC_SIZE 0
+# define assert_invariants(x) do {} while (0)
+#endif
+
+
+void string_new( string * s )
+{
+ s->value = s->opt;
+ s->size = 0;
+ s->capacity = sizeof( s->opt );
+ s->opt[ 0 ] = 0;
+#ifndef NDEBUG
+ memset( s->magic, JAM_STRING_MAGIC, sizeof( s->magic ) );
+#endif
+ assert_invariants( s );
+}
+
+
+void string_free( string * s )
+{
+ assert_invariants( s );
+ if ( s->value != s->opt )
+ BJAM_FREE( s->value );
+ string_new( s );
+}
+
+
+static void string_reserve_internal( string * self, size_t capacity )
+{
+ if ( self->value == self->opt )
+ {
+ self->value = (char *)BJAM_MALLOC_ATOMIC( capacity +
+ JAM_STRING_MAGIC_SIZE );
+ self->value[ 0 ] = 0;
+ strncat( self->value, self->opt, sizeof(self->opt) );
+ assert( strlen( self->value ) <= self->capacity && "Regression test" );
+ }
+ else
+ {
+ self->value = (char *)BJAM_REALLOC( self->value, capacity +
+ JAM_STRING_MAGIC_SIZE );
+ }
+#ifndef NDEBUG
+ memcpy( self->value + capacity, self->magic, JAM_STRING_MAGIC_SIZE );
+#endif
+ self->capacity = capacity;
+}
+
+
+void string_reserve( string * self, size_t capacity )
+{
+ assert_invariants( self );
+ if ( capacity <= self->capacity )
+ return;
+ string_reserve_internal( self, capacity );
+ assert_invariants( self );
+}
+
+
+static void extend_full( string * self, char const * start, char const * finish )
+{
+ size_t new_size = self->capacity + ( finish - start );
+ size_t new_capacity = self->capacity;
+ size_t old_size = self->capacity;
+ while ( new_capacity < new_size + 1)
+ new_capacity <<= 1;
+ string_reserve_internal( self, new_capacity );
+ memcpy( self->value + old_size, start, new_size - old_size );
+ self->value[ new_size ] = 0;
+ self->size = new_size;
+}
+
+static void maybe_reserve( string * self, size_t new_size )
+{
+ size_t capacity = self->capacity;
+ if ( capacity <= new_size )
+ {
+ size_t new_capacity = capacity;
+ while ( new_capacity <= new_size )
+ new_capacity <<= 1;
+ string_reserve_internal( self, new_capacity );
+ }
+}
+
+
+void string_append( string * self, char const * rhs )
+{
+ size_t rhs_size = strlen( rhs );
+ size_t new_size = self->size + rhs_size;
+ assert_invariants( self );
+
+ maybe_reserve( self, new_size );
+
+ memcpy( self->value + self->size, rhs, rhs_size + 1 );
+ self->size = new_size;
+
+ assert_invariants( self );
+}
+
+
+void string_append_range( string * self, char const * start, char const * finish )
+{
+ size_t rhs_size = finish - start;
+ size_t new_size = self->size + rhs_size;
+ assert_invariants( self );
+
+ maybe_reserve( self, new_size );
+
+ memcpy( self->value + self->size, start, rhs_size );
+ self->size = new_size;
+ self->value[ new_size ] = 0;
+
+ assert_invariants( self );
+}
+
+
+void string_copy( string * s, char const * rhs )
+{
+ string_new( s );
+ string_append( s, rhs );
+}
+
+void string_truncate( string * self, size_t n )
+{
+ assert_invariants( self );
+ assert( n <= self->capacity );
+ self->value[ self->size = n ] = 0;
+ assert_invariants( self );
+}
+
+
+void string_pop_back( string * self )
+{
+ string_truncate( self, self->size - 1 );
+}
+
+
+void string_push_back( string * self, char x )
+{
+ string_append_range( self, &x, &x + 1 );
+}
+
+
+char string_back( string * self )
+{
+ assert_invariants( self );
+ return self->value[ self->size - 1 ];
+}
+
+
+#ifndef NDEBUG
+void string_unit_test()
+{
+ {
+ string s[ 1 ];
+ int i;
+ int const limit = sizeof( s->opt ) * 2 + 2;
+ string_new( s );
+ assert( s->value == s->opt );
+ for ( i = 0; i < limit; ++i )
+ {
+ string_push_back( s, (char)( i + 1 ) );
+ assert( s->size == i + 1 );
+ }
+ assert( s->size == limit );
+ assert( s->value != s->opt );
+ for ( i = 0; i < limit; ++i )
+ assert( s->value[ i ] == (char)( i + 1 ) );
+ string_free( s );
+ }
+
+ {
+ char * const original = " \n\t\v Foo \r\n\v \tBar\n\n\r\r\t\n\v\t \t";
+ string copy[ 1 ];
+ string_copy( copy, original );
+ assert( !strcmp( copy->value, original ) );
+ assert( copy->size == strlen( original ) );
+ string_free( copy );
+ }
+}
+#endif
diff --git a/tools/build/src/engine/strings.h b/tools/build/src/engine/strings.h
new file mode 100644
index 0000000000..749f287834
--- /dev/null
+++ b/tools/build/src/engine/strings.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2004. David Abrahams
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+#ifndef STRINGS_DWA20011024_H
+#define STRINGS_DWA20011024_H
+
+#include <stddef.h>
+
+typedef struct string
+{
+ char * value;
+ unsigned long size;
+ unsigned long capacity;
+ char opt[ 32 ];
+#ifndef NDEBUG
+ char magic[ 4 ];
+#endif
+} string;
+
+void string_new( string * );
+void string_copy( string *, char const * );
+void string_free( string * );
+void string_append( string *, char const * );
+void string_append_range( string *, char const *, char const * );
+void string_push_back( string * s, char x );
+void string_reserve( string *, size_t );
+void string_truncate( string *, size_t );
+void string_pop_back( string * );
+char string_back( string * );
+void string_unit_test();
+
+#endif
diff --git a/tools/build/src/engine/subst.c b/tools/build/src/engine/subst.c
new file mode 100644
index 0000000000..a5fcee08cb
--- /dev/null
+++ b/tools/build/src/engine/subst.c
@@ -0,0 +1,116 @@
+#include "jam.h"
+#include "subst.h"
+
+#include "builtins.h"
+#include "frames.h"
+#include "hash.h"
+#include "lists.h"
+
+#include <stddef.h>
+
+
+typedef struct regex_entry
+{
+ OBJECT * pattern;
+ regexp * regex;
+} regex_entry;
+
+static struct hash * regex_hash;
+
+
+regexp * regex_compile( OBJECT * pattern )
+{
+ int found;
+ regex_entry * e ;
+
+ if ( !regex_hash )
+ regex_hash = hashinit( sizeof( regex_entry ), "regex" );
+
+ e = (regex_entry *)hash_insert( regex_hash, pattern, &found );
+ if ( !found )
+ {
+ e->pattern = object_copy( pattern );
+ e->regex = regcomp( (char *)pattern );
+ }
+
+ return e->regex;
+}
+
+
+LIST * builtin_subst( FRAME * frame, int flags )
+{
+ LIST * result = L0;
+ LIST * const arg1 = lol_get( frame->args, 0 );
+ LISTITER iter = list_begin( arg1 );
+ LISTITER const end = list_end( arg1 );
+
+ if ( iter != end && list_next( iter ) != end && list_next( list_next( iter )
+ ) != end )
+ {
+ char const * const source = object_str( list_item( iter ) );
+ OBJECT * const pattern = list_item( list_next( iter ) );
+ regexp * const repat = regex_compile( pattern );
+
+ if ( regexec( repat, (char *)source) )
+ {
+ LISTITER subst = list_next( iter );
+
+ while ( ( subst = list_next( subst ) ) != end )
+ {
+#define BUFLEN 4096
+ char buf[ BUFLEN + 1 ];
+ char const * in = object_str( list_item( subst ) );
+ char * out = buf;
+
+ for ( ; *in && out < buf + BUFLEN; ++in )
+ {
+ if ( *in == '\\' || *in == '$' )
+ {
+ ++in;
+ if ( *in == 0 )
+ break;
+ if ( *in >= '0' && *in <= '9' )
+ {
+ unsigned int const n = *in - '0';
+ size_t const srclen = repat->endp[ n ] -
+ repat->startp[ n ];
+ size_t const remaining = buf + BUFLEN - out;
+ size_t const len = srclen < remaining
+ ? srclen
+ : remaining;
+ memcpy( out, repat->startp[ n ], len );
+ out += len;
+ continue;
+ }
+ /* fall through and copy the next character */
+ }
+ *out++ = *in;
+ }
+ *out = 0;
+
+ result = list_push_back( result, object_new( buf ) );
+#undef BUFLEN
+ }
+ }
+ }
+
+ return result;
+}
+
+
+static void free_regex( void * xregex, void * data )
+{
+ regex_entry * const regex = (regex_entry *)xregex;
+ object_free( regex->pattern );
+ BJAM_FREE( regex->regex );
+}
+
+
+void regex_done()
+{
+ if ( regex_hash )
+ {
+ hashenumerate( regex_hash, free_regex, (void *)0 );
+ hashdone( regex_hash );
+ }
+}
diff --git a/tools/build/src/engine/subst.h b/tools/build/src/engine/subst.h
new file mode 100644
index 0000000000..7dc09a614a
--- /dev/null
+++ b/tools/build/src/engine/subst.h
@@ -0,0 +1,14 @@
+/* Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+#ifndef SUBST_JG20120722_H
+#define SUBST_JG20120722_H
+
+#include "object.h"
+#include "regexp.h"
+
+regexp * regex_compile( OBJECT * pattern );
+
+#endif
diff --git a/tools/build/src/engine/timestamp.c b/tools/build/src/engine/timestamp.c
new file mode 100644
index 0000000000..0d016985e0
--- /dev/null
+++ b/tools/build/src/engine/timestamp.c
@@ -0,0 +1,262 @@
+/*
+ * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * timestamp.c - get the timestamp of a file or archive member
+ *
+ * External routines:
+ * timestamp_from_path() - return timestamp for a path, if present
+ * timestamp_done() - free timestamp tables
+ *
+ * Internal routines:
+ * time_enter() - internal worker callback for scanning archives &
+ * directories
+ * free_timestamps() - worker function for freeing timestamp table contents
+ */
+
+#include "jam.h"
+#include "timestamp.h"
+
+#include "filesys.h"
+#include "hash.h"
+#include "object.h"
+#include "pathsys.h"
+#include "strings.h"
+
+
+/*
+ * BINDING - all known files
+ */
+
+typedef struct _binding
+{
+ OBJECT * name;
+ short flags;
+
+#define BIND_SCANNED 0x01 /* if directory or arch, has been scanned */
+
+ short progress;
+
+#define BIND_INIT 0 /* never seen */
+#define BIND_NOENTRY 1 /* timestamp requested but file never found */
+#define BIND_SPOTTED 2 /* file found but not timed yet */
+#define BIND_MISSING 3 /* file found but can not get timestamp */
+#define BIND_FOUND 4 /* file found and time stamped */
+
+ /* update time - cleared if the there is nothing to bind */
+ timestamp time;
+} BINDING;
+
+static struct hash * bindhash = 0;
+
+static void time_enter( void *, OBJECT *, int const found,
+ timestamp const * const );
+
+static char * time_progress[] =
+{
+ "INIT",
+ "NOENTRY",
+ "SPOTTED",
+ "MISSING",
+ "FOUND"
+};
+
+
+#ifdef OS_NT
+/*
+ * timestamp_from_filetime() - Windows FILETIME --> timestamp conversion
+ *
+ * Lifted shamelessly from the CPython implementation.
+ */
+
+void timestamp_from_filetime( timestamp * const t, FILETIME const * const ft )
+{
+ /* Seconds between 1.1.1601 and 1.1.1970 */
+ static __int64 const secs_between_epochs = 11644473600;
+
+ /* We can not simply cast and dereference a FILETIME, since it might not be
+ * aligned properly. __int64 type variables are expected to be aligned to an
+ * 8 byte boundary while FILETIME structures may be aligned to any 4 byte
+ * boundary. Using an incorrectly aligned __int64 variable may cause a
+ * performance penalty on some platforms or even exceptions on others
+ * (documented on MSDN).
+ */
+ __int64 in;
+ memcpy( &in, ft, sizeof( in ) );
+
+ /* FILETIME resolution: 100ns. */
+ timestamp_init( t, (time_t)( ( in / 10000000 ) - secs_between_epochs ),
+ (int)( in % 10000000 ) * 100 );
+}
+#endif /* OS_NT */
+
+
+void timestamp_clear( timestamp * const time )
+{
+ time->secs = time->nsecs = 0;
+}
+
+
+int timestamp_cmp( timestamp const * const lhs, timestamp const * const rhs )
+{
+ return lhs->secs == rhs->secs
+ ? lhs->nsecs - rhs->nsecs
+ : lhs->secs - rhs->secs;
+}
+
+
+void timestamp_copy( timestamp * const target, timestamp const * const source )
+{
+ target->secs = source->secs;
+ target->nsecs = source->nsecs;
+}
+
+
+void timestamp_current( timestamp * const t )
+{
+#ifdef OS_NT
+ /* GetSystemTimeAsFileTime()'s resolution seems to be about 15 ms on Windows
+ * XP and under a millisecond on Windows 7.
+ */
+ FILETIME ft;
+ GetSystemTimeAsFileTime( &ft );
+ timestamp_from_filetime( t, &ft );
+#else /* OS_NT */
+ timestamp_init( t, time( 0 ), 0 );
+#endif /* OS_NT */
+}
+
+
+int timestamp_empty( timestamp const * const time )
+{
+ return !time->secs && !time->nsecs;
+}
+
+
+/*
+ * timestamp_from_path() - return timestamp for a path, if present
+ */
+
+void timestamp_from_path( timestamp * const time, OBJECT * const path )
+{
+ PROFILE_ENTER( timestamp );
+
+ PATHNAME f1;
+ PATHNAME f2;
+ int found;
+ BINDING * b;
+ string buf[ 1 ];
+
+
+ if ( file_time( path, time ) < 0 )
+ timestamp_clear( time );
+
+ PROFILE_EXIT( timestamp );
+}
+
+
+void timestamp_init( timestamp * const time, time_t const secs, int const nsecs
+ )
+{
+ time->secs = secs;
+ time->nsecs = nsecs;
+}
+
+
+void timestamp_max( timestamp * const max, timestamp const * const lhs,
+ timestamp const * const rhs )
+{
+ if ( timestamp_cmp( lhs, rhs ) > 0 )
+ timestamp_copy( max, lhs );
+ else
+ timestamp_copy( max, rhs );
+}
+
+
+static char const * timestamp_formatstr( timestamp const * const time,
+ char const * const format )
+{
+ static char result1[ 500 ];
+ static char result2[ 500 ];
+ strftime( result1, sizeof( result1 ) / sizeof( *result1 ), format, gmtime(
+ &time->secs ) );
+ sprintf( result2, result1, time->nsecs );
+ return result2;
+}
+
+
+char const * timestamp_str( timestamp const * const time )
+{
+ return timestamp_formatstr( time, "%Y-%m-%d %H:%M:%S.%%09d +0000" );
+}
+
+
+char const * timestamp_timestr( timestamp const * const time )
+{
+ return timestamp_formatstr( time, "%H:%M:%S.%%09d" );
+}
+
+
+/*
+ * time_enter() - internal worker callback for scanning archives & directories
+ */
+
+static void time_enter( void * closure, OBJECT * target, int const found,
+ timestamp const * const time )
+{
+ int item_found;
+ BINDING * b;
+ struct hash * const bindhash = (struct hash *)closure;
+
+ target = path_as_key( target );
+
+ b = (BINDING *)hash_insert( bindhash, target, &item_found );
+ if ( !item_found )
+ {
+ b->name = object_copy( target );
+ b->flags = 0;
+ }
+
+ timestamp_copy( &b->time, time );
+ b->progress = found ? BIND_FOUND : BIND_SPOTTED;
+
+ if ( DEBUG_BINDSCAN )
+ printf( "time ( %s ) : %s\n", object_str( target ), time_progress[
+ b->progress ] );
+
+ object_free( target );
+}
+
+
+/*
+ * free_timestamps() - worker function for freeing timestamp table contents
+ */
+
+static void free_timestamps( void * xbinding, void * data )
+{
+ object_free( ( (BINDING *)xbinding )->name );
+}
+
+
+/*
+ * timestamp_done() - free timestamp tables
+ */
+
+void timestamp_done()
+{
+ if ( bindhash )
+ {
+ hashenumerate( bindhash, free_timestamps, 0 );
+ hashdone( bindhash );
+ }
+}
diff --git a/tools/build/src/engine/timestamp.h b/tools/build/src/engine/timestamp.h
new file mode 100644
index 0000000000..ecedb5f925
--- /dev/null
+++ b/tools/build/src/engine/timestamp.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright 1993, 1995 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * timestamp.h - get the timestamp of a file or archive member
+ */
+
+#ifndef TIMESTAMP_H_SW_2011_11_18
+#define TIMESTAMP_H_SW_2011_11_18
+
+#include "object.h"
+
+#ifdef OS_NT
+# include <windows.h>
+#endif
+
+#include <time.h>
+
+typedef struct timestamp
+{
+ time_t secs;
+ int nsecs;
+} timestamp;
+
+void timestamp_clear( timestamp * const );
+int timestamp_cmp( timestamp const * const lhs, timestamp const * const rhs );
+void timestamp_copy( timestamp * const target, timestamp const * const source );
+void timestamp_current( timestamp * const );
+int timestamp_empty( timestamp const * const );
+void timestamp_from_path( timestamp * const, OBJECT * const path );
+void timestamp_init( timestamp * const, time_t const secs, int const nsecs );
+void timestamp_max( timestamp * const max, timestamp const * const lhs,
+ timestamp const * const rhs );
+char const * timestamp_str( timestamp const * const );
+char const * timestamp_timestr( timestamp const * const );
+
+#ifdef OS_NT
+void timestamp_from_filetime( timestamp * const, FILETIME const * const );
+#endif
+
+void timestamp_done();
+
+#endif
diff --git a/tools/build/src/engine/variable.c b/tools/build/src/engine/variable.c
new file mode 100644
index 0000000000..2c292fbc8b
--- /dev/null
+++ b/tools/build/src/engine/variable.c
@@ -0,0 +1,345 @@
+/*
+ * Copyright 1993, 2000 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/* This file is ALSO:
+ * Copyright 2001-2004 David Abrahams.
+ * Copyright 2005 Reece H. Dunn.
+ * Copyright 2005 Rene Rivera.
+ * Distributed under the Boost Software License, Version 1.0.
+ * (See accompanying file LICENSE_1_0.txt or copy at
+ * http://www.boost.org/LICENSE_1_0.txt)
+ */
+
+/*
+ * variable.c - handle Jam multi-element variables.
+ *
+ * External routines:
+ *
+ * var_defines() - load a bunch of variable=value settings
+ * var_get() - get value of a user defined symbol
+ * var_set() - set a variable in jam's user defined symbol table.
+ * var_swap() - swap a variable's value with the given one
+ * var_done() - free variable tables
+ *
+ * Internal routines:
+ *
+ * var_enter() - make new var symbol table entry, returning var ptr
+ * var_dump() - dump a variable to stdout
+ */
+
+#include "jam.h"
+#include "variable.h"
+
+#include "filesys.h"
+#include "hash.h"
+#include "modules.h"
+#include "parse.h"
+#include "pathsys.h"
+#include "strings.h"
+
+#include <stdio.h>
+#include <stdlib.h>
+
+
+/*
+ * VARIABLE - a user defined multi-value variable
+ */
+
+typedef struct _variable VARIABLE ;
+
+struct _variable
+{
+ OBJECT * symbol;
+ LIST * value;
+};
+
+static LIST * * var_enter( struct module_t *, OBJECT * symbol );
+static void var_dump( OBJECT * symbol, LIST * value, char * what );
+
+
+/*
+ * var_defines() - load a bunch of variable=value settings
+ *
+ * If preprocess is false, take the value verbatim.
+ *
+ * Otherwise, if the variable value is enclosed in quotes, strip the quotes.
+ * Otherwise, if variable name ends in PATH, split value at :'s.
+ * Otherwise, split the value at blanks.
+ */
+
+void var_defines( struct module_t * module, char * const * e, int preprocess )
+{
+ string buf[ 1 ];
+
+ string_new( buf );
+
+ for ( ; *e; ++e )
+ {
+ char * val;
+
+ if ( ( val = strchr( *e, '=' ) )
+#if defined( OS_MAC )
+ /* On the mac (MPW), the var=val is actually var\0val */
+ /* Think different. */
+ || ( val = *e + strlen( *e ) )
+#endif
+ )
+ {
+ LIST * l = L0;
+ size_t const len = strlen( val + 1 );
+ int const quoted = ( val[ 1 ] == '"' ) && ( val[ len ] == '"' ) &&
+ ( len > 1 );
+
+ if ( quoted && preprocess )
+ {
+ string_append_range( buf, val + 2, val + len );
+ l = list_push_back( l, object_new( buf->value ) );
+ string_truncate( buf, 0 );
+ }
+ else
+ {
+ char * p;
+ char * pp;
+ char split =
+#if defined( OPT_NO_EXTERNAL_VARIABLE_SPLIT )
+ '\0'
+#elif defined( OS_MAC )
+ ','
+#else
+ ' '
+#endif
+ ;
+
+ /* Split *PATH at :'s, not spaces. */
+ if ( val - 4 >= *e )
+ {
+ if ( !strncmp( val - 4, "PATH", 4 ) ||
+ !strncmp( val - 4, "Path", 4 ) ||
+ !strncmp( val - 4, "path", 4 ) )
+ split = SPLITPATH;
+ }
+
+ /* Do the split. */
+ for
+ (
+ pp = val + 1;
+ preprocess && ( ( p = strchr( pp, split ) ) != 0 );
+ pp = p + 1
+ )
+ {
+ string_append_range( buf, pp, p );
+ l = list_push_back( l, object_new( buf->value ) );
+ string_truncate( buf, 0 );
+ }
+
+ l = list_push_back( l, object_new( pp ) );
+ }
+
+ /* Get name. */
+ string_append_range( buf, *e, val );
+ {
+ OBJECT * const varname = object_new( buf->value );
+ var_set( module, varname, l, VAR_SET );
+ object_free( varname );
+ }
+ string_truncate( buf, 0 );
+ }
+ }
+ string_free( buf );
+}
+
+
+/* Last returned variable value saved so we may clear it in var_done(). */
+static LIST * saved_var = L0;
+
+
+/*
+ * var_get() - get value of a user defined symbol
+ *
+ * Returns NULL if symbol unset.
+ */
+
+LIST * var_get( struct module_t * module, OBJECT * symbol )
+{
+ LIST * result = L0;
+#ifdef OPT_AT_FILES
+ /* Some "fixed" variables... */
+ if ( object_equal( symbol, constant_TMPDIR ) )
+ {
+ list_free( saved_var );
+ result = saved_var = list_new( object_new( path_tmpdir()->value ) );
+ }
+ else if ( object_equal( symbol, constant_TMPNAME ) )
+ {
+ list_free( saved_var );
+ result = saved_var = list_new( path_tmpnam() );
+ }
+ else if ( object_equal( symbol, constant_TMPFILE ) )
+ {
+ list_free( saved_var );
+ result = saved_var = list_new( path_tmpfile() );
+ }
+ else if ( object_equal( symbol, constant_STDOUT ) )
+ {
+ list_free( saved_var );
+ result = saved_var = list_new( object_copy( constant_STDOUT ) );
+ }
+ else if ( object_equal( symbol, constant_STDERR ) )
+ {
+ list_free( saved_var );
+ result = saved_var = list_new( object_copy( constant_STDERR ) );
+ }
+ else
+#endif
+ {
+ VARIABLE * v;
+ int n;
+
+ if ( ( n = module_get_fixed_var( module, symbol ) ) != -1 )
+ {
+ if ( DEBUG_VARGET )
+ var_dump( symbol, module->fixed_variables[ n ], "get" );
+ result = module->fixed_variables[ n ];
+ }
+ else if ( module->variables && ( v = (VARIABLE *)hash_find(
+ module->variables, symbol ) ) )
+ {
+ if ( DEBUG_VARGET )
+ var_dump( v->symbol, v->value, "get" );
+ result = v->value;
+ }
+ }
+ return result;
+}
+
+
+LIST * var_get_and_clear_raw( module_t * module, OBJECT * symbol )
+{
+ LIST * result = L0;
+ VARIABLE * v;
+
+ if ( module->variables && ( v = (VARIABLE *)hash_find( module->variables,
+ symbol ) ) )
+ {
+ result = v->value;
+ v->value = L0;
+ }
+
+ return result;
+}
+
+
+/*
+ * var_set() - set a variable in Jam's user defined symbol table
+ *
+ * 'flag' controls the relationship between new and old values of the variable:
+ * SET replaces the old with the new; APPEND appends the new to the old; DEFAULT
+ * only uses the new if the variable was previously unset.
+ *
+ * Copies symbol. Takes ownership of value.
+ */
+
+void var_set( struct module_t * module, OBJECT * symbol, LIST * value, int flag
+ )
+{
+ LIST * * v = var_enter( module, symbol );
+
+ if ( DEBUG_VARSET )
+ var_dump( symbol, value, "set" );
+
+ switch ( flag )
+ {
+ case VAR_SET: /* Replace value */
+ list_free( *v );
+ *v = value;
+ break;
+
+ case VAR_APPEND: /* Append value */
+ *v = list_append( *v, value );
+ break;
+
+ case VAR_DEFAULT: /* Set only if unset */
+ if ( list_empty( *v ) )
+ *v = value;
+ else
+ list_free( value );
+ break;
+ }
+}
+
+
+/*
+ * var_swap() - swap a variable's value with the given one
+ */
+
+LIST * var_swap( struct module_t * module, OBJECT * symbol, LIST * value )
+{
+ LIST * * v = var_enter( module, symbol );
+ LIST * oldvalue = *v;
+ if ( DEBUG_VARSET )
+ var_dump( symbol, value, "set" );
+ *v = value;
+ return oldvalue;
+}
+
+
+/*
+ * var_enter() - make new var symbol table entry, returning var ptr
+ */
+
+static LIST * * var_enter( struct module_t * module, OBJECT * symbol )
+{
+ int found;
+ VARIABLE * v;
+ int n;
+
+ if ( ( n = module_get_fixed_var( module, symbol ) ) != -1 )
+ return &module->fixed_variables[ n ];
+
+ if ( !module->variables )
+ module->variables = hashinit( sizeof( VARIABLE ), "variables" );
+
+ v = (VARIABLE *)hash_insert( module->variables, symbol, &found );
+ if ( !found )
+ {
+ v->symbol = object_copy( symbol );
+ v->value = L0;
+ }
+
+ return &v->value;
+}
+
+
+/*
+ * var_dump() - dump a variable to stdout
+ */
+
+static void var_dump( OBJECT * symbol, LIST * value, char * what )
+{
+ printf( "%s %s = ", what, object_str( symbol ) );
+ list_print( value );
+ printf( "\n" );
+}
+
+
+/*
+ * var_done() - free variable tables
+ */
+
+static void delete_var_( void * xvar, void * data )
+{
+ VARIABLE * const v = (VARIABLE *)xvar;
+ object_free( v->symbol );
+ list_free( v->value );
+}
+
+void var_done( struct module_t * module )
+{
+ list_free( saved_var );
+ saved_var = L0;
+ hashenumerate( module->variables, delete_var_, 0 );
+ hash_free( module->variables );
+}
diff --git a/tools/build/src/engine/variable.h b/tools/build/src/engine/variable.h
new file mode 100644
index 0000000000..ddb452bc1e
--- /dev/null
+++ b/tools/build/src/engine/variable.h
@@ -0,0 +1,34 @@
+/*
+ * Copyright 1993, 2000 Christopher Seiwald.
+ *
+ * This file is part of Jam - see jam.c for Copyright information.
+ */
+
+/*
+ * variable.h - handle jam multi-element variables
+ */
+
+#ifndef VARIABLE_SW20111119_H
+#define VARIABLE_SW20111119_H
+
+#include "lists.h"
+#include "object.h"
+
+
+struct module_t;
+
+void var_defines( struct module_t *, char * const * e, int preprocess );
+LIST * var_get( struct module_t *, OBJECT * symbol );
+void var_set( struct module_t *, OBJECT * symbol, LIST * value, int flag );
+LIST * var_swap( struct module_t *, OBJECT * symbol, LIST * value );
+void var_done( struct module_t * );
+
+/*
+ * Defines for var_set().
+ */
+
+#define VAR_SET 0 /* override previous value */
+#define VAR_APPEND 1 /* append to previous value */
+#define VAR_DEFAULT 2 /* set only if no previous value */
+
+#endif
diff --git a/tools/build/v2/engine/w32_getreg.c b/tools/build/src/engine/w32_getreg.c
index dd2d0fc70c..dd2d0fc70c 100644
--- a/tools/build/v2/engine/w32_getreg.c
+++ b/tools/build/src/engine/w32_getreg.c
diff --git a/tools/build/v2/engine/yyacc.c b/tools/build/src/engine/yyacc.c
index b5efc96b55..b5efc96b55 100644
--- a/tools/build/v2/engine/yyacc.c
+++ b/tools/build/src/engine/yyacc.c
diff --git a/tools/build/v2/exceptions.py b/tools/build/src/exceptions.py
index 5750abfe3f..5750abfe3f 100644
--- a/tools/build/v2/exceptions.py
+++ b/tools/build/src/exceptions.py
diff --git a/tools/build/v2/kernel/boost-build.jam b/tools/build/src/kernel/boost-build.jam
index 377f6ec023..377f6ec023 100644..100755
--- a/tools/build/v2/kernel/boost-build.jam
+++ b/tools/build/src/kernel/boost-build.jam
diff --git a/tools/build/src/kernel/bootstrap.jam b/tools/build/src/kernel/bootstrap.jam
new file mode 100644
index 0000000000..c4320dc299
--- /dev/null
+++ b/tools/build/src/kernel/bootstrap.jam
@@ -0,0 +1,266 @@
+# Copyright 2003 Dave Abrahams
+# Copyright 2003, 2005, 2006 Rene Rivera
+# Copyright 2003, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# First of all, check the jam version.
+if $(JAM_VERSION:J="") < 030112
+{
+ ECHO "error: Boost.Jam version 3.1.12 or later required" ;
+ EXIT ;
+}
+
+local required-rules = GLOB-RECURSIVELY HAS_NATIVE_RULE ;
+for local r in $(required-rules)
+{
+ if ! $(r) in [ RULENAMES ]
+ {
+ ECHO "error: builtin rule '$(r)' is not present" ;
+ ECHO "error: your version of bjam is likely out of date" ;
+ ECHO "error: please get a fresh version from SVN." ;
+ EXIT ;
+ }
+}
+
+local native = regex transform 2 ;
+while $(native)
+{
+ if ! [ HAS_NATIVE_RULE $(native[1]) : $(native[2]) : $(native[3]) ]
+ {
+ ECHO "error: missing native rule '$(native[1]).$(native[2])'" ;
+ ECHO "error: or interface version of that rule is too low" ;
+ ECHO "error: your version of bjam is likely out of date" ;
+ ECHO "error: please get a fresh version from SVN." ;
+ EXIT ;
+ }
+ native = $(native[4-]) ;
+}
+
+
+# Check that the builtin .ENVIRON module is present. We do not have a builtin to
+# check that a module is present, so we assume that the PATH environment
+# variable is always set and verify that the .ENVIRON module has a non-empty
+# value of that variable.
+module .ENVIRON
+{
+ local p = $(PATH) $(Path) $(path) ;
+ if ! $(p)
+ {
+ ECHO "error: no builtin module .ENVIRON is found" ;
+ ECHO "error: your version of bjam is likely out of date" ;
+ ECHO "error: please get a fresh version from SVN." ;
+ EXIT ;
+ }
+}
+
+# Check that @() functionality is present. Similarly to modules, we do not have
+# a way to test this directly. Instead we check that $(TMPNAME) functionality is
+# present which was added at roughly the same time (more precisely, it was added
+# just before).
+{
+ if ! $(TMPNAME)
+ {
+ ECHO "error: no @() functionality found" ;
+ ECHO "error: your version of b2 is likely out of date" ;
+ ECHO "error: please get a fresh version from SVN." ;
+ EXIT ;
+ }
+}
+
+# Make sure that \n escape is avaiable.
+if "\n" = "n"
+{
+ if $(OS) = CYGWIN
+ {
+ ECHO "warning: escape sequences are not supported" ;
+ ECHO "warning: this will cause major misbehaviour on cygwin" ;
+ ECHO "warning: your version of b2 is likely out of date" ;
+ ECHO "warning: please get a fresh version from SVN." ;
+ }
+}
+
+
+# Bootstrap the module system. Then bring the import rule into the global module.
+#
+SEARCH on <module@>modules.jam = $(.bootstrap-file:D) ;
+module modules { include <module@>modules.jam ; }
+IMPORT modules : import : : import ;
+
+{
+ # Add module subdirectories to the BOOST_BUILD_PATH, which allows us to make
+ # incremental refactoring steps by moving modules to appropriate
+ # subdirectories, thereby achieving some physical separation of different
+ # layers without changing all of our code to specify subdirectories in
+ # import statements or use an extra level of qualification on imported
+ # names.
+
+ local subdirs =
+ kernel # only the most-intrinsic modules: modules, errors
+ util # low-level substrate: string/number handling, etc.
+ build # essential elements of the build system architecture
+ tools # toolsets for handling specific build jobs and targets.
+ contrib # user contributed (unreviewed) modules
+ . # build-system.jam lives here
+ ;
+ local whereami = [ NORMALIZE_PATH $(.bootstrap-file:DT) ] ;
+ BOOST_BUILD_PATH += $(whereami:D)/$(subdirs) ;
+
+ modules.poke .ENVIRON : BOOST_BUILD_PATH : $(BOOST_BUILD_PATH) ;
+
+ modules.poke : EXTRA_PYTHONPATH : $(whereami) ;
+}
+
+# Reload the modules, to clean up things. The modules module can tolerate being
+# imported twice.
+#
+import modules ;
+
+# Process option plugins first to allow them to prevent loading the rest of the
+# build system.
+#
+import option ;
+local dont-build = [ option.process ] ;
+
+# Should we skip building, i.e. loading the build system, according to the
+# options processed?
+#
+if ! $(dont-build)
+{
+ if ! --python in $(ARGV)
+ {
+ # Allow users to override the build system file from the command-line
+ # (mostly for testing).
+ local build-system = [ MATCH --build-system=(.*) : $(ARGV) ] ;
+ build-system ?= build-system ;
+
+ # Use last element in case of multiple command-line options.
+ import $(build-system[-1]) ;
+ }
+ else
+ {
+ ECHO "Boost.Build V2 Python port (experimental)" ;
+
+ # Define additional interface exposed to Python code. Python code will
+ # also have access to select bjam builtins in the 'bjam' module, but
+ # some things are easier to define outside C.
+ module python_interface
+ {
+ rule load ( module-name : location )
+ {
+ USER_MODULE $(module-name) ;
+ # Make all rules in the loaded module available in the global
+ # namespace, so that we do not have to bother specifying the
+ # "correct" module when calling from Python.
+ module $(module-name)
+ {
+ __name__ = $(1) ;
+ include $(2) ;
+ local rules = [ RULENAMES $(1) ] ;
+ IMPORT $(1) : $(rules) : $(1) : $(1).$(rules) ;
+ }
+ }
+
+ rule peek ( module-name ? : variables + )
+ {
+ module $(<)
+ {
+ return $($(>)) ;
+ }
+ }
+
+ rule set-variable ( module-name : name : value * )
+ {
+ module $(<)
+ {
+ $(>) = $(3) ;
+ }
+ }
+
+ rule set-top-level-targets ( targets * )
+ {
+ DEPENDS all : $(targets) ;
+ }
+
+ rule call-in-module ( m : rulename : * )
+ {
+ module $(m)
+ {
+ return [ $(2) $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9)
+ : $(10) : $(11) : $(12) : $(13) : $(14) : $(15) : $(16)
+ : $(17) : $(18) : $(19) ] ;
+ }
+ }
+
+
+ rule set-update-action ( action : targets * : sources * :
+ properties * )
+ {
+ $(action) $(targets) : $(sources) : $(properties) ;
+ }
+
+ rule set-update-action-in-module ( m : action : targets * :
+ sources * : properties * )
+ {
+ module $(m)
+ {
+ $(2) $(3) : $(4) : $(5) ;
+ }
+ }
+
+ rule set-target-variable ( targets + : variable : value * : append ?
+ )
+ {
+ if $(append)
+ {
+ $(variable) on $(targets) += $(value) ;
+ }
+ else
+ {
+ $(variable) on $(targets) = $(value) ;
+ }
+ }
+
+ rule get-target-variable ( targets + : variable )
+ {
+ return [ on $(targets) return $($(variable)) ] ;
+ }
+
+ rule import-rules-from-parent ( parent-module : this-module :
+ user-rules * )
+ {
+ IMPORT $(parent-module) : $(user-rules) : $(this-module) :
+ $(user-rules) ;
+ EXPORT $(this-module) : $(user-rules) ;
+ }
+
+ rule mark-included ( targets * : includes * )
+ {
+ NOCARE $(includes) ;
+ INCLUDES $(targets) : $(includes) ;
+ ISFILE $(includes) ;
+ }
+ }
+
+ PYTHON_IMPORT_RULE bootstrap : bootstrap : PyBB : bootstrap ;
+ modules.poke PyBB : root : [ NORMALIZE_PATH $(.bootstrap-file:DT)/.. ] ;
+
+ module PyBB
+ {
+ local ok = [ bootstrap $(root) ] ;
+ if ! $(ok)
+ {
+ EXIT ;
+ }
+ }
+
+
+ #PYTHON_IMPORT_RULE boost.build.build_system : main : PyBB : main ;
+
+ #module PyBB
+ #{
+ # main ;
+ #}
+ }
+}
diff --git a/tools/build/v2/kernel/bootstrap.py b/tools/build/src/kernel/bootstrap.py
index 2e8dd37b7b..2e8dd37b7b 100644
--- a/tools/build/v2/kernel/bootstrap.py
+++ b/tools/build/src/kernel/bootstrap.py
diff --git a/tools/build/src/kernel/class.jam b/tools/build/src/kernel/class.jam
new file mode 100644
index 0000000000..e48ab6ddbe
--- /dev/null
+++ b/tools/build/src/kernel/class.jam
@@ -0,0 +1,420 @@
+# Copyright 2001, 2002, 2003 Dave Abrahams
+# Copyright 2002, 2005 Rene Rivera
+# Copyright 2002, 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Polymorphic class system built on top of core Jam facilities.
+#
+# Classes are defined by 'class' keywords:
+#
+# class myclass
+# {
+# rule __init__ ( arg1 ) # constructor
+# {
+# self.attribute = $(arg1) ;
+# }
+#
+# rule method1 ( ) # method
+# {
+# return [ method2 ] ;
+# }
+#
+# rule method2 ( ) # method
+# {
+# return $(self.attribute) ;
+# }
+# }
+#
+# The __init__ rule is the constructor, and sets member variables.
+#
+# New instances are created by invoking [ new <class> <args...> ]:
+#
+# local x = [ new myclass foo ] ; # x is a new myclass object
+# assert.result foo : [ $(x).method1 ] ; # $(x).method1 returns "foo"
+#
+# Derived class are created by mentioning base classes in the declaration::
+#
+# class derived : myclass
+# {
+# rule __init__ ( arg )
+# {
+# myclass.__init__ $(arg) ; # call base __init__
+#
+# }
+#
+# rule method2 ( ) # method override
+# {
+# return $(self.attribute)XXX ;
+# }
+# }
+#
+# All methods operate virtually, replacing behavior in the base classes. For
+# example::
+#
+# local y = [ new derived foo ] ; # y is a new derived object
+# assert.result fooXXX : [ $(y).method1 ] ; # $(y).method1 returns "foo"
+#
+# Each class instance is its own core Jam module. All instance attributes and
+# methods are accessible without additional qualification from within the class
+# instance. All rules imported in class declaration, or visible in base classses
+# are also visible. Base methods are available in qualified form:
+# base-name.method-name. By convention, attribute names are prefixed with
+# "self.".
+
+import modules ;
+import numbers ;
+
+
+rule xinit ( instance : class )
+{
+ module $(instance)
+ {
+ __class__ = $(2) ;
+ __name__ = $(1) ;
+ }
+}
+
+
+rule new ( class args * : * )
+{
+ .next-instance ?= 1 ;
+ local id = object($(class))@$(.next-instance) ;
+
+ INSTANCE $(id) : class@$(class) ;
+ xinit $(id) : $(class) ;
+ IMPORT_MODULE $(id) ;
+ $(id).__init__ $(args) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) :
+ $(9) : $(10) : $(11) : $(12) : $(13) : $(14) : $(15) : $(16) : $(17) :
+ $(18) : $(19) ;
+
+ # Bump the next unique object name.
+ .next-instance = [ numbers.increment $(.next-instance) ] ;
+
+ # Return the name of the new instance.
+ return $(id) ;
+}
+
+
+rule bases ( class )
+{
+ module class@$(class)
+ {
+ return $(__bases__) ;
+ }
+}
+
+
+rule is-derived ( class : bases + )
+{
+ local stack = $(class) ;
+ local visited found ;
+ while ! $(found) && $(stack)
+ {
+ local top = $(stack[1]) ;
+ stack = $(stack[2-]) ;
+ if ! ( $(top) in $(visited) )
+ {
+ visited += $(top) ;
+ stack += [ bases $(top) ] ;
+
+ if $(bases) in $(visited)
+ {
+ found = true ;
+ }
+ }
+ }
+ return $(found) ;
+}
+
+
+# Returns true if the 'value' is a class instance.
+#
+rule is-instance ( value )
+{
+ return [ MATCH "^(object\\()[^@]+\\)@.*" : $(value) ] ;
+}
+
+
+# Check if the given value is of the given type.
+#
+rule is-a (
+ instance # The value to check.
+ : type # The type to test for.
+)
+{
+ if [ is-instance $(instance) ]
+ {
+ return [ class.is-derived [ modules.peek $(instance) : __class__ ] : $(type) ] ;
+ }
+}
+
+
+local rule typecheck ( x )
+{
+ local class-name = [ MATCH "^\\[(.*)\\]$" : [ BACKTRACE 1 ] ] ;
+ if ! [ is-a $(x) : $(class-name) ]
+ {
+ return "Expected an instance of "$(class-name)" but got \""$(x)"\" for argument" ;
+ }
+}
+
+
+rule __test__ ( )
+{
+ import assert ;
+ import "class" : new ;
+ import errors : try catch ;
+
+ # This will be the construction function for a class called 'myclass'.
+ #
+ class myclass
+ {
+ import assert ;
+
+ rule __init__ ( x_ * : y_ * )
+ {
+ # Set some instance variables.
+ x = $(x_) ;
+ y = $(y_) ;
+ foo += 10 ;
+ }
+
+ rule set-x ( newx * )
+ {
+ x = $(newx) ;
+ }
+
+ rule get-x ( )
+ {
+ return $(x) ;
+ }
+
+ rule set-y ( newy * )
+ {
+ y = $(newy) ;
+ }
+
+ rule get-y ( )
+ {
+ return $(y) ;
+ }
+
+ rule f ( )
+ {
+ return [ g $(x) ] ;
+ }
+
+ rule g ( args * )
+ {
+ if $(x) in $(y)
+ {
+ return $(x) ;
+ }
+ else if $(y) in $(x)
+ {
+ return $(y) ;
+ }
+ else
+ {
+ return ;
+ }
+ }
+
+ rule get-class ( )
+ {
+ return $(__class__) ;
+ }
+
+ rule get-instance ( )
+ {
+ return $(__name__) ;
+ }
+
+ rule invariant ( )
+ {
+ assert.equal 1 : 1 ;
+ }
+
+ rule get-foo ( )
+ {
+ return $(foo) ;
+ }
+ } # class myclass ;
+
+ class derived1 : myclass
+ {
+ rule __init__ ( z_ )
+ {
+ myclass.__init__ $(z_) : X ;
+ z = $(z_) ;
+ }
+
+ # Override g.
+ #
+ rule g ( args * )
+ {
+ return derived1.g ;
+ }
+
+ rule h ( )
+ {
+ return derived1.h ;
+ }
+
+ rule get-z ( )
+ {
+ return $(z) ;
+ }
+
+ # Check that 'assert.equal' visible in base class is visible here.
+ #
+ rule invariant2 ( )
+ {
+ assert.equal 2 : 2 ;
+ }
+
+ # Check that 'assert.variable-not-empty' visible in base class is
+ # visible here.
+ #
+ rule invariant3 ( )
+ {
+ local v = 10 ;
+ assert.variable-not-empty v ;
+ }
+ } # class derived1 : myclass ;
+
+ class derived2 : myclass
+ {
+ rule __init__ ( )
+ {
+ myclass.__init__ 1 : 2 ;
+ }
+
+ # Override g.
+ #
+ rule g ( args * )
+ {
+ return derived2.g ;
+ }
+
+ # Test the ability to call base class functions with qualification.
+ #
+ rule get-x ( )
+ {
+ return [ myclass.get-x ] ;
+ }
+ } # class derived2 : myclass ;
+
+ class derived2a : derived2
+ {
+ rule __init__
+ {
+ derived2.__init__ ;
+ }
+ } # class derived2a : derived2 ;
+
+ local rule expect_derived2 ( [derived2] x ) { }
+
+ local a = [ new myclass 3 4 5 : 4 5 ] ;
+ local b = [ new derived1 4 ] ;
+ local b2 = [ new derived1 4 ] ;
+ local c = [ new derived2 ] ;
+ local d = [ new derived2 ] ;
+ local e = [ new derived2a ] ;
+
+ expect_derived2 $(d) ;
+ expect_derived2 $(e) ;
+
+ # Argument checking is set up to call exit(1) directly on failure, and we
+ # can not hijack that with try, so we should better not do this test by
+ # default. We could fix this by having errors look up and invoke the EXIT
+ # rule instead; EXIT can be hijacked (;-)
+ if --fail-typecheck in [ modules.peek : ARGV ]
+ {
+ try ;
+ {
+ expect_derived2 $(a) ;
+ }
+ catch
+ "Expected an instance of derived2 but got" instead
+ ;
+ }
+
+ #try ;
+ #{
+ # new bad_subclass ;
+ #}
+ #catch
+ # bad_subclass.bad_subclass failed to call base class constructor
+ # myclass.__init__
+ # ;
+
+ #try ;
+ #{
+ # class bad_subclass ;
+ #}
+ #catch bad_subclass has already been declared ;
+
+ assert.result 3 4 5 : $(a).get-x ;
+ assert.result 4 5 : $(a).get-y ;
+ assert.result 4 : $(b).get-x ;
+ assert.result X : $(b).get-y ;
+ assert.result 4 : $(b).get-z ;
+ assert.result 1 : $(c).get-x ;
+ assert.result 2 : $(c).get-y ;
+ assert.result 4 5 : $(a).f ;
+ assert.result derived1.g : $(b).f ;
+ assert.result derived2.g : $(c).f ;
+ assert.result derived2.g : $(d).f ;
+
+ assert.result 10 : $(b).get-foo ;
+
+ $(a).invariant ;
+ $(b).invariant2 ;
+ $(b).invariant3 ;
+
+ # Check that the __class__ attribute is getting properly set.
+ assert.result myclass : $(a).get-class ;
+ assert.result derived1 : $(b).get-class ;
+ assert.result $(a) : $(a).get-instance ;
+
+ $(a).set-x a.x ;
+ $(b).set-x b.x ;
+ $(c).set-x c.x ;
+ $(d).set-x d.x ;
+ assert.result a.x : $(a).get-x ;
+ assert.result b.x : $(b).get-x ;
+ assert.result c.x : $(c).get-x ;
+ assert.result d.x : $(d).get-x ;
+
+ class derived3 : derived1 derived2
+ {
+ rule __init__ ( )
+ {
+ }
+ }
+
+ assert.result : bases myclass ;
+ assert.result myclass : bases derived1 ;
+ assert.result myclass : bases derived2 ;
+ assert.result derived1 derived2 : bases derived3 ;
+
+ assert.true is-derived derived1 : myclass ;
+ assert.true is-derived derived2 : myclass ;
+ assert.true is-derived derived3 : derived1 ;
+ assert.true is-derived derived3 : derived2 ;
+ assert.true is-derived derived3 : derived1 derived2 myclass ;
+ assert.true is-derived derived3 : myclass ;
+
+ assert.false is-derived myclass : derived1 ;
+
+ assert.true is-instance $(a) ;
+ assert.false is-instance bar ;
+
+ assert.true is-a $(a) : myclass ;
+ assert.true is-a $(c) : derived2 ;
+ assert.true is-a $(d) : myclass ;
+ assert.false is-a literal : myclass ;
+}
diff --git a/tools/build/src/kernel/errors.jam b/tools/build/src/kernel/errors.jam
new file mode 100644
index 0000000000..9563396eca
--- /dev/null
+++ b/tools/build/src/kernel/errors.jam
@@ -0,0 +1,287 @@
+# Copyright 2003 Dave Abrahams
+# Copyright 2004 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Print a stack backtrace leading to this rule's caller. Each argument
+# represents a line of output to be printed after the first line of the
+# backtrace.
+#
+rule backtrace ( skip-frames prefix messages * : * )
+{
+ local frame-skips = 5 9 13 17 21 25 29 33 37 41 45 49 53 57 61 65 69 73 77 81 ;
+ local drop-elements = $(frame-skips[$(skip-frames)]) ;
+ if ! ( $(skip-frames) in 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 )
+ {
+ ECHO "warning: backtrace doesn't support skipping $(skip-frames) "
+ "frames; using 1 instead." ;
+ drop-elements = 5 ;
+ }
+
+ local args = $(.args) ;
+ if $(.user-modules-only)
+ {
+ local bt = [ nearest-user-location ] ;
+ if $(bt)
+ {
+ ECHO $(prefix) at $(bt) ;
+ }
+ for local n in $(args)
+ {
+ if $($(n))-is-defined
+ {
+ ECHO $(prefix) $($(n)) ;
+ }
+ }
+ }
+ else
+ {
+ # Get the whole backtrace, then drop the initial quadruples
+ # corresponding to the frames that must be skipped.
+ local bt = [ BACKTRACE ] ;
+ bt = $(bt[$(drop-elements)-]) ;
+
+ while $(bt)
+ {
+ local m = [ MATCH ^(.+)\\.$ : $(bt[3]) ] ;
+ ECHO $(bt[1]):$(bt[2]): "in" $(bt[4]) "from module" $(m) ;
+
+ # The first time through, print each argument on a separate line.
+ for local n in $(args)
+ {
+ if $($(n))-is-defined
+ {
+ ECHO $(prefix) $($(n)) ;
+ }
+ }
+ args = ; # Kill args so that this never happens again.
+
+ # Move on to the next quadruple.
+ bt = $(bt[5-]) ;
+ }
+ }
+}
+
+.args ?= messages 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 ;
+.disabled ?= ;
+.last-error-$(.args) ?= ;
+
+
+# try-catch --
+#
+# This is not really an exception-handling mechanism, but it does allow us to
+# perform some error-checking on our error-checking. Errors are suppressed after
+# a try, and the first one is recorded. Use catch to check that the error
+# message matched expectations.
+
+# Begin looking for error messages.
+#
+rule try ( )
+{
+ .disabled += true ;
+ .last-error-$(.args) = ;
+}
+
+
+# Stop looking for error messages; generate an error if an argument of messages
+# is not found in the corresponding argument in the error call.
+#
+rule catch ( messages * : * )
+{
+ .disabled = $(.disabled[2-]) ; # Pop the stack.
+
+ import sequence ;
+
+ if ! $(.last-error-$(.args))-is-defined
+ {
+ error-skip-frames 3 expected an error, but none occurred ;
+ }
+ else
+ {
+ for local n in $(.args)
+ {
+ if ! $($(n)) in $(.last-error-$(n))
+ {
+ local v = [ sequence.join $($(n)) : " " ] ;
+ v ?= "" ;
+ local joined = [ sequence.join $(.last-error-$(n)) : " " ] ;
+
+ .last-error-$(.args) = ;
+ error-skip-frames 3 expected \"$(v)\" in argument $(n) of error
+ : got \"$(joined)\" instead ;
+ }
+ }
+ }
+}
+
+
+rule error-skip-frames ( skip-frames messages * : * )
+{
+ if ! $(.disabled)
+ {
+ backtrace $(skip-frames) error: $(messages) : $(2) : $(3) : $(4) : $(5)
+ : $(6) : $(7) : $(8) : $(9) : $(10) : $(11) : $(12) : $(13) : $(14)
+ : $(15) : $(16) : $(17) : $(18) : $(19) ;
+ EXIT ;
+ }
+ else if ! $(.last-error-$(.args))
+ {
+ for local n in $(.args)
+ {
+ # Add an extra empty string so that we always have something in the
+ # event of an error.
+ .last-error-$(n) = $($(n)) "" ;
+ }
+ }
+}
+
+if --no-error-backtrace in [ modules.peek : ARGV ]
+{
+ .no-error-backtrace = true ;
+}
+
+
+# Print an error message with a stack backtrace and exit.
+#
+rule error ( messages * : * )
+{
+ if $(.no-error-backtrace)
+ {
+ local first-printed ;
+ # Print each argument on a separate line.
+ for local n in $(.args)
+ {
+ if $($(n))-is-defined
+ {
+ if ! $(first-printed)
+ {
+ ECHO error: $($(n)) ;
+ first-printed = true ;
+ }
+ else
+ {
+ ECHO $($(n)) ;
+ }
+ }
+ }
+ EXIT ;
+ }
+ else
+ {
+ error-skip-frames 3 $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) :
+ $(8) : $(9) : $(10) : $(11) : $(12) : $(13) : $(14) : $(15) : $(16)
+ : $(17) : $(18) : $(19) ;
+ }
+}
+
+
+# Same as 'error', but the generated backtrace will include only user files.
+#
+rule user-error ( messages * : * )
+{
+ .user-modules-only = 1 ;
+ error-skip-frames 3 $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) :
+ $(9) : $(10) : $(11) : $(12) : $(13) : $(14) : $(15) : $(16) : $(17) :
+ $(18) : $(19) ;
+}
+
+
+# Print a warning message with a stack backtrace and exit.
+#
+rule warning
+{
+ backtrace 2 warning: $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) :
+ $(9) : $(10) : $(11) : $(12) : $(13) : $(14) : $(15) : $(16) : $(17) :
+ $(18) : $(19) ;
+}
+
+
+# Convert an arbitrary argument list into a list with ":" separators and quoted
+# elements representing the same information. This is mostly useful for
+# formatting descriptions of arguments with which a rule was called when
+# reporting an error.
+#
+rule lol->list ( * )
+{
+ local result ;
+ local remaining = 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 ;
+ while $($(remaining))
+ {
+ local n = $(remaining[1]) ;
+ remaining = $(remaining[2-]) ;
+
+ if $(n) != 1
+ {
+ result += ":" ;
+ }
+ result += \"$($(n))\" ;
+ }
+ return $(result) ;
+}
+
+
+# Return the file:line for the nearest entry in backtrace which correspond to a
+# user module.
+#
+rule nearest-user-location ( )
+{
+ local bt = [ BACKTRACE ] ;
+
+ local result ;
+ while $(bt) && ! $(result)
+ {
+ local m = [ MATCH ^(.+)\\.$ : $(bt[3]) ] ;
+ local user-modules = ([Jj]amroot(.jam|.v2|)|([Jj]amfile(.jam|.v2|)|user-config.jam|site-config.jam|project-config.jam|project-root.jam) ;
+
+ if [ MATCH $(user-modules) : $(bt[1]:D=) ]
+ {
+ result = $(bt[1]):$(bt[2]) ;
+ }
+ bt = $(bt[5-]) ;
+ }
+ return $(result) ;
+}
+
+
+# If optimized rule is available in Jam, use it.
+if NEAREST_USER_LOCATION in [ RULENAMES ]
+{
+ rule nearest-user-location ( )
+ {
+ local r = [ NEAREST_USER_LOCATION ] ;
+ return $(r[1]):$(r[2]) ;
+ }
+}
+
+
+rule __test__ ( )
+{
+ # Show that we can correctly catch an expected error.
+ try ;
+ {
+ error an error occurred : somewhere ;
+ }
+ catch an error occurred : somewhere ;
+
+ # Show that unexpected errors generate real errors.
+ try ;
+ {
+ try ;
+ {
+ error an error occurred : somewhere ;
+ }
+ catch an error occurred : nowhere ;
+ }
+ catch expected \"nowhere\" in argument 2 ;
+
+ # Show that not catching an error where one was expected is an error.
+ try ;
+ {
+ try ;
+ {
+ }
+ catch ;
+ }
+ catch expected an error, but none occurred ;
+}
diff --git a/tools/build/src/kernel/modules.jam b/tools/build/src/kernel/modules.jam
new file mode 100644
index 0000000000..4258225320
--- /dev/null
+++ b/tools/build/src/kernel/modules.jam
@@ -0,0 +1,359 @@
+# Copyright 2003 Dave Abrahams
+# Copyright 2003, 2005 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Essentially an include guard; ensures that no module is loaded multiple times.
+.loaded ?= ;
+
+# A list of modules currently being loaded for error reporting of circular
+# dependencies.
+.loading ?= ;
+
+# A list of modules needing to be tested using their __test__ rule.
+.untested ?= ;
+
+# A list of modules which have been tested using their __test__ rule.
+.tested ?= ;
+
+
+# Runs internal Boost Build unit tests for the specified module. The module's
+# __test__ rule is executed in its own module to eliminate any inadvertent
+# effects of testing module dependencies (such as assert) on the module itself.
+#
+local rule run-module-test ( m )
+{
+ local tested-modules = [ modules.peek modules : .tested ] ;
+
+ if ( ! $(m) in $(tested-modules) ) # Avoid recursive test invocations.
+ && ( ( --debug in $(argv) ) || ( --debug-module=$(m) in $(argv) ) )
+ {
+ modules.poke modules : .tested : $(tested-modules) $(m) ;
+
+ if ! ( __test__ in [ RULENAMES $(m) ] )
+ {
+ local argv = [ peek : ARGV ] ;
+ if ! ( --quiet in $(argv) ) && ( --debug-tests in $(argv) )
+ {
+ ECHO warning: no __test__ rule defined in module $(m) ;
+ }
+ }
+ else
+ {
+ if ! ( --quiet in $(argv) )
+ {
+ ECHO testing module $(m)... ;
+ }
+
+ local test-module = __test-$(m)__ ;
+ IMPORT $(m) : [ RULENAMES $(m) ] : $(test-module) : [ RULENAMES $(m)
+ ] ;
+ IMPORT $(m) : __test__ : $(test-module) : __test__ : LOCALIZE ;
+ module $(test-module)
+ {
+ __test__ ;
+ }
+ }
+ }
+}
+
+
+# Return the binding of the given module.
+#
+rule binding ( module )
+{
+ return $($(module).__binding__) ;
+}
+
+
+# Sets the module-local value of a variable. This is the most reliable way to
+# set a module-local variable in a different module; it eliminates issues of
+# name shadowing due to dynamic scoping.
+#
+rule poke ( module-name ? : variables + : value * )
+{
+ module $(<)
+ {
+ $(>) = $(3) ;
+ }
+}
+
+
+# Returns the module-local value of a variable. This is the most reliable way to
+# examine a module-local variable in a different module; it eliminates issues of
+# name shadowing due to dynamic scoping.
+#
+rule peek ( module-name ? : variables + )
+{
+ module $(<)
+ {
+ return $($(>)) ;
+ }
+}
+
+
+# Call the given rule locally in the given module. Use this for rules accepting
+# rule names as arguments, so that the passed rule may be invoked in the context
+# of the rule's caller (for example, if the rule accesses module globals or is a
+# local rule). Note that rules called this way may accept at most 18 parameters.
+#
+rule call-in ( module-name ? : rule-name args * : * )
+{
+ module $(module-name)
+ {
+ return [ $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) : $(10) :
+ $(11) : $(12) : $(13) : $(14) : $(15) : $(16) : $(17) : $(18) :
+ $(19) ] ;
+ }
+}
+
+
+# Given a possibly qualified rule name and arguments, remove any initial module
+# qualification from the rule and invoke it in that module. If there is no
+# module qualification, the rule is invoked in the global module. Note that
+# rules called this way may accept at most 18 parameters.
+#
+rule call-locally ( qualified-rule-name args * : * )
+{
+ local module-rule = [ MATCH (.*)\\.(.*) : $(qualified-rule-name) ] ;
+ local rule-name = $(module-rule[2]) ;
+ rule-name ?= $(qualified-rule-name) ;
+ # We pass only 18 parameters here since Boost Jam allows at most 19 rule
+ # parameter positions and the call-in rule already uses up the initial
+ # position for the module name.
+ return [ call-in $(module-rule[1]) : $(rule-name) $(args) : $(2) : $(3) :
+ $(4) : $(5) : $(6) : $(7) : $(8) : $(9) : $(10) : $(11) : $(12) : $(13)
+ $(14) : $(15) : $(16) : $(17) : $(18) : $(19) ] ;
+}
+
+
+# Load the indicated module if it is not already loaded.
+#
+rule load (
+ module-name # Name of module to load. Rules will be defined in this
+ # module.
+ : filename ? # (partial) path to file; Defaults to $(module-name).jam.
+ : search * # Directories in which to search for filename. Defaults to
+ # $(BOOST_BUILD_PATH).
+)
+{
+ # Avoid loading modules twice.
+ if ! ( $(module-name) in $(.loaded) )
+ {
+ filename ?= $(module-name).jam ;
+
+ # Mark the module loaded so we do not try to load it recursively.
+ .loaded += $(module-name) ;
+
+ # Suppress tests if any module loads are already in progress.
+ local suppress-test = $(.loading[1]) ;
+
+ # Push this module on the loading stack.
+ .loading += $(module-name) ;
+
+ # Remember that it is untested.
+ .untested += $(module-name) ;
+
+ # Insert the new module's __name__ and __file__ globals.
+ poke $(module-name) : __name__ : $(module-name) ;
+ poke $(module-name) : __file__ : $(filename) ;
+
+ module $(module-name)
+ {
+ # Add some grist so that the module will have a unique target name.
+ local module-target = $(__file__:G=module@) ;
+
+ local search = $(3) ;
+ search ?= [ modules.peek : BOOST_BUILD_PATH ] ;
+ SEARCH on $(module-target) = $(search) ;
+ BINDRULE on $(module-target) = modules.record-binding ;
+
+ include $(module-target) ;
+
+ # Allow the module to see its own names with full qualification.
+ local rules = [ RULENAMES $(__name__) ] ;
+ IMPORT $(__name__) : $(rules) : $(__name__) : $(__name__).$(rules) ;
+ }
+
+ if $(module-name) != modules && ! [ binding $(module-name) ]
+ {
+ import errors ;
+ errors.error "Could not find module" $(module-name) in $(search) ;
+ }
+
+ # Pop the loading stack. Must happen before testing or we will run into
+ # a circular loading dependency.
+ .loading = $(.loading[1--2]) ;
+
+ # Run any pending tests if this is an outer load.
+ if ! $(suppress-test)
+ {
+ local argv = [ peek : ARGV ] ;
+ for local m in $(.untested)
+ {
+ run-module-test $(m) ;
+ }
+ .untested = ;
+ }
+ }
+ else if $(module-name) in $(.loading)
+ {
+ import errors ;
+ errors.error loading \"$(module-name)\"
+ : circular module loading dependency:
+ : $(.loading)" ->" $(module-name) ;
+ }
+}
+
+
+# This helper is used by load (above) to record the binding (path) of each
+# loaded module.
+#
+rule record-binding ( module-target : binding )
+{
+ $(.loading[-1]).__binding__ = $(binding) ;
+}
+
+
+# Transform each path in the list, with all backslashes converted to forward
+# slashes and all detectable redundancy removed. Something like this is probably
+# needed in path.jam, but I am not sure of that, I do not understand it, and I
+# am not ready to move all of path.jam into the kernel.
+#
+local rule normalize-raw-paths ( paths * )
+{
+ local result ;
+ for p in $(paths:T)
+ {
+ result += [ NORMALIZE_PATH $(p) ] ;
+ }
+ return $(result) ;
+}
+
+
+.cwd = [ PWD ] ;
+
+
+# Load the indicated module and import rule names into the current module. Any
+# members of rules-opt will be available without qualification in the caller's
+# module. Any members of rename-opt will be taken as the names of the rules in
+# the caller's module, in place of the names they have in the imported module.
+# If rules-opt = '*', all rules from the indicated module are imported into the
+# caller's module. If rename-opt is supplied, it must have the same number of
+# elements as rules-opt.
+#
+rule import ( module-names + : rules-opt * : rename-opt * )
+{
+ if ( $(rules-opt) = * || ! $(rules-opt) ) && $(rename-opt)
+ {
+ import errors ;
+ errors.error "Rule aliasing is only available for explicit imports." ;
+ }
+
+ if $(module-names[2]) && ( $(rules-opt) || $(rename-opt) )
+ {
+ import errors ;
+ errors.error "When loading multiple modules, no specific rules or"
+ "renaming is allowed" ;
+ }
+
+ local caller = [ CALLER_MODULE ] ;
+
+ # Import each specified module
+ for local m in $(module-names)
+ {
+ if ! $(m) in $(.loaded)
+ {
+ # If the importing module is not already in the BOOST_BUILD_PATH,
+ # prepend it to the path. We do not want to invert the search order
+ # of modules that are already there.
+
+ local caller-location ;
+ if $(caller)
+ {
+ caller-location = [ binding $(caller) ] ;
+ caller-location = $(caller-location:D) ;
+ caller-location = [ normalize-raw-paths
+ $(caller-location:R=$(.cwd)) ] ;
+ }
+
+ local search = [ peek : BOOST_BUILD_PATH ] ;
+ search = [ normalize-raw-paths $(search:R=$(.cwd)) ] ;
+
+ if $(caller-location) && ! $(caller-location) in $(search)
+ {
+ search = $(caller-location) $(search) ;
+ }
+
+ load $(m) : : $(search) ;
+ }
+
+ IMPORT_MODULE $(m) : $(caller) ;
+
+ if $(rules-opt)
+ {
+ local source-names ;
+ if $(rules-opt) = *
+ {
+ local all-rules = [ RULENAMES $(m) ] ;
+ source-names = $(all-rules) ;
+ }
+ else
+ {
+ source-names = $(rules-opt) ;
+ }
+ local target-names = $(rename-opt) ;
+ target-names ?= $(source-names) ;
+ IMPORT $(m) : $(source-names) : $(caller) : $(target-names) ;
+ }
+ }
+}
+
+
+# Define exported copies in $(target-module) of all rules exported from
+# $(source-module). Also make them available in the global module with
+# qualification, so that it is just as though the rules were defined originally
+# in $(target-module).
+#
+rule clone-rules ( source-module target-module )
+{
+ local r = [ RULENAMES $(source-module) ] ;
+ IMPORT $(source-module) : $(r) : $(target-module) : $(r) : LOCALIZE ;
+ EXPORT $(target-module) : $(r) ;
+ IMPORT $(target-module) : $(r) : : $(target-module).$(r) ;
+}
+
+
+# These rules need to be available in all modules to implement module loading
+# itself and other fundamental operations.
+local globalize = peek poke record-binding ;
+IMPORT modules : $(globalize) : : modules.$(globalize) ;
+
+
+rule __test__ ( )
+{
+ import assert ;
+ import modules : normalize-raw-paths ;
+
+ module modules.__test__
+ {
+ foo = bar ;
+ }
+
+ assert.result bar : peek modules.__test__ : foo ;
+
+ poke modules.__test__ : foo : bar baz ;
+ assert.result bar baz : peek modules.__test__ : foo ;
+
+ assert.result c:/foo/bar : normalize-raw-paths c:/x/../foo/./xx/yy/../../bar ;
+ assert.result . : normalize-raw-paths . ;
+ assert.result .. : normalize-raw-paths .. ;
+ assert.result ../.. : normalize-raw-paths ../.. ;
+ assert.result .. : normalize-raw-paths ./.. ;
+ assert.result / / : normalize-raw-paths / \\ ;
+ assert.result a : normalize-raw-paths a ;
+ assert.result a : normalize-raw-paths a/ ;
+ assert.result /a : normalize-raw-paths /a/ ;
+ assert.result / : normalize-raw-paths /a/.. ;
+}
diff --git a/tools/build/v2/manager.py b/tools/build/src/manager.py
index 473857fc76..473857fc76 100644
--- a/tools/build/v2/manager.py
+++ b/tools/build/src/manager.py
diff --git a/tools/build/src/options/help.jam b/tools/build/src/options/help.jam
new file mode 100755
index 0000000000..abab3770e1
--- /dev/null
+++ b/tools/build/src/options/help.jam
@@ -0,0 +1,222 @@
+# Copyright 2003 Dave Abrahams
+# Copyright 2003, 2006 Rene Rivera
+# Copyright 2003, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This module is the plug-in handler for the --help and --help-.*
+# command-line options
+import modules ;
+import assert ;
+import doc : do-scan set-option set-output set-output-file print-help-usage print-help-top ;
+import sequence ;
+import set ;
+import project ;
+import print ;
+import os ;
+import version ;
+import path ;
+
+# List of possible modules, but which really aren't.
+#
+.not-modules =
+ boost-build bootstrap site-config test user-config
+ -tools allyourbase boost-base features python stlport testing unit-tests ;
+
+# The help system options are parsed here and handed off to the doc
+# module to translate into documentation requests and actions. The
+# understood options are:
+#
+# --help-disable-<option>
+# --help-doc-options
+# --help-enable-<option>
+# --help-internal
+# --help-options
+# --help-usage
+# --help-output <type>
+# --help-output-file <file>
+# --help [<module-or-class>]
+#
+rule process (
+ command # The option.
+ : values * # The values, starting after the "=".
+ )
+{
+ assert.result --help : MATCH ^(--help).* : $(command) ;
+ local did-help = ;
+ switch $(command)
+ {
+ case --help-internal :
+ local path-to-modules = [ modules.peek : BOOST_BUILD_PATH ] ;
+ path-to-modules ?= . ;
+ local possible-modules = [ GLOB $(path-to-modules) : *\\.jam ] ;
+ local not-modules = [ GLOB $(path-to-modules) : *$(.not-modules)\\.jam ] ;
+ local modules-to-list =
+ [ sequence.insertion-sort
+ [ set.difference $(possible-modules:D=:S=) : $(not-modules:D=:S=) ] ] ;
+ local modules-to-scan ;
+ for local m in $(modules-to-list)
+ {
+ local module-files = [ GLOB $(path-to-modules) : $(m)\\.jam ] ;
+ modules-to-scan += $(module-files[1]) ;
+ }
+ do-scan $(modules-to-scan) : print-help-all ;
+ did-help = true ;
+
+ case --help-enable-* :
+ local option = [ MATCH --help-enable-(.*) : $(command) ] ; option = $(option:L) ;
+ set-option $(option) : enabled ;
+ did-help = true ;
+
+ case --help-disable-* :
+ local option = [ MATCH --help-disable-(.*) : $(command) ] ; option = $(option:L) ;
+ set-option $(option) ;
+ did-help = true ;
+
+ case --help-output :
+ set-output $(values[1]) ;
+ did-help = true ;
+
+ case --help-output-file :
+ set-output-file $(values[1]) ;
+ did-help = true ;
+
+ case --help-doc-options :
+ local doc-module-spec = [ split-symbol doc ] ;
+ do-scan $(doc-module-spec[1]) : print-help-options ;
+ did-help = true ;
+
+ case --help-options :
+ print-help-usage ;
+ local BOOST_BUILD_PATH = [ modules.peek : BOOST_BUILD_PATH ] ;
+ local plugin-dir = options ;
+ local option-files = [ GLOB $(plugin-dir:D=$(BOOST_BUILD_PATH)) : *.jam ] ;
+ if $(option-files)
+ {
+ for local file in $(option-files)
+ {
+ do-scan $(file) : print-help-options ;
+ }
+ }
+ did-help = true ;
+
+ case --help :
+ local spec = $(values[1]) ;
+ if $(spec)
+ {
+ local spec-parts = [ split-symbol $(spec) ] ;
+ if $(spec-parts)
+ {
+ if $(spec-parts[2])
+ {
+ do-scan $(spec-parts[1]) : print-help-classes $(spec-parts[2]) ;
+ do-scan $(spec-parts[1]) : print-help-rules $(spec-parts[2]) ;
+ do-scan $(spec-parts[1]) : print-help-variables $(spec-parts[2]) ;
+ }
+ else
+ {
+ do-scan $(spec-parts[1]) : print-help-module ;
+ }
+ }
+ else
+ {
+ EXIT "Unrecognized help option '"$(command)" "$(spec)"'." ;
+ }
+ }
+ else
+ {
+ version.print ;
+ ECHO ;
+ # First print documentation from the current Jamfile, if any.
+ # FIXME: Generally, this duplication of project.jam logic is bad.
+ local names = [ modules.peek project : JAMROOT ]
+ [ modules.peek project : JAMFILE ] ;
+ local project-file = [ path.glob . : $(names) ] ;
+ if ! $(project-file)
+ {
+ project-file = [ path.glob-in-parents . : $(names) ] ;
+ }
+
+ for local p in $(project-file)
+ {
+ do-scan $(p) : print-help-project $(p) ;
+ }
+
+ # Next any user-config help.
+ local user-path = [ os.home-directories ] [ os.environ BOOST_BUILD_PATH ] ;
+ local user-config = [ GLOB $(user-path) : user-config.jam ] ;
+ if $(user-config)
+ {
+ do-scan $(user-config[1]) : print-help-config user $(user-config[1]) ;
+ }
+
+ # Next any site-config help.
+ local site-config = [ GLOB $(user-path) : site-config.jam ] ;
+ if $(site-config)
+ {
+ do-scan $(site-config[1]) : print-help-config site $(site-config[1]) ;
+ }
+
+ # Then the overall help.
+ print-help-top ;
+ }
+ did-help = true ;
+ }
+ if $(did-help)
+ {
+ UPDATE all ;
+ NOCARE all ;
+ }
+ return $(did-help) ;
+}
+
+# Split a reference to a symbol into module and symbol parts.
+#
+local rule split-symbol (
+ symbol # The symbol to split.
+ )
+{
+ local path-to-modules = [ modules.peek : BOOST_BUILD_PATH ] ;
+ path-to-modules ?= . ;
+ local module-name = $(symbol) ;
+ local symbol-name = ;
+ local result = ;
+ while ! $(result)
+ {
+ local module-path = [ GLOB $(path-to-modules) : $(module-name)\\.jam ] ;
+ if $(module-path)
+ {
+ # The 'module-name' in fact refers to module. Return the full
+ # module path and a symbol within it. If 'symbol' passed to this
+ # rule is already module, 'symbol-name' will be empty. Otherwise,
+ # it's initialized on the previous loop iteration.
+ # In case there are several modules by this name,
+ # use the first one.
+ result = $(module-path[1]) $(symbol-name) ;
+ }
+ else
+ {
+ if ! $(module-name:S)
+ {
+ result = - ;
+ }
+ else
+ {
+ local next-symbol-part = [ MATCH ^.(.*) : $(module-name:S) ] ;
+ if $(symbol-name)
+ {
+ symbol-name = $(next-symbol-part).$(symbol-name) ;
+ }
+ else
+ {
+ symbol-name = $(next-symbol-part) ;
+ }
+ module-name = $(module-name:B) ;
+ }
+ }
+ }
+ if $(result) != -
+ {
+ return $(result) ;
+ }
+}
diff --git a/tools/build/v2/debian/copyright b/tools/build/src/tools/__init__.py
index e69de29bb2..e69de29bb2 100644
--- a/tools/build/v2/debian/copyright
+++ b/tools/build/src/tools/__init__.py
diff --git a/tools/build/v2/tools/acc.jam b/tools/build/src/tools/acc.jam
index f04c9dc879..f04c9dc879 100644
--- a/tools/build/v2/tools/acc.jam
+++ b/tools/build/src/tools/acc.jam
diff --git a/tools/build/src/tools/auto-index.jam b/tools/build/src/tools/auto-index.jam
new file mode 100644
index 0000000000..41d04828a7
--- /dev/null
+++ b/tools/build/src/tools/auto-index.jam
@@ -0,0 +1,204 @@
+
+import feature ;
+import generators ;
+import "class" ;
+import toolset ;
+import targets ;
+import "class" : new ;
+import project ;
+
+feature.feature auto-index : off "on" ;
+feature.feature auto-index-internal : off "on" ;
+feature.feature auto-index-verbose : off "on" ;
+feature.feature auto-index-no-duplicates : off "on" ;
+feature.feature auto-index-script : : free path ;
+feature.feature auto-index-prefix : : free path ;
+feature.feature auto-index-type : : free ;
+feature.feature auto-index-section-names : "on" off ;
+
+toolset.flags auto-index.auto-index FLAGS <auto-index-internal>on : --internal-index ;
+toolset.flags auto-index.auto-index SCRIPT <auto-index-script> ;
+toolset.flags auto-index.auto-index PREFIX <auto-index-prefix> ;
+toolset.flags auto-index.auto-index INDEX_TYPE <auto-index-type> ;
+toolset.flags auto-index.auto-index FLAGS <auto-index-verbose>on : --verbose ;
+toolset.flags auto-index.auto-index FLAGS <auto-index-no-duplicates>on : --no-duplicates ;
+toolset.flags auto-index.auto-index FLAGS <auto-index-section-names>off : --no-section-names ;
+
+# <auto-index-binary> shell command to run AutoIndex
+# <auto-index-binary-dependencies> targets to build AutoIndex from sources.
+feature.feature <auto-index-binary> : : free ;
+feature.feature <auto-index-binary-dependencies> : : free dependency ;
+
+class auto-index-generator : generator
+{
+ import common modules path targets build-system ;
+ rule run ( project name ? : property-set : sources * )
+ {
+ # AutoIndex invocation command and dependencies.
+ local auto-index-binary = [ modules.peek auto-index : .command ] ;
+ local auto-index-binary-dependencies ;
+
+ if $(auto-index-binary)
+ {
+ # Use user-supplied command.
+ auto-index-binary = [ common.get-invocation-command auto-index : auto-index : $(auto-index-binary) ] ;
+ }
+ else
+ {
+ # Search for AutoIndex sources in sensible places, like
+ # $(BOOST_ROOT)/tools/auto_index
+ # $(BOOST_BUILD_PATH)/../../auto_index
+
+ # And build auto-index executable from sources.
+
+ local boost-root = [ modules.peek : BOOST_ROOT ] ;
+ local boost-build-path = [ build-system.location ] ;
+ local boost-build-path2 = [ modules.peek : BOOST_BUILD_PATH ] ;
+
+ local auto-index-dir ;
+
+ if $(boost-root)
+ {
+ auto-index-dir += [ path.join $(boost-root) tools ] ;
+ }
+
+ if $(boost-build-path)
+ {
+ auto-index-dir += $(boost-build-path)/../.. ;
+ }
+ if $(boost-build-path2)
+ {
+ auto-index-dir += $(boost-build-path2)/.. ;
+ }
+
+ #ECHO $(auto-index-dir) ;
+ auto-index-dir = [ path.glob $(auto-index-dir) : auto_index ] ;
+ #ECHO $(auto-index-dir) ;
+
+ # If the AutoIndex source directory was found, mark its main target
+ # as a dependency for the current project. Otherwise, try to find
+ # 'auto-index' in user's PATH
+ if $(auto-index-dir)
+ {
+ auto-index-dir = [ path.make $(auto-index-dir[1]) ] ;
+ auto-index-dir = $(auto-index-dir)/build ;
+
+ #ECHO $(auto-index-dir) ;
+
+ # Get the main-target in AutoIndex directory.
+ local auto-index-main-target = [ targets.resolve-reference $(auto-index-dir) : $(project) ] ;
+
+ #ECHO $(auto-index-main-target) ;
+
+ # The first element are actual targets, the second are
+ # properties found in target-id. We do not care about these
+ # since we have passed the id ourselves.
+ auto-index-main-target =
+ [ $(auto-index-main-target[1]).main-target auto_index ] ;
+
+ #ECHO $(auto-index-main-target) ;
+
+ auto-index-binary-dependencies =
+ [ $(auto-index-main-target).generate [ $(property-set).propagated ] ] ;
+
+ # Ignore usage-requirements returned as first element.
+ auto-index-binary-dependencies = $(auto-index-binary-dependencies[2-]) ;
+
+ # Some toolsets generate extra targets (e.g. RSP). We must mark
+ # all targets as dependencies for the project, but we will only
+ # use the EXE target for auto-index-to-boostbook translation.
+ for local target in $(auto-index-binary-dependencies)
+ {
+ if [ $(target).type ] = EXE
+ {
+ auto-index-binary =
+ [ path.native
+ [ path.join
+ [ $(target).path ]
+ [ $(target).name ]
+ ]
+ ] ;
+ }
+ }
+ }
+ else
+ {
+ ECHO "AutoIndex warning: The path to the auto-index executable was" ;
+ ECHO " not provided. Additionally, couldn't find AutoIndex" ;
+ ECHO " sources searching in" ;
+ ECHO " * BOOST_ROOT/tools/auto-index" ;
+ ECHO " * BOOST_BUILD_PATH/../../auto-index" ;
+ ECHO " Will now try to find a precompiled executable by searching" ;
+ ECHO " the PATH for 'auto-index'." ;
+ ECHO " To disable this warning in the future, or to completely" ;
+ ECHO " avoid compilation of auto-index, you can explicitly set the" ;
+ ECHO " path to a auto-index executable command in user-config.jam" ;
+ ECHO " or site-config.jam with the call" ;
+ ECHO " using auto-index : /path/to/auto-index ;" ;
+
+ # As a last resort, search for 'auto-index' command in path. Note
+ # that even if the 'auto-index' command is not found,
+ # get-invocation-command will still return 'auto-index' and might
+ # generate an error while generating the virtual-target.
+
+ auto-index-binary = [ common.get-invocation-command auto-index : auto-index ] ;
+ }
+ }
+
+ # Add $(auto-index-binary-dependencies) as a dependency of the current
+ # project and set it as the <auto-index-binary> feature for the
+ # auto-index-to-boostbook rule, below.
+ property-set = [ $(property-set).add-raw
+ <dependency>$(auto-index-binary-dependencies)
+ <auto-index-binary>$(auto-index-binary)
+ <auto-index-binary-dependencies>$(auto-index-binary-dependencies)
+ ] ;
+
+ #ECHO "binary = " $(auto-index-binary) ;
+ #ECHO "dependencies = " $(auto-index-binary-dependencies) ;
+
+ return [ generator.run $(project) $(name) : $(property-set) : $(sources) ] ;
+ }
+}
+
+# Initialization of toolset.
+#
+# Parameters:
+# command ? -> path to AutoIndex executable.
+#
+# When command is not supplied toolset will search for AutoIndex directory and
+# compile the executable from source. If that fails we still search the path for
+# 'auto_index'.
+#
+rule init (
+ command ? # path to the AutoIndex executable.
+ )
+{
+ if ! $(.initialized)
+ {
+ .initialized = true ;
+ .command = $(command) ;
+ }
+}
+
+toolset.flags auto-index.auto-index AI-COMMAND <auto-index-binary> ;
+toolset.flags auto-index.auto-index AI-DEPENDENCIES <auto-index-binary-dependencies> ;
+
+generators.register [ class.new auto-index-generator auto-index.auto-index : DOCBOOK : DOCBOOK(%.auto_index) : <auto-index>on ] ;
+generators.override auto-index.auto-index : boostbook.boostbook-to-docbook ;
+
+rule auto-index ( target : source : properties * )
+{
+ # Signal dependency of auto-index sources on <auto-index-binary-dependencies>
+ # upon invocation of auto-index-to-boostbook.
+ #ECHO "AI-COMMAND= " $(AI-COMMAND) ;
+ DEPENDS $(target) : [ on $(target) return $(AI-DEPENDENCIES) ] ;
+ #DEPENDS $(target) : [ on $(target) return $(SCRIPT) ] ;
+}
+
+actions auto-index
+{
+ $(AI-COMMAND) $(FLAGS) "--prefix="$(PREFIX) "--script="$(SCRIPT) "--index-type="$(INDEX_TYPE) "--in="$(>) "--out="$(<)
+}
+
+
diff --git a/tools/build/v2/tools/bison.jam b/tools/build/src/tools/bison.jam
index 0689d4bd89..0689d4bd89 100644
--- a/tools/build/v2/tools/bison.jam
+++ b/tools/build/src/tools/bison.jam
diff --git a/tools/build/v2/tools/boostbook-config.jam b/tools/build/src/tools/boostbook-config.jam
index 6e3f3ddc10..6e3f3ddc10 100644
--- a/tools/build/v2/tools/boostbook-config.jam
+++ b/tools/build/src/tools/boostbook-config.jam
diff --git a/tools/build/src/tools/boostbook.jam b/tools/build/src/tools/boostbook.jam
new file mode 100644
index 0000000000..42342d9bb1
--- /dev/null
+++ b/tools/build/src/tools/boostbook.jam
@@ -0,0 +1,771 @@
+# Copyright 2003, 2004, 2005 Dave Abrahams
+# Copyright 2003, 2004, 2005 Douglas Gregor
+# Copyright 2005, 2006, 2007 Rene Rivera
+# Copyright 2003, 2004, 2005 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# This module defines rules to handle generation of documentation from BoostBook
+# sources.
+#
+# The type of output is controlled by the <format> feature which can have the
+# following values:
+# * html: Generates html documentation. This is the default.
+# * xhtml: Generates xhtml documentation.
+# * htmlhelp: Generates html help output.
+# * onehtml: Generates a single html page.
+# * man: Generates man pages.
+# * pdf: Generates pdf documentation.
+# * ps: Generates postscript output.
+# * docbook: Generates docbook XML.
+# * fo: Generates XSL formating objects.
+# * tests: Extracts test cases from the boostbook XML.
+#
+# <format> is an implicit feature, so for example, typing pdf on the command
+# line is a short-cut for format=pdf.
+
+import build-system ;
+import "class" : new ;
+import common ;
+import feature ;
+import generators ;
+import make ;
+import modules ;
+import os ;
+import path ;
+import print ;
+import project ;
+import property ;
+import property-set ;
+import regex ;
+import scanner ;
+import sequence ;
+import targets ;
+import type ;
+import virtual-target ;
+import xsltproc ;
+
+# Make this module into a project.
+project.initialize $(__name__) ;
+project boostbook ;
+
+.debug-configuration = [ MATCH ^(--debug-configuration)$ : [ modules.peek : ARGV
+ ] ] ;
+
+feature.feature format
+ : html xhtml htmlhelp onehtml man pdf ps docbook fo tests
+ : incidental implicit composite propagated ;
+
+type.register DTDXML : dtdxml ;
+type.register XML : xml ;
+type.register BOOSTBOOK : boostbook : XML ;
+type.register DOCBOOK : docbook : XML ;
+type.register FO : fo : XML ;
+type.register PDF : pdf ;
+type.register PS : ps ;
+type.register XSLT : xsl : XML ;
+type.register HTMLDIR ;
+type.register XHTMLDIR ;
+type.register HTMLHELP ;
+type.register MANPAGES ;
+type.register TESTS : tests ;
+
+
+# Initialize BoostBook support.
+#
+rule init (
+ docbook-xsl-dir ? # The DocBook XSL stylesheet directory. If not provided,
+ # we use DOCBOOK_XSL_DIR from the environment (if
+ # available) or look in standard locations. Otherwise,
+ # we let the XML processor load the stylesheets
+ # remotely.
+
+ : docbook-dtd-dir ? # The DocBook DTD directory. If not provided, we use
+ # DOCBOOK_DTD_DIR From the environment (if available) or
+ # look in standard locations. Otherwise, we let the XML
+ # processor load the DTD remotely.
+
+ : boostbook-dir ? # The BoostBook directory with the DTD and XSL subdirs.
+)
+{
+ if ! $(.initialized)
+ {
+ .initialized = true ;
+
+ check-boostbook-dir $(boostbook-dir) ;
+ find-tools $(docbook-xsl-dir) : $(docbook-dtd-dir) : $(boostbook-dir) ;
+
+ # Register generators only if we were called via "using boostbook ;"
+ local reg-gen = generators.register-standard ;
+ $(reg-gen) boostbook.dtdxml-to-boostbook : DTDXML : XML ;
+ $(reg-gen) boostbook.boostbook-to-docbook : XML : DOCBOOK ;
+ $(reg-gen) boostbook.boostbook-to-tests : XML : TESTS ;
+ $(reg-gen) boostbook.docbook-to-onehtml : DOCBOOK : HTML ;
+ $(reg-gen) boostbook.docbook-to-htmldir : DOCBOOK : HTMLDIR ;
+ $(reg-gen) boostbook.docbook-to-xhtmldir : DOCBOOK : XHTMLDIR ;
+ $(reg-gen) boostbook.docbook-to-htmlhelp : DOCBOOK : HTMLHELP ;
+ $(reg-gen) boostbook.docbook-to-manpages : DOCBOOK : MANPAGES ;
+ $(reg-gen) boostbook.docbook-to-fo : DOCBOOK : FO ;
+
+ # The same about Jamfile main target rules.
+ IMPORT $(__name__) : boostbook : : boostbook ;
+ }
+ else
+ {
+ if $(docbook-xsl-dir)
+ {
+ modify-config ;
+ .docbook-xsl-dir = [ path.make $(docbook-xsl-dir) ] ;
+ check-docbook-xsl-dir ;
+ }
+ if $(docbook-dtd-dir)
+ {
+ modify-config ;
+ .docbook-dtd-dir = [ path.make $(docbook-dtd-dir) ] ;
+ check-docbook-dtd-dir ;
+ }
+ if $(boostbook-dir)
+ {
+ modify-config ;
+ check-boostbook-dir $(boostbook-dir) ;
+ local boostbook-xsl-dir = [ path.glob $(boostbook-dir) : xsl ] ;
+ local boostbook-dtd-dir = [ path.glob $(boostbook-dir) : dtd ] ;
+ .boostbook-xsl-dir = $(boostbook-xsl-dir[1]) ;
+ .boostbook-dtd-dir = $(boostbook-dtd-dir[1]) ;
+ check-boostbook-xsl-dir ;
+ check-boostbook-dtd-dir ;
+ }
+ }
+}
+
+
+local rule lock-config ( )
+{
+ if ! $(.initialized)
+ {
+ import errors ;
+ errors.user-error BoostBook has not been configured. ;
+ }
+ if ! $(.config-locked)
+ {
+ .config-locked = true ;
+
+ if $(.error-message)
+ {
+ print-error $(.error-message) ;
+ }
+ }
+}
+
+
+local rule modify-config ( )
+{
+ if $(.config-locked)
+ {
+ import errors ;
+ errors.user-error BoostBook configuration cannot be changed after it has
+ been used. ;
+ }
+}
+
+rule print-error ( location message * )
+{
+ ECHO error: at $(location) ;
+ ECHO error: $(message) ;
+ EXIT ;
+}
+
+rule make-error ( message * )
+{
+ import errors ;
+ return [ errors.nearest-user-location ] $(message) ;
+}
+
+
+rule find-boost-in-registry ( keys * )
+{
+ local boost-root ;
+ for local R in $(keys)
+ {
+ local installed-boost = [ W32_GETREG
+ "HKEY_LOCAL_MACHINE\\SOFTWARE\\$(R)" : "InstallRoot" ] ;
+ if $(installed-boost)
+ {
+ boost-root += [ path.make $(installed-boost) ] ;
+ }
+ }
+ return $(boost-root) ;
+}
+
+
+rule check-docbook-xsl-dir ( )
+{
+ if $(.docbook-xsl-dir)
+ {
+ if ! [ path.glob $(.docbook-xsl-dir) : common/common.xsl ]
+ {
+ .error-message = [ make-error BoostBook: could not find docbook XSL stylesheets
+ in: [ path.native $(.docbook-xsl-dir) ] ] ;
+ }
+ else if $(.debug-configuration)
+ {
+ ECHO notice: BoostBook: found docbook XSL stylesheets in: [
+ path.native $(.docbook-xsl-dir) ] ;
+ }
+ }
+}
+
+
+rule check-docbook-dtd-dir ( )
+{
+ if $(.docbook-dtd-dir)
+ {
+ if ! [ path.glob $(.docbook-dtd-dir) : docbookx.dtd ]
+ {
+ .error-message = [ make-error BoostBook: could not find docbook DTD in: [
+ path.native $(.docbook-dtd-dir) ] ] ;
+ }
+ else if $(.debug-configuration)
+ {
+ ECHO notice: BoostBook: found docbook DTD in: [ path.native
+ $(.docbook-dtd-dir) ] ;
+ }
+ }
+}
+
+
+rule check-boostbook-xsl-dir ( )
+{
+ if ! $(.boostbook-xsl-dir)
+ {
+ .error-message = [ make-error BoostBook: could not find boostbook XSL stylesheets. ] ;
+ }
+ else if ! [ path.glob $(.boostbook-xsl-dir) : docbook.xsl ]
+ {
+ .error-message = [ make-error BoostBook: could not find docbook XSL stylesheets in:
+ [ path.native $(.boostbook-xsl-dir) ] ] ;
+ }
+ else if $(.debug-configuration)
+ {
+ ECHO notice: BoostBook: found boostbook XSL stylesheets in: [
+ path.native $(.boostbook-xsl-dir) ] ;
+ }
+}
+
+
+rule check-boostbook-dtd-dir ( )
+{
+ if ! $(.boostbook-dtd-dir)
+ {
+ .error-message = [ make-error BoostBook: could not find boostbook DTD. ] ;
+ }
+ else if ! [ path.glob $(.boostbook-dtd-dir) : boostbook.dtd ]
+ {
+ .error-message = [ make-error BoostBook: could not find boostbook DTD in: [
+ path.native $(.boostbook-dtd-dir) ] ] ;
+ }
+ else if $(.debug-configuration)
+ {
+ ECHO notice: BoostBook: found boostbook DTD in: [ path.native
+ $(.boostbook-dtd-dir) ] ;
+ }
+}
+
+
+rule check-boostbook-dir ( boostbook-dir ? )
+{
+ if $(boostbook-dir) && ! [ path.glob $(boostbook-dir) : xsl ]
+ {
+ .error-message = [ make-error BoostBook: could not find boostbook in: [ path.native
+ $(boostbook-dir) ] ] ;
+ }
+}
+
+
+rule find-tools ( docbook-xsl-dir ? : docbook-dtd-dir ? : boostbook-dir ? )
+{
+ docbook-xsl-dir ?= [ modules.peek : DOCBOOK_XSL_DIR ] ;
+ docbook-dtd-dir ?= [ modules.peek : DOCBOOK_DTD_DIR ] ;
+ boostbook-dir ?= [ modules.peek : BOOSTBOOK_DIR ] ;
+
+ # Look for the boostbook stylesheets relative to BOOST_ROOT and Boost.Build.
+ local boost-build-root = [ path.make [ build-system.location ] ] ;
+ local boostbook-search-dirs = [ path.join $(boost-build-root) .. .. ] ;
+
+ local boost-root = [ modules.peek : BOOST_ROOT ] ;
+ if $(boost-root)
+ {
+ boostbook-search-dirs += [ path.join [ path.make $(boost-root) ] tools ]
+ ;
+ }
+ boostbook-dir ?= [ path.glob $(boostbook-search-dirs) : boostbook* ] ;
+
+ # Try to find the tools in platform specific locations.
+ if [ os.name ] = NT
+ {
+ # If installed by the Boost installer.
+ local boost-root = ;
+
+ local boost-installer-versions = snapshot cvs 1.33.0 ;
+ local boost-consulting-installer-versions = 1.33.1 1.34.0 1.34.1 ;
+ local boostpro-installer-versions =
+ 1.35.0 1.36.0 1.37.0 1.38.0 1.39.0 1.40.0 1.41.0 1.42.0
+ 1.43.0 1.44.0 1.45.0 1.46.0 1.47.0 1.48.0 1.49.0 1.50.0 ;
+
+ local old-installer-root = [ find-boost-in-registry
+ Boost.org\\$(boost-installer-versions) ] ;
+
+ # Make sure that the most recent version is searched for first.
+ boost-root += [ sequence.reverse [ find-boost-in-registry
+ Boost-Consulting.com\\$(boost-consulting-installer-versions)
+ boostpro.com\\$(boostpro-installer-versions) ] ] ;
+
+ # Plausible locations.
+ local root = [ PWD ] ;
+ while $(root) != $(root:D) { root = $(root:D) ; }
+ root = [ path.make $(root) ] ;
+ local search-dirs ;
+ local docbook-search-dirs ;
+ for local p in $(boost-root)
+ {
+ search-dirs += [ path.join $(p) tools ] ;
+ }
+ for local p in $(old-installer-root)
+ {
+ search-dirs += [ path.join $(p) share ] ;
+ docbook-search-dirs += [ path.join $(p) share ] ;
+ }
+ search-dirs += [ path.join $(root) Boost tools ] ;
+ search-dirs += [ path.join $(root) Boost share ] ;
+ docbook-search-dirs += [ path.join $(root) Boost share ] ;
+
+ docbook-xsl-dir ?= [ path.glob $(docbook-search-dirs) : docbook-xsl* ] ;
+ docbook-dtd-dir ?= [ path.glob $(docbook-search-dirs) : docbook-xml* ] ;
+ boostbook-dir ?= [ path.glob $(search-dirs) : boostbook* ] ;
+ }
+ else
+ {
+ # Plausible locations.
+
+ local share = /usr/local/share /usr/share /opt/share /opt/local/share ;
+ local dtd-versions = 4.2 ;
+
+ docbook-xsl-dir ?= [ path.glob $(share) : docbook-xsl* ] ;
+ docbook-xsl-dir ?= [ path.glob $(share)/sgml/docbook : xsl-stylesheets ]
+ ;
+ docbook-xsl-dir ?= [ path.glob $(share)/xsl : docbook* ] ;
+
+ docbook-dtd-dir ?= [ path.glob $(share) : docbook-xml* ] ;
+ docbook-dtd-dir ?= [ path.glob $(share)/sgml/docbook :
+ xml-dtd-$(dtd-versions)* ] ;
+ docbook-dtd-dir ?= [ path.glob $(share)/xml/docbook : $(dtd-versions) ]
+ ;
+
+ boostbook-dir ?= [ path.glob $(share) : boostbook* ] ;
+
+ # Ubuntu Linux.
+ docbook-xsl-dir ?= [ path.glob /usr/share/xml/docbook/stylesheet :
+ nwalsh ] ;
+ docbook-dtd-dir ?= [ path.glob /usr/share/xml/docbook/schema/dtd :
+ $(dtd-versions) ] ;
+
+ # SUSE.
+ docbook-xsl-dir ?= [ path.glob /usr/share/xml/docbook/stylesheet/nwalsh
+ : current ] ;
+ }
+
+ if $(docbook-xsl-dir)
+ {
+ .docbook-xsl-dir = [ path.make $(docbook-xsl-dir[1]) ] ;
+ }
+ if $(docbook-dtd-dir)
+ {
+ .docbook-dtd-dir = [ path.make $(docbook-dtd-dir[1]) ] ;
+ }
+
+ if $(.debug-configuration)
+ {
+ ECHO notice: Boost.Book: searching XSL/DTD "in" ;
+ ECHO notice: [ sequence.transform path.native : $(boostbook-dir) ] ;
+ }
+ local boostbook-xsl-dir ;
+ for local dir in $(boostbook-dir)
+ {
+ boostbook-xsl-dir += [ path.glob $(dir) : xsl ] ;
+ }
+ local boostbook-dtd-dir ;
+ for local dir in $(boostbook-dir)
+ {
+ boostbook-dtd-dir += [ path.glob $(dir) : dtd ] ;
+ }
+ .boostbook-xsl-dir = $(boostbook-xsl-dir[1]) ;
+ .boostbook-dtd-dir = $(boostbook-dtd-dir[1]) ;
+
+ check-docbook-xsl-dir ;
+ check-docbook-dtd-dir ;
+ check-boostbook-xsl-dir ;
+ check-boostbook-dtd-dir ;
+}
+
+
+rule xsl-dir
+{
+ lock-config ;
+ return $(.boostbook-xsl-dir) ;
+}
+
+
+rule dtd-dir
+{
+ lock-config ;
+ return $(.boostbook-dtd-dir) ;
+}
+
+
+rule docbook-xsl-dir
+{
+ lock-config ;
+ return $(.docbook-xsl-dir) ;
+}
+
+
+rule docbook-dtd-dir
+{
+ lock-config ;
+ return $(.docbook-dtd-dir) ;
+}
+
+
+rule dtdxml-to-boostbook ( target : source : properties * )
+{
+ lock-config ;
+ xsltproc.xslt $(target) : $(source)
+ "$(.boostbook-xsl-dir)/dtd/dtd2boostbook.xsl" : $(properties) ;
+}
+
+
+rule boostbook-to-docbook ( target : source : properties * )
+{
+ lock-config ;
+ local stylesheet = [ path.native $(.boostbook-xsl-dir)/docbook.xsl ] ;
+ xsltproc.xslt $(target) : $(source) $(stylesheet) : $(properties) ;
+}
+
+
+rule docbook-to-onehtml ( target : source : properties * )
+{
+ lock-config ;
+ local stylesheet = [ path.native $(.boostbook-xsl-dir)/html-single.xsl ] ;
+ xsltproc.xslt $(target) : $(source) $(stylesheet) : $(properties) ;
+}
+
+
+rule docbook-to-htmldir ( target : source : properties * )
+{
+ lock-config ;
+ local stylesheet = [ path.native $(.boostbook-xsl-dir)/html.xsl ] ;
+ xsltproc.xslt-dir $(target) : $(source) $(stylesheet) : $(properties) : html
+ ;
+}
+
+
+rule docbook-to-xhtmldir ( target : source : properties * )
+{
+ lock-config ;
+ local stylesheet = [ path.native $(.boostbook-xsl-dir)/xhtml.xsl ] ;
+ xsltproc.xslt-dir $(target) : $(source) $(stylesheet) : $(properties) :
+ xhtml ;
+}
+
+
+rule docbook-to-htmlhelp ( target : source : properties * )
+{
+ lock-config ;
+ local stylesheet = [ path.native $(.boostbook-xsl-dir)/html-help.xsl ] ;
+ xsltproc.xslt-dir $(target) : $(source) $(stylesheet) : $(properties) :
+ htmlhelp ;
+}
+
+
+rule docbook-to-manpages ( target : source : properties * )
+{
+ lock-config ;
+ local stylesheet = [ path.native $(.boostbook-xsl-dir)/manpages.xsl ] ;
+ xsltproc.xslt-dir $(target) : $(source) $(stylesheet) : $(properties) : man
+ ;
+}
+
+
+rule docbook-to-fo ( target : source : properties * )
+{
+ lock-config ;
+ local stylesheet = [ path.native $(.boostbook-xsl-dir)/fo.xsl ] ;
+ xsltproc.xslt $(target) : $(source) $(stylesheet) : $(properties) ;
+}
+
+
+rule format-catalog-path ( path )
+{
+ local result = $(path) ;
+ if [ xsltproc.is-cygwin ]
+ {
+ if [ os.name ] = NT
+ {
+ drive = [ MATCH ^/(.):(.*)$ : $(path) ] ;
+ result = /cygdrive/$(drive[1])$(drive[2]) ;
+ }
+ }
+ else
+ {
+ if [ os.name ] = CYGWIN
+ {
+ local native-path = [ path.native $(path) ] ;
+ result = [ path.make $(native-path:W) ] ;
+ }
+ }
+ return [ regex.replace $(result) " " "%20" ] ;
+}
+
+
+rule generate-xml-catalog ( target : sources * : properties * )
+{
+ print.output $(target) ;
+
+ # BoostBook DTD catalog entry.
+ local boostbook-dtd-dir = [ boostbook.dtd-dir ] ;
+ if $(boostbook-dtd-dir)
+ {
+ boostbook-dtd-dir = [ format-catalog-path $(boostbook-dtd-dir) ] ;
+ }
+
+ print.text
+ "<?xml version=\"1.0\"?>"
+ "<!DOCTYPE catalog "
+ " PUBLIC \"-//OASIS/DTD Entity Resolution XML Catalog V1.0//EN\""
+ " \"http://www.oasis-open.org/committees/entity/release/1.0/catalog.dtd\">"
+ "<catalog xmlns=\"urn:oasis:names:tc:entity:xmlns:xml:catalog\">"
+ " <rewriteURI uriStartString=\"http://www.boost.org/tools/boostbook/dtd/\" rewritePrefix=\"file://$(boostbook-dtd-dir)/\"/>"
+ : true ;
+
+ local docbook-xsl-dir = [ boostbook.docbook-xsl-dir ] ;
+ if ! $(docbook-xsl-dir)
+ {
+ ECHO "BoostBook warning: no DocBook XSL directory specified." ;
+ ECHO " If you have the DocBook XSL stylesheets installed, please " ;
+ ECHO " set DOCBOOK_XSL_DIR to the stylesheet directory on either " ;
+ ECHO " the command line (via -sDOCBOOK_XSL_DIR=...) or in a " ;
+ ECHO " Boost.Jam configuration file. The DocBook XSL stylesheets " ;
+ ECHO " are available here: http://docbook.sourceforge.net/ " ;
+ ECHO " Stylesheets will be downloaded on-the-fly (very slow!) " ;
+ }
+ else
+ {
+ docbook-xsl-dir = [ format-catalog-path $(docbook-xsl-dir) ] ;
+ print.text " <rewriteURI uriStartString=\"http://docbook.sourceforge.net/release/xsl/current/\" rewritePrefix=\"file://$(docbook-xsl-dir)/\"/>" ;
+ }
+
+ local docbook-dtd-dir = [ boostbook.docbook-dtd-dir ] ;
+ if ! $(docbook-dtd-dir)
+ {
+ ECHO "BoostBook warning: no DocBook DTD directory specified." ;
+ ECHO " If you have the DocBook DTD installed, please set " ;
+ ECHO " DOCBOOK_DTD_DIR to the DTD directory on either " ;
+ ECHO " the command line (via -sDOCBOOK_DTD_DIR=...) or in a " ;
+ ECHO " Boost.Jam configuration file. The DocBook DTD is available " ;
+ ECHO " here: http://www.oasis-open.org/docbook/xml/4.2/index.shtml" ;
+ ECHO " The DTD will be downloaded on-the-fly (very slow!) " ;
+ }
+ else
+ {
+ docbook-dtd-dir = [ format-catalog-path $(docbook-dtd-dir) ] ;
+ print.text " <rewriteURI uriStartString=\"http://www.oasis-open.org/docbook/xml/4.2/\" rewritePrefix=\"file://$(docbook-dtd-dir)/\"/>" ;
+ }
+
+ print.text "</catalog>" ;
+}
+
+
+# Returns information about the global XML catalog target, creating it lazily if
+# needed. To get the global catalog generated only once we do not create it in
+# every project that requests it but instead only create it based on the first
+# project requesting it and then reuse it from there for any later requests.
+#
+# To get 'as close as possible' to having the global catalog stored in the same
+# location independent of which folder our build was run from, we assign its
+# target to the given project's base Jamroot project. This works correctly as
+# long as we know the passed project is not standalone or one of Boost Build's
+# configuration module projects, as those to not have a Jamroot project in their
+# parent chain. Note also that we can still get our targets generated in
+# different folders in case when one build project references a target from
+# another build project with its own separate Jamroot.
+#
+# FIXME: Ideally the catalog target should be created as part of the boostbook
+# project and stored in some central location for all used standalone pojects,
+# shared between all builds made on that system. This however would require much
+# more though to add the necessary changes to Boost Build's internal design.
+#
+local rule xml-catalog ( project )
+{
+ if ! $(.xml-catalog)
+ {
+ local project-module = [ $(project).project-module ] ;
+ local root-module = [ project.get-jamroot-module $(project-module) ] ;
+ if ! $(root-module)
+ {
+ import errors ;
+ if [ project.is-config-module $(project-module) ]
+ {
+ errors.user-error boostbook targets can not be declared in Boost
+ Build's configuration modules. ;
+ }
+ else
+ {
+ errors.user-error boostbook targets can not be declared in
+ standalone projects. : use a Jamfile/Jamroot project
+ instead. ;
+ }
+ }
+ local root-project = [ project.target $(root-module) ] ;
+
+ .xml-catalog = [ virtual-target.register [ new file-target
+ boostbook_catalog : XML : $(root-project) : [ new action :
+ boostbook.generate-xml-catalog ] ] ] ;
+ .xml-catalog-file = [ $(.xml-catalog).path ] [ $(.xml-catalog).name ] ;
+ .xml-catalog-file = $(.xml-catalog-file:J=/) ;
+ }
+ return $(.xml-catalog) $(.xml-catalog-file) ;
+}
+
+
+class boostbook-target-class : basic-target
+{
+ import generators ;
+ import property-set ;
+ import virtual-target ;
+
+ rule construct ( name : sources * : property-set )
+ {
+ # Generate the catalog, but only once.
+ IMPORT boostbook : xml-catalog : $(__name__) : boostbook.xml-catalog ;
+ local global-catalog = [ boostbook.xml-catalog [ project ] ] ;
+ local catalog = $(global-catalog[1]) ;
+ local catalog-file = $(global-catalog[2]) ;
+ local targets ;
+
+ # Add the catalog to the property set.
+ property-set = [ $(property-set).add-raw <catalog>$(catalog-file) ] ;
+
+ local type = none ;
+ local manifest ;
+ local format = [ $(property-set).get <format> ] ;
+ switch $(format)
+ {
+ case html : type = HTMLDIR ; manifest = HTML.manifest ;
+ case xhtml : type = XHTMLDIR ; manifest = HTML.manifest ;
+ case htmlhelp : type = HTMLHELP ; manifest = HTML.manifest ;
+ case onehtml : type = HTML ;
+ case man : type = MANPAGES ; manifest = man.manifest ;
+ case docbook : type = DOCBOOK ;
+ case fo : type = FO ;
+ case pdf : type = PDF ;
+ case ps : type = PS ;
+ case tests : type = TESTS ;
+ }
+
+ local target ;
+ if $(manifest)
+ {
+ # Sources --> DOCBOOK.
+ local docbook-target = [ generators.construct [ project ] : DOCBOOK
+ : $(property-set) : $(sources) ] ;
+ docbook-target = $(docbook-target[2]) ;
+ $(docbook-target).depends $(catalog) ;
+
+ # DOCBOOK --> type.
+ target = [ generators.construct [ project ] $(name)_$(manifest) :
+ $(type) : [ $(property-set).add-raw
+ <xsl:param>manifest=$(name)_$(manifest) ] : $(docbook-target) ]
+ ;
+ target = $(target[2]) ;
+ local name = [ $(property-set).get <name> ] ;
+ name ?= $(format) ;
+ $(target).set-path $(name) ;
+ }
+ else
+ {
+ # Sources --> type.
+ target = [ generators.construct [ project ] : $(type) :
+ $(property-set) : $(sources) ] ;
+ target = $(target[2]) ;
+ if ! $(target)
+ {
+ import errors ;
+ errors.error Cannot build documentation type '$(format)'. ;
+ }
+ }
+ $(target).depends $(catalog) ;
+
+ return [ property-set.empty ] $(target) ;
+ }
+}
+
+
+# Declare a boostbook target.
+#
+rule boostbook ( target-name : sources * : requirements * : default-build * )
+{
+ return [ targets.create-metatarget boostbook-target-class :
+ [ project.current ] : $(target-name) : $(sources) : $(requirements) :
+ $(default-build) ] ;
+}
+
+
+rule boostbook-to-tests ( target : source : properties * )
+{
+ lock-config ;
+ local boost_root = [ modules.peek : BOOST_ROOT ] ;
+ local native-path = [ path.native [ path.join $(.boostbook-xsl-dir) testing
+ Jamfile ] ] ;
+ local stylesheet = $(native-path:S=.xsl) ;
+ xsltproc.xslt $(target) : $(source) $(stylesheet) : $(properties)
+ <xsl:param>boost.root=$(boost_root) ;
+}
+
+
+#############################################################################
+# Dependency scanners
+#############################################################################
+# XInclude scanner. Mostly stolen from c-scanner. :)
+# Note that this assumes an "xi" prefix for XIncludes. This is not always the
+# case for XML documents, but we assume it is true for anything we encounter.
+#
+class xinclude-scanner : scanner
+{
+ import scanner ;
+
+ rule __init__ ( includes * )
+ {
+ scanner.__init__ ;
+ self.includes = $(includes) ;
+ }
+
+ rule pattern ( )
+ {
+ return "xi:include[ ]*href=\"([^\"]*)\"" ;
+ }
+
+ rule process ( target : matches * : binding )
+ {
+ local target_path = [ NORMALIZE_PATH $(binding:D) ] ;
+
+ NOCARE $(matches) ;
+ INCLUDES $(target) : $(matches) ;
+ SEARCH on $(matches) = $(target_path) $(self.includes:G=) ;
+
+ scanner.propagate $(__name__) : $(matches) : $(target) ;
+ }
+}
+
+scanner.register xinclude-scanner : xsl:path ;
+type.set-scanner XML : xinclude-scanner ;
diff --git a/tools/build/src/tools/borland.jam b/tools/build/src/tools/borland.jam
new file mode 100644
index 0000000000..3fc215da95
--- /dev/null
+++ b/tools/build/src/tools/borland.jam
@@ -0,0 +1,221 @@
+# Copyright 2005 Dave Abrahams
+# Copyright 2003 Rene Rivera
+# Copyright 2003, 2004, 2005 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Support for the Borland's command line compiler
+
+import property ;
+import generators ;
+import os ;
+import toolset : flags ;
+import feature : get-values ;
+import type ;
+import common ;
+
+feature.extend toolset : borland ;
+
+rule init ( version ? : command * : options * )
+{
+ local condition = [ common.check-init-parameters borland :
+ version $(version) ] ;
+
+ local command = [ common.get-invocation-command borland : bcc32.exe
+ : $(command) ] ;
+
+ common.handle-options borland : $(condition) : $(command) : $(options) ;
+
+ if $(command)
+ {
+ command = [ common.get-absolute-tool-path $(command[-1]) ] ;
+ }
+ root = $(command:D) ;
+
+ flags borland.compile STDHDRS $(condition) : $(root)/include/ ;
+ flags borland.link STDLIBPATH $(condition) : $(root)/lib ;
+ flags borland.link RUN_PATH $(condition) : $(root)/bin ;
+ flags borland .root $(condition) : $(root)/bin/ ;
+}
+
+
+# A borland-specific target type
+type.register BORLAND.TDS : tds ;
+
+# Declare generators
+
+generators.register-linker borland.link : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : EXE : <toolset>borland ;
+generators.register-linker borland.link.dll : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : SHARED_LIB IMPORT_LIB : <toolset>borland ;
+
+generators.register-archiver borland.archive : OBJ : STATIC_LIB : <toolset>borland ;
+generators.register-c-compiler borland.compile.c++ : CPP : OBJ : <toolset>borland ;
+generators.register-c-compiler borland.compile.c : C : OBJ : <toolset>borland ;
+generators.register-standard borland.asm : ASM : OBJ : <toolset>borland ;
+
+# Declare flags
+
+flags borland.compile OPTIONS <debug-symbols>on : -v ;
+flags borland.link OPTIONS <debug-symbols>on : -v ;
+
+flags borland.compile OPTIONS <optimization>off : -Od ;
+flags borland.compile OPTIONS <optimization>speed : -O2 ;
+flags borland.compile OPTIONS <optimization>space : -O1 ;
+
+if $(.BORLAND_HAS_FIXED_INLINING_BUGS)
+{
+ flags borland CFLAGS <inlining>off : -vi- ;
+ flags borland CFLAGS <inlining>on : -vi -w-inl ;
+ flags borland CFLAGS <inlining>full : -vi -w-inl ;
+}
+else
+{
+ flags borland CFLAGS : -vi- ;
+}
+
+flags borland.compile OPTIONS <warnings>off : -w- ;
+flags borland.compile OPTIONS <warnings>all : -w ;
+flags borland.compile OPTIONS <warnings-as-errors>on : -w! ;
+
+
+# Deal with various runtime configs...
+
+# This should be not for DLL
+flags borland OPTIONS <user-interface>console : -tWC ;
+
+# -tWR sets -tW as well, so we turn it off here and then turn it
+# on again later if we need it:
+flags borland OPTIONS <runtime-link>shared : -tWR -tWC ;
+flags borland OPTIONS <user-interface>gui : -tW ;
+
+flags borland OPTIONS <main-target-type>LIB/<link>shared : -tWD ;
+# Hmm.. not sure what's going on here.
+flags borland OPTIONS : -WM- ;
+flags borland OPTIONS <threading>multi : -tWM ;
+
+
+
+flags borland.compile OPTIONS <cflags> ;
+flags borland.compile.c++ OPTIONS <cxxflags> ;
+flags borland.compile DEFINES <define> ;
+flags borland.compile INCLUDES <include> ;
+
+flags borland NEED_IMPLIB <main-target-type>LIB/<link>shared : "" ;
+
+#
+# for C++ compiles the following options are turned on by default:
+#
+# -j5 stops after 5 errors
+# -g255 allow an unlimited number of warnings
+# -q no banner
+# -c compile to object
+# -P C++ code regardless of file extention
+# -a8 8 byte alignment, this option is on in the IDE by default
+# and effects binary compatibility.
+#
+
+# -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) $(C++FLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o"$(<)" "$(>)"
+
+
+actions compile.c++
+{
+ "$(CONFIG_COMMAND)" -j5 -g255 -q -c -P -a8 -Vx- -Ve- -b- $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -I"$(STDHDRS)" -o"$(<)" "$(>)"
+}
+
+# For C, we don't pass -P flag
+actions compile.c
+{
+ "$(CONFIG_COMMAND)" -j5 -g255 -q -c -a8 -Vx- -Ve- -b- $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -I"$(STDHDRS)" -o"$(<)" "$(>)"
+}
+
+
+# Declare flags and action for linking
+toolset.flags borland.link OPTIONS <debug-symbols>on : -v ;
+toolset.flags borland.link LIBRARY_PATH <library-path> ;
+toolset.flags borland.link FINDLIBS_ST <find-static-library> ;
+toolset.flags borland.link FINDLIBS_SA <find-shared-library> ;
+toolset.flags borland.link LIBRARIES <library-file> ;
+
+flags borland.link OPTIONS <linkflags> ;
+flags borland.link OPTIONS <link>shared : -tWD ;
+
+flags borland.link LIBRARY_PATH_OPTION <toolset>borland : -L : unchecked ;
+flags borland.link LIBRARY_OPTION <toolset>borland : "" : unchecked ;
+
+
+
+# bcc32 needs to have ilink32 in the path in order to invoke it, so explicitly
+# specifying $(BCC_TOOL_PATH)bcc32 doesn't help. You need to add
+# $(BCC_TOOL_PATH) to the path
+# The NEED_IMPLIB variable controls whether we need to invoke implib.
+
+flags borland.archive AROPTIONS <archiveflags> ;
+
+# Declare action for archives. We don't use response file
+# since it's hard to get "+-" there.
+# The /P256 increases 'page' size -- with too low
+# values tlib fails when building large applications.
+# CONSIDER: don't know what 'together' is for...
+actions updated together piecemeal archive
+{
+ $(.set-path)$(.root:W)$(.old-path)
+ tlib $(AROPTIONS) /P256 /u /a /C "$(<:W)" +-"$(>:W)"
+}
+
+
+if [ os.name ] = CYGWIN
+{
+ .set-path = "cmd /S /C set \"PATH=" ;
+ .old-path = ";%PATH%\" \"&&\"" ;
+
+
+ # Couldn't get TLIB to stop being confused about pathnames
+ # containing dashes (it seemed to treat them as option separators
+ # when passed through from bash), so we explicitly write the
+ # command into a .bat file and execute that. TLIB is also finicky
+ # about pathname style! Forward slashes, too, are treated as
+ # options.
+ actions updated together piecemeal archive
+ {
+ chdir $(<:D)
+ echo +-$(>:BS) > $(<:BS).rsp
+ $(.set-path)$(.root)$(.old-path) "tlib.exe" $(AROPTIONS) /P256 /C $(<:BS) @$(<:BS).rsp && $(RM) $(<:BS).rsp
+ }
+}
+else if [ os.name ] = NT
+{
+ .set-path = "set \"PATH=" ;
+ .old-path = ";%PATH%\"
+ " ;
+}
+else
+{
+ .set-path = "PATH=\"" ;
+ .old-path = "\":$PATH
+ export PATH
+ " ;
+}
+
+RM = [ common.rm-command ] ;
+
+nl = "
+" ;
+
+actions link
+{
+ $(.set-path)$(.root:W)$(.old-path) "$(CONFIG_COMMAND)" -v -q $(OPTIONS) -L"$(LIBRARY_PATH:W)" -L"$(STDLIBPATH:W)" -e"$(<[1]:W)" @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST:S=.lib)" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA:S=.lib)")"
+}
+
+
+actions link.dll bind LIBRARIES RSP
+{
+ $(.set-path)$(.root:W)$(.old-path) "$(CONFIG_COMMAND)" -v -q $(OPTIONS) -L"$(LIBRARY_PATH:W)" -L"$(STDLIBPATH:W)" -e"$(<[1]:W)" @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST:S=.lib)" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA:S=.lib)")" && "$(.root)implib" "$(<[2]:W)" "$(<[1]:W)"
+}
+
+# It seems impossible to specify output file with directory when compiling
+# asm files using bcc32, so use tasm32 directly.
+# /ml makes all symbol names case-sensitive
+actions asm
+{
+ $(.set-path)$(.root:W)$(.old-path) tasm32.exe /ml "$(>)" "$(<)"
+}
+
diff --git a/tools/build/src/tools/builtin.jam b/tools/build/src/tools/builtin.jam
new file mode 100644
index 0000000000..d62680afd9
--- /dev/null
+++ b/tools/build/src/tools/builtin.jam
@@ -0,0 +1,974 @@
+# Copyright 2002, 2003, 2004, 2005 Dave Abrahams
+# Copyright 2002, 2005, 2006, 2007, 2010 Rene Rivera
+# Copyright 2006 Juergen Hunold
+# Copyright 2005 Toon Knapen
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Defines standard features and rules.
+
+import alias ;
+import "class" : new ;
+import errors ;
+import feature ;
+import generators ;
+import numbers ;
+import os ;
+import path ;
+import print ;
+import project ;
+import property ;
+import regex ;
+import scanner ;
+import sequence ;
+import stage ;
+import symlink ;
+import toolset ;
+import type ;
+import targets ;
+import types/register ;
+import utility ;
+import virtual-target ;
+import message ;
+import convert ;
+
+# FIXME: the following generate module import is not needed here but removing it
+# too hastly will break using code (e.g. the main Boost library Jamroot file)
+# that forgot to import the generate module before calling the generate rule.
+import generate ;
+
+
+.os-names = aix android bsd cygwin darwin freebsd hpux iphone linux netbsd openbsd osf
+ qnx qnxnto sgi solaris unix unixware windows
+ elf # Not actually an OS -- used for targeting bare metal where object
+ # format is ELF. This catches both -elf and -eabi gcc targets and well
+ # as other compilers targeting ELF. It is not clear how often we need
+ # the 'elf' key as opposed to other bare metal targets, but let us
+ # stick with gcc naming.
+ ;
+
+# Feature used to determine which OS we're on. New <target-os> and <host-os>
+# features should be used instead.
+local os = [ modules.peek : OS ] ;
+feature.feature os : $(os) : propagated link-incompatible ;
+
+
+# Translates from bjam current OS to the os tags used in host-os and target-os,
+# i.e. returns the running host-os.
+#
+local rule default-host-os ( )
+{
+ local host-os ;
+ if [ os.name ] in $(.os-names:U)
+ {
+ host-os = [ os.name ] ;
+ }
+ else
+ {
+ switch [ os.name ]
+ {
+ case NT : host-os = windows ;
+ case AS400 : host-os = unix ;
+ case MINGW : host-os = windows ;
+ case BSDI : host-os = bsd ;
+ case COHERENT : host-os = unix ;
+ case DRAGONFLYBSD : host-os = bsd ;
+ case IRIX : host-os = sgi ;
+ case MACOSX : host-os = darwin ;
+ case KFREEBSD : host-os = freebsd ;
+ case LINUX : host-os = linux ;
+ case SUNOS :
+ ECHO "SunOS is not a supported operating system." ;
+ ECHO "We believe last version of SunOS was released in 1992, " ;
+ ECHO "so if you get this message, something is very wrong with configuration logic. " ;
+ ECHO "Please report this as a bug. " ;
+ EXIT ;
+ case * : host-os = unix ;
+ }
+ }
+ return $(host-os:L) ;
+}
+
+
+# The two OS features define a known set of abstract OS names. The host-os is
+# the OS under which bjam is running. Even though this should really be a fixed
+# property we need to list all the values to prevent unknown value errors. Both
+# set the default value to the current OS to account for the default use case of
+# building on the target OS.
+feature.feature host-os : $(.os-names) ;
+feature.set-default host-os : [ default-host-os ] ;
+
+feature.feature target-os : $(.os-names) : propagated link-incompatible ;
+feature.set-default target-os : [ default-host-os ] ;
+
+
+feature.feature toolset : : implicit propagated symmetric ;
+feature.feature stdlib : native : propagated composite ;
+feature.feature link : shared static : propagated ;
+feature.feature runtime-link : shared static : propagated ;
+feature.feature runtime-debugging : on off : propagated ;
+feature.feature optimization : off speed space : propagated ;
+feature.feature profiling : off on : propagated ;
+feature.feature inlining : off on full : propagated ;
+feature.feature threading : single multi : propagated ;
+feature.feature rtti : on off : propagated ;
+feature.feature exception-handling : on off : propagated ;
+
+# Whether there is support for asynchronous EH (e.g. catching SEGVs).
+feature.feature asynch-exceptions : off on : propagated ;
+
+# Whether all extern "C" functions are considered nothrow by default.
+feature.feature extern-c-nothrow : off on : propagated ;
+
+feature.feature debug-symbols : on off : propagated ;
+# Controls whether the binary should be stripped -- that is have
+# everything not necessary to running removed. This option should
+# not be very often needed. Also, this feature will show up in
+# target paths of everything, not just binaries. Should fix that
+# when impelementing feature relevance.
+feature.feature strip : off on : propagated ;
+feature.feature define : : free ;
+feature.feature undef : : free ;
+feature.feature "include" : : free path ; #order-sensitive ;
+feature.feature cflags : : free ;
+feature.feature cxxflags : : free ;
+feature.feature fflags : : free ;
+feature.feature asmflags : : free ;
+feature.feature linkflags : : free ;
+feature.feature archiveflags : : free ;
+feature.feature version : : free ;
+
+# Generic, i.e. non-language specific, flags for tools.
+feature.feature flags : : free ;
+feature.feature location-prefix : : free ;
+
+
+# The following features are incidental since they have no effect on built
+# products. Not making them incidental will result in problems in corner cases,
+# e.g.:
+#
+# unit-test a : a.cpp : <use>b ;
+# lib b : a.cpp b ;
+#
+# Here, if <use> is not incidental, we would decide we have two targets for
+# a.obj with different properties and complain about it.
+#
+# Note that making a feature incidental does not mean it is ignored. It may be
+# ignored when creating a virtual target, but the rest of build process will use
+# them.
+feature.feature use : : free dependency incidental ;
+feature.feature dependency : : free dependency incidental ;
+feature.feature implicit-dependency : : free dependency incidental ;
+
+feature.feature warnings :
+ on # Enable default/"reasonable" warning level for the tool.
+ all # Enable all possible warnings issued by the tool.
+ off # Disable all warnings issued by the tool.
+ : incidental propagated ;
+
+feature.feature warnings-as-errors :
+ off # Do not fail the compilation if there are warnings.
+ on # Fail the compilation if there are warnings.
+ : incidental propagated ;
+
+# Feature that allows us to configure the maximal template instantiation depth
+# level allowed by a C++ compiler. Applies only to C++ toolsets whose compilers
+# actually support this configuration setting.
+#
+# Note that Boost Build currently does not allow defining features that take any
+# positive integral value as a parameter, which is what we need here, so we just
+# define some of the values here and leave it up to the user to extend this set
+# as he needs using the feature.extend rule.
+#
+# TODO: This should be upgraded as soon as Boost Build adds support for custom
+# validated feature values or at least features allowing any positive integral
+# value. See related Boost Build related trac ticket #194.
+#
+feature.feature c++-template-depth
+ :
+ [ numbers.range 64 1024 : 64 ]
+ [ numbers.range 20 1000 : 10 ]
+ # Maximum template instantiation depth guaranteed for ANSI/ISO C++
+ # conforming programs.
+ 17
+ :
+ incidental optional propagated ;
+
+feature.feature source : : free dependency incidental ;
+feature.feature library : : free dependency incidental ;
+feature.feature file : : free dependency incidental ;
+feature.feature find-shared-library : : free ; #order-sensitive ;
+feature.feature find-static-library : : free ; #order-sensitive ;
+feature.feature library-path : : free path ; #order-sensitive ;
+
+# Internal feature.
+feature.feature library-file : : free dependency ;
+
+feature.feature name : : free ;
+feature.feature tag : : free ;
+feature.feature search : : free path ; #order-sensitive ;
+feature.feature location : : free path ;
+feature.feature dll-path : : free path ;
+feature.feature hardcode-dll-paths : true false : incidental ;
+
+
+# An internal feature that holds the paths of all dependency shared libraries.
+# On Windows, it is needed so that we can add all those paths to PATH when
+# running applications. On Linux, it is needed to add proper -rpath-link command
+# line options.
+feature.feature xdll-path : : free path ;
+
+# Provides means to specify def-file for windows DLLs.
+feature.feature def-file : : free dependency ;
+
+feature.feature suppress-import-lib : false true : incidental ;
+
+# Internal feature used to store the name of a bjam action to call when building
+# a target.
+feature.feature action : : free ;
+
+# This feature is used to allow specific generators to run. For example, QT
+# tools can only be invoked when QT library is used. In that case, <allow>qt
+# will be in usage requirement of the library.
+feature.feature allow : : free ;
+
+# The addressing model to generate code for. Currently a limited set only
+# specifying the bit size of pointers.
+feature.feature address-model : 16 32 64 32_64 : propagated optional ;
+
+# Type of CPU architecture to compile for.
+feature.feature architecture :
+ # x86 and x86-64
+ x86
+
+ # ia64
+ ia64
+
+ # Sparc
+ sparc
+
+ # RS/6000 & PowerPC
+ power
+
+ # MIPS/SGI
+ mips1 mips2 mips3 mips4 mips32 mips32r2 mips64
+
+ # HP/PA-RISC
+ parisc
+
+ # Advanced RISC Machines
+ arm
+
+ # Combined architectures for platforms/toolsets that support building for
+ # multiple architectures at once. "combined" would be the default multi-arch
+ # for the toolset.
+ combined
+ combined-x86-power
+
+ : propagated optional ;
+
+# The specific instruction set in an architecture to compile.
+feature.feature instruction-set :
+ # x86 and x86-64
+ native i486 i586 i686 pentium pentium-mmx pentiumpro pentium2 pentium3
+ pentium3m pentium-m pentium4 pentium4m prescott nocona core2 corei7 corei7-avx core-avx-i
+ conroe conroe-xe conroe-l allendale merom merom-xe kentsfield kentsfield-xe penryn wolfdale
+ yorksfield nehalem sandy-bridge ivy-bridge haswell k6 k6-2 k6-3 athlon athlon-tbird athlon-4 athlon-xp
+ athlon-mp k8 opteron athlon64 athlon-fx k8-sse3 opteron-sse3 athlon64-sse3 amdfam10 barcelona
+ bdver1 bdver2 bdver3 btver1 btver2 winchip-c6 winchip2 c3 c3-2 atom
+
+ # ia64
+ itanium itanium1 merced itanium2 mckinley
+
+ # Sparc
+ v7 cypress v8 supersparc sparclite hypersparc sparclite86x f930 f934
+ sparclet tsc701 v9 ultrasparc ultrasparc3
+
+ # RS/6000 & PowerPC
+ 401 403 405 405fp 440 440fp 505 601 602 603 603e 604 604e 620 630 740 7400
+ 7450 750 801 821 823 860 970 8540 power-common ec603e g3 g4 g5 power power2
+ power3 power4 power5 powerpc powerpc64 rios rios1 rsc rios2 rs64a
+
+ # MIPS
+ 4kc 4kp 5kc 20kc m4k r2000 r3000 r3900 r4000 r4100 r4300 r4400 r4600 r4650
+ r6000 r8000 rm7000 rm9000 orion sb1 vr4100 vr4111 vr4120 vr4130 vr4300
+ vr5000 vr5400 vr5500
+
+ # HP/PA-RISC
+ 700 7100 7100lc 7200 7300 8000
+
+ # Advanced RISC Machines
+ armv2 armv2a armv3 armv3m armv4 armv4t armv5 armv5t armv5te armv6 armv6j iwmmxt ep9312
+ armv7 armv7s
+
+ : propagated optional ;
+
+# Used to select a specific variant of C++ ABI if the compiler supports several.
+feature.feature c++abi : : propagated optional ;
+
+feature.feature conditional : : incidental free ;
+
+# The value of 'no' prevents building of a target.
+feature.feature build : yes no : optional ;
+
+# Windows-specific features
+
+feature.feature user-interface : console gui wince native auto ;
+
+feature.feature variant : : implicit composite propagated symmetric ;
+
+
+# Declares a new variant.
+#
+# First determines explicit properties for this variant, by refining parents'
+# explicit properties with the passed explicit properties. The result is
+# remembered and will be used if this variant is used as parent.
+#
+# Second, determines the full property set for this variant by adding to the
+# explicit properties default values for all missing non-symmetric properties.
+#
+# Lastly, makes appropriate value of 'variant' property expand to the full
+# property set.
+#
+rule variant ( name # Name of the variant
+ : parents-or-properties * # Specifies parent variants, if
+ # 'explicit-properties' are given, and
+ # explicit-properties or parents otherwise.
+ : explicit-properties * # Explicit properties.
+ )
+{
+ local parents ;
+ if ! $(explicit-properties)
+ {
+ if $(parents-or-properties[1]:G)
+ {
+ explicit-properties = $(parents-or-properties) ;
+ }
+ else
+ {
+ parents = $(parents-or-properties) ;
+ }
+ }
+ else
+ {
+ parents = $(parents-or-properties) ;
+ }
+
+ # The problem is that we have to check for conflicts between base variants.
+ if $(parents[2])
+ {
+ errors.error "multiple base variants are not yet supported" ;
+ }
+
+ local inherited ;
+ # Add explicitly specified properties for parents.
+ for local p in $(parents)
+ {
+ # TODO: This check may be made stricter.
+ if ! [ feature.is-implicit-value $(p) ]
+ {
+ errors.error "Invalid base variant" $(p) ;
+ }
+
+ inherited += $(.explicit-properties.$(p)) ;
+ }
+ property.validate $(explicit-properties) ;
+ explicit-properties = [ property.refine $(inherited)
+ : $(explicit-properties) ] ;
+
+ # Record explicitly specified properties for this variant. We do this after
+ # inheriting parents' properties so they affect other variants derived from
+ # this one.
+ .explicit-properties.$(name) = $(explicit-properties) ;
+
+ feature.extend variant : $(name) ;
+ feature.compose <variant>$(name) : $(explicit-properties) ;
+}
+IMPORT $(__name__) : variant : : variant ;
+
+
+variant debug : <optimization>off <debug-symbols>on <inlining>off
+ <runtime-debugging>on ;
+variant release : <optimization>speed <debug-symbols>off <inlining>full
+ <runtime-debugging>off <define>NDEBUG ;
+variant profile : release : <profiling>on <debug-symbols>on ;
+
+
+class searched-lib-target : abstract-file-target
+{
+ rule __init__ ( name
+ : project
+ : shared ?
+ : search *
+ : action
+ )
+ {
+ abstract-file-target.__init__ $(name) : SEARCHED_LIB : $(project)
+ : $(action) : ;
+
+ self.shared = $(shared) ;
+ self.search = $(search) ;
+ }
+
+ rule shared ( )
+ {
+ return $(self.shared) ;
+ }
+
+ rule search ( )
+ {
+ return $(self.search) ;
+ }
+
+ rule actualize-location ( target )
+ {
+ NOTFILE $(target) ;
+ }
+
+ rule path ( )
+ {
+ }
+}
+
+
+# The generator class for libraries (target type LIB). Depending on properties
+# it will request building of the appropriate specific library type --
+# -- SHARED_LIB, STATIC_LIB or SHARED_LIB.
+#
+class lib-generator : generator
+{
+ rule __init__ ( * : * )
+ {
+ generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8)
+ : $(9) : $(10) : $(11) : $(12) : $(13) : $(14) : $(15) : $(16) :
+ $(17) : $(18) : $(19) ;
+ }
+
+ rule run ( project name ? : property-set : sources * )
+ {
+ # The lib generator is composing, and can be only invoked with an
+ # explicit name. This check is present in generator.run (and so in
+ # builtin.linking-generator) but duplicated here to avoid doing extra
+ # work.
+ if $(name)
+ {
+ local properties = [ $(property-set).raw ] ;
+ # Determine the needed target type.
+ local actual-type ;
+ # <source>files can be generated by <conditional>@rule feature
+ # in which case we do not consider it a SEARCHED_LIB type.
+ if ! <source> in $(properties:G) &&
+ ( <search> in $(properties:G) || <name> in $(properties:G) )
+ {
+ actual-type = SEARCHED_LIB ;
+ }
+ else if <file> in $(properties:G)
+ {
+ actual-type = LIB ;
+ }
+ else if <link>shared in $(properties)
+ {
+ actual-type = SHARED_LIB ;
+ }
+ else
+ {
+ actual-type = STATIC_LIB ;
+ }
+ property-set = [ $(property-set).add-raw <main-target-type>LIB ] ;
+ # Construct the target.
+ return [ generators.construct $(project) $(name) : $(actual-type)
+ : $(property-set) : $(sources) ] ;
+ }
+ }
+
+ rule viable-source-types ( )
+ {
+ return * ;
+ }
+}
+
+
+generators.register [ new lib-generator builtin.lib-generator : : LIB ] ;
+
+
+# The implementation of the 'lib' rule. Beyond standard syntax that rule allows
+# simplified: "lib a b c ;".
+#
+rule lib ( names + : sources * : requirements * : default-build * :
+ usage-requirements * )
+{
+ if $(names[2])
+ {
+ if <name> in $(requirements:G)
+ {
+ errors.user-error "When several names are given to the 'lib' rule" :
+ "it is not allowed to specify the <name> feature." ;
+ }
+ if $(sources)
+ {
+ errors.user-error "When several names are given to the 'lib' rule" :
+ "it is not allowed to specify sources." ;
+ }
+ }
+
+ # This is a circular module dependency so it must be imported here.
+ import targets ;
+
+ local project = [ project.current ] ;
+ local result ;
+
+ for local name in $(names)
+ {
+ local r = $(requirements) ;
+ # Support " lib a ; " and " lib a b c ; " syntax.
+ if ! $(sources) && ! <name> in $(requirements:G)
+ && ! <file> in $(requirements:G)
+ {
+ r += <name>$(name) ;
+ }
+ result += [ targets.main-target-alternative
+ [ new typed-target $(name) : $(project) : LIB
+ : [ targets.main-target-sources $(sources) : $(name) ]
+ : [ targets.main-target-requirements $(r) : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ]
+ ] ] ;
+ }
+ return $(result) ;
+}
+IMPORT $(__name__) : lib : : lib ;
+
+
+class searched-lib-generator : generator
+{
+ import property-set ;
+
+ rule __init__ ( )
+ {
+ # The requirements cause the generators to be tried *only* when we are
+ # building a lib target with a 'search' feature. This seems ugly --- all
+ # we want is to make sure searched-lib-generator is not invoked deep
+ # inside transformation search to produce intermediate targets.
+ generator.__init__ searched-lib-generator : : SEARCHED_LIB ;
+ }
+
+ rule run ( project name ? : property-set : sources * )
+ {
+ if $(name)
+ {
+ # If 'name' is empty, it means we have not been called to build a
+ # top-level target. In this case, we just fail immediately, because
+ # searched-lib-generator cannot be used to produce intermediate
+ # targets.
+
+ local properties = [ $(property-set).raw ] ;
+ local shared ;
+ if <link>shared in $(properties)
+ {
+ shared = true ;
+ }
+
+ local search = [ feature.get-values <search> : $(properties) ] ;
+
+ local a = [ new null-action $(property-set) ] ;
+ local lib-name = [ feature.get-values <name> : $(properties) ] ;
+ lib-name ?= $(name) ;
+ local t = [ new searched-lib-target $(lib-name) : $(project)
+ : $(shared) : $(search) : $(a) ] ;
+ # We return sources for a simple reason. If there is
+ # lib png : z : <name>png ;
+ # the 'z' target should be returned, so that apps linking to 'png'
+ # will link to 'z', too.
+ return [ property-set.create <xdll-path>$(search) ]
+ [ virtual-target.register $(t) ] $(sources) ;
+ }
+ }
+}
+
+generators.register [ new searched-lib-generator ] ;
+
+
+class prebuilt-lib-generator : generator
+{
+ rule __init__ ( * : * )
+ {
+ generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8)
+ : $(9) : $(10) : $(11) : $(12) : $(13) : $(14) : $(15) : $(16) :
+ $(17) : $(18) : $(19) ;
+ }
+
+ rule run ( project name ? : property-set : sources * )
+ {
+ local f = [ $(property-set).get <file> ] ;
+ return $(f) $(sources) ;
+ }
+}
+
+generators.register
+ [ new prebuilt-lib-generator builtin.prebuilt : : LIB : <file> ] ;
+
+generators.override builtin.prebuilt : builtin.lib-generator ;
+
+class preprocessed-target-class : basic-target
+{
+ import generators ;
+ rule construct ( name : sources * : property-set )
+ {
+ local result = [ generators.construct [ project ]
+ $(name) : PREPROCESSED_CPP : $(property-set) : $(sources) ] ;
+ if ! $(result)
+ {
+ result = [ generators.construct [ project ]
+ $(name) : PREPROCESSED_C : $(property-set) : $(sources) ] ;
+ }
+ if ! $(result)
+ {
+ local s ;
+ for x in $(sources)
+ {
+ s += [ $(x).name ] ;
+ }
+ local p = [ project ] ;
+ errors.user-error
+ "In project" [ $(p).name ] :
+ "Could not construct preprocessed file \"$(name)\" from $(s:J=, )." ;
+ }
+ return $(result) ;
+ }
+}
+
+rule preprocessed ( name : sources * : requirements * : default-build * :
+ usage-requirements * )
+{
+ local project = [ project.current ] ;
+ return [ targets.main-target-alternative
+ [ new preprocessed-target-class $(name) : $(project)
+ : [ targets.main-target-sources $(sources) : $(name) ]
+ : [ targets.main-target-requirements $(requirements) : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ]
+ ] ] ;
+}
+
+IMPORT $(__name__) : preprocessed : : preprocessed ;
+
+class compile-action : action
+{
+ import sequence ;
+
+ rule __init__ ( targets * : sources * : action-name : properties * )
+ {
+ action.__init__ $(targets) : $(sources) : $(action-name) : $(properties) ;
+ }
+
+ # For all virtual targets for the same dependency graph as self, i.e. which
+ # belong to the same main target, add their directories to the include path.
+ #
+ rule adjust-properties ( property-set )
+ {
+ local s = [ $(self.targets[1]).creating-subvariant ] ;
+ if $(s)
+ {
+ return [ $(property-set).add-raw
+ [ $(s).implicit-includes "include" : H ] ] ;
+ }
+ else
+ {
+ return $(property-set) ;
+ }
+ }
+}
+
+
+# Declare a special compiler generator. The only thing it does is changing the
+# type used to represent 'action' in the constructed dependency graph to
+# 'compile-action'. That class in turn adds additional include paths to handle
+# cases when a source file includes headers which are generated themselves.
+#
+class C-compiling-generator : generator
+{
+ rule __init__ ( id : source-types + : target-types + : requirements *
+ : optional-properties * )
+ {
+ generator.__init__ $(id) : $(source-types) : $(target-types) :
+ $(requirements) : $(optional-properties) ;
+ }
+
+ rule action-class ( )
+ {
+ return compile-action ;
+ }
+}
+
+
+rule register-c-compiler ( id : source-types + : target-types + : requirements *
+ : optional-properties * )
+{
+ generators.register [ new C-compiling-generator $(id) : $(source-types) :
+ $(target-types) : $(requirements) : $(optional-properties) ] ;
+}
+
+# FIXME: this is ugly, should find a better way (we would like client code to
+# register all generators as "generators.some-rule" instead of
+# "some-module.some-rule".)
+#
+IMPORT $(__name__) : register-c-compiler : : generators.register-c-compiler ;
+
+
+# The generator class for handling EXE and SHARED_LIB creation.
+#
+class linking-generator : generator
+{
+ import path ;
+ import project ;
+ import property-set ;
+ import type ;
+
+ rule __init__ ( id
+ composing ? : # The generator will be composing if a non-empty
+ # string is passed or the parameter is not given. To
+ # make the generator non-composing, pass an empty
+ # string ("").
+ source-types + :
+ target-types + :
+ requirements * )
+ {
+ composing ?= true ;
+ generator.__init__ $(id) $(composing) : $(source-types)
+ : $(target-types) : $(requirements) ;
+ }
+
+ rule run ( project name ? : property-set : sources + )
+ {
+ sources += [ $(property-set).get <library> ] ;
+
+ # Add <library-path> properties for all searched libraries.
+ local extra ;
+ for local s in $(sources)
+ {
+ if [ $(s).type ] = SEARCHED_LIB
+ {
+ local search = [ $(s).search ] ;
+ extra += <library-path>$(search) ;
+ }
+ }
+
+ # It is possible that sources include shared libraries that did not came
+ # from 'lib' targets, e.g. .so files specified as sources. In this case
+ # we have to add extra dll-path properties and propagate extra xdll-path
+ # properties so that application linking to us will get xdll-path to
+ # those libraries.
+ local extra-xdll-paths ;
+ for local s in $(sources)
+ {
+ if [ type.is-derived [ $(s).type ] SHARED_LIB ] && ! [ $(s).action ]
+ {
+ # Unfortunately, we do not have a good way to find the path to a
+ # file, so use this nasty approach.
+ #
+ # TODO: This needs to be done better. One thing that is really
+ # broken with this is that it does not work correctly with
+ # projects having multiple source locations.
+ local p = [ $(s).project ] ;
+ local location = [ path.root [ $(s).name ]
+ [ $(p).get source-location ] ] ;
+ extra-xdll-paths += [ path.parent $(location) ] ;
+ }
+ }
+
+ # Hardcode DLL paths only when linking executables.
+ # Pros: do not need to relink libraries when installing.
+ # Cons: "standalone" libraries (plugins, python extensions) can not
+ # hardcode paths to dependent libraries.
+ if [ $(property-set).get <hardcode-dll-paths> ] = true
+ && [ type.is-derived $(self.target-types[1]) EXE ]
+ {
+ local xdll-path = [ $(property-set).get <xdll-path> ] ;
+ extra += <dll-path>$(xdll-path) <dll-path>$(extra-xdll-paths) ;
+ }
+
+ if $(extra)
+ {
+ property-set = [ $(property-set).add-raw $(extra) ] ;
+ }
+
+ local result = [ generator.run $(project) $(name) : $(property-set)
+ : $(sources) ] ;
+
+ local ur ;
+ if $(result)
+ {
+ ur = [ extra-usage-requirements $(result) : $(property-set) ] ;
+ ur = [ $(ur).add
+ [ property-set.create <xdll-path>$(extra-xdll-paths) ] ] ;
+ }
+ return $(ur) $(result) ;
+ }
+
+ rule extra-usage-requirements ( created-targets * : property-set )
+ {
+ local result = [ property-set.empty ] ;
+ local extra ;
+
+ # Add appropricate <xdll-path> usage requirements.
+ local raw = [ $(property-set).raw ] ;
+ if <link>shared in $(raw)
+ {
+ local paths ;
+ local pwd = [ path.pwd ] ;
+ for local t in $(created-targets)
+ {
+ if [ type.is-derived [ $(t).type ] SHARED_LIB ]
+ {
+ paths += [ path.root [ path.make [ $(t).path ] ] $(pwd) ] ;
+ }
+ }
+ extra += $(paths:G=<xdll-path>) ;
+ }
+
+ # We need to pass <xdll-path> features that we've got from sources,
+ # because if a shared library is built, exe using it needs to know paths
+ # to other shared libraries this one depends on in order to be able to
+ # find them all at runtime.
+
+ # Just pass all features in property-set, it is theorically possible
+ # that we will propagate <xdll-path> features explicitly specified by
+ # the user, but then the user is to blaim for using an internal feature.
+ local values = [ $(property-set).get <xdll-path> ] ;
+ extra += $(values:G=<xdll-path>) ;
+
+ if $(extra)
+ {
+ result = [ property-set.create $(extra) ] ;
+ }
+ return $(result) ;
+ }
+
+ rule generated-targets ( sources + : property-set : project name ? )
+ {
+ local sources2 ; # Sources to pass to inherited rule.
+ local properties2 ; # Properties to pass to inherited rule.
+ local libraries ; # Library sources.
+
+ # Searched libraries are not passed as arguments to the linker but via
+ # some option. So, we pass them to the action using a property.
+ properties2 = [ $(property-set).raw ] ;
+ local fsa ;
+ local fst ;
+ for local s in $(sources)
+ {
+ if [ type.is-derived [ $(s).type ] SEARCHED_LIB ]
+ {
+ local name = [ $(s).name ] ;
+ if [ $(s).shared ]
+ {
+ fsa += $(name) ;
+ }
+ else
+ {
+ fst += $(name) ;
+ }
+ }
+ else
+ {
+ sources2 += $(s) ;
+ }
+ }
+ properties2 += <find-shared-library>$(fsa:J=&&)
+ <find-static-library>$(fst:J=&&) ;
+
+ return [ generator.generated-targets $(sources2)
+ : [ property-set.create $(properties2) ] : $(project) $(name) ] ;
+ }
+}
+
+
+rule register-linker ( id composing ? : source-types + : target-types +
+ : requirements * )
+{
+ generators.register [ new linking-generator $(id) $(composing)
+ : $(source-types) : $(target-types) : $(requirements) ] ;
+}
+
+
+# The generator class for handling STATIC_LIB creation.
+#
+class archive-generator : generator
+{
+ import property-set ;
+
+ rule __init__ ( id composing ? : source-types + : target-types +
+ : requirements * )
+ {
+ composing ?= true ;
+ generator.__init__ $(id) $(composing) : $(source-types)
+ : $(target-types) : $(requirements) ;
+ }
+
+ rule run ( project name ? : property-set : sources + )
+ {
+ sources += [ $(property-set).get <library> ] ;
+
+ local result = [ generator.run $(project) $(name) : $(property-set)
+ : $(sources) ] ;
+
+ # For static linking, if we get a library in source, we can not directly
+ # link to it so we need to cause our dependencies to link to that
+ # library. There are two approaches:
+ # - adding the library to the list of returned targets.
+ # - using the <library> usage requirements.
+ # The problem with the first is:
+ #
+ # lib a1 : : <file>liba1.a ;
+ # lib a2 : a2.cpp a1 : <link>static ;
+ # install dist : a2 ;
+ #
+ # here we will try to install 'a1', even though it is not necessary in
+ # the general case. With the second approach, even indirect dependants
+ # will link to the library, but it should not cause any harm. So, return
+ # all LIB sources together with created targets, so that dependants link
+ # to them.
+ local usage-requirements ;
+ if [ $(property-set).get <link> ] = static
+ {
+ for local t in $(sources)
+ {
+ if [ type.is-derived [ $(t).type ] LIB ]
+ {
+ usage-requirements += <library>$(t) ;
+ }
+ }
+ }
+
+ usage-requirements = [ property-set.create $(usage-requirements) ] ;
+
+ return $(usage-requirements) $(result) ;
+ }
+}
+
+
+rule register-archiver ( id composing ? : source-types + : target-types +
+ : requirements * )
+{
+ generators.register [ new archive-generator $(id) $(composing)
+ : $(source-types) : $(target-types) : $(requirements) ] ;
+}
+
+
+# Generator that accepts everything and produces nothing. Useful as a general
+# fallback for toolset-specific actions like PCH generation.
+#
+class dummy-generator : generator
+{
+ import property-set ;
+
+ rule run ( project name ? : property-set : sources + )
+ {
+ return [ property-set.empty ] ;
+ }
+}
+
+IMPORT $(__name__) : register-linker register-archiver
+ : : generators.register-linker generators.register-archiver ;
diff --git a/tools/build/src/tools/builtin.py b/tools/build/src/tools/builtin.py
new file mode 100644
index 0000000000..68ddfed4b1
--- /dev/null
+++ b/tools/build/src/tools/builtin.py
@@ -0,0 +1,728 @@
+# Status: minor updates by Steven Watanabe to make gcc work
+#
+# Copyright (C) Vladimir Prus 2002. Permission to copy, use, modify, sell and
+# distribute this software is granted provided this copyright notice appears in
+# all copies. This software is provided "as is" without express or implied
+# warranty, and with no claim as to its suitability for any purpose.
+
+""" Defines standard features and rules.
+"""
+
+import b2.build.targets as targets
+
+import sys
+from b2.build import feature, property, virtual_target, generators, type, property_set, scanner
+from b2.util.utility import *
+from b2.util import path, regex, bjam_signature
+import b2.tools.types
+from b2.manager import get_manager
+
+
+# Records explicit properties for a variant.
+# The key is the variant name.
+__variant_explicit_properties = {}
+
+def reset ():
+ """ Clear the module state. This is mainly for testing purposes.
+ """
+ global __variant_explicit_properties
+
+ __variant_explicit_properties = {}
+
+@bjam_signature((["name"], ["parents_or_properties", "*"], ["explicit_properties", "*"]))
+def variant (name, parents_or_properties, explicit_properties = []):
+ """ Declares a new variant.
+ First determines explicit properties for this variant, by
+ refining parents' explicit properties with the passed explicit
+ properties. The result is remembered and will be used if
+ this variant is used as parent.
+
+ Second, determines the full property set for this variant by
+ adding to the explicit properties default values for all properties
+ which neither present nor are symmetric.
+
+ Lastly, makes appropriate value of 'variant' property expand
+ to the full property set.
+ name: Name of the variant
+ parents_or_properties: Specifies parent variants, if
+ 'explicit_properties' are given,
+ and explicit_properties otherwise.
+ explicit_properties: Explicit properties.
+ """
+ parents = []
+ if not explicit_properties:
+ explicit_properties = parents_or_properties
+ else:
+ parents = parents_or_properties
+
+ inherited = property_set.empty()
+ if parents:
+
+ # If we allow multiple parents, we'd have to to check for conflicts
+ # between base variants, and there was no demand for so to bother.
+ if len (parents) > 1:
+ raise BaseException ("Multiple base variants are not yet supported")
+
+ p = parents[0]
+ # TODO: the check may be stricter
+ if not feature.is_implicit_value (p):
+ raise BaseException ("Invalid base variant '%s'" % p)
+
+ inherited = __variant_explicit_properties[p]
+
+ explicit_properties = property_set.create_with_validation(explicit_properties)
+ explicit_properties = inherited.refine(explicit_properties)
+
+ # Record explicitly specified properties for this variant
+ # We do this after inheriting parents' properties, so that
+ # they affect other variants, derived from this one.
+ __variant_explicit_properties[name] = explicit_properties
+
+ feature.extend('variant', [name])
+ feature.compose ("<variant>" + name, explicit_properties.all())
+
+__os_names = """
+ amiga aix bsd cygwin darwin dos emx freebsd hpux iphone linux netbsd
+ openbsd osf qnx qnxnto sgi solaris sun sunos svr4 sysv ultrix unix unixware
+ vms windows
+""".split()
+
+# Translates from bjam current OS to the os tags used in host-os and target-os,
+# i.e. returns the running host-os.
+#
+def default_host_os():
+ host_os = os_name()
+ if host_os not in (x.upper() for x in __os_names):
+ if host_os == 'NT': host_os = 'windows'
+ elif host_os == 'AS400': host_os = 'unix'
+ elif host_os == 'MINGW': host_os = 'windows'
+ elif host_os == 'BSDI': host_os = 'bsd'
+ elif host_os == 'COHERENT': host_os = 'unix'
+ elif host_os == 'DRAGONFLYBSD': host_os = 'bsd'
+ elif host_os == 'IRIX': host_os = 'sgi'
+ elif host_os == 'MACOSX': host_os = 'darwin'
+ elif host_os == 'KFREEBSD': host_os = 'freebsd'
+ elif host_os == 'LINUX': host_os = 'linux'
+ else: host_os = 'unix'
+ return host_os.lower()
+
+def register_globals ():
+ """ Registers all features and variants declared by this module.
+ """
+
+ # This feature is used to determine which OS we're on.
+ # In future, this may become <target-os> and <host-os>
+ # TODO: check this. Compatibility with bjam names? Subfeature for version?
+ os = sys.platform
+ feature.feature ('os', [os], ['propagated', 'link-incompatible'])
+
+
+ # The two OS features define a known set of abstract OS names. The host-os is
+ # the OS under which bjam is running. Even though this should really be a fixed
+ # property we need to list all the values to prevent unknown value errors. Both
+ # set the default value to the current OS to account for the default use case of
+ # building on the target OS.
+ feature.feature('host-os', __os_names)
+ feature.set_default('host-os', default_host_os())
+
+ feature.feature('target-os', __os_names, ['propagated', 'link-incompatible'])
+ feature.set_default('target-os', default_host_os())
+
+ feature.feature ('toolset', [], ['implicit', 'propagated' ,'symmetric'])
+
+ feature.feature ('stdlib', ['native'], ['propagated', 'composite'])
+
+ feature.feature ('link', ['shared', 'static'], ['propagated'])
+ feature.feature ('runtime-link', ['shared', 'static'], ['propagated'])
+ feature.feature ('runtime-debugging', ['on', 'off'], ['propagated'])
+
+
+ feature.feature ('optimization', ['off', 'speed', 'space'], ['propagated'])
+ feature.feature ('profiling', ['off', 'on'], ['propagated'])
+ feature.feature ('inlining', ['off', 'on', 'full'], ['propagated'])
+
+ feature.feature ('threading', ['single', 'multi'], ['propagated'])
+ feature.feature ('rtti', ['on', 'off'], ['propagated'])
+ feature.feature ('exception-handling', ['on', 'off'], ['propagated'])
+
+ # Whether there is support for asynchronous EH (e.g. catching SEGVs).
+ feature.feature ('asynch-exceptions', ['on', 'off'], ['propagated'])
+
+ # Whether all extern "C" functions are considered nothrow by default.
+ feature.feature ('extern-c-nothrow', ['off', 'on'], ['propagated'])
+
+ feature.feature ('debug-symbols', ['on', 'off'], ['propagated'])
+ feature.feature ('define', [], ['free'])
+ feature.feature ('undef', [], ['free'])
+ feature.feature ('include', [], ['free', 'path']) #order-sensitive
+ feature.feature ('cflags', [], ['free'])
+ feature.feature ('cxxflags', [], ['free'])
+ feature.feature ('asmflags', [], ['free'])
+ feature.feature ('linkflags', [], ['free'])
+ feature.feature ('archiveflags', [], ['free'])
+ feature.feature ('version', [], ['free'])
+
+ feature.feature ('location-prefix', [], ['free'])
+
+ feature.feature ('action', [], ['free'])
+
+
+ # The following features are incidental, since
+ # in themself they have no effect on build products.
+ # Not making them incidental will result in problems in corner
+ # cases, for example:
+ #
+ # unit-test a : a.cpp : <use>b ;
+ # lib b : a.cpp b ;
+ #
+ # Here, if <use> is not incidental, we'll decide we have two
+ # targets for a.obj with different properties, and will complain.
+ #
+ # Note that making feature incidental does not mean it's ignored. It may
+ # be ignored when creating the virtual target, but the rest of build process
+ # will use them.
+ feature.feature ('use', [], ['free', 'dependency', 'incidental'])
+ feature.feature ('dependency', [], ['free', 'dependency', 'incidental'])
+ feature.feature ('implicit-dependency', [], ['free', 'dependency', 'incidental'])
+
+ feature.feature('warnings', [
+ 'on', # Enable default/"reasonable" warning level for the tool.
+ 'all', # Enable all possible warnings issued by the tool.
+ 'off'], # Disable all warnings issued by the tool.
+ ['incidental', 'propagated'])
+
+ feature.feature('warnings-as-errors', [
+ 'off', # Do not fail the compilation if there are warnings.
+ 'on'], # Fail the compilation if there are warnings.
+ ['incidental', 'propagated'])
+
+ feature.feature('c++-template-depth',
+ [str(i) for i in range(64,1024+1,64)] +
+ [str(i) for i in range(20,1000+1,10)] +
+ # Maximum template instantiation depth guaranteed for ANSI/ISO C++
+ # conforming programs.
+ ['17'],
+ ['incidental', 'optional', 'propagated'])
+
+ feature.feature ('source', [], ['free', 'dependency', 'incidental'])
+ feature.feature ('library', [], ['free', 'dependency', 'incidental'])
+ feature.feature ('file', [], ['free', 'dependency', 'incidental'])
+ feature.feature ('find-shared-library', [], ['free']) #order-sensitive ;
+ feature.feature ('find-static-library', [], ['free']) #order-sensitive ;
+ feature.feature ('library-path', [], ['free', 'path']) #order-sensitive ;
+ # Internal feature.
+ feature.feature ('library-file', [], ['free', 'dependency'])
+
+ feature.feature ('name', [], ['free'])
+ feature.feature ('tag', [], ['free'])
+ feature.feature ('search', [], ['free', 'path']) #order-sensitive ;
+ feature.feature ('location', [], ['free', 'path'])
+
+ feature.feature ('dll-path', [], ['free', 'path'])
+ feature.feature ('hardcode-dll-paths', ['true', 'false'], ['incidental'])
+
+
+ # This is internal feature which holds the paths of all dependency
+ # dynamic libraries. On Windows, it's needed so that we can all
+ # those paths to PATH, when running applications.
+ # On Linux, it's needed to add proper -rpath-link command line options.
+ feature.feature ('xdll-path', [], ['free', 'path'])
+
+ #provides means to specify def-file for windows dlls.
+ feature.feature ('def-file', [], ['free', 'dependency'])
+
+ # This feature is used to allow specific generators to run.
+ # For example, QT tools can only be invoked when QT library
+ # is used. In that case, <allow>qt will be in usage requirement
+ # of the library.
+ feature.feature ('allow', [], ['free'])
+
+ # The addressing model to generate code for. Currently a limited set only
+ # specifying the bit size of pointers.
+ feature.feature('address-model', ['16', '32', '64'], ['propagated', 'optional'])
+
+ # Type of CPU architecture to compile for.
+ feature.feature('architecture', [
+ # x86 and x86-64
+ 'x86',
+
+ # ia64
+ 'ia64',
+
+ # Sparc
+ 'sparc',
+
+ # RS/6000 & PowerPC
+ 'power',
+
+ # MIPS/SGI
+ 'mips1', 'mips2', 'mips3', 'mips4', 'mips32', 'mips32r2', 'mips64',
+
+ # HP/PA-RISC
+ 'parisc',
+
+ # Advanced RISC Machines
+ 'arm',
+
+ # Combined architectures for platforms/toolsets that support building for
+ # multiple architectures at once. "combined" would be the default multi-arch
+ # for the toolset.
+ 'combined',
+ 'combined-x86-power'],
+
+ ['propagated', 'optional'])
+
+ # The specific instruction set in an architecture to compile.
+ feature.feature('instruction-set', [
+ # x86 and x86-64
+ 'native', 'i486', 'i586', 'i686', 'pentium', 'pentium-mmx', 'pentiumpro', 'pentium2', 'pentium3',
+ 'pentium3m', 'pentium-m', 'pentium4', 'pentium4m', 'prescott', 'nocona', 'core2', 'corei7', 'corei7-avx', 'core-avx-i',
+ 'conroe', 'conroe-xe', 'conroe-l', 'allendale', 'merom', 'merom-xe', 'kentsfield', 'kentsfield-xe', 'penryn', 'wolfdale',
+ 'yorksfield', 'nehalem', 'sandy-bridge', 'ivy-bridge', 'haswell', 'k6', 'k6-2', 'k6-3', 'athlon', 'athlon-tbird', 'athlon-4', 'athlon-xp',
+ 'athlon-mp', 'k8', 'opteron', 'athlon64', 'athlon-fx', 'k8-sse3', 'opteron-sse3', 'athlon64-sse3', 'amdfam10', 'barcelona',
+ 'bdver1', 'bdver2', 'bdver3', 'btver1', 'btver2', 'winchip-c6', 'winchip2', 'c3', 'c3-2', 'atom',
+
+ # ia64
+ 'itanium', 'itanium1', 'merced', 'itanium2', 'mckinley',
+
+ # Sparc
+ 'v7', 'cypress', 'v8', 'supersparc', 'sparclite', 'hypersparc', 'sparclite86x', 'f930', 'f934',
+ 'sparclet', 'tsc701', 'v9', 'ultrasparc', 'ultrasparc3',
+
+ # RS/6000 & PowerPC
+ '401', '403', '405', '405fp', '440', '440fp', '505', '601', '602',
+ '603', '603e', '604', '604e', '620', '630', '740', '7400',
+ '7450', '750', '801', '821', '823', '860', '970', '8540',
+ 'power-common', 'ec603e', 'g3', 'g4', 'g5', 'power', 'power2',
+ 'power3', 'power4', 'power5', 'powerpc', 'powerpc64', 'rios',
+ 'rios1', 'rsc', 'rios2', 'rs64a',
+
+ # MIPS
+ '4kc', '4kp', '5kc', '20kc', 'm4k', 'r2000', 'r3000', 'r3900', 'r4000',
+ 'r4100', 'r4300', 'r4400', 'r4600', 'r4650',
+ 'r6000', 'r8000', 'rm7000', 'rm9000', 'orion', 'sb1', 'vr4100',
+ 'vr4111', 'vr4120', 'vr4130', 'vr4300',
+ 'vr5000', 'vr5400', 'vr5500',
+
+ # HP/PA-RISC
+ '700', '7100', '7100lc', '7200', '7300', '8000',
+
+ # Advanced RISC Machines
+ 'armv2', 'armv2a', 'armv3', 'armv3m', 'armv4', 'armv4t', 'armv5',
+ 'armv5t', 'armv5te', 'armv6', 'armv6j', 'iwmmxt', 'ep9312'],
+
+ ['propagated', 'optional'])
+
+ feature.feature('conditional', [], ['incidental', 'free'])
+
+ # The value of 'no' prevents building of a target.
+ feature.feature('build', ['yes', 'no'], ['optional'])
+
+ # Windows-specific features
+ feature.feature ('user-interface', ['console', 'gui', 'wince', 'native', 'auto'], [])
+ feature.feature ('variant', [], ['implicit', 'composite', 'propagated', 'symmetric'])
+
+
+ variant ('debug', ['<optimization>off', '<debug-symbols>on', '<inlining>off', '<runtime-debugging>on'])
+ variant ('release', ['<optimization>speed', '<debug-symbols>off', '<inlining>full',
+ '<runtime-debugging>off', '<define>NDEBUG'])
+ variant ('profile', ['release'], ['<profiling>on', '<debug-symbols>on'])
+
+
+reset ()
+register_globals ()
+
+class SearchedLibTarget (virtual_target.AbstractFileTarget):
+ def __init__ (self, name, project, shared, search, action):
+ virtual_target.AbstractFileTarget.__init__ (self, name, 'SEARCHED_LIB', project, action)
+
+ self.shared_ = shared
+ self.search_ = search
+
+ def shared (self):
+ return self.shared_
+
+ def search (self):
+ return self.search_
+
+ def actualize_location (self, target):
+ bjam.call("NOTFILE", target)
+
+ def path (self):
+ #FIXME: several functions rely on this not being None
+ return ""
+
+
+class CScanner (scanner.Scanner):
+ def __init__ (self, includes):
+ scanner.Scanner.__init__ (self)
+
+ self.includes_ = []
+
+ for i in includes:
+ self.includes_.extend(i.split("&&"))
+
+ def pattern (self):
+ return r'#[ \t]*include[ ]*(<(.*)>|"(.*)")'
+
+ def process (self, target, matches, binding):
+
+ angle = regex.transform (matches, "<(.*)>")
+ quoted = regex.transform (matches, '"(.*)"')
+
+ g = str(id(self))
+ b = os.path.normpath(os.path.dirname(binding[0]))
+
+ # Attach binding of including file to included targets.
+ # When target is directly created from virtual target
+ # this extra information is unnecessary. But in other
+ # cases, it allows to distinguish between two headers of the
+ # same name included from different places.
+ # We don't need this extra information for angle includes,
+ # since they should not depend on including file (we can't
+ # get literal "." in include path).
+ g2 = g + "#" + b
+
+ g = "<" + g + ">"
+ g2 = "<" + g2 + ">"
+ angle = [g + x for x in angle]
+ quoted = [g2 + x for x in quoted]
+
+ all = angle + quoted
+ bjam.call("mark-included", target, all)
+
+ engine = get_manager().engine()
+ engine.set_target_variable(angle, "SEARCH", get_value(self.includes_))
+ engine.set_target_variable(quoted, "SEARCH", [b] + get_value(self.includes_))
+
+ # Just propagate current scanner to includes, in a hope
+ # that includes do not change scanners.
+ get_manager().scanners().propagate(self, angle + quoted)
+
+scanner.register (CScanner, 'include')
+type.set_scanner ('CPP', CScanner)
+type.set_scanner ('C', CScanner)
+
+# Ported to trunk@47077
+class LibGenerator (generators.Generator):
+ """ The generator class for libraries (target type LIB). Depending on properties it will
+ request building of the approapriate specific type -- SHARED_LIB, STATIC_LIB or
+ SHARED_LIB.
+ """
+
+ def __init__(self, id, composing = True, source_types = [], target_types_and_names = ['LIB'], requirements = []):
+ generators.Generator.__init__(self, id, composing, source_types, target_types_and_names, requirements)
+
+ def run(self, project, name, prop_set, sources):
+
+ # The lib generator is composing, and can be only invoked with
+ # explicit name. This check is present in generator.run (and so in
+ # builtin.LinkingGenerator), but duplicate it here to avoid doing
+ # extra work.
+ if name:
+ properties = prop_set.raw()
+ # Determine the needed target type
+ actual_type = None
+ properties_grist = get_grist(properties)
+ if '<source>' not in properties_grist and \
+ ('<search>' in properties_grist or '<name>' in properties_grist):
+ actual_type = 'SEARCHED_LIB'
+ elif '<file>' in properties_grist:
+ # The generator for
+ actual_type = 'LIB'
+ elif '<link>shared' in properties:
+ actual_type = 'SHARED_LIB'
+ else:
+ actual_type = 'STATIC_LIB'
+
+ prop_set = prop_set.add_raw(['<main-target-type>LIB'])
+
+ # Construct the target.
+ return generators.construct(project, name, actual_type, prop_set, sources)
+
+ def viable_source_types(self):
+ return ['*']
+
+generators.register(LibGenerator("builtin.lib-generator"))
+
+generators.override("builtin.prebuilt", "builtin.lib-generator")
+
+def lib(names, sources=[], requirements=[], default_build=[], usage_requirements=[]):
+ """The implementation of the 'lib' rule. Beyond standard syntax that rule allows
+ simplified: 'lib a b c ;'."""
+
+ if len(names) > 1:
+ if any(r.startswith('<name>') for r in requirements):
+ get_manager().errors()("When several names are given to the 'lib' rule\n" +
+ "it is not allowed to specify the <name> feature.")
+
+ if sources:
+ get_manager().errors()("When several names are given to the 'lib' rule\n" +
+ "it is not allowed to specify sources.")
+
+ project = get_manager().projects().current()
+ result = []
+
+ for name in names:
+ r = requirements[:]
+
+ # Support " lib a ; " and " lib a b c ; " syntax.
+ if not sources and not any(r.startswith("<name>") for r in requirements) \
+ and not any(r.startswith("<file") for r in requirements):
+ r.append("<name>" + name)
+
+ result.append(targets.create_typed_metatarget(name, "LIB", sources,
+ r,
+ default_build,
+ usage_requirements))
+ return result
+
+get_manager().projects().add_rule("lib", lib)
+
+
+# Updated to trunk@47077
+class SearchedLibGenerator (generators.Generator):
+ def __init__ (self, id = 'SearchedLibGenerator', composing = False, source_types = [], target_types_and_names = ['SEARCHED_LIB'], requirements = []):
+ # TODO: the comment below looks strange. There are no requirements!
+ # The requirements cause the generators to be tried *only* when we're building
+ # lib target and there's 'search' feature. This seems ugly --- all we want
+ # is make sure SearchedLibGenerator is not invoked deep in transformation
+ # search.
+ generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements)
+
+ def run(self, project, name, prop_set, sources):
+
+ if not name:
+ return None
+
+ # If name is empty, it means we're called not from top-level.
+ # In this case, we just fail immediately, because SearchedLibGenerator
+ # cannot be used to produce intermediate targets.
+
+ properties = prop_set.raw ()
+ shared = '<link>shared' in properties
+
+ a = virtual_target.NullAction (project.manager(), prop_set)
+
+ real_name = feature.get_values ('<name>', properties)
+ if real_name:
+ real_name = real_name[0]
+ else:
+ real_name = name
+ search = feature.get_values('<search>', properties)
+ usage_requirements = property_set.create(['<xdll-path>' + p for p in search])
+ t = SearchedLibTarget(real_name, project, shared, search, a)
+
+ # We return sources for a simple reason. If there's
+ # lib png : z : <name>png ;
+ # the 'z' target should be returned, so that apps linking to
+ # 'png' will link to 'z', too.
+ return(usage_requirements, [b2.manager.get_manager().virtual_targets().register(t)] + sources)
+
+generators.register (SearchedLibGenerator ())
+
+class PrebuiltLibGenerator(generators.Generator):
+
+ def __init__(self, id, composing, source_types, target_types_and_names, requirements):
+ generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements)
+
+ def run(self, project, name, properties, sources):
+ f = properties.get("file")
+ return f + sources
+
+generators.register(PrebuiltLibGenerator("builtin.prebuilt", False, [],
+ ["LIB"], ["<file>"]))
+
+generators.override("builtin.prebuilt", "builtin.lib-generator")
+
+
+class CompileAction (virtual_target.Action):
+ def __init__ (self, manager, sources, action_name, prop_set):
+ virtual_target.Action.__init__ (self, manager, sources, action_name, prop_set)
+
+ def adjust_properties (self, prop_set):
+ """ For all virtual targets for the same dependency graph as self,
+ i.e. which belong to the same main target, add their directories
+ to include path.
+ """
+ s = self.targets () [0].creating_subvariant ()
+
+ return prop_set.add_raw (s.implicit_includes ('include', 'H'))
+
+class CCompilingGenerator (generators.Generator):
+ """ Declare a special compiler generator.
+ The only thing it does is changing the type used to represent
+ 'action' in the constructed dependency graph to 'CompileAction'.
+ That class in turn adds additional include paths to handle a case
+ when a source file includes headers which are generated themselfs.
+ """
+ def __init__ (self, id, composing, source_types, target_types_and_names, requirements):
+ # TODO: (PF) What to do with optional_properties? It seemed that, in the bjam version, the arguments are wrong.
+ generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements)
+
+ def action_class (self):
+ return CompileAction
+
+def register_c_compiler (id, source_types, target_types, requirements, optional_properties = []):
+ g = CCompilingGenerator (id, False, source_types, target_types, requirements + optional_properties)
+ return generators.register (g)
+
+
+class LinkingGenerator (generators.Generator):
+ """ The generator class for handling EXE and SHARED_LIB creation.
+ """
+ def __init__ (self, id, composing, source_types, target_types_and_names, requirements):
+ generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements)
+
+ def run (self, project, name, prop_set, sources):
+
+ sources.extend(prop_set.get('<library>'))
+
+ # Add <library-path> properties for all searched libraries
+ extra = []
+ for s in sources:
+ if s.type () == 'SEARCHED_LIB':
+ search = s.search()
+ extra.extend(property.Property('<library-path>', sp) for sp in search)
+
+ # It's possible that we have libraries in sources which did not came
+ # from 'lib' target. For example, libraries which are specified
+ # just as filenames as sources. We don't have xdll-path properties
+ # for such target, but still need to add proper dll-path properties.
+ extra_xdll_path = []
+ for s in sources:
+ if type.is_derived (s.type (), 'SHARED_LIB') and not s.action ():
+ # Unfortunately, we don't have a good way to find the path
+ # to a file, so use this nasty approach.
+ p = s.project()
+ location = path.root(s.name(), p.get('source-location')[0])
+ extra_xdll_path.append(os.path.dirname(location))
+
+ # Hardcode DLL paths only when linking executables.
+ # Pros: do not need to relink libraries when installing.
+ # Cons: "standalone" libraries (plugins, python extensions) can not
+ # hardcode paths to dependent libraries.
+ if prop_set.get('<hardcode-dll-paths>') == ['true'] \
+ and type.is_derived(self.target_types_ [0], 'EXE'):
+ xdll_path = prop_set.get('<xdll-path>')
+ extra.extend(property.Property('<dll-path>', sp) \
+ for sp in extra_xdll_path)
+ extra.extend(property.Property('<dll-path>', sp) \
+ for sp in xdll_path)
+
+ if extra:
+ prop_set = prop_set.add_raw (extra)
+ result = generators.Generator.run(self, project, name, prop_set, sources)
+
+ if result:
+ ur = self.extra_usage_requirements(result, prop_set)
+ ur = ur.add(property_set.create(['<xdll-path>' + p for p in extra_xdll_path]))
+ else:
+ return None
+ return (ur, result)
+
+ def extra_usage_requirements (self, created_targets, prop_set):
+
+ result = property_set.empty ()
+ extra = []
+
+ # Add appropriate <xdll-path> usage requirements.
+ raw = prop_set.raw ()
+ if '<link>shared' in raw:
+ paths = []
+
+ # TODO: is it safe to use the current directory? I think we should use
+ # another mechanism to allow this to be run from anywhere.
+ pwd = os.getcwd()
+
+ for t in created_targets:
+ if type.is_derived(t.type(), 'SHARED_LIB'):
+ paths.append(path.root(path.make(t.path()), pwd))
+
+ extra += replace_grist(paths, '<xdll-path>')
+
+ # We need to pass <xdll-path> features that we've got from sources,
+ # because if shared library is built, exe which uses it must know paths
+ # to other shared libraries this one depends on, to be able to find them
+ # all at runtime.
+
+ # Just pass all features in property_set, it's theorically possible
+ # that we'll propagate <xdll-path> features explicitly specified by
+ # the user, but then the user's to blaim for using internal feature.
+ values = prop_set.get('<xdll-path>')
+ extra += replace_grist(values, '<xdll-path>')
+
+ if extra:
+ result = property_set.create(extra)
+
+ return result
+
+ def generated_targets (self, sources, prop_set, project, name):
+
+ # sources to pass to inherited rule
+ sources2 = []
+ # sources which are libraries
+ libraries = []
+
+ # Searched libraries are not passed as argument to linker
+ # but via some option. So, we pass them to the action
+ # via property.
+ fsa = []
+ fst = []
+ for s in sources:
+ if type.is_derived(s.type(), 'SEARCHED_LIB'):
+ n = s.name()
+ if s.shared():
+ fsa.append(n)
+
+ else:
+ fst.append(n)
+
+ else:
+ sources2.append(s)
+
+ add = []
+ if fsa:
+ add.append("<find-shared-library>" + '&&'.join(fsa))
+ if fst:
+ add.append("<find-static-library>" + '&&'.join(fst))
+
+ spawn = generators.Generator.generated_targets(self, sources2, prop_set.add_raw(add), project, name)
+ return spawn
+
+
+def register_linker(id, source_types, target_types, requirements):
+ g = LinkingGenerator(id, True, source_types, target_types, requirements)
+ generators.register(g)
+
+class ArchiveGenerator (generators.Generator):
+ """ The generator class for handling STATIC_LIB creation.
+ """
+ def __init__ (self, id, composing, source_types, target_types_and_names, requirements):
+ generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements)
+
+ def run (self, project, name, prop_set, sources):
+ sources += prop_set.get ('<library>')
+
+ result = generators.Generator.run (self, project, name, prop_set, sources)
+
+ return result
+
+
+def register_archiver(id, source_types, target_types, requirements):
+ g = ArchiveGenerator(id, True, source_types, target_types, requirements)
+ generators.register(g)
+
+class DummyGenerator(generators.Generator):
+ """Generator that accepts everything and produces nothing. Useful as a general
+ fallback for toolset-specific actions like PCH generation.
+ """
+ def run (self, project, name, prop_set, sources):
+ return (property_set.empty(), [])
+
+
+get_manager().projects().add_rule("variant", variant)
+
+import stage
+import symlink
+import message
diff --git a/tools/build/src/tools/cast.jam b/tools/build/src/tools/cast.jam
new file mode 100644
index 0000000000..41b0ac338e
--- /dev/null
+++ b/tools/build/src/tools/cast.jam
@@ -0,0 +1,91 @@
+# Copyright 2005 Vladimir Prus.
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Defines main target 'cast', used to change type for target. For example, in Qt
+# library one wants two kinds of CPP files -- those that just compiled and those
+# that are passed via the MOC tool.
+#
+# This is done with:
+#
+# exe main : main.cpp [ cast _ moccable-cpp : widget.cpp ] ;
+#
+# Boost.Build will assign target type CPP to both main.cpp and widget.cpp. Then,
+# the cast rule will change target type of widget.cpp to MOCCABLE-CPP, and Qt
+# support will run the MOC tool as part of the build process.
+#
+# At the moment, the 'cast' rule only works for non-derived (source) targets.
+#
+# TODO: The following comment is unclear or incorrect. Clean it up.
+# > Another solution would be to add a separate main target 'moc-them' that
+# > would moc all the passed sources, no matter what their type is, but I prefer
+# > cast, as defining a new target type + generator for that type is somewhat
+# > simpler than defining a main target rule.
+
+import "class" : new ;
+import project ;
+import property-set ;
+import targets ;
+import type ;
+
+
+class cast-target-class : typed-target
+{
+ import type ;
+
+ rule __init__ ( name : project : type : sources * : requirements * :
+ default-build * : usage-requirements * )
+ {
+ typed-target.__init__ $(name) : $(project) : $(type) : $(sources) :
+ $(requirements) : $(default-build) : $(usage-requirements) ;
+ }
+
+ rule construct ( name : source-targets * : property-set )
+ {
+ local result ;
+ for local s in $(source-targets)
+ {
+ if ! [ class.is-a $(s) : file-target ]
+ {
+ import errors : user-error : errors.user-error ;
+ errors.user-error Source to the 'cast' rule is not a file! ;
+ }
+ if [ $(s).action ]
+ {
+ import errors : user-error : errors.user-error ;
+ errors.user-error Only non-derived target are allowed for
+ 'cast'. : when building [ full-name ] ;
+ }
+ local r = [ $(s).clone-with-different-type $(self.type) ] ;
+ result += [ virtual-target.register $(r) ] ;
+ }
+ return [ property-set.empty ] $(result) ;
+ }
+}
+
+
+rule cast ( name type : sources * : requirements * : default-build * :
+ usage-requirements * )
+{
+ local project = [ project.current ] ;
+
+ local real-type = [ type.type-from-rule-name $(type) ] ;
+ if ! $(real-type)
+ {
+ import errors ;
+ errors.user-error No type corresponds to the main target rule name
+ '$(type)' : "Hint: try a lowercase name" ;
+ }
+
+ targets.main-target-alternative [ new cast-target-class $(name) : $(project)
+ : $(real-type)
+ : [ targets.main-target-sources $(sources) : $(name) ]
+ : [ targets.main-target-requirements $(requirements) : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ : [ targets.main-target-usage-requirements $(usage-requirements) :
+ $(project) ] ] ;
+}
+
+
+IMPORT $(__name__) : cast : : cast ;
diff --git a/tools/build/v2/tools/cast.py b/tools/build/src/tools/cast.py
index 8f053f110c..8f053f110c 100644
--- a/tools/build/v2/tools/cast.py
+++ b/tools/build/src/tools/cast.py
diff --git a/tools/build/src/tools/clang-darwin.jam b/tools/build/src/tools/clang-darwin.jam
new file mode 100644
index 0000000000..51e5fad754
--- /dev/null
+++ b/tools/build/src/tools/clang-darwin.jam
@@ -0,0 +1,170 @@
+# Copyright Vladimir Prus 2004.
+# Copyright Noel Belcourt 2007.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt
+# or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+import clang ;
+import feature : feature ;
+import os ;
+import toolset ;
+import toolset : flags ;
+import gcc ;
+import common ;
+import errors ;
+import generators ;
+
+feature.extend-subfeature toolset clang : platform : darwin ;
+
+toolset.inherit-generators clang-darwin
+ <toolset>clang <toolset-clang:platform>darwin
+ : gcc
+ # Don't inherit PCH generators. They were not tested, and probably
+ # don't work for this compiler.
+ : gcc.mingw.link gcc.mingw.link.dll gcc.compile.c.pch gcc.compile.c++.pch
+ ;
+
+generators.override clang-darwin.prebuilt : builtin.lib-generator ;
+generators.override clang-darwin.prebuilt : builtin.prebuilt ;
+generators.override clang-darwin.searched-lib-generator : searched-lib-generator ;
+
+toolset.inherit-rules clang-darwin : gcc ;
+toolset.inherit-flags clang-darwin : gcc
+ : <inlining>off <inlining>on <inlining>full <optimization>space
+ <warnings>off <warnings>all <warnings>on
+ <architecture>x86/<address-model>32
+ <architecture>x86/<address-model>64
+ ;
+
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+ .debug-configuration = true ;
+}
+
+# vectorization diagnostics
+feature vectorize : off on full ;
+
+# Initializes the clang-darwin toolset
+# version in optional
+# name (default clang++) is used to invoke the specified clang complier
+# compile and link options allow you to specify addition command line options for each version
+rule init ( version ? : command * : options * )
+{
+ command = [ common.get-invocation-command clang-darwin : clang++
+ : $(command) ] ;
+
+ # Determine the version
+ local command-string = $(command:J=" ") ;
+ if $(command)
+ {
+ version ?= [ MATCH "^([0-9.]+)"
+ : [ SHELL "$(command-string) -dumpversion" ] ] ;
+ }
+
+ local condition = [ common.check-init-parameters clang-darwin
+ : version $(version) ] ;
+
+ common.handle-options clang-darwin : $(condition) : $(command) : $(options) ;
+
+ gcc.init-link-flags clang-darwin darwin $(condition) ;
+
+}
+
+SPACE = " " ;
+
+flags clang-darwin.compile OPTIONS <cflags> ;
+flags clang-darwin.compile.c++ OPTIONS <cxxflags> ;
+# flags clang-darwin.compile INCLUDES <include> ;
+
+# Declare flags and action for compilation.
+toolset.flags clang-darwin.compile OPTIONS <optimization>off : -O0 ;
+toolset.flags clang-darwin.compile OPTIONS <optimization>speed : -O3 ;
+toolset.flags clang-darwin.compile OPTIONS <optimization>space : -Os ;
+
+toolset.flags clang-darwin.compile OPTIONS <inlining>off : -fno-inline ;
+toolset.flags clang-darwin.compile OPTIONS <inlining>on : -Wno-inline ;
+toolset.flags clang-darwin.compile OPTIONS <inlining>full : -finline-functions -Wno-inline ;
+
+toolset.flags clang-darwin.compile OPTIONS <warnings>off : -w ;
+toolset.flags clang-darwin.compile OPTIONS <warnings>on : -Wall ;
+toolset.flags clang-darwin.compile OPTIONS <warnings>all : -Wall -pedantic ;
+toolset.flags clang-darwin.compile OPTIONS <warnings-as-errors>on : -Werror ;
+
+toolset.flags clang-darwin.compile OPTIONS <debug-symbols>on : -g ;
+toolset.flags clang-darwin.compile OPTIONS <profiling>on : -pg ;
+toolset.flags clang-darwin.compile OPTIONS <rtti>off : -fno-rtti ;
+
+actions compile.c
+{
+ "$(CONFIG_COMMAND)" -x c $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions compile.c++
+{
+ "$(CONFIG_COMMAND)" -x c++ $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+flags clang-darwin ARFLAGS <archiveflags> ;
+
+# Default value. Mostly for the sake of clang-linux
+# that inherits from gcc, but does not has the same
+# logic to set the .AR variable. We can put the same
+# logic in clang-linux, but that's hardly worth the trouble
+# as on Linux, 'ar' is always available.
+.AR = ar ;
+
+rule archive ( targets * : sources * : properties * )
+{
+ # Always remove archive and start again. Here's rationale from
+ # Andre Hentz:
+ #
+ # I had a file, say a1.c, that was included into liba.a.
+ # I moved a1.c to a2.c, updated my Jamfiles and rebuilt.
+ # My program was crashing with absurd errors.
+ # After some debugging I traced it back to the fact that a1.o was *still*
+ # in liba.a
+ #
+ # Rene Rivera:
+ #
+ # Originally removing the archive was done by splicing an RM
+ # onto the archive action. That makes archives fail to build on NT
+ # when they have many files because it will no longer execute the
+ # action directly and blow the line length limit. Instead we
+ # remove the file in a different action, just before the building
+ # of the archive.
+ #
+ local clean.a = $(targets[1])(clean) ;
+ TEMPORARY $(clean.a) ;
+ NOCARE $(clean.a) ;
+ LOCATE on $(clean.a) = [ on $(targets[1]) return $(LOCATE) ] ;
+ DEPENDS $(clean.a) : $(sources) ;
+ DEPENDS $(targets) : $(clean.a) ;
+ common.RmTemps $(clean.a) : $(targets) ;
+}
+
+actions piecemeal archive
+{
+ "$(.AR)" $(AROPTIONS) rc "$(<)" "$(>)"
+ "ranlib" -cs "$(<)"
+}
+
+flags clang-darwin.link USER_OPTIONS <linkflags> ;
+
+# Declare actions for linking
+rule link ( targets * : sources * : properties * )
+{
+ SPACE on $(targets) = " " ;
+ # Serialize execution of the 'link' action, since
+ # running N links in parallel is just slower.
+ JAM_SEMAPHORE on $(targets) = <s>clang-darwin-link-semaphore ;
+}
+
+actions link bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" $(USER_OPTIONS) -L"$(LINKPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" $(START-GROUP) $(FINDLIBS-ST-PFX) -l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA) $(END-GROUP) $(OPTIONS)
+}
+
+actions link.dll bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" $(USER_OPTIONS) -L"$(LINKPATH)" -o "$(<)" -single_module -dynamiclib -install_name "$(<[1]:D=)" "$(>)" "$(LIBRARIES)" $(START-GROUP) $(FINDLIBS-ST-PFX) -l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA) $(END-GROUP) $(OPTIONS)
+}
diff --git a/tools/build/src/tools/clang-linux.jam b/tools/build/src/tools/clang-linux.jam
new file mode 100644
index 0000000000..0aa29d519c
--- /dev/null
+++ b/tools/build/src/tools/clang-linux.jam
@@ -0,0 +1,213 @@
+# Copyright (c) 2003 Michael Stevens
+# Copyright (c) 2010-2011 Bryce Lelbach (blelbach@cct.lsu.edu, maintainer)
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import toolset ;
+import feature ;
+import toolset : flags ;
+
+import clang ;
+import gcc ;
+import common ;
+import errors ;
+import generators ;
+import type ;
+import numbers ;
+
+feature.extend-subfeature toolset clang : platform : linux ;
+
+toolset.inherit-generators clang-linux
+ <toolset>clang <toolset-clang:platform>linux : gcc
+ : gcc.mingw.link gcc.mingw.link.dll gcc.cygwin.link gcc.cygwin.link.dll ;
+generators.override clang-linux.prebuilt : builtin.lib-generator ;
+generators.override clang-linux.prebuilt : builtin.prebuilt ;
+generators.override clang-linux.searched-lib-generator : searched-lib-generator ;
+
+# Override default do-nothing generators.
+generators.override clang-linux.compile.c.pch : pch.default-c-pch-generator ;
+generators.override clang-linux.compile.c++.pch : pch.default-cpp-pch-generator ;
+
+type.set-generated-target-suffix PCH
+ : <toolset>clang <toolset-clang:platform>linux : pth ;
+
+toolset.inherit-rules clang-linux : gcc ;
+toolset.inherit-flags clang-linux : gcc
+ : <inlining>off <inlining>on <inlining>full
+ <optimization>space <optimization>speed
+ <warnings>off <warnings>all <warnings>on ;
+
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ] {
+ .debug-configuration = true ;
+}
+
+rule init ( version ? : command * : options * ) {
+ command = [ common.get-invocation-command clang-linux : clang++
+ : $(command) ] ;
+
+ # Determine the version
+ local command-string = $(command:J=" ") ;
+
+ if $(command) {
+ version ?= [ MATCH "version ([0-9.]+)"
+ : [ SHELL "$(command-string) --version" ] ] ;
+ }
+
+ local condition = [ common.check-init-parameters clang-linux
+ : version $(version) ] ;
+
+ common.handle-options clang-linux : $(condition) : $(command) : $(options) ;
+
+ gcc.init-link-flags clang-linux gnu $(condition) ;
+}
+
+###############################################################################
+# Flags
+
+toolset.flags clang-linux.compile OPTIONS <cflags> ;
+toolset.flags clang-linux.compile.c++ OPTIONS <cxxflags> ;
+
+toolset.flags clang-linux.compile OPTIONS <optimization>off : ;
+toolset.flags clang-linux.compile OPTIONS <optimization>speed : -O3 ;
+toolset.flags clang-linux.compile OPTIONS <optimization>space : -Os ;
+
+# note: clang silently ignores some of these inlining options
+toolset.flags clang-linux.compile OPTIONS <inlining>off : -fno-inline ;
+toolset.flags clang-linux.compile OPTIONS <inlining>on : -Wno-inline ;
+toolset.flags clang-linux.compile OPTIONS <inlining>full : -finline-functions -Wno-inline ;
+
+toolset.flags clang-linux.compile OPTIONS <warnings>off : -w ;
+toolset.flags clang-linux.compile OPTIONS <warnings>on : -Wall ;
+toolset.flags clang-linux.compile OPTIONS <warnings>all : -Wall -pedantic ;
+toolset.flags clang-linux.compile OPTIONS <warnings-as-errors>on : -Werror ;
+
+toolset.flags clang-linux.compile OPTIONS <debug-symbols>on : -g ;
+toolset.flags clang-linux.compile OPTIONS <profiling>on : -pg ;
+toolset.flags clang-linux.compile OPTIONS <rtti>off : -fno-rtti ;
+
+###############################################################################
+# C and C++ compilation
+
+rule compile.c++ ( targets * : sources * : properties * ) {
+ setup-threading $(targets) : $(sources) : $(properties) ;
+ gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
+ gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
+
+ local pth-file = [ on $(<) return $(PCH_FILE) ] ;
+
+ if $(pth-file) {
+ DEPENDS $(<) : $(pth-file) ;
+ clang-linux.compile.c++.with-pch $(targets) : $(sources) ;
+ }
+ else {
+ clang-linux.compile.c++.without-pth $(targets) : $(sources) ;
+ }
+}
+
+actions compile.c++.without-pth {
+ "$(CONFIG_COMMAND)" -c -x c++ $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -o "$(<)" "$(>)"
+}
+
+actions compile.c++.with-pch bind PCH_FILE
+{
+ "$(CONFIG_COMMAND)" -c -x c++ $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -Xclang -include-pth -Xclang "$(PCH_FILE)" -o "$(<)" "$(>)"
+}
+
+rule compile.c ( targets * : sources * : properties * )
+{
+ setup-threading $(targets) : $(sources) : $(properties) ;
+ gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
+ gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
+
+ local pth-file = [ on $(<) return $(PCH_FILE) ] ;
+
+ if $(pth-file) {
+ DEPENDS $(<) : $(pth-file) ;
+ clang-linux.compile.c.with-pch $(targets) : $(sources) ;
+ }
+ else {
+ clang-linux.compile.c.without-pth $(targets) : $(sources) ;
+ }
+}
+
+actions compile.c.without-pth
+{
+ "$(CONFIG_COMMAND)" -c -x c $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions compile.c.with-pch bind PCH_FILE
+{
+ "$(CONFIG_COMMAND)" -c -x c $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -Xclang -include-pth -Xclang "$(PCH_FILE)" -c -o "$(<)" "$(>)"
+}
+
+###############################################################################
+# PCH emission
+
+rule compile.c++.pch ( targets * : sources * : properties * ) {
+ setup-threading $(targets) : $(sources) : $(properties) ;
+ gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
+ gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
+}
+
+actions compile.c++.pch {
+ rm -f "$(<)" && "$(CONFIG_COMMAND)" -x c++-header $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -Xclang -emit-pth -o "$(<)" "$(>)"
+}
+
+rule compile.c.pch ( targets * : sources * : properties * ) {
+ setup-threading $(targets) : $(sources) : $(properties) ;
+ gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
+ gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
+}
+
+actions compile.c.pch
+{
+ rm -f "$(<)" && "$(CONFIG_COMMAND)" -x c-header $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -Xclang -emit-pth -o "$(<)" "$(>)"
+}
+
+###############################################################################
+# Linking
+
+SPACE = " " ;
+
+rule link ( targets * : sources * : properties * ) {
+ setup-threading $(targets) : $(sources) : $(properties) ;
+ gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
+ SPACE on $(targets) = " " ;
+ JAM_SEMAPHORE on $(targets) = <s>clang-linux-link-semaphore ;
+}
+
+actions link bind LIBRARIES {
+ "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" -o "$(<)" $(START-GROUP) "$(>)" "$(LIBRARIES)" $(FINDLIBS-ST-PFX) -l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA) $(END-GROUP) $(OPTIONS) $(USER_OPTIONS)
+}
+
+rule link.dll ( targets * : sources * : properties * ) {
+ setup-threading $(targets) : $(sources) : $(properties) ;
+ gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
+ SPACE on $(targets) = " " ;
+ JAM_SEMAPHORE on $(targets) = <s>clang-linux-link-semaphore ;
+}
+
+rule setup-threading ( targets * : sources * : properties * )
+{
+
+ local target = [ feature.get-values target-os : $(properties) ] ;
+
+ switch $(target)
+ {
+ case windows :
+ local threading = [ feature.get-values threading : $(properties) ] ;
+ if $(threading) = multi
+ {
+ OPTIONS on $(targets) += -pthread ;
+ }
+ case * : gcc.setup-threading $(targets) : $(sources) : $(properties) ;
+ }
+}
+
+# Differ from 'link' above only by -shared.
+actions link.dll bind LIBRARIES {
+ "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -o "$(<)" -Wl,-soname$(SPACE)-Wl,$(<[1]:D=) -shared $(START-GROUP) "$(>)" "$(LIBRARIES)" $(FINDLIBS-ST-PFX) -l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA) $(END-GROUP) $(OPTIONS) $(USER_OPTIONS)
+}
+
diff --git a/tools/build/src/tools/clang-win.jam b/tools/build/src/tools/clang-win.jam
new file mode 100644
index 0000000000..25a6d006da
--- /dev/null
+++ b/tools/build/src/tools/clang-win.jam
@@ -0,0 +1,175 @@
+# Copyright Vladimir Prus 2004.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt
+# or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+# Importing common is needed because the rules we inherit here depend on it.
+# That is nasty.
+import common ;
+import errors ;
+import feature ;
+import clang ;
+import msvc ;
+import os ;
+import toolset ;
+import generators ;
+import type ;
+import path ;
+import set ;
+
+feature.extend-subfeature toolset clang : platform : win ;
+
+toolset.inherit-generators clang-win <toolset>clang <toolset-clang:platform>win : msvc ;
+toolset.inherit-flags clang-win : msvc : <debug-symbols>on/<debug-store>object <asynch-exceptions>off <asynch-exceptions>on : YLOPTION ;
+toolset.inherit-rules clang-win : msvc ;
+
+# Override default do-nothing generators.
+generators.override clang-win.compile.c.pch : pch.default-c-pch-generator ;
+generators.override clang-win.compile.c++.pch : pch.default-cpp-pch-generator ;
+generators.override clang-win.compile.rc : rc.compile.resource ;
+generators.override clang-win.compile.mc : mc.compile ;
+
+toolset.flags clang-win.compile PCH_SOURCE <pch>on : <pch-source> ;
+
+toolset.flags clang-win.compile CFLAGS <debug-symbols>on/<debug-store>object : "" ;
+
+# Initializes the intel toolset for windows
+rule init ( version ? : # the compiler version
+ command * : # the command to invoke the compiler itself
+ options * # Additional option: <compatibility>
+ # either 'vc6', 'vc7', 'vc7.1'
+ # or 'native'(default).
+ )
+{
+ local compatibility =
+ [ feature.get-values <compatibility> : $(options) ] ;
+ local condition = [ common.check-init-parameters clang-win
+ : version $(version) : compatibility $(compatibility) ] ;
+
+ if ! $(compatibility)
+ {
+ import errors ;
+ errors.error "Please set <compatibility> property for visual studio version!" ;
+ }
+ local vc_version = [ MATCH vc([0-9]+) : $(compatibility) ] ;
+ if ! $(vc_version)
+ {
+ errors.user-error "Invalid value for compatibility option:"
+ $(compatibility) ;
+ }
+
+ local m = [ MATCH ([0-9]+).* : $(version) ] ;
+ local major = $(m[1]) ;
+
+ command = [ common.get-invocation-command clang-win : clang-cl.exe :
+ $(command) ] ;
+
+ common.handle-options clang-win : $(condition) : $(command) : $(options) ;
+
+ local setup ;
+ setup = [ get-visual-studio-vcvars $(vc_version) ] ; # Get visual studio vcvars bat file path
+
+ local target_types ;
+ if [ MATCH ^(AMD64) : [ os.environ PROCESSOR_ARCHITECTURE ] ]
+ {
+ target_types = x86 amd64 ;
+ }
+ else
+ {
+ target_types = x86 x86_amd64 ;
+ }
+
+ for local c in $(target_types)
+ {
+ local cpu-conditions ;
+ local setup-call ;
+ setup-call = "call \""$(setup)"\" $(c) > nul " ;
+ cpu-conditions = $(condition)/$(.cpu-arch-$(c)) ;
+
+ if [ os.name ] = NT
+ {
+ setup-call = $(setup-call)"
+ " ;
+ }
+ else
+ {
+ setup-call = "cmd /S /C "$(setup-call)" \"&&\" " ;
+ }
+
+ if $(.debug-configuration)
+ {
+ for local cpu-condition in $(cpu-conditions)
+ {
+ ECHO "notice: [clang-cfg] condition: '$(cpu-condition)', setup: '$(setup-call)'" ;
+ }
+ }
+
+ local compiler ;
+ compiler = [ path.native $(command) ] ;
+ compiler = "\"$(compiler)\"" ;
+
+ toolset.flags clang-win.compile .CC $(cpu-conditions) : $(setup-call)$(compiler) ;
+ toolset.flags clang-win.link .LD $(cpu-conditions) : $(setup-call)link /nologo ;
+ toolset.flags clang-win.archive .LD $(cpu-conditions) : $(setup-call)link /lib /nologo ;
+ toolset.flags clang-win.link .MT $(cpu-conditions) : $(setup-call)mt -nologo ;
+ toolset.flags clang-win.compile .MC $(cpu-conditions) : $(setup-call)mc ;
+ toolset.flags clang-win.compile .RC $(cpu-conditions) : $(setup-call)rc ;
+ }
+
+
+ local C++FLAGS ;
+
+ if $(vc_version) = 10
+ {
+ C++FLAGS += -fmsc-version=1600 ;
+ }
+ else if $(vc_version) = 11
+ {
+ C++FLAGS += -fmsc-version=1700 ;
+ }
+ else if $(vc_version) = 12
+ {
+ C++FLAGS += -fmsc-version=1800 ;
+ }
+
+ toolset.flags clang-win CFLAGS $(condition) : $(C++FLAGS) ;
+
+ msvc.configure-version-specific clang-win : $(vc_version) : $(condition) ;
+}
+
+local rule get-visual-studio-vcvars ( version )
+{
+ local env_variable_name ;
+ env_variable_name = "VS"$(version:U)"0COMNTOOLS" ;
+
+ local vc-path = [ os.environ $(env_variable_name) ] ;
+ vc-path = [ path.join $(vc-path) "../../VC/vcvarsall.bat" ] ;
+ path.native $(vc-path) ;
+}
+
+
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+ .debug-configuration = true ;
+}
+
+# Copied from msvc.jam
+# Supported CPU architectures.
+.cpu-arch-x86 =
+ <architecture>/<address-model>
+ <architecture>/<address-model>32
+ <architecture>x86/<address-model>
+ <architecture>x86/<address-model>32 ;
+
+.cpu-arch-amd64 =
+ <architecture>/<address-model>64
+ <architecture>x86/<address-model>64 ;
+
+.cpu-arch-x86_amd64 =
+ <architecture>/<address-model>64
+ <architecture>x86/<address-model>64 ;
+
+# toolset.flags clang-win.link LIBRARY_OPTION <toolset>clang : "" ;
+
+toolset.flags clang-win YLOPTION ;
+
diff --git a/tools/build/v2/tools/clang.jam b/tools/build/src/tools/clang.jam
index e0ac9a553c..e0ac9a553c 100644
--- a/tools/build/v2/tools/clang.jam
+++ b/tools/build/src/tools/clang.jam
diff --git a/tools/build/src/tools/common.jam b/tools/build/src/tools/common.jam
new file mode 100644
index 0000000000..8404febf9a
--- /dev/null
+++ b/tools/build/src/tools/common.jam
@@ -0,0 +1,980 @@
+# Copyright 2003, 2005 Dave Abrahams
+# Copyright 2005, 2006 Rene Rivera
+# Copyright 2005 Toon Knapen
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Provides actions common to all toolsets, such as creating directories and
+# removing files.
+
+import os ;
+import modules ;
+import utility ;
+import print ;
+import type ;
+import feature ;
+import errors ;
+import path ;
+import sequence ;
+import toolset ;
+import virtual-target ;
+
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+ .debug-configuration = true ;
+}
+if [ MATCH (--show-configuration) : [ modules.peek : ARGV ] ]
+{
+ .show-configuration = true ;
+}
+
+# Configurations
+#
+# The following class helps to manage toolset configurations. Each configuration
+# has a unique ID and one or more parameters. A typical example of a unique ID
+# is a condition generated by 'common.check-init-parameters' rule. Other kinds
+# of IDs can be used. Parameters may include any details about the configuration
+# like 'command', 'path', etc.
+#
+# A toolset configuration may be in one of the following states:
+#
+# - registered
+# Configuration has been registered (e.g. explicitly or by auto-detection
+# code) but has not yet been marked as used, i.e. 'toolset.using' rule has
+# not yet been called for it.
+# - used
+# Once called 'toolset.using' rule marks the configuration as 'used'.
+#
+# The main difference between the states above is that while a configuration is
+# 'registered' its options can be freely changed. This is useful in particular
+# for autodetection code - all detected configurations may be safely overwritten
+# by user code.
+
+class configurations
+{
+ import errors ;
+
+ rule __init__ ( )
+ {
+ }
+
+ # Registers a configuration.
+ #
+ # Returns 'true' if the configuration has been added and an empty value if
+ # it already exists. Reports an error if the configuration is 'used'.
+ #
+ rule register ( id )
+ {
+ if $(id) in $(self.used)
+ {
+ errors.error "common: the configuration '$(id)' is in use" ;
+ }
+
+ local retval ;
+
+ if ! $(id) in $(self.all)
+ {
+ self.all += $(id) ;
+
+ # Indicate that a new configuration has been added.
+ retval = true ;
+ }
+
+ return $(retval) ;
+ }
+
+ # Mark a configuration as 'used'.
+ #
+ # Returns 'true' if the state of the configuration has been changed to
+ # 'used' and an empty value if it the state has not been changed. Reports an
+ # error if the configuration is not known.
+ #
+ rule use ( id )
+ {
+ if ! $(id) in $(self.all)
+ {
+ errors.error "common: the configuration '$(id)' is not known" ;
+ }
+
+ local retval ;
+
+ if ! $(id) in $(self.used)
+ {
+ self.used += $(id) ;
+
+ # Indicate that the configuration has been marked as 'used'.
+ retval = true ;
+ }
+
+ return $(retval) ;
+ }
+
+ # Return all registered configurations.
+ #
+ rule all ( )
+ {
+ return $(self.all) ;
+ }
+
+ # Return all used configurations.
+ #
+ rule used ( )
+ {
+ return $(self.used) ;
+ }
+
+ # Returns the value of a configuration parameter.
+ #
+ rule get ( id : param )
+ {
+ return $(self.$(param).$(id)) ;
+ }
+
+ # Sets the value of a configuration parameter.
+ #
+ rule set ( id : param : value * )
+ {
+ self.$(param).$(id) = $(value) ;
+ }
+}
+
+
+# The rule for checking toolset parameters. Trailing parameters should all be
+# parameter name/value pairs. The rule will check that each parameter either has
+# a value in each invocation or has no value in each invocation. Also, the rule
+# will check that the combination of all parameter values is unique in all
+# invocations.
+#
+# Each parameter name corresponds to a subfeature. This rule will declare a
+# subfeature the first time a non-empty parameter value is passed and will
+# extend it with all the values.
+#
+# The return value from this rule is a condition to be used for flags settings.
+#
+rule check-init-parameters ( toolset requirement * : * )
+{
+ local sig = $(toolset) ;
+ local condition = <toolset>$(toolset) ;
+ local subcondition ;
+ for local index in 2 3 4 5 6 7 8 9
+ {
+ local name = $($(index)[1]) ;
+ local value = $($(index)[2]) ;
+
+ if $(value)-is-not-empty
+ {
+ condition = $(condition)-$(value) ;
+ if $(.had-unspecified-value.$(toolset).$(name))
+ {
+ errors.user-error
+ "$(toolset) initialization: parameter '$(name)'"
+ "inconsistent" : "no value was specified in earlier"
+ "initialization" : "an explicit value is specified now" ;
+ }
+ # The below logic is for intel compiler. It calls this rule with
+ # 'intel-linux' and 'intel-win' as toolset, so we need to get the
+ # base part of toolset name. We can not pass 'intel' as toolset
+ # because in that case it will be impossible to register versionless
+ # intel-linux and intel-win toolsets of a specific version.
+ local t = $(toolset) ;
+ local m = [ MATCH ([^-]*)- : $(toolset) ] ;
+ if $(m)
+ {
+ t = $(m[1]) ;
+ }
+ if ! $(.had-value.$(toolset).$(name))
+ {
+ if ! $(.declared-subfeature.$(t).$(name))
+ {
+ feature.subfeature toolset $(t) : $(name) : : propagated ;
+ .declared-subfeature.$(t).$(name) = true ;
+ }
+ .had-value.$(toolset).$(name) = true ;
+ }
+ feature.extend-subfeature toolset $(t) : $(name) : $(value) ;
+ subcondition += <toolset-$(t):$(name)>$(value) ;
+ }
+ else
+ {
+ if $(.had-value.$(toolset).$(name))
+ {
+ errors.user-error
+ "$(toolset) initialization: parameter '$(name)'"
+ "inconsistent" : "an explicit value was specified in an"
+ "earlier initialization" : "no value is specified now" ;
+ }
+ .had-unspecified-value.$(toolset).$(name) = true ;
+ }
+ sig = $(sig)$(value:E="")- ;
+ }
+ if $(sig) in $(.all-signatures)
+ {
+ local message =
+ "duplicate initialization of $(toolset) with the following parameters: " ;
+ for local index in 2 3 4 5 6 7 8 9
+ {
+ local p = $($(index)) ;
+ if $(p)
+ {
+ message += "$(p[1]) = $(p[2]:E=<unspecified>)" ;
+ }
+ }
+ message += "previous initialization at $(.init-loc.$(sig))" ;
+ errors.user-error
+ $(message[1]) : $(message[2]) : $(message[3]) : $(message[4]) :
+ $(message[5]) : $(message[6]) : $(message[7]) : $(message[8]) ;
+ }
+ .all-signatures += $(sig) ;
+ .init-loc.$(sig) = [ errors.nearest-user-location ] ;
+
+ # If we have a requirement, this version should only be applied under that
+ # condition. To accomplish this we add a toolset requirement that imposes
+ # the toolset subcondition, which encodes the version.
+ if $(requirement)
+ {
+ local r = <toolset>$(toolset) $(requirement) ;
+ r = $(r:J=,) ;
+ toolset.add-requirements $(r):$(subcondition) ;
+ }
+
+ # We add the requirements, if any, to the condition to scope the toolset
+ # variables and options to this specific version.
+ condition += $(requirement) ;
+
+ if $(.show-configuration)
+ {
+ ECHO notice: $(condition) ;
+ }
+ return $(condition:J=/) ;
+}
+
+
+# A helper rule to get the command to invoke some tool. If
+# 'user-provided-command' is not given, tries to find binary named 'tool' in
+# PATH and in the passed 'additional-path'. Otherwise, verifies that the first
+# element of 'user-provided-command' is an existing program.
+#
+# This rule returns the command to be used when invoking the tool. If we can not
+# find the tool, a warning is issued. If 'path-last' is specified, PATH is
+# checked after 'additional-paths' when searching for 'tool'.
+#
+rule get-invocation-command-nodefault ( toolset : tool :
+ user-provided-command * : additional-paths * : path-last ? )
+{
+ local command ;
+ if ! $(user-provided-command)
+ {
+ command = [ find-tool $(tool) : $(additional-paths) : $(path-last) ] ;
+ if ! $(command) && $(.debug-configuration)
+ {
+ ECHO warning: toolset $(toolset) initialization: can not find tool
+ $(tool) ;
+ ECHO warning: initialized from [ errors.nearest-user-location ] ;
+ }
+ }
+ else
+ {
+ command = [ check-tool $(user-provided-command) ] ;
+ if ! $(command) && $(.debug-configuration)
+ {
+ ECHO warning: toolset $(toolset) initialization: ;
+ ECHO warning: can not find user-provided command
+ '$(user-provided-command)' ;
+ ECHO warning: initialized from [ errors.nearest-user-location ] ;
+ }
+ }
+
+ return $(command) ;
+}
+
+
+# Same as get-invocation-command-nodefault, except that if no tool is found,
+# returns either the user-provided-command, if present, or the 'tool' parameter.
+#
+rule get-invocation-command ( toolset : tool : user-provided-command * :
+ additional-paths * : path-last ? )
+{
+ local result = [ get-invocation-command-nodefault $(toolset) : $(tool) :
+ $(user-provided-command) : $(additional-paths) : $(path-last) ] ;
+
+ if ! $(result)
+ {
+ if $(user-provided-command)
+ {
+ result = $(user-provided-command) ;
+ }
+ else
+ {
+ result = $(tool) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Given an invocation command return the absolute path to the command. This
+# works even if command has no path element and was found on the PATH.
+#
+rule get-absolute-tool-path ( command )
+{
+ if $(command:D)
+ {
+ return $(command:D) ;
+ }
+ else
+ {
+ local m = [ GLOB [ modules.peek : PATH Path path ] : $(command)
+ $(command).exe ] ;
+ return $(m[1]:D) ;
+ }
+}
+
+
+# Attempts to find tool (binary) named 'name' in PATH and in 'additional-paths'.
+# If found in PATH, returns 'name' and if found in additional paths, returns
+# absolute name. If the tool is found in several directories, returns the first
+# path found. Otherwise, returns an empty string. If 'path-last' is specified,
+# PATH is searched after 'additional-paths'.
+#
+rule find-tool ( name : additional-paths * : path-last ? )
+{
+ local path = [ path.programs-path ] ;
+ local match = [ path.glob $(path) : $(name) $(name).exe ] ;
+ local additional-match = [ path.glob $(additional-paths) : $(name)
+ $(name).exe ] ;
+
+ local result ;
+ if $(path-last)
+ {
+ result = $(additional-match) ;
+ if ! $(result) && $(match)
+ {
+ result = $(name) ;
+ }
+ }
+ else
+ {
+ if $(match)
+ {
+ result = $(name) ;
+ }
+ else
+ {
+ result = $(additional-match) ;
+ }
+ }
+ if $(result)
+ {
+ return [ path.native $(result[1]) ] ;
+ }
+}
+
+# Checks if 'command' can be found either in path or is a full name to an
+# existing file.
+#
+local rule check-tool-aux ( command )
+{
+ if $(command:D)
+ {
+ if [ path.exists $(command) ]
+ # Both NT and Cygwin will run .exe files by their unqualified names.
+ || ( [ os.on-windows ] && [ path.exists $(command).exe ] )
+ # Only NT will run .bat & .cmd files by their unqualified names.
+ || ( ( [ os.name ] = NT ) && ( [ path.exists $(command).bat ] ||
+ [ path.exists $(command).cmd ] ) )
+ {
+ return $(command) ;
+ }
+ }
+ else
+ {
+ if [ GLOB [ modules.peek : PATH Path path ] : $(command) ]
+ {
+ return $(command) ;
+ }
+ }
+}
+
+
+# Checks that a tool can be invoked by 'command'. If command is not an absolute
+# path, checks if it can be found in 'path'. If comand is an absolute path,
+# check that it exists. Returns 'command' if ok or empty string otherwise.
+#
+local rule check-tool ( xcommand + )
+{
+ if [ check-tool-aux $(xcommand[1]) ] ||
+ [ check-tool-aux $(xcommand[-1]) ]
+ {
+ return $(xcommand) ;
+ }
+}
+
+
+# Handle common options for toolset, specifically sets the following flag
+# variables:
+# - CONFIG_COMMAND to $(command)
+# - OPTIONS for compile to the value of <compileflags> in $(options)
+# - OPTIONS for compile.c to the value of <cflags> in $(options)
+# - OPTIONS for compile.c++ to the value of <cxxflags> in $(options)
+# - OPTIONS for compile.fortran to the value of <fflags> in $(options)
+# - OPTIONS for link to the value of <linkflags> in $(options)
+#
+rule handle-options ( toolset : condition * : command * : options * )
+{
+ if $(.debug-configuration)
+ {
+ ECHO notice: will use '$(command)' for $(toolset), condition
+ $(condition:E=(empty)) ;
+ }
+
+ # The last parameter ('unchecked') says it is OK to set flags for another
+ # module.
+ toolset.flags $(toolset) CONFIG_COMMAND $(condition) : $(command)
+ : unchecked ;
+
+ toolset.flags $(toolset).compile OPTIONS $(condition) :
+ [ feature.get-values <compileflags> : $(options) ] : unchecked ;
+
+ toolset.flags $(toolset).compile.c OPTIONS $(condition) :
+ [ feature.get-values <cflags> : $(options) ] : unchecked ;
+
+ toolset.flags $(toolset).compile.c++ OPTIONS $(condition) :
+ [ feature.get-values <cxxflags> : $(options) ] : unchecked ;
+
+ toolset.flags $(toolset).compile.fortran OPTIONS $(condition) :
+ [ feature.get-values <fflags> : $(options) ] : unchecked ;
+
+ toolset.flags $(toolset).link OPTIONS $(condition) :
+ [ feature.get-values <linkflags> : $(options) ] : unchecked ;
+}
+
+
+# Returns the location of the "program files" directory on a Windows platform.
+#
+rule get-program-files-dir ( )
+{
+ local ProgramFiles = [ modules.peek : ProgramFiles ] ;
+ if $(ProgramFiles)
+ {
+ ProgramFiles = "$(ProgramFiles:J= )" ;
+ }
+ else
+ {
+ ProgramFiles = "c:\\Program Files" ;
+ }
+ return $(ProgramFiles) ;
+}
+
+
+if [ os.name ] = NT
+{
+ RM = del /f /q ;
+ CP = copy /b ;
+ IGNORE = "2>nul >nul & setlocal" ;
+ LN ?= $(CP) ;
+ # Ugly hack to convince copy to set the timestamp of the destination to the
+ # current time by concatenating the source with a nonexistent file. Note
+ # that this requires /b (binary) as the default when concatenating files is
+ # /a (ascii).
+ WINDOWS-CP-HACK = "+ this-file-does-not-exist-A698EE7806899E69" ;
+}
+else
+{
+ RM = rm -f ;
+ CP = cp ;
+ LN = ln ;
+}
+
+
+rule rm-command ( )
+{
+ return $(RM) ;
+}
+
+
+rule copy-command ( )
+{
+ return $(CP) ;
+}
+
+
+if "\n" = "n"
+{
+ # Escape characters not supported so use ugly hacks. Will not work on Cygwin
+ # - see below.
+ nl = "
+" ;
+ q = "" ;
+}
+else
+{
+ nl = "\n" ;
+ q = "\"" ;
+}
+
+# Returns the command needed to set an environment variable on the current
+# platform. The variable setting persists through all following commands and is
+# visible in the environment seen by subsequently executed commands. In other
+# words, on Unix systems, the variable is exported, which is consistent with the
+# only possible behavior on Windows systems.
+#
+rule variable-setting-command ( variable : value )
+{
+ if [ os.name ] = NT
+ {
+ return "set $(variable)=$(value)$(nl)" ;
+ }
+ else
+ {
+ # If we do not have escape character support in bjam, the cod below
+ # blows up on CYGWIN, since the $(nl) variable holds a Windows new-line
+ # \r\n sequence that messes up the executed export command which then
+ # reports that the passed variable name is incorrect.
+ # But we have a check for cygwin in kernel/bootstrap.jam already.
+ return "$(variable)=$(q)$(value)$(q)$(nl)export $(variable)$(nl)" ;
+ }
+}
+
+
+# Returns a command to sets a named shell path variable to the given NATIVE
+# paths on the current platform.
+#
+rule path-variable-setting-command ( variable : paths * )
+{
+ local sep = [ os.path-separator ] ;
+ return [ variable-setting-command $(variable) : $(paths:J=$(sep)) ] ;
+}
+
+
+# Returns a command that prepends the given paths to the named path variable on
+# the current platform.
+#
+rule prepend-path-variable-command ( variable : paths * )
+{
+ return [ path-variable-setting-command $(variable)
+ : $(paths) [ os.expand-variable $(variable) ] ] ;
+}
+
+
+# Return a command which can create a file. If 'r' is result of invocation, then
+# 'r foobar' will create foobar with unspecified content. What happens if file
+# already exists is unspecified.
+#
+rule file-creation-command ( )
+{
+ if [ os.name ] = NT
+ {
+ # A few alternative implementations on Windows:
+ #
+ # 'type NUL >> '
+ # That would construct an empty file instead of a file containing
+ # a space and an end-of-line marker but it would also not change
+ # the target's timestamp in case the file already exists.
+ #
+ # 'type NUL > '
+ # That would construct an empty file instead of a file containing
+ # a space and an end-of-line marker but it would also destroy an
+ # already existing file by overwriting it with an empty one.
+ #
+ # I guess the best solution would be to allow Boost Jam to define
+ # built-in functions such as 'create a file', 'touch a file' or 'copy a
+ # file' which could be used from inside action code. That would allow
+ # completely portable operations without this kind of kludge.
+ # (22.02.2009.) (Jurko)
+ return "echo. > " ;
+ }
+ else
+ {
+ return "touch " ;
+ }
+}
+
+
+# Returns a command that may be used for 'touching' files. It is not a real
+# 'touch' command on NT because it adds an empty line at the end of file but it
+# works with source files.
+#
+rule file-touch-command ( )
+{
+ if [ os.name ] = NT
+ {
+ return "echo. >> " ;
+ }
+ else
+ {
+ return "touch " ;
+ }
+}
+
+
+rule MkDir
+{
+ # If dir exists, do not update it. Do this even for $(DOT).
+ NOUPDATE $(<) ;
+
+ if $(<) != $(DOT) && ! $($(<)-mkdir)
+ {
+ # Cheesy gate to prevent multiple invocations on same dir.
+ $(<)-mkdir = true ;
+
+ # Schedule the mkdir build action.
+ common.mkdir $(<) ;
+
+ # Prepare a Jam 'dirs' target that can be used to make the build only
+ # construct all the target directories.
+ DEPENDS dirs : $(<) ;
+
+ # Recursively create parent directories. $(<:P) = $(<)'s parent & we
+ # recurse until root.
+
+ local s = $(<:P) ;
+ if [ os.name ] = NT
+ {
+ switch $(s)
+ {
+ case *: : s = ;
+ case *:\\ : s = ;
+ }
+ }
+
+ if $(s)
+ {
+ if $(s) != $(<)
+ {
+ DEPENDS $(<) : $(s) ;
+ MkDir $(s) ;
+ }
+ else
+ {
+ NOTFILE $(s) ;
+ }
+ }
+ }
+}
+
+
+#actions MkDir1
+#{
+# mkdir "$(<)"
+#}
+
+# The following quick-fix actions should be replaced using the original MkDir1
+# action once Boost Jam gets updated to correctly detect different paths leading
+# up to the same filesystem target and triggers their build action only once.
+# (todo) (04.07.2008.) (Jurko)
+
+if [ os.name ] = NT
+{
+ actions mkdir
+ {
+ if not exist "$(<)\\" mkdir "$(<)"
+ }
+}
+else
+{
+ actions mkdir
+ {
+ mkdir -p "$(<)"
+ }
+}
+
+actions piecemeal together existing Clean
+{
+ $(RM) "$(>)"
+}
+
+
+rule copy
+{
+}
+
+
+actions copy
+{
+ $(CP) "$(>)" $(WINDOWS-CP-HACK) "$(<)"
+}
+
+
+rule RmTemps
+{
+}
+
+
+actions quietly updated piecemeal together RmTemps
+{
+ $(RM) "$(>)" $(IGNORE)
+}
+
+
+actions hard-link
+{
+ $(RM) "$(<)" 2$(NULL_OUT) $(NULL_OUT)
+ $(LN) "$(>)" "$(<)" $(NULL_OUT)
+}
+
+
+# Given a target, as given to a custom tag rule, returns a string formatted
+# according to the passed format. Format is a list of properties that is
+# represented in the result. For each element of format the corresponding target
+# information is obtained and added to the result string. For all, but the
+# literal, the format value is taken as the as string to prepend to the output
+# to join the item to the rest of the result. If not given "-" is used as a
+# joiner.
+#
+# The format options can be:
+#
+# <base>[joiner]
+# :: The basename of the target name.
+# <toolset>[joiner]
+# :: The abbreviated toolset tag being used to build the target.
+# <threading>[joiner]
+# :: Indication of a multi-threaded build.
+# <runtime>[joiner]
+# :: Collective tag of the build runtime.
+# <version:/version-feature | X.Y[.Z]/>[joiner]
+# :: Short version tag taken from the given "version-feature" in the
+# build properties. Or if not present, the literal value as the
+# version number.
+# <property:/property-name/>[joiner]
+# :: Direct lookup of the given property-name value in the build
+# properties. /property-name/ is a regular expression. E.g.
+# <property:toolset-.*:flavor> will match every toolset.
+# /otherwise/
+# :: The literal value of the format argument.
+#
+# For example this format:
+#
+# boost_ <base> <toolset> <threading> <runtime> <version:boost-version>
+#
+# Might return:
+#
+# boost_thread-vc80-mt-gd-1_33.dll, or
+# boost_regex-vc80-gd-1_33.dll
+#
+# The returned name also has the target type specific prefix and suffix which
+# puts it in a ready form to use as the value from a custom tag rule.
+#
+rule format-name ( format * : name : type ? : property-set )
+{
+ local result = "" ;
+ for local f in $(format)
+ {
+ switch $(f:G)
+ {
+ case <base> :
+ result += $(name:B) ;
+
+ case <toolset> :
+ result += [ join-tag $(f:G=) : [ toolset-tag $(name) : $(type) :
+ $(property-set) ] ] ;
+
+ case <threading> :
+ result += [ join-tag $(f:G=) : [ threading-tag $(name) : $(type)
+ : $(property-set) ] ] ;
+
+ case <runtime> :
+ result += [ join-tag $(f:G=) : [ runtime-tag $(name) : $(type) :
+ $(property-set) ] ] ;
+
+ case <qt> :
+ result += [ join-tag $(f:G=) : [ qt-tag $(name) : $(type) :
+ $(property-set) ] ] ;
+
+ case <address-model> :
+ result += [ join-tag $(f:G=) : [ address-model-tag $(name) :
+ $(type) : $(property-set) ] ] ;
+
+ case <version:*> :
+ local key = [ MATCH <version:(.*)> : $(f:G) ] ;
+ local version = [ $(property-set).get <$(key)> ] ;
+ version ?= $(key) ;
+ version = [ MATCH "^([^.]+)[.]([^.]+)[.]?([^.]*)" : $(version) ] ;
+ result += [ join-tag $(f:G=) : $(version[1])_$(version[2]) ] ;
+
+ case <property:*> :
+ local key = [ MATCH <property:(.*)> : $(f:G) ] ;
+ local p0 = [ MATCH <($(key))> : [ $(property-set).raw ] ] ;
+ if $(p0)
+ {
+ local p = [ $(property-set).get <$(p0)> ] ;
+ if $(p)
+ {
+ result += [ join-tag $(f:G=) : $(p) ] ;
+ }
+ }
+
+ case * :
+ result += $(f:G=) ;
+ }
+ }
+ return [ virtual-target.add-prefix-and-suffix $(result:J=) : $(type) :
+ $(property-set) ] ;
+}
+
+
+local rule join-tag ( joiner ? : tag ? )
+{
+ if ! $(joiner) { joiner = - ; }
+ return $(joiner)$(tag) ;
+}
+
+
+local rule toolset-tag ( name : type ? : property-set )
+{
+ local tag = ;
+
+ local properties = [ $(property-set).raw ] ;
+ switch [ $(property-set).get <toolset> ]
+ {
+ case borland* : tag += bcb ;
+ case clang* :
+ {
+ switch [ $(property-set).get <toolset-clang:platform> ]
+ {
+ case darwin : tag += clang-darwin ;
+ case linux : tag += clang ;
+ }
+ }
+ case como* : tag += como ;
+ case cw : tag += cw ;
+ case darwin* : tag += xgcc ;
+ case edg* : tag += edg ;
+ case gcc* :
+ {
+ switch [ $(property-set).get <toolset-gcc:flavor> ]
+ {
+ case *mingw* : tag += mgw ;
+ case * : tag += gcc ;
+ }
+ }
+ case intel :
+ if [ $(property-set).get <toolset-intel:platform> ] = win
+ {
+ tag += iw ;
+ }
+ else
+ {
+ tag += il ;
+ }
+ case kcc* : tag += kcc ;
+ case kylix* : tag += bck ;
+ #case metrowerks* : tag += cw ;
+ #case mingw* : tag += mgw ;
+ case mipspro* : tag += mp ;
+ case msvc* : tag += vc ;
+ case qcc* : tag += qcc ;
+ case sun* : tag += sw ;
+ case tru64cxx* : tag += tru ;
+ case vacpp* : tag += xlc ;
+ }
+ local version = [ MATCH <toolset.*version>([0123456789]+)[.]([0123456789]*)
+ : $(properties) ] ;
+ # For historical reasons, vc6.0 and vc7.0 use different naming.
+ if $(tag) = vc
+ {
+ if $(version[1]) = 6
+ {
+ # Cancel minor version.
+ version = 6 ;
+ }
+ else if $(version[1]) = 7 && $(version[2]) = 0
+ {
+ version = 7 ;
+ }
+ }
+ # On intel, version is not added, because it does not matter and it is the
+ # version of vc used as backend that matters. Ideally, we should encode the
+ # backend version but that would break compatibility with V1.
+ if $(tag) = iw
+ {
+ version = ;
+ }
+
+ # On borland, version is not added for compatibility with V1.
+ if $(tag) = bcb
+ {
+ version = ;
+ }
+
+ tag += $(version) ;
+
+ return $(tag:J=) ;
+}
+
+
+local rule threading-tag ( name : type ? : property-set )
+{
+ if <threading>multi in [ $(property-set).raw ]
+ {
+ return mt ;
+ }
+}
+
+
+local rule runtime-tag ( name : type ? : property-set )
+{
+ local tag = ;
+
+ local properties = [ $(property-set).raw ] ;
+ if <runtime-link>static in $(properties) { tag += s ; }
+
+ # This is an ugly thing. In V1, there is code to automatically detect which
+ # properties affect a target. So, if <runtime-debugging> does not affect gcc
+ # toolset, the tag rules will not even see <runtime-debugging>. Similar
+ # functionality in V2 is not implemented yet, so we just check for toolsets
+ # known to care about runtime debugging.
+ if ( <toolset>msvc in $(properties) ) ||
+ ( <stdlib>stlport in $(properties) ) ||
+ ( <toolset-intel:platform>win in $(properties) )
+ {
+ if <runtime-debugging>on in $(properties) { tag += g ; }
+ }
+
+ if <python-debugging>on in $(properties) { tag += y ; }
+ if <variant>debug in $(properties) { tag += d ; }
+ if <stdlib>stlport in $(properties) { tag += p ; }
+ if <stdlib-stlport:iostream>hostios in $(properties) { tag += n ; }
+
+ return $(tag:J=) ;
+}
+
+
+# Create a tag for the Qt library version
+# "<qt>4.6.0" will result in tag "qt460"
+local rule qt-tag ( name : type ? : property-set )
+{
+ local v = [ MATCH ([0123456789]+)[.]?([0123456789]*)[.]?([0123456789]*) :
+ [ $(property-set).get <qt> ] ] ;
+ return qt$(v:J=) ;
+}
+
+
+# Create a tag for the address-model
+# <address-model>64 will simply generate "64"
+local rule address-model-tag ( name : type ? : property-set )
+{
+ return [ $(property-set).get <address-model> ] ;
+}
+
+
+rule __test__ ( )
+{
+ import assert ;
+
+ local save-os = [ modules.peek os : .name ] ;
+
+ modules.poke os : .name : LINUX ;
+ assert.result "PATH=\"foo:bar:baz\"\nexport PATH\n"
+ : path-variable-setting-command PATH : foo bar baz ;
+ assert.result "PATH=\"foo:bar:$PATH\"\nexport PATH\n"
+ : prepend-path-variable-command PATH : foo bar ;
+
+ modules.poke os : .name : NT ;
+ assert.result "set PATH=foo;bar;baz\n"
+ : path-variable-setting-command PATH : foo bar baz ;
+ assert.result "set PATH=foo;bar;%PATH%\n"
+ : prepend-path-variable-command PATH : foo bar ;
+
+ modules.poke os : .name : $(save-os) ;
+}
diff --git a/tools/build/src/tools/common.py b/tools/build/src/tools/common.py
new file mode 100644
index 0000000000..63c65e4c01
--- /dev/null
+++ b/tools/build/src/tools/common.py
@@ -0,0 +1,858 @@
+# Status: being ported by Steven Watanabe
+# Base revision: 47174
+#
+# Copyright (C) Vladimir Prus 2002. Permission to copy, use, modify, sell and
+# distribute this software is granted provided this copyright notice appears in
+# all copies. This software is provided "as is" without express or implied
+# warranty, and with no claim as to its suitability for any purpose.
+
+""" Provides actions common to all toolsets, such as creating directories and
+ removing files.
+"""
+
+import re
+import bjam
+import os
+import os.path
+import sys
+
+# for some reason this fails on Python 2.7(r27:82525)
+# from b2.build import virtual_target
+import b2.build.virtual_target
+from b2.build import feature, type
+from b2.util.utility import *
+from b2.util import path
+
+__re__before_first_dash = re.compile ('([^-]*)-')
+
+def reset ():
+ """ Clear the module state. This is mainly for testing purposes.
+ Note that this must be called _after_ resetting the module 'feature'.
+ """
+ global __had_unspecified_value, __had_value, __declared_subfeature
+ global __init_loc
+ global __all_signatures, __debug_configuration, __show_configuration
+
+ # Stores toolsets without specified initialization values.
+ __had_unspecified_value = {}
+
+ # Stores toolsets with specified initialization values.
+ __had_value = {}
+
+ # Stores toolsets with declared subfeatures.
+ __declared_subfeature = {}
+
+ # Stores all signatures of the toolsets.
+ __all_signatures = {}
+
+ # Stores the initialization locations of each toolset
+ __init_loc = {}
+
+ __debug_configuration = '--debug-configuration' in bjam.variable('ARGV')
+ __show_configuration = '--show-configuration' in bjam.variable('ARGV')
+
+ global __executable_path_variable
+ OS = bjam.call("peek", [], "OS")[0]
+ if OS == "NT":
+ # On Windows the case and capitalization of PATH is not always predictable, so
+ # let's find out what variable name was really set.
+ for n in os.environ:
+ if n.lower() == "path":
+ __executable_path_variable = n
+ break
+ else:
+ __executable_path_variable = "PATH"
+
+ m = {"NT": __executable_path_variable,
+ "CYGWIN": "PATH",
+ "MACOSX": "DYLD_LIBRARY_PATH",
+ "AIX": "LIBPATH"}
+ global __shared_library_path_variable
+ __shared_library_path_variable = m.get(OS, "LD_LIBRARY_PATH")
+
+reset()
+
+def shared_library_path_variable():
+ return __shared_library_path_variable
+
+# ported from trunk@47174
+class Configurations(object):
+ """
+ This class helps to manage toolset configurations. Each configuration
+ has a unique ID and one or more parameters. A typical example of a unique ID
+ is a condition generated by 'common.check-init-parameters' rule. Other kinds
+ of IDs can be used. Parameters may include any details about the configuration
+ like 'command', 'path', etc.
+
+ A toolset configuration may be in one of the following states:
+
+ - registered
+ Configuration has been registered (e.g. by autodetection code) but has
+ not yet been marked as used, i.e. 'toolset.using' rule has not yet been
+ called for it.
+ - used
+ Once called 'toolset.using' rule marks the configuration as 'used'.
+
+ The main difference between the states above is that while a configuration is
+ 'registered' its options can be freely changed. This is useful in particular
+ for autodetection code - all detected configurations may be safely overwritten
+ by user code.
+ """
+
+ def __init__(self):
+ self.used_ = set()
+ self.all_ = set()
+ self.params_ = {}
+
+ def register(self, id):
+ """
+ Registers a configuration.
+
+ Returns True if the configuration has been added and False if
+ it already exists. Reports an error if the configuration is 'used'.
+ """
+ if id in self.used_:
+ #FIXME
+ errors.error("common: the configuration '$(id)' is in use")
+
+ if id not in self.all_:
+ self.all_.add(id)
+
+ # Indicate that a new configuration has been added.
+ return True
+ else:
+ return False
+
+ def use(self, id):
+ """
+ Mark a configuration as 'used'.
+
+ Returns True if the state of the configuration has been changed to
+ 'used' and False if it the state wasn't changed. Reports an error
+ if the configuration isn't known.
+ """
+ if id not in self.all_:
+ #FIXME:
+ errors.error("common: the configuration '$(id)' is not known")
+
+ if id not in self.used_:
+ self.used_.add(id)
+
+ # indicate that the configuration has been marked as 'used'
+ return True
+ else:
+ return False
+
+ def all(self):
+ """ Return all registered configurations. """
+ return self.all_
+
+ def used(self):
+ """ Return all used configurations. """
+ return self.used_
+
+ def get(self, id, param):
+ """ Returns the value of a configuration parameter. """
+ return self.params_.get(param, {}).get(id)
+
+ def set (self, id, param, value):
+ """ Sets the value of a configuration parameter. """
+ self.params_.setdefault(param, {})[id] = value
+
+# Ported from trunk@47174
+def check_init_parameters(toolset, requirement, *args):
+ """ The rule for checking toolset parameters. Trailing parameters should all be
+ parameter name/value pairs. The rule will check that each parameter either has
+ a value in each invocation or has no value in each invocation. Also, the rule
+ will check that the combination of all parameter values is unique in all
+ invocations.
+
+ Each parameter name corresponds to a subfeature. This rule will declare a
+ subfeature the first time a non-empty parameter value is passed and will
+ extend it with all the values.
+
+ The return value from this rule is a condition to be used for flags settings.
+ """
+ from b2.build import toolset as b2_toolset
+ if requirement is None:
+ requirement = []
+ # The type checking here is my best guess about
+ # what the types should be.
+ assert(isinstance(toolset, str))
+ # iterable and not a string, allows for future support of sets
+ assert(not isinstance(requirement, basestring) and hasattr(requirement, '__contains__'))
+ sig = toolset
+ condition = replace_grist(toolset, '<toolset>')
+ subcondition = []
+
+ for arg in args:
+ assert(isinstance(arg, tuple))
+ assert(len(arg) == 2)
+ name = arg[0]
+ value = arg[1]
+ assert(isinstance(name, str))
+ assert(isinstance(value, str) or value is None)
+
+ str_toolset_name = str((toolset, name))
+
+ # FIXME: is this the correct translation?
+ ### if $(value)-is-not-empty
+ if value is not None:
+ condition = condition + '-' + value
+ if __had_unspecified_value.has_key(str_toolset_name):
+ raise BaseException("'%s' initialization: parameter '%s' inconsistent\n" \
+ "no value was specified in earlier initialization\n" \
+ "an explicit value is specified now" % (toolset, name))
+
+ # The logic below is for intel compiler. It calls this rule
+ # with 'intel-linux' and 'intel-win' as toolset, so we need to
+ # get the base part of toolset name.
+ # We can't pass 'intel' as toolset, because it that case it will
+ # be impossible to register versionles intel-linux and
+ # intel-win of specific version.
+ t = toolset
+ m = __re__before_first_dash.match(toolset)
+ if m:
+ t = m.group(1)
+
+ if not __had_value.has_key(str_toolset_name):
+ if not __declared_subfeature.has_key(str((t, name))):
+ feature.subfeature('toolset', t, name, [], ['propagated'])
+ __declared_subfeature[str((t, name))] = True
+
+ __had_value[str_toolset_name] = True
+
+ feature.extend_subfeature('toolset', t, name, [value])
+ subcondition += ['<toolset-' + t + ':' + name + '>' + value ]
+
+ else:
+ if __had_value.has_key(str_toolset_name):
+ raise BaseException ("'%s' initialization: parameter '%s' inconsistent\n" \
+ "an explicit value was specified in an earlier initialization\n" \
+ "no value is specified now" % (toolset, name))
+
+ __had_unspecified_value[str_toolset_name] = True
+
+ if value == None: value = ''
+
+ sig = sig + value + '-'
+
+ # if a requirement is specified, the signature should be unique
+ # with that requirement
+ if requirement:
+ sig += '-' + '-'.join(requirement)
+
+ if __all_signatures.has_key(sig):
+ message = "duplicate initialization of '%s' with the following parameters: " % toolset
+
+ for arg in args:
+ name = arg[0]
+ value = arg[1]
+ if value == None: value = '<unspecified>'
+
+ message += "'%s' = '%s'\n" % (name, value)
+
+ raise BaseException(message)
+
+ __all_signatures[sig] = True
+ # FIXME
+ __init_loc[sig] = "User location unknown" #[ errors.nearest-user-location ] ;
+
+ # If we have a requirement, this version should only be applied under that
+ # condition. To accomplish this we add a toolset requirement that imposes
+ # the toolset subcondition, which encodes the version.
+ if requirement:
+ r = ['<toolset>' + toolset] + requirement
+ r = ','.join(r)
+ b2_toolset.add_requirements([r + ':' + c for c in subcondition])
+
+ # We add the requirements, if any, to the condition to scope the toolset
+ # variables and options to this specific version.
+ condition = [condition]
+ if requirement:
+ condition += requirement
+
+ if __show_configuration:
+ print "notice:", condition
+ return ['/'.join(condition)]
+
+# Ported from trunk@47077
+def get_invocation_command_nodefault(
+ toolset, tool, user_provided_command=[], additional_paths=[], path_last=False):
+ """
+ A helper rule to get the command to invoke some tool. If
+ 'user-provided-command' is not given, tries to find binary named 'tool' in
+ PATH and in the passed 'additional-path'. Otherwise, verifies that the first
+ element of 'user-provided-command' is an existing program.
+
+ This rule returns the command to be used when invoking the tool. If we can't
+ find the tool, a warning is issued. If 'path-last' is specified, PATH is
+ checked after 'additional-paths' when searching for 'tool'.
+ """
+ assert(isinstance(toolset, str))
+ assert(isinstance(tool, str))
+ assert(isinstance(user_provided_command, list))
+ if additional_paths is not None:
+ assert(isinstance(additional_paths, list))
+ assert(all([isinstance(path, str) for path in additional_paths]))
+ assert(all(isinstance(path, str) for path in additional_paths))
+ assert(isinstance(path_last, bool))
+
+ if not user_provided_command:
+ command = find_tool(tool, additional_paths, path_last)
+ if not command and __debug_configuration:
+ print "warning: toolset", toolset, "initialization: can't find tool, tool"
+ #FIXME
+ #print "warning: initialized from" [ errors.nearest-user-location ] ;
+ else:
+ command = check_tool(user_provided_command)
+ assert(isinstance(command, list))
+ command=' '.join(command)
+ if not command and __debug_configuration:
+ print "warning: toolset", toolset, "initialization:"
+ print "warning: can't find user-provided command", user_provided_command
+ #FIXME
+ #ECHO "warning: initialized from" [ errors.nearest-user-location ]
+
+ assert(isinstance(command, str))
+
+ return command
+
+# ported from trunk@47174
+def get_invocation_command(toolset, tool, user_provided_command = [],
+ additional_paths = [], path_last = False):
+ """ Same as get_invocation_command_nodefault, except that if no tool is found,
+ returns either the user-provided-command, if present, or the 'tool' parameter.
+ """
+
+ assert(isinstance(toolset, str))
+ assert(isinstance(tool, str))
+ assert(isinstance(user_provided_command, list))
+ if additional_paths is not None:
+ assert(isinstance(additional_paths, list))
+ assert(all([isinstance(path, str) for path in additional_paths]))
+ assert(isinstance(path_last, bool))
+
+ result = get_invocation_command_nodefault(toolset, tool,
+ user_provided_command,
+ additional_paths,
+ path_last)
+
+ if not result:
+ if user_provided_command:
+ result = user_provided_command[0]
+ else:
+ result = tool
+
+ assert(isinstance(result, str))
+
+ return result
+
+# ported from trunk@47281
+def get_absolute_tool_path(command):
+ """
+ Given an invocation command,
+ return the absolute path to the command. This works even if commnad
+ has not path element and is present in PATH.
+ """
+ if os.path.dirname(command):
+ return os.path.dirname(command)
+ else:
+ programs = path.programs_path()
+ m = path.glob(programs, [command, command + '.exe' ])
+ if not len(m):
+ if __debug_configuration:
+ print "Could not find:", command, "in", programs
+ return None
+ return os.path.dirname(m[0])
+
+# ported from trunk@47174
+def find_tool(name, additional_paths = [], path_last = False):
+ """ Attempts to find tool (binary) named 'name' in PATH and in
+ 'additional-paths'. If found in path, returns 'name'. If
+ found in additional paths, returns full name. If the tool
+ is found in several directories, returns the first path found.
+ Otherwise, returns the empty string. If 'path_last' is specified,
+ path is checked after 'additional_paths'.
+ """
+ assert(isinstance(name, str))
+ assert(isinstance(additional_paths, list))
+ assert(isinstance(path_last, bool))
+
+ programs = path.programs_path()
+ match = path.glob(programs, [name, name + '.exe'])
+ additional_match = path.glob(additional_paths, [name, name + '.exe'])
+
+ result = []
+ if path_last:
+ result = additional_match
+ if not result and match:
+ result = match
+
+ else:
+ if match:
+ result = match
+
+ elif additional_match:
+ result = additional_match
+
+ if result:
+ return path.native(result[0])
+ else:
+ return ''
+
+#ported from trunk@47281
+def check_tool_aux(command):
+ """ Checks if 'command' can be found either in path
+ or is a full name to an existing file.
+ """
+ assert(isinstance(command, str))
+ dirname = os.path.dirname(command)
+ if dirname:
+ if os.path.exists(command):
+ return command
+ # Both NT and Cygwin will run .exe files by their unqualified names.
+ elif on_windows() and os.path.exists(command + '.exe'):
+ return command
+ # Only NT will run .bat files by their unqualified names.
+ elif os_name() == 'NT' and os.path.exists(command + '.bat'):
+ return command
+ else:
+ paths = path.programs_path()
+ if path.glob(paths, [command]):
+ return command
+
+# ported from trunk@47281
+def check_tool(command):
+ """ Checks that a tool can be invoked by 'command'.
+ If command is not an absolute path, checks if it can be found in 'path'.
+ If comand is absolute path, check that it exists. Returns 'command'
+ if ok and empty string otherwise.
+ """
+ assert(isinstance(command, list))
+ assert(all(isinstance(c, str) for c in command))
+ #FIXME: why do we check the first and last elements????
+ if check_tool_aux(command[0]) or check_tool_aux(command[-1]):
+ return command
+
+# ported from trunk@47281
+def handle_options(tool, condition, command, options):
+ """ Handle common options for toolset, specifically sets the following
+ flag variables:
+ - CONFIG_COMMAND to 'command'
+ - OPTIOns for compile to the value of <compileflags> in options
+ - OPTIONS for compile.c to the value of <cflags> in options
+ - OPTIONS for compile.c++ to the value of <cxxflags> in options
+ - OPTIONS for compile.fortran to the value of <fflags> in options
+ - OPTIONs for link to the value of <linkflags> in options
+ """
+ from b2.build import toolset
+
+ assert(isinstance(tool, str))
+ assert(isinstance(condition, list))
+ assert(isinstance(command, str))
+ assert(isinstance(options, list))
+ assert(command)
+ toolset.flags(tool, 'CONFIG_COMMAND', condition, [command])
+ toolset.flags(tool + '.compile', 'OPTIONS', condition, feature.get_values('<compileflags>', options))
+ toolset.flags(tool + '.compile.c', 'OPTIONS', condition, feature.get_values('<cflags>', options))
+ toolset.flags(tool + '.compile.c++', 'OPTIONS', condition, feature.get_values('<cxxflags>', options))
+ toolset.flags(tool + '.compile.fortran', 'OPTIONS', condition, feature.get_values('<fflags>', options))
+ toolset.flags(tool + '.link', 'OPTIONS', condition, feature.get_values('<linkflags>', options))
+
+# ported from trunk@47281
+def get_program_files_dir():
+ """ returns the location of the "program files" directory on a windows
+ platform
+ """
+ ProgramFiles = bjam.variable("ProgramFiles")
+ if ProgramFiles:
+ ProgramFiles = ' '.join(ProgramFiles)
+ else:
+ ProgramFiles = "c:\\Program Files"
+ return ProgramFiles
+
+# ported from trunk@47281
+def rm_command():
+ return __RM
+
+# ported from trunk@47281
+def copy_command():
+ return __CP
+
+# ported from trunk@47281
+def variable_setting_command(variable, value):
+ """
+ Returns the command needed to set an environment variable on the current
+ platform. The variable setting persists through all following commands and is
+ visible in the environment seen by subsequently executed commands. In other
+ words, on Unix systems, the variable is exported, which is consistent with the
+ only possible behavior on Windows systems.
+ """
+ assert(isinstance(variable, str))
+ assert(isinstance(value, str))
+
+ if os_name() == 'NT':
+ return "set " + variable + "=" + value + os.linesep
+ else:
+ # (todo)
+ # The following does not work on CYGWIN and needs to be fixed. On
+ # CYGWIN the $(nl) variable holds a Windows new-line \r\n sequence that
+ # messes up the executed export command which then reports that the
+ # passed variable name is incorrect. This is most likely due to the
+ # extra \r character getting interpreted as a part of the variable name.
+ #
+ # Several ideas pop to mind on how to fix this:
+ # * One way would be to separate the commands using the ; shell
+ # command separator. This seems like the quickest possible
+ # solution but I do not know whether this would break code on any
+ # platforms I I have no access to.
+ # * Another would be to not use the terminating $(nl) but that would
+ # require updating all the using code so it does not simply
+ # prepend this variable to its own commands.
+ # * I guess the cleanest solution would be to update Boost Jam to
+ # allow explicitly specifying \n & \r characters in its scripts
+ # instead of always relying only on the 'current OS native newline
+ # sequence'.
+ #
+ # Some code found to depend on this behaviour:
+ # * This Boost Build module.
+ # * __test__ rule.
+ # * path-variable-setting-command rule.
+ # * python.jam toolset.
+ # * xsltproc.jam toolset.
+ # * fop.jam toolset.
+ # (todo) (07.07.2008.) (Jurko)
+ #
+ # I think that this works correctly in python -- Steven Watanabe
+ return variable + "=" + value + os.linesep + "export " + variable + os.linesep
+
+def path_variable_setting_command(variable, paths):
+ """
+ Returns a command to sets a named shell path variable to the given NATIVE
+ paths on the current platform.
+ """
+ assert(isinstance(variable, str))
+ assert(isinstance(paths, list))
+ sep = os.path.pathsep
+ return variable_setting_command(variable, sep.join(paths))
+
+def prepend_path_variable_command(variable, paths):
+ """
+ Returns a command that prepends the given paths to the named path variable on
+ the current platform.
+ """
+ return path_variable_setting_command(variable,
+ paths + os.environ.get(variable, "").split(os.pathsep))
+
+def file_creation_command():
+ """
+ Return a command which can create a file. If 'r' is result of invocation, then
+ 'r foobar' will create foobar with unspecified content. What happens if file
+ already exists is unspecified.
+ """
+ if os_name() == 'NT':
+ return "echo. > "
+ else:
+ return "touch "
+
+#FIXME: global variable
+__mkdir_set = set()
+__re_windows_drive = re.compile(r'^.*:\$')
+
+def mkdir(engine, target):
+ # If dir exists, do not update it. Do this even for $(DOT).
+ bjam.call('NOUPDATE', target)
+
+ global __mkdir_set
+
+ # FIXME: Where is DOT defined?
+ #if $(<) != $(DOT) && ! $($(<)-mkdir):
+ if target != '.' and target not in __mkdir_set:
+ # Cheesy gate to prevent multiple invocations on same dir.
+ __mkdir_set.add(target)
+
+ # Schedule the mkdir build action.
+ if os_name() == 'NT':
+ engine.set_update_action("common.MkDir1-quick-fix-for-windows", target, [])
+ else:
+ engine.set_update_action("common.MkDir1-quick-fix-for-unix", target, [])
+
+ # Prepare a Jam 'dirs' target that can be used to make the build only
+ # construct all the target directories.
+ engine.add_dependency('dirs', target)
+
+ # Recursively create parent directories. $(<:P) = $(<)'s parent & we
+ # recurse until root.
+
+ s = os.path.dirname(target)
+ if os_name() == 'NT':
+ if(__re_windows_drive.match(s)):
+ s = ''
+
+ if s:
+ if s != target:
+ engine.add_dependency(target, s)
+ mkdir(engine, s)
+ else:
+ bjam.call('NOTFILE', s)
+
+__re_version = re.compile(r'^([^.]+)[.]([^.]+)[.]?([^.]*)')
+
+def format_name(format, name, target_type, prop_set):
+ """ Given a target, as given to a custom tag rule, returns a string formatted
+ according to the passed format. Format is a list of properties that is
+ represented in the result. For each element of format the corresponding target
+ information is obtained and added to the result string. For all, but the
+ literal, the format value is taken as the as string to prepend to the output
+ to join the item to the rest of the result. If not given "-" is used as a
+ joiner.
+
+ The format options can be:
+
+ <base>[joiner]
+ :: The basename of the target name.
+ <toolset>[joiner]
+ :: The abbreviated toolset tag being used to build the target.
+ <threading>[joiner]
+ :: Indication of a multi-threaded build.
+ <runtime>[joiner]
+ :: Collective tag of the build runtime.
+ <version:/version-feature | X.Y[.Z]/>[joiner]
+ :: Short version tag taken from the given "version-feature"
+ in the build properties. Or if not present, the literal
+ value as the version number.
+ <property:/property-name/>[joiner]
+ :: Direct lookup of the given property-name value in the
+ build properties. /property-name/ is a regular expression.
+ e.g. <property:toolset-.*:flavor> will match every toolset.
+ /otherwise/
+ :: The literal value of the format argument.
+
+ For example this format:
+
+ boost_ <base> <toolset> <threading> <runtime> <version:boost-version>
+
+ Might return:
+
+ boost_thread-vc80-mt-gd-1_33.dll, or
+ boost_regex-vc80-gd-1_33.dll
+
+ The returned name also has the target type specific prefix and suffix which
+ puts it in a ready form to use as the value from a custom tag rule.
+ """
+ assert(isinstance(format, list))
+ assert(isinstance(name, str))
+ assert(isinstance(target_type, str) or not type)
+ # assert(isinstance(prop_set, property_set.PropertySet))
+ if type.is_derived(target_type, 'LIB'):
+ result = "" ;
+ for f in format:
+ grist = get_grist(f)
+ if grist == '<base>':
+ result += os.path.basename(name)
+ elif grist == '<toolset>':
+ result += join_tag(get_value(f),
+ toolset_tag(name, target_type, prop_set))
+ elif grist == '<threading>':
+ result += join_tag(get_value(f),
+ threading_tag(name, target_type, prop_set))
+ elif grist == '<runtime>':
+ result += join_tag(get_value(f),
+ runtime_tag(name, target_type, prop_set))
+ elif grist.startswith('<version:'):
+ key = grist[len('<version:'):-1]
+ version = prop_set.get('<' + key + '>')
+ if not version:
+ version = key
+ version = __re_version.match(version)
+ result += join_tag(get_value(f), version[1] + '_' + version[2])
+ elif grist.startswith('<property:'):
+ key = grist[len('<property:'):-1]
+ property_re = re.compile('<(' + key + ')>')
+ p0 = None
+ for prop in prop_set.raw():
+ match = property_re.match(prop)
+ if match:
+ p0 = match[1]
+ break
+ if p0:
+ p = prop_set.get('<' + p0 + '>')
+ if p:
+ assert(len(p) == 1)
+ result += join_tag(ungrist(f), p)
+ else:
+ result += f
+
+ result = b2.build.virtual_target.add_prefix_and_suffix(
+ ''.join(result), target_type, prop_set)
+ return result
+
+def join_tag(joiner, tag):
+ if tag:
+ if not joiner: joiner = '-'
+ return joiner + tag
+ return ''
+
+__re_toolset_version = re.compile(r"<toolset.*version>(\d+)[.](\d*)")
+
+def toolset_tag(name, target_type, prop_set):
+ tag = ''
+
+ properties = prop_set.raw()
+ tools = prop_set.get('<toolset>')
+ assert(len(tools) == 1)
+ tools = tools[0]
+ if tools.startswith('borland'): tag += 'bcb'
+ elif tools.startswith('como'): tag += 'como'
+ elif tools.startswith('cw'): tag += 'cw'
+ elif tools.startswith('darwin'): tag += 'xgcc'
+ elif tools.startswith('edg'): tag += edg
+ elif tools.startswith('gcc'):
+ flavor = prop_set.get('<toolset-gcc:flavor>')
+ ''.find
+ if flavor.find('mingw') != -1:
+ tag += 'mgw'
+ else:
+ tag += 'gcc'
+ elif tools == 'intel':
+ if prop_set.get('<toolset-intel:platform>') == ['win']:
+ tag += 'iw'
+ else:
+ tag += 'il'
+ elif tools.startswith('kcc'): tag += 'kcc'
+ elif tools.startswith('kylix'): tag += 'bck'
+ #case metrowerks* : tag += cw ;
+ #case mingw* : tag += mgw ;
+ elif tools.startswith('mipspro'): tag += 'mp'
+ elif tools.startswith('msvc'): tag += 'vc'
+ elif tools.startswith('sun'): tag += 'sw'
+ elif tools.startswith('tru64cxx'): tag += 'tru'
+ elif tools.startswith('vacpp'): tag += 'xlc'
+
+ for prop in properties:
+ match = __re_toolset_version.match(prop)
+ if(match):
+ version = match
+ break
+ version_string = None
+ # For historical reasons, vc6.0 and vc7.0 use different naming.
+ if tag == 'vc':
+ if version.group(1) == '6':
+ # Cancel minor version.
+ version_string = '6'
+ elif version.group(1) == '7' and version.group(2) == '0':
+ version_string = '7'
+
+ # On intel, version is not added, because it does not matter and it's the
+ # version of vc used as backend that matters. Ideally, we'd encode the
+ # backend version but that would break compatibility with V1.
+ elif tag == 'iw':
+ version_string = ''
+
+ # On borland, version is not added for compatibility with V1.
+ elif tag == 'bcb':
+ version_string = ''
+
+ if version_string is None:
+ version = version.group(1) + version.group(2)
+
+ tag += version
+
+ return tag
+
+
+def threading_tag(name, target_type, prop_set):
+ tag = ''
+ properties = prop_set.raw()
+ if '<threading>multi' in properties: tag = 'mt'
+
+ return tag
+
+
+def runtime_tag(name, target_type, prop_set ):
+ tag = ''
+
+ properties = prop_set.raw()
+ if '<runtime-link>static' in properties: tag += 's'
+
+ # This is an ugly thing. In V1, there's a code to automatically detect which
+ # properties affect a target. So, if <runtime-debugging> does not affect gcc
+ # toolset, the tag rules won't even see <runtime-debugging>. Similar
+ # functionality in V2 is not implemented yet, so we just check for toolsets
+ # which are known to care about runtime debug.
+ if '<toolset>msvc' in properties \
+ or '<stdlib>stlport' in properties \
+ or '<toolset-intel:platform>win' in properties:
+ if '<runtime-debugging>on' in properties: tag += 'g'
+
+ if '<python-debugging>on' in properties: tag += 'y'
+ if '<variant>debug' in properties: tag += 'd'
+ if '<stdlib>stlport' in properties: tag += 'p'
+ if '<stdlib-stlport:iostream>hostios' in properties: tag += 'n'
+
+ return tag
+
+
+## TODO:
+##rule __test__ ( )
+##{
+## import assert ;
+##
+## local nl = "
+##" ;
+##
+## local save-os = [ modules.peek os : .name ] ;
+##
+## modules.poke os : .name : LINUX ;
+##
+## assert.result "PATH=foo:bar:baz$(nl)export PATH$(nl)"
+## : path-variable-setting-command PATH : foo bar baz ;
+##
+## assert.result "PATH=foo:bar:$PATH$(nl)export PATH$(nl)"
+## : prepend-path-variable-command PATH : foo bar ;
+##
+## modules.poke os : .name : NT ;
+##
+## assert.result "set PATH=foo;bar;baz$(nl)"
+## : path-variable-setting-command PATH : foo bar baz ;
+##
+## assert.result "set PATH=foo;bar;%PATH%$(nl)"
+## : prepend-path-variable-command PATH : foo bar ;
+##
+## modules.poke os : .name : $(save-os) ;
+##}
+
+def init(manager):
+ engine = manager.engine()
+
+ engine.register_action("common.MkDir1-quick-fix-for-unix", 'mkdir -p "$(<)"')
+ engine.register_action("common.MkDir1-quick-fix-for-windows", 'if not exist "$(<)\\" mkdir "$(<)"')
+
+ import b2.tools.make
+ import b2.build.alias
+
+ global __RM, __CP, __IGNORE, __LN
+ # ported from trunk@47281
+ if os_name() == 'NT':
+ __RM = 'del /f /q'
+ __CP = 'copy'
+ __IGNORE = '2>nul >nul & setlocal'
+ __LN = __CP
+ #if not __LN:
+ # __LN = CP
+ else:
+ __RM = 'rm -f'
+ __CP = 'cp'
+ __IGNORE = ''
+ __LN = 'ln'
+
+ engine.register_action("common.Clean", __RM + ' "$(>)"',
+ flags=['piecemeal', 'together', 'existing'])
+ engine.register_action("common.copy", __CP + ' "$(>)" "$(<)"')
+ engine.register_action("common.RmTemps", __RM + ' "$(>)" ' + __IGNORE,
+ flags=['quietly', 'updated', 'piecemeal', 'together'])
+
+ engine.register_action("common.hard-link",
+ __RM + ' "$(<)" 2$(NULL_OUT) $(NULL_OUT)' + os.linesep +
+ __LN + ' "$(>)" "$(<)" $(NULL_OUT)')
diff --git a/tools/build/src/tools/common_clang_vc.jam b/tools/build/src/tools/common_clang_vc.jam
new file mode 100644
index 0000000000..bdf1ca8747
--- /dev/null
+++ b/tools/build/src/tools/common_clang_vc.jam
@@ -0,0 +1,987 @@
+# Copyright 2003, 2005 Dave Abrahams
+# Copyright 2005, 2006 Rene Rivera
+# Copyright 2005 Toon Knapen
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Provides actions common to all toolsets, such as creating directories and
+# removing files.
+
+import os ;
+import modules ;
+import utility ;
+import print ;
+import type ;
+import feature ;
+import errors ;
+import path ;
+import sequence ;
+import toolset ;
+import virtual-target ;
+
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+ .debug-configuration = true ;
+}
+if [ MATCH (--show-configuration) : [ modules.peek : ARGV ] ]
+{
+ .show-configuration = true ;
+}
+
+# Configurations
+#
+# The following class helps to manage toolset configurations. Each configuration
+# has a unique ID and one or more parameters. A typical example of a unique ID
+# is a condition generated by 'common.check-init-parameters' rule. Other kinds
+# of IDs can be used. Parameters may include any details about the configuration
+# like 'command', 'path', etc.
+#
+# A toolset configuration may be in one of the following states:
+#
+# - registered
+# Configuration has been registered (e.g. explicitly or by auto-detection
+# code) but has not yet been marked as used, i.e. 'toolset.using' rule has
+# not yet been called for it.
+# - used
+# Once called 'toolset.using' rule marks the configuration as 'used'.
+#
+# The main difference between the states above is that while a configuration is
+# 'registered' its options can be freely changed. This is useful in particular
+# for autodetection code - all detected configurations may be safely overwritten
+# by user code.
+
+class configurations
+{
+ import errors ;
+
+ rule __init__ ( )
+ {
+ }
+
+ # Registers a configuration.
+ #
+ # Returns 'true' if the configuration has been added and an empty value if
+ # it already exists. Reports an error if the configuration is 'used'.
+ #
+ rule register ( id )
+ {
+ if $(id) in $(self.used)
+ {
+ errors.error "common: the configuration '$(id)' is in use" ;
+ }
+
+ local retval ;
+
+ if ! $(id) in $(self.all)
+ {
+ self.all += $(id) ;
+
+ # Indicate that a new configuration has been added.
+ retval = true ;
+ }
+
+ return $(retval) ;
+ }
+
+ # Mark a configuration as 'used'.
+ #
+ # Returns 'true' if the state of the configuration has been changed to
+ # 'used' and an empty value if it the state has not been changed. Reports an
+ # error if the configuration is not known.
+ #
+ rule use ( id )
+ {
+ if ! $(id) in $(self.all)
+ {
+ errors.error "common: the configuration '$(id)' is not known" ;
+ }
+
+ local retval ;
+
+ if ! $(id) in $(self.used)
+ {
+ self.used += $(id) ;
+
+ # Indicate that the configuration has been marked as 'used'.
+ retval = true ;
+ }
+
+ return $(retval) ;
+ }
+
+ # Return all registered configurations.
+ #
+ rule all ( )
+ {
+ return $(self.all) ;
+ }
+
+ # Return all used configurations.
+ #
+ rule used ( )
+ {
+ return $(self.used) ;
+ }
+
+ # Returns the value of a configuration parameter.
+ #
+ rule get ( id : param )
+ {
+ return $(self.$(param).$(id)) ;
+ }
+
+ # Sets the value of a configuration parameter.
+ #
+ rule set ( id : param : value * )
+ {
+ self.$(param).$(id) = $(value) ;
+ }
+}
+
+
+# The rule for checking toolset parameters. Trailing parameters should all be
+# parameter name/value pairs. The rule will check that each parameter either has
+# a value in each invocation or has no value in each invocation. Also, the rule
+# will check that the combination of all parameter values is unique in all
+# invocations.
+#
+# Each parameter name corresponds to a subfeature. This rule will declare a
+# subfeature the first time a non-empty parameter value is passed and will
+# extend it with all the values.
+#
+# The return value from this rule is a condition to be used for flags settings.
+#
+rule check-init-parameters ( toolset requirement * : * )
+{
+ local sig = $(toolset) ;
+ local condition = <toolset>$(toolset) ;
+ local subcondition ;
+ for local index in 2 3 4 5 6 7 8 9
+ {
+ local name = $($(index)[1]) ;
+ local value = $($(index)[2]) ;
+
+ if $(value)-is-not-empty
+ {
+ condition = $(condition)-$(value) ;
+ if $(.had-unspecified-value.$(toolset).$(name))
+ {
+ errors.user-error
+ "$(toolset) initialization: parameter '$(name)'"
+ "inconsistent" : "no value was specified in earlier"
+ "initialization" : "an explicit value is specified now" ;
+ }
+ # The below logic is for intel compiler. It calls this rule with
+ # 'intel-linux' and 'intel-win' as toolset, so we need to get the
+ # base part of toolset name. We can not pass 'intel' as toolset
+ # because in that case it will be impossible to register versionless
+ # intel-linux and intel-win toolsets of a specific version.
+ local t = $(toolset) ;
+ local m = [ MATCH ([^-]*)- : $(toolset) ] ;
+ if $(m)
+ {
+ t = $(m[1]) ;
+ }
+ if ! $(.had-value.$(toolset).$(name))
+ {
+ if ! $(.declared-subfeature.$(t).$(name))
+ {
+ feature.subfeature toolset $(t) : $(name) : : propagated ;
+ .declared-subfeature.$(t).$(name) = true ;
+ }
+ .had-value.$(toolset).$(name) = true ;
+ }
+ feature.extend-subfeature toolset $(t) : $(name) : $(value) ;
+ subcondition += <toolset-$(t):$(name)>$(value) ;
+ }
+ else
+ {
+ if $(.had-value.$(toolset).$(name))
+ {
+ errors.user-error
+ "$(toolset) initialization: parameter '$(name)'"
+ "inconsistent" : "an explicit value was specified in an"
+ "earlier initialization" : "no value is specified now" ;
+ }
+ .had-unspecified-value.$(toolset).$(name) = true ;
+ }
+ sig = $(sig)$(value:E="")- ;
+ }
+ if $(sig) in $(.all-signatures)
+ {
+ local message =
+ "duplicate initialization of $(toolset) with the following parameters: " ;
+ for local index in 2 3 4 5 6 7 8 9
+ {
+ local p = $($(index)) ;
+ if $(p)
+ {
+ message += "$(p[1]) = $(p[2]:E=<unspecified>)" ;
+ }
+ }
+ message += "previous initialization at $(.init-loc.$(sig))" ;
+ errors.user-error
+ $(message[1]) : $(message[2]) : $(message[3]) : $(message[4]) :
+ $(message[5]) : $(message[6]) : $(message[7]) : $(message[8]) ;
+ }
+ .all-signatures += $(sig) ;
+ .init-loc.$(sig) = [ errors.nearest-user-location ] ;
+
+ # If we have a requirement, this version should only be applied under that
+ # condition. To accomplish this we add a toolset requirement that imposes
+ # the toolset subcondition, which encodes the version.
+ if $(requirement)
+ {
+ local r = <toolset>$(toolset) $(requirement) ;
+ r = $(r:J=,) ;
+ toolset.add-requirements $(r):$(subcondition) ;
+ }
+
+ # We add the requirements, if any, to the condition to scope the toolset
+ # variables and options to this specific version.
+ condition += $(requirement) ;
+
+ if $(.show-configuration)
+ {
+ ECHO notice: $(condition) ;
+ }
+ return $(condition:J=/) ;
+}
+
+
+# A helper rule to get the command to invoke some tool. If
+# 'user-provided-command' is not given, tries to find binary named 'tool' in
+# PATH and in the passed 'additional-path'. Otherwise, verifies that the first
+# element of 'user-provided-command' is an existing program.
+#
+# This rule returns the command to be used when invoking the tool. If we can not
+# find the tool, a warning is issued. If 'path-last' is specified, PATH is
+# checked after 'additional-paths' when searching for 'tool'.
+#
+rule get-invocation-command-nodefault ( toolset : tool :
+ user-provided-command * : additional-paths * : path-last ? )
+{
+ local command ;
+ if ! $(user-provided-command)
+ {
+ command = [ find-tool $(tool) : $(additional-paths) : $(path-last) ] ;
+ if ! $(command) && $(.debug-configuration)
+ {
+ ECHO warning: toolset $(toolset) initialization: can not find tool
+ $(tool) ;
+ ECHO warning: initialized from [ errors.nearest-user-location ] ;
+ }
+ }
+ else
+ {
+ command = [ check-tool $(user-provided-command) ] ;
+ if ! $(command) && $(.debug-configuration)
+ {
+ ECHO warning: toolset $(toolset) initialization: ;
+ ECHO warning: can not find user-provided command
+ '$(user-provided-command)' ;
+ ECHO warning: initialized from [ errors.nearest-user-location ] ;
+ }
+ }
+
+ return $(command) ;
+}
+
+
+# Same as get-invocation-command-nodefault, except that if no tool is found,
+# returns either the user-provided-command, if present, or the 'tool' parameter.
+#
+rule get-invocation-command ( toolset : tool : user-provided-command * :
+ additional-paths * : path-last ? )
+{
+ local result = [ get-invocation-command-nodefault $(toolset) : $(tool) :
+ $(user-provided-command) : $(additional-paths) : $(path-last) ] ;
+
+ if ! $(result)
+ {
+ if $(user-provided-command)
+ {
+ result = $(user-provided-command) ;
+ }
+ else
+ {
+ result = $(tool) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Given an invocation command return the absolute path to the command. This
+# works even if command has no path element and was found on the PATH.
+#
+rule get-absolute-tool-path ( command )
+{
+ if $(command:D)
+ {
+ return $(command:D) ;
+ }
+ else
+ {
+ local m = [ GLOB [ modules.peek : PATH Path path ] : $(command)
+ $(command).exe ] ;
+ return $(m[1]:D) ;
+ }
+}
+
+
+# Attempts to find tool (binary) named 'name' in PATH and in 'additional-paths'.
+# If found in PATH, returns 'name' and if found in additional paths, returns
+# absolute name. If the tool is found in several directories, returns the first
+# path found. Otherwise, returns an empty string. If 'path-last' is specified,
+# PATH is searched after 'additional-paths'.
+#
+rule find-tool ( name : additional-paths * : path-last ? )
+{
+ local path = [ path.programs-path ] ;
+ local match = [ path.glob $(path) : $(name) $(name).exe ] ;
+ local additional-match = [ path.glob $(additional-paths) : $(name)
+ $(name).exe ] ;
+
+ local result ;
+ if $(path-last)
+ {
+ result = $(additional-match) ;
+ if ! $(result) && $(match)
+ {
+ result = $(name) ;
+ }
+ }
+ else
+ {
+ if $(match)
+ {
+ result = $(name) ;
+ }
+ else
+ {
+ result = $(additional-match) ;
+ }
+ }
+ if $(result)
+ {
+ return [ path.native $(result[1]) ] ;
+ }
+}
+
+
+# Checks if 'command' can be found either in path or is a full name to an
+# existing file.
+#
+local rule check-tool-aux ( command )
+{
+ if $(command:D)
+ {
+ if [ path.exists $(command) ]
+ # Both NT and Cygwin will run .exe files by their unqualified names.
+ || ( [ os.on-windows ] && [ path.exists $(command).exe ] )
+ # Only NT will run .bat & .cmd files by their unqualified names.
+ || ( ( [ os.name ] = NT ) && ( [ path.exists $(command).bat ] ||
+ [ path.exists $(command).cmd ] ) )
+ {
+ return $(command) ;
+ }
+ }
+ else
+ {
+ if [ GLOB [ modules.peek : PATH Path path ] : $(command) ]
+ {
+ return $(command) ;
+ }
+ }
+}
+
+
+# Checks that a tool can be invoked by 'command'. If command is not an absolute
+# path, checks if it can be found in 'path'. If comand is an absolute path,
+# check that it exists. Returns 'command' if ok or empty string otherwise.
+#
+local rule check-tool ( xcommand + )
+{
+ if [ check-tool-aux $(xcommand[1]) ] ||
+ [ check-tool-aux $(xcommand[-1]) ]
+ {
+ return $(xcommand) ;
+ }
+}
+
+
+# Handle common options for toolset, specifically sets the following flag
+# variables:
+# - CONFIG_COMMAND to $(command)
+# - OPTIONS for compile to the value of <compileflags> in $(options)
+# - OPTIONS for compile.c to the value of <cflags> in $(options)
+# - OPTIONS for compile.c++ to the value of <cxxflags> in $(options)
+# - OPTIONS for compile.fortran to the value of <fflags> in $(options)
+# - OPTIONS for link to the value of <linkflags> in $(options)
+#
+rule handle-options ( toolset : condition * : command * : options * )
+{
+ if $(.debug-configuration)
+ {
+ ECHO notice: will use '$(command)' for $(toolset), condition
+ $(condition:E=(empty)) ;
+ }
+
+ # The last parameter ('unchecked') says it is OK to set flags for another
+ # module.
+ toolset.flags $(toolset) CONFIG_COMMAND $(condition) : $(command)
+ : unchecked ;
+
+ toolset.flags $(toolset).compile OPTIONS $(condition) :
+ [ feature.get-values <compileflags> : $(options) ] : unchecked ;
+
+ toolset.flags $(toolset).compile.c OPTIONS $(condition) :
+ [ feature.get-values <cflags> : $(options) ] : unchecked ;
+
+ toolset.flags $(toolset).compile.c++ OPTIONS $(condition) :
+ [ feature.get-values <cxxflags> : $(options) ] : unchecked ;
+
+ toolset.flags $(toolset).compile.fortran OPTIONS $(condition) :
+ [ feature.get-values <fflags> : $(options) ] : unchecked ;
+
+ toolset.flags $(toolset).link OPTIONS $(condition) :
+ [ feature.get-values <linkflags> : $(options) ] : unchecked ;
+}
+
+
+# Returns the location of the "program files" directory on a Windows platform.
+#
+rule get-program-files-dir ( )
+{
+ local ProgramFiles = [ modules.peek : ProgramFiles ] ;
+ if $(ProgramFiles)
+ {
+ ProgramFiles = "$(ProgramFiles:J= )" ;
+ }
+ else
+ {
+ ProgramFiles = "c:\\Program Files" ;
+ }
+ return $(ProgramFiles) ;
+}
+
+
+if [ os.name ] = NT
+{
+ RM = del /f /q ;
+ CP = copy /b ;
+ IGNORE = "2>nul >nul & setlocal" ;
+ LN ?= $(CP) ;
+ # Ugly hack to convince copy to set the timestamp of the destination to the
+ # current time by concatenating the source with a nonexistent file. Note
+ # that this requires /b (binary) as the default when concatenating files is
+ # /a (ascii).
+ WINDOWS-CP-HACK = "+ this-file-does-not-exist-A698EE7806899E69" ;
+}
+else
+{
+ RM = rm -f ;
+ CP = cp ;
+ LN = ln ;
+}
+
+
+rule rm-command ( )
+{
+ return $(RM) ;
+}
+
+
+rule copy-command ( )
+{
+ return $(CP) ;
+}
+
+
+if "\n" = "n"
+{
+ # Escape characters not supported so use ugly hacks. Will not work on Cygwin
+ # - see below.
+ nl = "
+" ;
+ q = "" ;
+}
+else
+{
+ nl = "\n" ;
+ q = "\"" ;
+}
+
+# Returns the command needed to set an environment variable on the current
+# platform. The variable setting persists through all following commands and is
+# visible in the environment seen by subsequently executed commands. In other
+# words, on Unix systems, the variable is exported, which is consistent with the
+# only possible behavior on Windows systems.
+#
+rule variable-setting-command ( variable : value )
+{
+ if [ os.name ] = NT
+ {
+ return "set $(variable)=$(value)$(nl)" ;
+ }
+ else
+ {
+ # If we do not have escape character support in bjam, the cod below
+ # blows up on CYGWIN, since the $(nl) variable holds a Windows new-line
+ # \r\n sequence that messes up the executed export command which then
+ # reports that the passed variable name is incorrect.
+ # But we have a check for cygwin in kernel/bootstrap.jam already.
+ return "$(variable)=$(q)$(value)$(q)$(nl)export $(variable)$(nl)" ;
+ }
+}
+
+
+# Returns a command to sets a named shell path variable to the given NATIVE
+# paths on the current platform.
+#
+rule path-variable-setting-command ( variable : paths * )
+{
+ local sep = [ os.path-separator ] ;
+ return [ variable-setting-command $(variable) : $(paths:J=$(sep)) ] ;
+}
+
+
+# Returns a command that prepends the given paths to the named path variable on
+# the current platform.
+#
+rule prepend-path-variable-command ( variable : paths * )
+{
+ return [ path-variable-setting-command $(variable)
+ : $(paths) [ os.expand-variable $(variable) ] ] ;
+}
+
+
+# Return a command which can create a file. If 'r' is result of invocation, then
+# 'r foobar' will create foobar with unspecified content. What happens if file
+# already exists is unspecified.
+#
+rule file-creation-command ( )
+{
+ if [ os.name ] = NT
+ {
+ # A few alternative implementations on Windows:
+ #
+ # 'type NUL >> '
+ # That would construct an empty file instead of a file containing
+ # a space and an end-of-line marker but it would also not change
+ # the target's timestamp in case the file already exists.
+ #
+ # 'type NUL > '
+ # That would construct an empty file instead of a file containing
+ # a space and an end-of-line marker but it would also destroy an
+ # already existing file by overwriting it with an empty one.
+ #
+ # I guess the best solution would be to allow Boost Jam to define
+ # built-in functions such as 'create a file', 'touch a file' or 'copy a
+ # file' which could be used from inside action code. That would allow
+ # completely portable operations without this kind of kludge.
+ # (22.02.2009.) (Jurko)
+ return "echo. > " ;
+ }
+ else
+ {
+ return "touch " ;
+ }
+}
+
+
+# Returns a command that may be used for 'touching' files. It is not a real
+# 'touch' command on NT because it adds an empty line at the end of file but it
+# works with source files.
+#
+rule file-touch-command ( )
+{
+ if [ os.name ] = NT
+ {
+ return "echo. >> " ;
+ }
+ else
+ {
+ return "touch " ;
+ }
+}
+
+
+rule MkDir
+{
+ # If dir exists, do not update it. Do this even for $(DOT).
+ NOUPDATE $(<) ;
+
+ if $(<) != $(DOT) && ! $($(<)-mkdir)
+ {
+ # Cheesy gate to prevent multiple invocations on same dir.
+ $(<)-mkdir = true ;
+
+ # Schedule the mkdir build action.
+ common.mkdir $(<) ;
+
+ # Prepare a Jam 'dirs' target that can be used to make the build only
+ # construct all the target directories.
+ DEPENDS dirs : $(<) ;
+
+ # Recursively create parent directories. $(<:P) = $(<)'s parent & we
+ # recurse until root.
+
+ local s = $(<:P) ;
+ if [ os.name ] = NT
+ {
+ switch $(s)
+ {
+ case *: : s = ;
+ case *:\\ : s = ;
+ }
+ }
+
+ if $(s)
+ {
+ if $(s) != $(<)
+ {
+ DEPENDS $(<) : $(s) ;
+ MkDir $(s) ;
+ }
+ else
+ {
+ NOTFILE $(s) ;
+ }
+ }
+ }
+}
+
+
+#actions MkDir1
+#{
+# mkdir "$(<)"
+#}
+
+# The following quick-fix actions should be replaced using the original MkDir1
+# action once Boost Jam gets updated to correctly detect different paths leading
+# up to the same filesystem target and triggers their build action only once.
+# (todo) (04.07.2008.) (Jurko)
+
+if [ os.name ] = NT
+{
+ actions mkdir
+ {
+ if not exist "$(<)\\" mkdir "$(<)"
+ }
+}
+else
+{
+ actions mkdir
+ {
+ mkdir -p "$(<)"
+ }
+}
+
+actions piecemeal together existing Clean
+{
+ $(RM) "$(>)"
+}
+
+
+rule copy
+{
+}
+
+
+actions copy
+{
+ $(CP) "$(>)" $(WINDOWS-CP-HACK) "$(<)"
+}
+
+
+rule RmTemps
+{
+}
+
+
+actions quietly updated piecemeal together RmTemps
+{
+ $(RM) "$(>)" $(IGNORE)
+}
+
+
+actions hard-link
+{
+ $(RM) "$(<)" 2$(NULL_OUT) $(NULL_OUT)
+ $(LN) "$(>)" "$(<)" $(NULL_OUT)
+}
+
+
+# Given a target, as given to a custom tag rule, returns a string formatted
+# according to the passed format. Format is a list of properties that is
+# represented in the result. For each element of format the corresponding target
+# information is obtained and added to the result string. For all, but the
+# literal, the format value is taken as the as string to prepend to the output
+# to join the item to the rest of the result. If not given "-" is used as a
+# joiner.
+#
+# The format options can be:
+#
+# <base>[joiner]
+# :: The basename of the target name.
+# <toolset>[joiner]
+# :: The abbreviated toolset tag being used to build the target.
+# <threading>[joiner]
+# :: Indication of a multi-threaded build.
+# <runtime>[joiner]
+# :: Collective tag of the build runtime.
+# <version:/version-feature | X.Y[.Z]/>[joiner]
+# :: Short version tag taken from the given "version-feature" in the
+# build properties. Or if not present, the literal value as the
+# version number.
+# <property:/property-name/>[joiner]
+# :: Direct lookup of the given property-name value in the build
+# properties. /property-name/ is a regular expression. E.g.
+# <property:toolset-.*:flavor> will match every toolset.
+# /otherwise/
+# :: The literal value of the format argument.
+#
+# For example this format:
+#
+# boost_ <base> <toolset> <threading> <runtime> <version:boost-version>
+#
+# Might return:
+#
+# boost_thread-vc80-mt-gd-1_33.dll, or
+# boost_regex-vc80-gd-1_33.dll
+#
+# The returned name also has the target type specific prefix and suffix which
+# puts it in a ready form to use as the value from a custom tag rule.
+#
+rule format-name ( format * : name : type ? : property-set )
+{
+ local result = "" ;
+ for local f in $(format)
+ {
+ switch $(f:G)
+ {
+ case <base> :
+ result += $(name:B) ;
+
+ case <toolset> :
+ result += [ join-tag $(f:G=) : [ toolset-tag $(name) : $(type) :
+ $(property-set) ] ] ;
+
+ case <threading> :
+ result += [ join-tag $(f:G=) : [ threading-tag $(name) : $(type)
+ : $(property-set) ] ] ;
+
+ case <runtime> :
+ result += [ join-tag $(f:G=) : [ runtime-tag $(name) : $(type) :
+ $(property-set) ] ] ;
+
+ case <qt> :
+ result += [ join-tag $(f:G=) : [ qt-tag $(name) : $(type) :
+ $(property-set) ] ] ;
+
+ case <address-model> :
+ result += [ join-tag $(f:G=) : [ address-model-tag $(name) :
+ $(type) : $(property-set) ] ] ;
+
+ case <version:*> :
+ local key = [ MATCH <version:(.*)> : $(f:G) ] ;
+ local version = [ $(property-set).get <$(key)> ] ;
+ version ?= $(key) ;
+ version = [ MATCH "^([^.]+)[.]([^.]+)[.]?([^.]*)" : $(version) ] ;
+ result += [ join-tag $(f:G=) : $(version[1])_$(version[2]) ] ;
+
+ case <property:*> :
+ local key = [ MATCH <property:(.*)> : $(f:G) ] ;
+ local p0 = [ MATCH <($(key))> : [ $(property-set).raw ] ] ;
+ if $(p0)
+ {
+ local p = [ $(property-set).get <$(p0)> ] ;
+ if $(p)
+ {
+ result += [ join-tag $(f:G=) : $(p) ] ;
+ }
+ }
+
+ case * :
+ result += $(f:G=) ;
+ }
+ }
+ return [ virtual-target.add-prefix-and-suffix $(result:J=) : $(type) :
+ $(property-set) ] ;
+}
+
+
+local rule join-tag ( joiner ? : tag ? )
+{
+ if ! $(joiner) { joiner = - ; }
+ return $(joiner)$(tag) ;
+}
+
+
+local rule toolset-tag ( name : type ? : property-set )
+{
+ local tag = ;
+
+ local properties = [ $(property-set).raw ] ;
+ switch [ $(property-set).get <toolset> ]
+ {
+ case borland* : tag += bcb ;
+ case clang* :
+ {
+ switch [ $(property-set).get <toolset-clang:platform> ]
+ {
+ case darwin : tag += clang-darwin ;
+ case linux : tag += clang ;
+ case win : tag += clang-win ;
+ }
+ }
+ case como* : tag += como ;
+ case cw : tag += cw ;
+ case darwin* : tag += xgcc ;
+ case edg* : tag += edg ;
+ case gcc* :
+ {
+ switch [ $(property-set).get <toolset-gcc:flavor> ]
+ {
+ case *mingw* : tag += mgw ;
+ case * : tag += gcc ;
+ }
+ }
+ case intel :
+ if [ $(property-set).get <toolset-intel:platform> ] = win
+ {
+ tag += iw ;
+ }
+ else
+ {
+ tag += il ;
+ }
+ case kcc* : tag += kcc ;
+ case kylix* : tag += bck ;
+ #case metrowerks* : tag += cw ;
+ #case mingw* : tag += mgw ;
+ case mipspro* : tag += mp ;
+ case msvc* : tag += vc ;
+ case qcc* : tag += qcc ;
+ case sun* : tag += sw ;
+ case tru64cxx* : tag += tru ;
+ case vacpp* : tag += xlc ;
+ }
+ local version = [ MATCH <toolset.*version>([0123456789]+)[.]([0123456789]*)
+ : $(properties) ] ;
+ # For historical reasons, vc6.0 and vc7.0 use different naming.
+ if $(tag) = vc
+ {
+ if $(version[1]) = 6
+ {
+ # Cancel minor version.
+ version = 6 ;
+ }
+ else if $(version[1]) = 7 && $(version[2]) = 0
+ {
+ version = 7 ;
+ }
+ }
+ # On intel, version is not added, because it does not matter and it is the
+ # version of vc used as backend that matters. Ideally, we should encode the
+ # backend version but that would break compatibility with V1.
+ if $(tag) = iw
+ {
+ version = ;
+ }
+ if $(tag) = clang-win
+ {
+ local my_tmp = [ $(property-set).get <toolset-clang:compatibility> ] ;
+ version = $(version[1])_$(version[2])_$(my_tmp) ;
+ }
+
+ # On borland, version is not added for compatibility with V1.
+ if $(tag) = bcb
+ {
+ version = ;
+ }
+
+ tag += $(version) ;
+
+ return $(tag:J=) ;
+}
+
+
+local rule threading-tag ( name : type ? : property-set )
+{
+ if <threading>multi in [ $(property-set).raw ]
+ {
+ return mt ;
+ }
+}
+
+
+local rule runtime-tag ( name : type ? : property-set )
+{
+ local tag = ;
+
+ local properties = [ $(property-set).raw ] ;
+ if <runtime-link>static in $(properties) { tag += s ; }
+
+ # This is an ugly thing. In V1, there is code to automatically detect which
+ # properties affect a target. So, if <runtime-debugging> does not affect gcc
+ # toolset, the tag rules will not even see <runtime-debugging>. Similar
+ # functionality in V2 is not implemented yet, so we just check for toolsets
+ # known to care about runtime debugging.
+ if ( <toolset>msvc in $(properties) ) ||
+ ( <stdlib>stlport in $(properties) ) ||
+ ( <toolset-intel:platform>win in $(properties) )
+ {
+ if <runtime-debugging>on in $(properties) { tag += g ; }
+ }
+
+ if <python-debugging>on in $(properties) { tag += y ; }
+ if <variant>debug in $(properties) { tag += d ; }
+ if <stdlib>stlport in $(properties) { tag += p ; }
+ if <stdlib-stlport:iostream>hostios in $(properties) { tag += n ; }
+
+ return $(tag:J=) ;
+}
+
+
+# Create a tag for the Qt library version
+# "<qt>4.6.0" will result in tag "qt460"
+local rule qt-tag ( name : type ? : property-set )
+{
+ local v = [ MATCH ([0123456789]+)[.]?([0123456789]*)[.]?([0123456789]*) :
+ [ $(property-set).get <qt> ] ] ;
+ return qt$(v:J=) ;
+}
+
+
+# Create a tag for the address-model
+# <address-model>64 will simply generate "64"
+local rule address-model-tag ( name : type ? : property-set )
+{
+ return [ $(property-set).get <address-model> ] ;
+}
+
+
+rule __test__ ( )
+{
+ import assert ;
+
+ local save-os = [ modules.peek os : .name ] ;
+
+ modules.poke os : .name : LINUX ;
+ assert.result "PATH=\"foo:bar:baz\"\nexport PATH\n"
+ : path-variable-setting-command PATH : foo bar baz ;
+ assert.result "PATH=\"foo:bar:$PATH\"\nexport PATH\n"
+ : prepend-path-variable-command PATH : foo bar ;
+
+ modules.poke os : .name : NT ;
+ assert.result "set PATH=foo;bar;baz\n"
+ : path-variable-setting-command PATH : foo bar baz ;
+ assert.result "set PATH=foo;bar;%PATH%\n"
+ : prepend-path-variable-command PATH : foo bar ;
+
+ modules.poke os : .name : $(save-os) ;
+}
diff --git a/tools/build/v2/tools/como-linux.jam b/tools/build/src/tools/como-linux.jam
index 5c554c8f8b..5c554c8f8b 100644
--- a/tools/build/v2/tools/como-linux.jam
+++ b/tools/build/src/tools/como-linux.jam
diff --git a/tools/build/v2/tools/como-win.jam b/tools/build/src/tools/como-win.jam
index d21a70d6f1..d21a70d6f1 100644
--- a/tools/build/v2/tools/como-win.jam
+++ b/tools/build/src/tools/como-win.jam
diff --git a/tools/build/v2/tools/como.jam b/tools/build/src/tools/como.jam
index 04a05a94b1..04a05a94b1 100644
--- a/tools/build/v2/tools/como.jam
+++ b/tools/build/src/tools/como.jam
diff --git a/tools/build/v2/tools/convert.jam b/tools/build/src/tools/convert.jam
index ac1d701015..ac1d701015 100644
--- a/tools/build/v2/tools/convert.jam
+++ b/tools/build/src/tools/convert.jam
diff --git a/tools/build/src/tools/cray.jam b/tools/build/src/tools/cray.jam
new file mode 100644
index 0000000000..a64f1080a4
--- /dev/null
+++ b/tools/build/src/tools/cray.jam
@@ -0,0 +1,125 @@
+# Copyright 2001 David Abrahams.
+# Copyright 2004, 2005 Markus Schoepflin.
+# Copyright 2011, John Maddock
+# Copyright 2013, Cray, Inc.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+#
+# Cray C++ Compiler
+# See http://docs.cray.com/books/S-2179-50/html-S-2179-50/S-2179-50-toc.html
+#
+
+import feature generators common ;
+import toolset : flags ;
+
+feature.extend toolset : cray ;
+
+# Inherit from Unix toolset to get library ordering magic.
+toolset.inherit cray : unix ;
+
+generators.override cray.prebuilt : builtin.lib-generator ;
+generators.override cray.prebuilt : builtin.prebuilt ;
+generators.override cray.searched-lib-generator : searched-lib-generator ;
+
+
+rule init ( version ? : command * : options * )
+{
+ local condition = [ common.check-init-parameters cray : version $(version) ] ;
+
+ local command = [ common.get-invocation-command cray : CC : $(command) ] ;
+
+ if $(command)
+ {
+ local root = [ common.get-absolute-tool-path $(command[-1]) ] ;
+
+ if $(root)
+ {
+ flags cray .root $(condition) : "\"$(root)\"/" ;
+ }
+ }
+ # If we can't find 'CC' anyway, at least show 'CC' in the commands
+ command ?= CC ;
+
+ common.handle-options cray : $(condition) : $(command) : $(options) ;
+}
+
+generators.register-c-compiler cray.compile.c++ : CPP : OBJ : <toolset>cray ;
+generators.register-c-compiler cray.compile.c : C : OBJ : <toolset>cray ;
+
+
+# unlike most compliers, Cray defaults to static linking.
+# flags cxx LINKFLAGS <runtime-link>static : -bstatic ;
+# flags cray.compile OPTIONS <debug-symbols>on : -G0 ;
+flags cray.compile OPTIONS <debug-symbols>on : -g ;
+# flags cray.link OPTIONS <debug-symbols>on : -G0 ;
+flags cray.link OPTIONS <debug-symbols>on : -g ;
+
+flags cray.compile OPTIONS <optimization>off : -O0 ;
+flags cray.compile OPTIONS <optimization>speed : -O2 ;
+flags cray.compile OPTIONS <optimization>space : -O1 ;
+
+# flags cray.compile OPTIONS <inlining>off : -hipa0 ;
+# flags cray.compile OPTIONS <inlining>on : ;
+# flags cray.compile OPTIONS <inlining>full : -hipa5 ;
+
+flags cray.compile OPTIONS <cflags> ;
+flags cray.compile.c++ OPTIONS <cxxflags> ;
+flags cray.compile DEFINES <define> ;
+flags cray.compile INCLUDES <include> ;
+flags cray.link OPTIONS <linkflags> ;
+
+flags cray.compile OPTIONS : -hgnu -fPIC -h system_alloc -h tolerant -h ipa0 ;
+flags cray.compile OPTIONS <link>shared : -dynamic ;
+flags cray.compile OPTIONS <link>static : -static ;
+flags cray.link OPTIONS <link>static : -static ;
+flags cray.link OPTIONS <link>shared ;
+flags cray.link LOPTIONS <link>shared : -dynamic ;
+
+flags cray.link LIBPATH <library-path> ;
+flags cray.link LIBRARIES <library-file> ;
+flags cray.link FINDLIBS-ST <find-static-library> ;
+flags cray.link FINDLIBS-SA <find-shared-library> ;
+
+actions link bind LIBRARIES
+{
+ $(CONFIG_COMMAND) $(OPTIONS) $(LOPTIONS) -o "$(<)" -L$(LIBPATH) "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA)
+}
+
+# When creating dynamic libraries, we don't want to be warned about unresolved
+# symbols, therefore all unresolved symbols are marked as expected by
+# '-expect_unresolved *'. This also mirrors the behaviour of the GNU tool
+# chain.
+
+actions link.dll bind LIBRARIES
+{
+ $(CONFIG_COMMAND) -o "$(<[1])" -Wl,-h -Wl,$(<[-1]:D=) -shared -L$(LIBPATH) "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA) $(OPTIONS)
+}
+
+
+# Note: Relaxed ANSI mode (-std) is used for compilation because in strict ANSI
+# C89 mode (-std1) the compiler doesn't accept C++ comments in C files. As -std
+# is the default, no special flag is needed.
+actions compile.c
+{
+ $(.root:E=)cc -c $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -o "$(<)" "$(>)"
+}
+
+# Note: The compiler is forced to compile the files as C++ (-x cxx) because
+# otherwise it will silently ignore files with no file extension.
+#
+# Note: We deliberately don't suppress any warnings on the compiler command
+# line, the user can always do this in a customized toolset later on.
+
+actions compile.c++
+{
+ $(CONFIG_COMMAND) -c $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -o "$(<)" "$(>)"
+}
+
+# Always create archive from scratch. See the gcc toolet for rationale.
+RM = [ common.rm-command ] ;
+actions together piecemeal archive
+{
+ $(RM) "$(<)"
+ ar rc $(<) $(>)
+}
diff --git a/tools/build/v2/tools/cw-config.jam b/tools/build/src/tools/cw-config.jam
index 1211b7c049..1211b7c049 100644
--- a/tools/build/v2/tools/cw-config.jam
+++ b/tools/build/src/tools/cw-config.jam
diff --git a/tools/build/v2/tools/cw.jam b/tools/build/src/tools/cw.jam
index ddcbfeb2b6..ddcbfeb2b6 100644
--- a/tools/build/v2/tools/cw.jam
+++ b/tools/build/src/tools/cw.jam
diff --git a/tools/build/src/tools/darwin.jam b/tools/build/src/tools/darwin.jam
new file mode 100644
index 0000000000..edd6b7a4e6
--- /dev/null
+++ b/tools/build/src/tools/darwin.jam
@@ -0,0 +1,590 @@
+# Copyright 2003 Christopher Currie
+# Copyright 2006 Dave Abrahams
+# Copyright 2003, 2004, 2005, 2006 Vladimir Prus
+# Copyright 2005-2007 Mat Marcus
+# Copyright 2005-2007 Adobe Systems Incorporated
+# Copyright 2007-2010 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Please see http://article.gmane.org/gmane.comp.lib.boost.build/3389/
+# for explanation why it's a separate toolset.
+
+import feature : feature ;
+import toolset : flags ;
+import type ;
+import common ;
+import generators ;
+import path : basename ;
+import version ;
+import property-set ;
+import regex ;
+import errors ;
+
+## Use a framework.
+feature framework : : free ;
+
+## The MacOSX version to compile for, which maps to the SDK to use (sysroot).
+feature macosx-version : : propagated link-incompatible symmetric optional ;
+
+## The minimal MacOSX version to target.
+feature macosx-version-min : : propagated optional ;
+
+## A dependency, that is forced to be included in the link.
+feature force-load : : free dependency incidental ;
+
+#############################################################################
+
+_ = " " ;
+
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+ .debug-configuration = true ;
+}
+
+feature.extend toolset : darwin ;
+import gcc ;
+toolset.inherit-generators darwin : gcc : gcc.mingw.link gcc.mingw.link.dll ;
+
+generators.override darwin.prebuilt : builtin.prebuilt ;
+generators.override darwin.searched-lib-generator : searched-lib-generator ;
+
+# Override default do-nothing generators.
+generators.override darwin.compile.c.pch : pch.default-c-pch-generator ;
+generators.override darwin.compile.c++.pch : pch.default-cpp-pch-generator ;
+
+type.set-generated-target-suffix PCH : <toolset>darwin : gch ;
+
+toolset.inherit-rules darwin : gcc : localize ;
+toolset.inherit-flags darwin : gcc
+ : <runtime-link>static
+ <architecture>arm/<address-model>32
+ <architecture>arm/<address-model>64
+ <architecture>arm/<instruction-set>
+ <architecture>x86/<address-model>32
+ <architecture>x86/<address-model>64
+ <architecture>x86/<instruction-set>
+ <architecture>power/<address-model>32
+ <architecture>power/<address-model>64
+ <architecture>power/<instruction-set> ;
+
+# Options:
+#
+# <root>PATH
+# Platform root path. The common autodetection will set this to
+# "/Developer". And when a command is given it will be set to
+# the corresponding "*.platform/Developer" directory.
+#
+rule init ( version ? : command * : options * : requirement * )
+{
+ # First time around, figure what is host OSX version
+ if ! $(.host-osx-version)
+ {
+ .host-osx-version = [ MATCH "^([0-9.]+)"
+ : [ SHELL "/usr/bin/sw_vers -productVersion" ] ] ;
+ if $(.debug-configuration)
+ {
+ ECHO notice: OSX version on this machine is $(.host-osx-version) ;
+ }
+ }
+
+ # - The root directory of the tool install.
+ local root = [ feature.get-values <root> : $(options) ] ;
+
+ # - The bin directory where to find the commands to execute.
+ local bin ;
+
+ # - The configured compile driver command.
+ local command = [ common.get-invocation-command darwin : g++ : $(command) ] ;
+
+ # The version as reported by the compiler
+ local real-version ;
+
+ # - Autodetect the root and bin dir if not given.
+ if $(command)
+ {
+ bin ?= [ common.get-absolute-tool-path $(command[1]) ] ;
+ if $(bin) = "/usr/bin"
+ {
+ root ?= /Developer ;
+ }
+ else
+ {
+ local r = $(bin:D) ;
+ r = $(r:D) ;
+ root ?= $(r) ;
+ }
+ }
+
+ # - Autodetect the version if not given.
+ if $(command)
+ {
+ # - The 'command' variable can have multiple elements. When calling
+ # the SHELL builtin we need a single string.
+ local command-string = $(command:J=" ") ;
+ real-version = [ MATCH "^([0-9.]+)"
+ : [ SHELL "$(command-string) -dumpversion" ] ] ;
+ version ?= $(real-version) ;
+ }
+
+ .real-version.$(version) = $(real-version) ;
+
+ # - Define the condition for this toolset instance.
+ local condition =
+ [ common.check-init-parameters darwin $(requirement) : version $(version) ] ;
+
+ # - Set the toolset generic common options.
+ common.handle-options darwin : $(condition) : $(command) : $(options) ;
+
+ # - GCC 4.0 and higher in Darwin does not have -fcoalesce-templates.
+ if $(real-version) < "4.0.0"
+ {
+ flags darwin.compile.c++ OPTIONS $(condition) : -fcoalesce-templates ;
+ }
+ # - GCC 4.2 and higher in Darwin does not have -Wno-long-double.
+ if $(real-version) < "4.2.0"
+ {
+ flags darwin.compile OPTIONS $(condition) : -Wno-long-double ;
+ }
+ # - GCC on Darwin with -pedantic, suppress unsupported long long warning
+ flags darwin.compile OPTIONS $(condition)/<warnings>all : -Wno-long-long ;
+
+ # - Set the link flags common with the GCC toolset.
+ gcc.init-link-flags darwin darwin $(condition) ;
+
+ # - The symbol strip program.
+ local strip ;
+ if <striper> in $(options)
+ {
+ # We can turn off strip by specifying it as empty. In which
+ # case we switch to using the linker to do the strip.
+ flags darwin.link.dll OPTIONS
+ $(condition)/<main-target-type>LIB/<link>shared/<address-model>32/<strip>on : -Wl,-x ;
+ flags darwin.link.dll OPTIONS
+ $(condition)/<main-target-type>LIB/<link>shared/<address-model>/<strip>on : -Wl,-x ;
+ flags darwin.link OPTIONS
+ $(condition)/<main-target-type>EXE/<address-model>32/<strip>on : -s ;
+ flags darwin.link OPTIONS
+ $(condition)/<main-target-type>EXE/<address-model>/<strip>on : -s ;
+ }
+ else
+ {
+ # Otherwise we need to find a strip program to use. And hence
+ # also tell the link action that we need to use a strip
+ # post-process.
+ flags darwin.link NEED_STRIP $(condition)/<strip>on : "" ;
+ strip =
+ [ common.get-invocation-command darwin
+ : strip : [ feature.get-values <striper> : $(options) ] : $(bin) : search-path ] ;
+ flags darwin.link .STRIP $(condition) : $(strip[1]) ;
+ if $(.debug-configuration)
+ {
+ ECHO notice: using strip for $(condition) at $(strip[1]) ;
+ }
+ }
+
+ # - The archive builder (libtool is the default as creating
+ # archives in darwin is complicated.
+ local archiver =
+ [ common.get-invocation-command darwin
+ : libtool : [ feature.get-values <archiver> : $(options) ] : $(bin) : search-path ] ;
+ flags darwin.archive .LIBTOOL $(condition) : $(archiver[1]) ;
+ if $(.debug-configuration)
+ {
+ ECHO notice: using archiver for $(condition) at $(archiver[1]) ;
+ }
+
+ # - Initialize the SDKs available in the root for this tool.
+ local sdks = [ init-available-sdk-versions $(condition) : $(root) ] ;
+
+ #~ ECHO --- ;
+ #~ ECHO --- bin :: $(bin) ;
+ #~ ECHO --- root :: $(root) ;
+ #~ ECHO --- version :: $(version) ;
+ #~ ECHO --- condition :: $(condition) ;
+ #~ ECHO --- strip :: $(strip) ;
+ #~ ECHO --- archiver :: $(archiver) ;
+ #~ ECHO --- sdks :: $(sdks) ;
+ #~ ECHO --- ;
+ #~ EXIT ;
+}
+
+# Add and set options for a discovered SDK version.
+local rule init-sdk ( condition * : root ? : version + : version-feature ? )
+{
+ local rule version-to-feature ( version + )
+ {
+ switch $(version[1])
+ {
+ case iphone* :
+ {
+ return $(version[1])-$(version[2-]:J=.) ;
+ }
+ case mac* :
+ {
+ return $(version[2-]:J=.) ;
+ }
+ case * :
+ {
+ return $(version:J=.) ;
+ }
+ }
+ }
+
+ if $(version-feature)
+ {
+ if $(.debug-configuration)
+ {
+ ECHO notice: available sdk for $(condition)/<macosx-version>$(version-feature) at $(root) ;
+ }
+
+ # Add the version to the features for specifying them.
+ if ! $(version-feature) in [ feature.values macosx-version ]
+ {
+ feature.extend macosx-version : $(version-feature) ;
+ }
+ if ! $(version-feature) in [ feature.values macosx-version-min ]
+ {
+ feature.extend macosx-version-min : $(version-feature) ;
+ }
+
+ # Set the flags the version needs to compile with, first
+ # generic options.
+ flags darwin.compile OPTIONS $(condition)/<macosx-version>$(version-feature)
+ : -isysroot $(root) ;
+ flags darwin.link OPTIONS $(condition)/<macosx-version>$(version-feature)
+ : -isysroot $(root) ;
+
+ # Then device variation options.
+ switch $(version[1])
+ {
+ case iphonesim* :
+ {
+ local N = $(version[2]) ;
+ if ! $(version[3]) { N += 00 ; }
+ else if [ regex.match (..) : $(version[3]) ] { N += $(version[3]) ; }
+ else { N += 0$(version[3]) ; }
+ if ! $(version[4]) { N += 00 ; }
+ else if [ regex.match (..) : $(version[4]) ] { N += $(version[4]) ; }
+ else { N += 0$(version[4]) ; }
+ N = $(N:J=) ;
+ flags darwin.compile OPTIONS <macosx-version-min>$(version-feature)
+ : -D__IPHONE_OS_VERSION_MIN_REQUIRED=$(N) ;
+ flags darwin.link OPTIONS <macosx-version-min>$(version-feature)
+ : -D__IPHONE_OS_VERSION_MIN_REQUIRED=$(N) ;
+ }
+
+ case iphone* :
+ {
+ flags darwin.compile OPTIONS <macosx-version-min>$(version-feature)
+ : -miphoneos-version-min=$(version[2-]:J=.) ;
+ flags darwin.link OPTIONS <macosx-version-min>$(version-feature)
+ : -miphoneos-version-min=$(version[2-]:J=.) ;
+ }
+
+ case mac* :
+ {
+ flags darwin.compile OPTIONS <macosx-version-min>$(version-feature)
+ : -mmacosx-version-min=$(version[2-]:J=.) ;
+ flags darwin.link OPTIONS <macosx-version-min>$(version-feature)
+ : -mmacosx-version-min=$(version[2-]:J=.) ;
+ }
+ }
+
+ if $(version[3]) > 0
+ {
+ # We have a minor version of an SDK. We want to set up
+ # previous minor versions, plus the current minor version.
+ # So we recurse to set up the previous minor versions, up to
+ # the current version.
+ local minor-minus-1 = [ CALC $(version[3]) - 1 ] ;
+ return
+ [ init-sdk $(condition) : $(root)
+ : $(version[1-2]) $(minor-minus-1) : [ version-to-feature $(version[1-2]) $(minor-minus-1) ] ]
+ $(version-feature) ;
+ }
+ else
+ {
+ return $(version-feature) ;
+ }
+ }
+ else if $(version[4])
+ {
+ # We have a patch version of an SDK. We want to set up
+ # both the specific patch version, and the minor version.
+ # So we recurse to set up the patch version. Plus the minor version.
+ return
+ [ init-sdk $(condition) : $(root)
+ : $(version[1-3]) : [ version-to-feature $(version[1-3]) ] ]
+ [ init-sdk $(condition) : $(root)
+ : $(version) : [ version-to-feature $(version) ] ] ;
+ }
+ else
+ {
+ # Yes, this is intentionally recursive.
+ return
+ [ init-sdk $(condition) : $(root)
+ : $(version) : [ version-to-feature $(version) ] ] ;
+ }
+}
+
+# Determine the MacOSX SDK versions installed and their locations.
+local rule init-available-sdk-versions ( condition * : root ? )
+{
+ root ?= /Developer ;
+ local sdks-root = $(root)/SDKs ;
+ local sdks = [ GLOB $(sdks-root) : MacOSX*.sdk iPhoneOS*.sdk iPhoneSimulator*.sdk ] ;
+ local result ;
+ for local sdk in $(sdks)
+ {
+ local sdk-match = [ MATCH ([^0-9]+)([0-9]+)[.]([0-9x]+)[.]?([0-9x]+)? : $(sdk:D=) ] ;
+ local sdk-platform = $(sdk-match[1]:L) ;
+ local sdk-version = $(sdk-match[2-]) ;
+ if $(sdk-version)
+ {
+ switch $(sdk-platform)
+ {
+ case macosx :
+ {
+ sdk-version = mac $(sdk-version) ;
+ }
+ case iphoneos :
+ {
+ sdk-version = iphone $(sdk-version) ;
+ }
+ case iphonesimulator :
+ {
+ sdk-version = iphonesim $(sdk-version) ;
+ }
+ case * :
+ {
+ sdk-version = $(sdk-version:J=-) ;
+ }
+ }
+ result += [ init-sdk $(condition) : $(sdk) : $(sdk-version) ] ;
+ }
+ }
+ return $(result) ;
+}
+
+# Generic options.
+flags darwin.compile OPTIONS <flags> ;
+
+# The following adds objective-c support to darwin.
+# Thanks to http://thread.gmane.org/gmane.comp.lib.boost.build/13759
+
+generators.register-c-compiler darwin.compile.m : OBJECTIVE_C : OBJ : <toolset>darwin ;
+generators.register-c-compiler darwin.compile.mm : OBJECTIVE_CPP : OBJ : <toolset>darwin ;
+
+rule setup-address-model ( targets * : sources * : properties * )
+{
+ local ps = [ property-set.create $(properties) ] ;
+ local arch = [ $(ps).get <architecture> ] ;
+ local instruction-set = [ $(ps).get <instruction-set> ] ;
+ local address-model = [ $(ps).get <address-model> ] ;
+ local osx-version = [ $(ps).get <macosx-version> ] ;
+ local gcc-version = [ $(ps).get <toolset-darwin:version> ] ;
+ gcc-version = $(.real-version.$(gcc-version)) ;
+ local options ;
+
+ local support-ppc64 = 1 ;
+
+ osx-version ?= $(.host-osx-version) ;
+
+ switch $(osx-version)
+ {
+ case iphone* :
+ {
+ support-ppc64 = ;
+ }
+
+ case * :
+ if $(osx-version) && ! [ version.version-less [ regex.split $(osx-version) \\. ] : 10 6 ]
+ {
+ # When targeting 10.6:
+ # - gcc 4.2 will give a compiler errir if ppc64 compilation is requested
+ # - gcc 4.0 will compile fine, somehow, but then fail at link time
+ support-ppc64 = ;
+ }
+ }
+ switch $(arch)
+ {
+ case combined :
+ {
+ if $(address-model) = 32_64 {
+ if $(support-ppc64) {
+ options = -arch i386 -arch ppc -arch x86_64 -arch ppc64 ;
+ } else {
+ # Build 3-way binary
+ options = -arch i386 -arch ppc -arch x86_64 ;
+ }
+ } else if $(address-model) = 64 {
+ if $(support-ppc64) {
+ options = -arch x86_64 -arch ppc64 ;
+ } else {
+ errors.user-error "64-bit PPC compilation is not supported when targeting OSX 10.6 or later" ;
+ }
+ } else {
+ options = -arch i386 -arch ppc ;
+ }
+ }
+
+ case x86 :
+ {
+ if $(address-model) = 32_64 {
+ options = -arch i386 -arch x86_64 ;
+ } else if $(address-model) = 64 {
+ options = -arch x86_64 ;
+ } else {
+ options = -arch i386 ;
+ }
+ }
+
+ case power :
+ {
+ if ! $(support-ppc64)
+ && ( $(address-model) = 32_64 || $(address-model) = 64 )
+ {
+ errors.user-error "64-bit PPC compilation is not supported when targeting OSX 10.6 or later" ;
+ }
+
+ if $(address-model) = 32_64 {
+ options = -arch ppc -arch ppc64 ;
+ } else if $(address-model) = 64 {
+ options = -arch ppc64 ;
+ } else {
+ options = -arch ppc ;
+ }
+ }
+
+ case arm :
+ {
+ if $(instruction-set) {
+ options = -arch$(_)$(instruction-set) ;
+ } else {
+ options = -arch arm ;
+ }
+ }
+ }
+
+ if $(options)
+ {
+ OPTIONS on $(targets) += $(options) ;
+ }
+}
+
+rule setup-threading ( targets * : sources * : properties * )
+{
+ gcc.setup-threading $(targets) : $(sources) : $(properties) ;
+}
+
+rule setup-fpic ( targets * : sources * : properties * )
+{
+ gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
+}
+
+rule compile.m ( targets * : sources * : properties * )
+{
+ LANG on $(<) = "-x objective-c" ;
+ gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
+ setup-address-model $(targets) : $(sources) : $(properties) ;
+}
+
+actions compile.m
+{
+ "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+rule compile.mm ( targets * : sources * : properties * )
+{
+ LANG on $(<) = "-x objective-c++" ;
+ gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
+ setup-address-model $(targets) : $(sources) : $(properties) ;
+}
+
+actions compile.mm
+{
+ "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+# Set the max header padding to allow renaming of libs for installation.
+flags darwin.link.dll OPTIONS : -headerpad_max_install_names ;
+
+# To link the static runtime we need to link to all the core runtime libraries.
+flags darwin.link OPTIONS <runtime-link>static
+ : -nodefaultlibs -shared-libgcc -lstdc++-static -lgcc_eh -lgcc -lSystem ;
+
+# Strip as much as possible when optimizing.
+flags darwin.link OPTIONS <optimization>speed : -Wl,-dead_strip -no_dead_strip_inits_and_terms ;
+flags darwin.link OPTIONS <optimization>space : -Wl,-dead_strip -no_dead_strip_inits_and_terms ;
+
+# Dynamic/shared linking.
+flags darwin.compile OPTIONS <link>shared : -dynamic ;
+
+# Misc options.
+flags darwin.compile OPTIONS : -gdwarf-2 -fexceptions ;
+#~ flags darwin.link OPTIONS : -fexceptions ;
+
+# Add the framework names to use.
+flags darwin.link FRAMEWORK <framework> ;
+
+#
+flags darwin.link FORCE_LOAD <force-load> ;
+
+# This is flag is useful for debugging the link step
+# uncomment to see what libtool is doing under the hood
+#~ flags darwin.link.dll OPTIONS : -Wl,-v ;
+
+# set up the -F option to include the paths to any frameworks used.
+local rule prepare-framework-path ( target + )
+{
+ # The -framework option only takes basename of the framework.
+ # The -F option specifies the directories where a framework
+ # is searched for. So, if we find <framework> feature
+ # with some path, we need to generate property -F option.
+ local framework-paths = [ on $(target) return $(FRAMEWORK:D) ] ;
+
+ # Be sure to generate no -F if there's no path.
+ for local framework-path in $(framework-paths)
+ {
+ if $(framework-path) != ""
+ {
+ FRAMEWORK_PATH on $(target) += -F$(framework-path) ;
+ }
+ }
+}
+
+rule link ( targets * : sources * : properties * )
+{
+ DEPENDS $(targets) : [ on $(targets) return $(FORCE_LOAD) ] ;
+ setup-address-model $(targets) : $(sources) : $(properties) ;
+ prepare-framework-path $(<) ;
+}
+
+# Note that using strip without any options was reported to result in broken
+# binaries, at least on OS X 10.5.5, see:
+# http://svn.boost.org/trac/boost/ticket/2347
+# So we pass -S -x.
+actions link bind LIBRARIES FORCE_LOAD
+{
+ "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -o "$(<)" "$(>)" -Wl,-force_load$(_)"$(FORCE_LOAD)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(FRAMEWORK_PATH) -framework$(_)$(FRAMEWORK:D=:S=) $(OPTIONS) $(USER_OPTIONS)
+ $(NEED_STRIP)"$(.STRIP)" $(NEED_STRIP)-S $(NEED_STRIP)-x $(NEED_STRIP)"$(<)"
+}
+
+rule link.dll ( targets * : sources * : properties * )
+{
+ setup-address-model $(targets) : $(sources) : $(properties) ;
+ prepare-framework-path $(<) ;
+}
+
+actions link.dll bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" -dynamiclib -Wl,-single_module -install_name "$(<:B)$(<:S)" -L"$(LINKPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(FRAMEWORK_PATH) -framework$(_)$(FRAMEWORK:D=:S=) $(OPTIONS) $(USER_OPTIONS)
+}
+
+# We use libtool instead of ar to support universal binary linking
+# TODO: Find a way to use the underlying tools, i.e. lipo, to do this.
+actions piecemeal archive
+{
+ "$(.LIBTOOL)" -static -o "$(<:T)" $(ARFLAGS) "$(>:T)"
+}
diff --git a/tools/build/v2/tools/darwin.py b/tools/build/src/tools/darwin.py
index c29196060b..c29196060b 100644
--- a/tools/build/v2/tools/darwin.py
+++ b/tools/build/src/tools/darwin.py
diff --git a/tools/build/v2/tools/dmc.jam b/tools/build/src/tools/dmc.jam
index 8af8725a8a..8af8725a8a 100644
--- a/tools/build/v2/tools/dmc.jam
+++ b/tools/build/src/tools/dmc.jam
diff --git a/tools/build/src/tools/docutils.jam b/tools/build/src/tools/docutils.jam
new file mode 100644
index 0000000000..02b2794b20
--- /dev/null
+++ b/tools/build/src/tools/docutils.jam
@@ -0,0 +1,99 @@
+# Copyright David Abrahams 2004. Distributed under the Boost
+# Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+# Support for docutils ReStructuredText processing.
+
+import type ;
+import scanner ;
+import generators ;
+import os ;
+import common ;
+import toolset ;
+import path ;
+import feature : feature ;
+import property ;
+import errors ;
+
+.initialized = ;
+
+type.register ReST : rst ;
+
+class rst-scanner : common-scanner
+{
+ rule __init__ ( paths * )
+ {
+ common-scanner.__init__ . $(paths) ;
+ }
+
+ rule pattern ( )
+ {
+ return "^[ ]*\\.\\.[ ]+include::[ ]+([^
+]+)"
+ "^[ ]*\\.\\.[ ]+image::[ ]+([^
+]+)"
+ "^[ ]*\\.\\.[ ]+figure::[ ]+([^
+]+)"
+ ;
+ }
+}
+
+scanner.register rst-scanner : include ;
+type.set-scanner ReST : rst-scanner ;
+
+generators.register-standard docutils.html : ReST : HTML ;
+
+rule init ( docutils-dir ? : tools-dir ? )
+{
+ docutils-dir ?= [ modules.peek : DOCUTILS_DIR ] ;
+ tools-dir ?= $(docutils-dir)/tools ;
+
+ if ! $(.initialized)
+ {
+ .initialized = true ;
+ .docutils-dir = $(docutils-dir) ;
+ .tools-dir = $(tools-dir:R="") ;
+
+ .setup = [
+ common.prepend-path-variable-command PYTHONPATH
+ : $(.docutils-dir) $(.docutils-dir)/extras ] ;
+ RST2XXX = [ common.find-tool rst2html ] ;
+ }
+}
+
+rule html ( target : source : properties * )
+{
+ if ! [ on $(target) return $(RST2XXX) ]
+ {
+ local python-cmd = [ property.select <python.interpreter> : $(properties) ] ;
+ if ! $(.tools-dir) {
+ errors.user-error
+ "The docutils module is used, but not configured. "
+ : ""
+ : "Please modify your user-config.jam or project-config.jam to contain:"
+ : ""
+ : " using docutils : <docutils-dir> ;"
+ : ""
+ : "On Ubuntu, 'docutils-common' package will create /usr/share/docutils."
+ : "Other flavours of Linux likely have docutils as package as well."
+ : "On Windows, you can install from http://docutils.sourceforge.net/."
+ ;
+ }
+ RST2XXX on $(target) = $(python-cmd:G=:E="python") $(.tools-dir)/rst2html.py ;
+ }
+}
+
+
+feature docutils : : free ;
+feature docutils-html : : free ;
+feature docutils-cmd : : free ;
+toolset.flags docutils COMMON-FLAGS : <docutils> ;
+toolset.flags docutils HTML-FLAGS : <docutils-html> ;
+toolset.flags docutils RST2XXX : <docutils-cmd> ;
+
+actions html
+{
+ $(.setup)
+ "$(RST2XXX)" $(COMMON-FLAGS) $(HTML-FLAGS) $(>) $(<)
+}
+
diff --git a/tools/build/v2/tools/doxproc.py b/tools/build/src/tools/doxproc.py
index 4cbd5edd2f..4cbd5edd2f 100644
--- a/tools/build/v2/tools/doxproc.py
+++ b/tools/build/src/tools/doxproc.py
diff --git a/tools/build/v2/tools/doxygen-config.jam b/tools/build/src/tools/doxygen-config.jam
index 2cd2ccaeb1..2cd2ccaeb1 100644
--- a/tools/build/v2/tools/doxygen-config.jam
+++ b/tools/build/src/tools/doxygen-config.jam
diff --git a/tools/build/src/tools/doxygen.jam b/tools/build/src/tools/doxygen.jam
new file mode 100644
index 0000000000..6a56ccdc29
--- /dev/null
+++ b/tools/build/src/tools/doxygen.jam
@@ -0,0 +1,775 @@
+# Copyright 2003, 2004 Douglas Gregor
+# Copyright 2003, 2004, 2005 Vladimir Prus
+# Copyright 2006 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# This module defines rules to handle generation of various outputs from source
+# files documented with doxygen comments. The supported transformations are:
+#
+# * Source -> Doxygen XML -> BoostBook XML
+# * Source -> Doxygen HTML
+#
+# The type of transformation is selected based on the target requested. For
+# BoostBook XML, the default, specifying a target with an ".xml" suffix, or an
+# empty suffix, will produce a <target>.xml and <target>.boostbook. For Doxygen
+# HTML specifying a target with an ".html" suffix will produce a directory
+# <target> with the Doxygen html files, and a <target>.html file redirecting to
+# that directory.
+
+import alias ;
+import boostbook ;
+import "class" : new ;
+import common ;
+import feature ;
+import make ;
+import modules ;
+import generators ;
+import os ;
+import path ;
+import print ;
+import project ;
+import property ;
+import stage ;
+import targets ;
+import toolset ;
+import type ;
+import utility ;
+import xsltproc ;
+import virtual-target ;
+
+
+# Use to specify extra configuration paramters. These get translated into a
+# doxyfile which configures the building of the docs.
+feature.feature doxygen:param : : free ;
+
+# Specify the "<xsl:param>boost.doxygen.header.prefix" XSLT option.
+feature.feature prefix : : free ;
+
+# Specify the "<xsl:param>boost.doxygen.reftitle" XSLT option.
+feature.feature reftitle : : free ;
+
+# Which processor to use for various translations from Doxygen.
+feature.feature doxygen.processor : xsltproc doxproc : propagated implicit ;
+
+# To generate, or not, index sections.
+feature.feature doxygen.doxproc.index : no yes : propagated incidental ;
+
+# The ID for the resulting BoostBook reference section.
+feature.feature doxygen.doxproc.id : : free ;
+
+# The title for the resulting BoostBook reference section.
+feature.feature doxygen.doxproc.title : : free ;
+
+# Location for images when generating XML
+feature.feature doxygen:xml-imagedir : : free ;
+
+# Indicates whether the entire directory should be deleted
+feature.feature doxygen.rmdir : off on : optional incidental ;
+
+# Doxygen configuration input file.
+type.register DOXYFILE : doxyfile ;
+
+# Doxygen XML multi-file output.
+type.register DOXYGEN_XML_MULTIFILE : xml-dir : XML ;
+
+# Doxygen XML coallesed output.
+type.register DOXYGEN_XML : doxygen : XML ;
+
+# Doxygen HTML multifile directory.
+type.register DOXYGEN_HTML_MULTIFILE : html-dir : HTML ;
+
+# Redirection HTML file to HTML multifile directory.
+type.register DOXYGEN_HTML : : HTML ;
+
+type.register DOXYGEN_XML_IMAGES : doxygen-xml-images ;
+
+
+# Initialize the Doxygen module. Parameters are:
+# name: the name of the 'doxygen' executable. If not specified, the name
+# 'doxygen' will be used
+#
+rule init ( name ? )
+{
+ if ! $(.initialized)
+ {
+ .initialized = true ;
+
+ .doxproc = [ modules.binding $(__name__) ] ;
+ .doxproc = $(.doxproc:D)/doxproc.py ;
+
+ generators.register-composing doxygen.headers-to-doxyfile
+ : H HPP CPP : DOXYFILE ;
+ generators.register-standard doxygen.run
+ : DOXYFILE : DOXYGEN_XML_MULTIFILE ;
+ generators.register-standard doxygen.xml-dir-to-boostbook
+ : DOXYGEN_XML_MULTIFILE : BOOSTBOOK : <doxygen.processor>doxproc ;
+ generators.register-standard doxygen.xml-to-boostbook
+ : DOXYGEN_XML : BOOSTBOOK : <doxygen.processor>xsltproc ;
+ generators.register-standard doxygen.collect
+ : DOXYGEN_XML_MULTIFILE : DOXYGEN_XML ;
+ generators.register-standard doxygen.run
+ : DOXYFILE : DOXYGEN_HTML_MULTIFILE ;
+ generators.register-standard doxygen.html-redirect
+ : DOXYGEN_HTML_MULTIFILE : DOXYGEN_HTML ;
+ generators.register-standard doxygen.copy-latex-pngs
+ : DOXYGEN_HTML : DOXYGEN_XML_IMAGES ;
+
+ IMPORT $(__name__) : doxygen : : doxygen ;
+ }
+
+ if $(name)
+ {
+ modify-config ;
+ .doxygen = $(name) ;
+ check-doxygen ;
+ }
+
+ if ! $(.doxygen)
+ {
+ check-doxygen ;
+ }
+}
+
+
+local rule freeze-config ( )
+{
+ if ! $(.initialized)
+ {
+ import errors ;
+ errors.user-error doxygen must be initialized before it can be used. ;
+ }
+ if ! $(.config-frozen)
+ {
+ .config-frozen = true ;
+ if [ .is-cygwin ]
+ {
+ .is-cygwin = true ;
+ }
+ }
+}
+
+
+local rule modify-config ( )
+{
+ if $(.config-frozen)
+ {
+ import errors ;
+ errors.user-error "Cannot change doxygen after it has been used." ;
+ }
+}
+
+
+local rule check-doxygen ( )
+{
+ if --debug-configuration in [ modules.peek : ARGV ]
+ {
+ ECHO "notice:" using doxygen ":" $(.doxygen) ;
+ }
+ local extra-paths ;
+ if [ os.name ] = NT
+ {
+ local ProgramFiles = [ modules.peek : ProgramFiles ] ;
+ if $(ProgramFiles)
+ {
+ extra-paths = "$(ProgramFiles:J= )" ;
+ }
+ else
+ {
+ extra-paths = "C:\\Program Files" ;
+ }
+ }
+ .doxygen = [ common.get-invocation-command doxygen : doxygen : $(.doxygen) :
+ $(extra-paths) ] ;
+}
+
+
+rule name ( )
+{
+ freeze-config ;
+ return $(.doxygen) ;
+}
+
+
+local rule .is-cygwin ( )
+{
+ if [ os.on-windows ]
+ {
+ local file = [ path.make [ modules.binding $(__name__) ] ] ;
+ local dir = [ path.native [ path.join [ path.parent $(file) ] doxygen ]
+ ] ;
+ local command = cd \"$(dir)\" "&&" \"$(.doxygen)\"
+ windows-paths-check.doxyfile 2>&1 ;
+ command = $(command:J=" ") ;
+ result = [ SHELL $(command) ] ;
+ if [ MATCH "(Parsing file /)" : $(result) ]
+ {
+ return true ;
+ }
+ }
+}
+
+
+# Runs Doxygen on the given Doxygen configuration file (the source) to generate
+# the Doxygen files. The output is dumped according to the settings in the
+# Doxygen configuration file, not according to the target! Because of this, we
+# essentially "touch" the target file, in effect making it look like we have
+# really written something useful to it. Anyone that uses this action must deal
+# with this behavior.
+#
+actions doxygen-action
+{
+ $(RM) "$(*.XML)" & "$(NAME:E=doxygen)" "$(>)" && echo "Stamped" > "$(<)"
+}
+
+
+# Runs the Python doxproc XML processor.
+#
+actions doxproc
+{
+ python "$(DOXPROC)" "--xmldir=$(>)" "--output=$(<)" "$(OPTIONS)" "--id=$(ID)" "--title=$(TITLE)"
+}
+
+
+rule translate-path ( path )
+{
+ freeze-config ;
+ if [ os.on-windows ]
+ {
+ if [ os.name ] = CYGWIN
+ {
+ if $(.is-cygwin)
+ {
+ return $(path) ;
+ }
+ else
+ {
+ return $(path:W) ;
+ }
+ }
+ else
+ {
+ if $(.is-cygwin)
+ {
+ match = [ MATCH ^(.):(.*) : $(path) ] ;
+ if $(match)
+ {
+ return /cygdrive/$(match[1])$(match[2]:T) ;
+ }
+ else
+ {
+ return $(path:T) ;
+ }
+ }
+ else
+ {
+ return $(path) ;
+ }
+ }
+ }
+ else
+ {
+ return $(path) ;
+ }
+}
+
+
+# Generates a doxygen configuration file (doxyfile) given a set of C++ sources
+# and a property list that may contain <doxygen:param> features.
+#
+rule headers-to-doxyfile ( target : sources * : properties * )
+{
+ local text = "# Generated by Boost.Build version 2" ;
+
+ local output-dir ;
+
+ # Translate <doxygen:param> into command line flags.
+ for local param in [ feature.get-values <doxygen:param> : $(properties) ]
+ {
+ local namevalue = [ MATCH ([^=]*)=(.*) : $(param) ] ;
+ if $(namevalue[1]) = OUTPUT_DIRECTORY
+ {
+ output-dir = [ translate-path [ utility.unquote $(namevalue[2]) ] ]
+ ;
+ text += "OUTPUT_DIRECTORY = \"$(output-dir)\"" ;
+ }
+ else
+ {
+ text += "$(namevalue[1]) = $(namevalue[2])" ;
+ }
+ }
+
+ if ! $(output-dir)
+ {
+ output-dir = [ translate-path [ on $(target) return $(LOCATE) ] ] ;
+ text += "OUTPUT_DIRECTORY = \"$(output-dir)\"" ;
+ }
+
+ local headers ;
+ for local header in $(sources:G=)
+ {
+ header = [ translate-path $(header) ] ;
+ headers += \"$(header)\" ;
+ }
+
+ # Doxygen generates LaTex by default. So disable it unconditionally, or at
+ # least until someone needs, and hence writes support for, LaTex output.
+ text += "GENERATE_LATEX = NO" ;
+ text += "INPUT = $(headers:J= )" ;
+ print.output $(target) plain ;
+ print.text $(text) : true ;
+}
+
+
+# Run Doxygen. See doxygen-action for a description of the strange properties of
+# this rule.
+#
+rule run ( target : source : properties * )
+{
+ freeze-config ;
+ if <doxygen.rmdir>on in $(properties)
+ {
+ local output-dir = [ path.make [ MATCH
+ <doxygen:param>OUTPUT_DIRECTORY=\"?([^\"]*) : $(properties) ] ] ;
+ local html-dir = [ path.make [ MATCH <doxygen:param>HTML_OUTPUT=(.*) :
+ $(properties) ] ] ;
+ if $(output-dir) && $(html-dir) &&
+ [ path.glob $(output-dir) : $(html-dir) ]
+ {
+ HTMLDIR on $(target) = [ path.native [ path.join $(output-dir)
+ $(html-dir) ] ] ;
+ rm-htmldir $(target) ;
+ }
+ }
+ doxygen-action $(target) : $(source) ;
+ NAME on $(target) = $(.doxygen) ;
+ RM on $(target) = [ modules.peek common : RM ] ;
+ *.XML on $(target) = [ path.native [ path.join [ path.make [ on $(target)
+ return $(LOCATE) ] ] $(target:B:S=) *.xml ] ] ;
+}
+
+
+if [ os.name ] = NT
+{
+ RMDIR = rmdir /s /q ;
+}
+else
+{
+ RMDIR = rm -rf ;
+}
+
+actions quietly rm-htmldir
+{
+ $(RMDIR) $(HTMLDIR)
+}
+
+
+# The rules below require BoostBook stylesheets, so we need some code to check
+# that the boostbook module has actualy been initialized.
+#
+rule check-boostbook ( )
+{
+ if ! [ modules.peek boostbook : .initialized ]
+ {
+ import errors ;
+ errors.user-error
+ : The boostbook module is not initialized you have attempted to use
+ : the 'doxygen' toolset, which requires BoostBook, but never
+ : initialized BoostBook.
+ : Hint: add 'using boostbook ;' to your user-config.jam. ;
+ }
+}
+
+
+# Collect the set of Doxygen XML files into a single XML source file that can be
+# handled by an XSLT processor. The source is completely ignored (see
+# doxygen-action), because this action picks up the Doxygen XML index file xml/
+# index.xml. This is because we can not teach Doxygen to act like a NORMAL
+# program and take a "-o output.xml" argument (grrrr). The target of the
+# collection will be a single Doxygen XML file.
+#
+rule collect ( target : source : properties * )
+{
+ check-boostbook ;
+ local collect-xsl-dir
+ = [ path.native [ path.join [ boostbook.xsl-dir ] doxygen collect ] ] ;
+ local source-path
+ = [ path.make [ on $(source) return $(LOCATE) ] ] ;
+ local collect-path
+ = [ path.root [ path.join $(source-path) $(source:B) ] [ path.pwd ] ] ;
+ local native-path
+ = [ path.native $(collect-path) ] ;
+ local real-source
+ = [ path.native [ path.join $(collect-path) index.xml ] ] ;
+ xsltproc.xslt $(target) : $(real-source) $(collect-xsl-dir:S=.xsl)
+ : <xsl:param>doxygen.xml.path=$(native-path) ;
+}
+
+
+# Translate Doxygen XML into BoostBook.
+#
+rule xml-to-boostbook ( target : source : properties * )
+{
+ check-boostbook ;
+ local xsl-dir = [ boostbook.xsl-dir ] ;
+ local d2b-xsl = [ path.native [ path.join [ boostbook.xsl-dir ] doxygen
+ doxygen2boostbook.xsl ] ] ;
+
+ local xslt-properties = $(properties) ;
+ for local prefix in [ feature.get-values <prefix> : $(properties) ]
+ {
+ xslt-properties += "<xsl:param>boost.doxygen.header.prefix=$(prefix)" ;
+ }
+ for local title in [ feature.get-values <reftitle> : $(properties) ]
+ {
+ xslt-properties += "<xsl:param>boost.doxygen.reftitle=$(title)" ;
+ }
+
+ xsltproc.xslt $(target) : $(source) $(d2b-xsl) : $(xslt-properties) ;
+}
+
+
+toolset.flags doxygen.xml-dir-to-boostbook OPTIONS <doxygen.doxproc.index>yes :
+ --enable-index ;
+toolset.flags doxygen.xml-dir-to-boostbook ID <doxygen.doxproc.id> ;
+toolset.flags doxygen.xml-dir-to-boostbook TITLE <doxygen.doxproc.title> ;
+
+
+rule xml-dir-to-boostbook ( target : source : properties * )
+{
+ DOXPROC on $(target) = $(.doxproc) ;
+ LOCATE on $(source:S=) = [ on $(source) return $(LOCATE) ] ;
+ doxygen.doxproc $(target) : $(source:S=) ;
+}
+
+
+# Generate the HTML redirect to HTML dir index.html file.
+#
+rule html-redirect ( target : source : properties * )
+{
+ local uri = "$(target:B)/index.html" ;
+ print.output $(target) plain ;
+ print.text
+"<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Strict//EN\"
+ \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd\">
+<html xmlns=\"http://www.w3.org/1999/xhtml\">
+<head>
+ <meta http-equiv=\"refresh\" content=\"0; URL=$(uri)\" />
+
+ <title></title>
+</head>
+
+<body>
+ Automatic redirection failed, please go to <a href=
+ \"$(uri)\">$(uri)</a>.
+</body>
+</html>
+"
+ : true ;
+}
+
+rule copy-latex-pngs ( target : source : requirements * )
+{
+ local directory = [ path.native [ feature.get-values <doxygen:xml-imagedir>
+ : $(requirements) ] ] ;
+ local location = [ on $(target) return $(LOCATE) ] ;
+
+ local pdf-location = [ path.native [ path.join [ path.make $(location) ]
+ [ path.make $(directory) ] ] ] ;
+ local html-location = [ path.native [ path.join . html [ path.make
+ $(directory) ] ] ] ;
+
+ common.MkDir $(pdf-location) ;
+ common.MkDir $(html-location) ;
+
+ DEPENDS $(target) : $(pdf-location) $(html-location) ;
+
+ if [ os.name ] = NT
+ {
+ CP on $(target) = copy /y ;
+ FROM on $(target) = \\*.png ;
+ TOHTML on $(target) = .\\html\\$(directory) ;
+ TOPDF on $(target) = \\$(directory) ;
+ }
+ else
+ {
+ CP on $(target) = cp ;
+ FROM on $(target) = /*.png ;
+ TOHTML on $(target) = ./html/$(directory) ;
+ TOPDF on $(target) = $(target:D)/$(directory) ;
+ }
+}
+
+actions copy-latex-pngs
+{
+ $(CP) $(>:S=)$(FROM) $(TOHTML)
+ $(CP) $(>:S=)$(FROM) $(<:D)$(TOPDF)
+ echo "Stamped" > "$(<)"
+}
+
+
+# Building latex images for doxygen XML depends on latex, dvips, and gs being in
+# your PATH. This is true for most Unix installs, but not on Win32, where you
+# will need to install MkTex and Ghostscript and add these tools to your path.
+
+actions check-latex
+{
+ latex -version >$(<)
+}
+
+actions check-dvips
+{
+ dvips -version >$(<)
+}
+
+if [ os.name ] = "NT"
+{
+ actions check-gs
+ {
+ gswin32c -version >$(<)
+ }
+}
+else
+{
+ actions check-gs
+ {
+ gs -version >$(<)
+ }
+}
+
+
+local rule check-tools-targets ( project )
+{
+ if ! $(.check-tools-targets)
+ {
+ # Find the root project.
+ #
+ # This is a best effort attempt to avoid using different locations for
+ # storing *.check files depending on which project imported the doxygen
+ # toolset first. The files are stored in a location related to the
+ # project's root project. Note that this location may change depending
+ # on the folder the build was run from in case the build uses multiple
+ # related projects with their own Jamroot separate modules.
+ local project-module = [ $(project).project-module ] ;
+ local root-module = [ project.get-jamroot-module $(project-module) ] ;
+ if ! $(root-module)
+ {
+ import errors ;
+ if [ project.is-config-module $(project-module) ]
+ {
+ errors.user-error doxygen targets can not be declared in Boost
+ Build's configuration modules. ;
+ }
+ else
+ {
+ errors.user-error doxygen targets can not be declared in
+ standalone projects. : use a Jamfile/Jamroot project
+ instead. ;
+ }
+ }
+ local root-project = [ project.target $(root-module) ] ;
+
+ local targets =
+ [ new file-target latex.check : : $(root-project) : [ new action :
+ doxygen.check-latex ] ]
+ [ new file-target dvips.check : : $(root-project) : [ new action :
+ doxygen.check-dvips ] ]
+ [ new file-target gs.check : : $(root-project) : [ new action :
+ doxygen.check-gs ] ] ;
+
+ for local target in $(targets)
+ {
+ .check-tools-targets += [ virtual-target.register $(target) ] ;
+ }
+ }
+ return $(.check-tools-targets) ;
+}
+
+
+project.initialize $(__name__) ;
+project doxygen ;
+
+class doxygen-check-tools-target-class : basic-target
+{
+ rule construct ( name : sources * : property-set )
+ {
+ IMPORT doxygen : check-tools-targets : $(__name__) :
+ doxygen.check-tools-targets ;
+ return [ property-set.empty ] [ doxygen.check-tools-targets [ project ]
+ ] ;
+ }
+}
+
+
+# Declares a metatarget for collecting version information on different external
+# tools used in this module.
+#
+rule check-tools ( target )
+{
+ freeze-config ;
+ targets.create-metatarget doxygen-check-tools-target-class :
+ [ project.current ] : $(target) ;
+}
+
+
+# User-level rule to generate HTML files or BoostBook XML from a set of headers
+# via Doxygen.
+#
+rule doxygen ( target : sources + : requirements * : default-build * :
+ usage-requirements * )
+{
+ freeze-config ;
+ local project = [ project.current ] ;
+
+ if $(target:S) = .html
+ {
+ # Build an HTML directory from the sources.
+ local html-location = [ feature.get-values <location> : $(requirements)
+ ] ;
+ local output-dir ;
+ if [ $(project).get build-dir ]
+ {
+ # Explicitly specified build dir. Add html at the end.
+ output-dir = [ path.join [ $(project).build-dir ]
+ $(html-location:E=html) ] ;
+ }
+ else
+ {
+ # Trim 'bin' from implicit build dir, for no other reason than
+ # backward compatibility.
+ output-dir = [ path.join [ path.parent [ $(project).build-dir ] ]
+ $(html-location:E=html) ] ;
+ }
+ output-dir = [ path.root $(output-dir) [ path.pwd ] ] ;
+ local output-dir-native = [ path.native $(output-dir) ] ;
+ requirements = [ property.change $(requirements) : <location> ] ;
+
+ # The doxygen configuration file.
+ targets.create-typed-target DOXYFILE : $(project) : $(target:S=.tag)
+ : $(sources)
+ : $(requirements)
+ <doxygen:param>GENERATE_HTML=YES
+ <doxygen:param>GENERATE_XML=NO
+ <doxygen:param>"OUTPUT_DIRECTORY=\"$(output-dir-native)\""
+ <doxygen:param>HTML_OUTPUT=$(target:B)
+ : $(default-build) ;
+ $(project).mark-target-as-explicit $(target:S=.tag) ;
+
+ # The html directory to generate by running doxygen.
+ targets.create-typed-target DOXYGEN_HTML_MULTIFILE : $(project)
+ : $(target:S=.dir) # Name.
+ : $(target:S=.tag) # Sources.
+ : $(requirements)
+ <doxygen:param>"OUTPUT_DIRECTORY=\"$(output-dir-native)\""
+ <doxygen:param>HTML_OUTPUT=$(target:B)
+ : $(default-build) ;
+ $(project).mark-target-as-explicit $(target:S=.dir) ;
+
+ # The redirect html file into the generated html.
+ targets.create-typed-target DOXYGEN_HTML : $(project) : $(target)
+ : $(target:S=.dir) # Sources.
+ : $(requirements) <location>$(output-dir)
+ : $(default-build) ;
+ }
+ else
+ {
+ # Build a BoostBook XML file from the sources.
+ local location-xml = [ feature.get-values <location> : $(requirements) ]
+ ;
+ requirements = [ property.change $(requirements) : <location> ] ;
+ local target-xml = $(target:B=$(target:B)-xml) ;
+
+ # Check whether we need to build images.
+ local images-location = [ feature.get-values <doxygen:xml-imagedir> :
+ $(requirements) ] ;
+ if $(images-location)
+ {
+ # Prepare a metatarget for collecting used external tool version
+ # information. We use only one such metatarget as they always
+ # produce the same files and we do not want to deal with multiple
+ # metatargets having matching names, causing 'ambiguous variants'
+ # errors.
+ if ! $(.check-tools)
+ {
+ # FIXME: Since we have the check-tools target object reference,
+ # see how we can use that instead of having to construct a valid
+ # target reference string for use in <dependency> property
+ # values.
+ local project-id = --doxygen.check-tools-project-- ;
+ local target-id = --doxygen.check-tools-- ;
+ local pm = [ $(project).project-module ] ;
+ project.register-id $(project-id) : $(pm) ;
+ check-tools $(target-id) ;
+ .check-tools = /$(project-id)//$(target-id) ;
+ }
+
+ doxygen $(target).doxygen-xml-images.html : $(sources) :
+ $(requirements)
+ <doxygen.rmdir>on
+ <doxygen:param>QUIET=YES
+ <doxygen:param>WARNINGS=NO
+ <doxygen:param>WARN_IF_UNDOCUMENTED=NO
+ <dependency>$(.check-tools) ;
+ $(project).mark-target-as-explicit $(target).doxygen-xml-images.html
+ ;
+
+ targets.create-typed-target DOXYGEN_XML_IMAGES : $(project)
+ : $(target).doxygen-xml-images # Name.
+ : $(target).doxygen-xml-images.html # Sources.
+ : $(requirements)
+ : $(default-build) ;
+ $(project).mark-target-as-explicit $(target).doxygen-xml-images ;
+
+ if ! [ MATCH (/)$ : $(images-location) ]
+ {
+ images-location = $(images-location)/ ;
+ }
+
+ requirements +=
+ <dependency>$(target).doxygen-xml-images
+ <xsl:param>boost.doxygen.formuladir=$(images-location) ;
+ }
+
+ # The doxygen configuration file.
+ targets.create-typed-target DOXYFILE : $(project) : $(target-xml:S=.tag)
+ : $(sources)
+ : $(requirements)
+ <doxygen:param>GENERATE_HTML=NO
+ <doxygen:param>GENERATE_XML=YES
+ <doxygen:param>XML_OUTPUT=$(target-xml)
+ : $(default-build) ;
+ $(project).mark-target-as-explicit $(target-xml:S=.tag) ;
+
+ # The Doxygen XML directory for the processed source files.
+ targets.create-typed-target DOXYGEN_XML_MULTIFILE : $(project)
+ : $(target-xml:S=.dir) # Name.
+ : $(target-xml:S=.tag) # Sources.
+ : $(requirements)
+ : $(default-build) ;
+ $(project).mark-target-as-explicit $(target-xml:S=.dir) ;
+
+ # The resulting BoostBook file is generated by the processor tool. The
+ # tool can be either the xsltproc plus accompanying XSL scripts. Or it
+ # can be the python doxproc.py script.
+ targets.create-typed-target BOOSTBOOK : $(project) : $(target-xml)
+ : $(target-xml:S=.dir) # Sources.
+ : $(requirements)
+ : $(default-build) ;
+ $(project).mark-target-as-explicit $(target-xml) ;
+
+ stage $(target:S=.xml) # Name.
+ : $(target-xml) # Sources.
+ : $(requirements)
+ <location>$(location-xml:E=.)
+ <name>$(target:S=.xml)
+ : $(default-build) ;
+ $(project).mark-target-as-explicit $(target:S=.xml) ;
+
+ # TODO: See why this alias target is used here instead of simply naming
+ # the previous stage target $(target) and having it specify the alias
+ # target's usage requirements directly.
+ alias $(target) : : $(requirements) : $(default-build) :
+ $(usage-requirements) <dependency>$(target:S=.xml) ;
+ }
+}
diff --git a/tools/build/v2/tools/doxygen/windows-paths-check.doxyfile b/tools/build/src/tools/doxygen/windows-paths-check.doxyfile
index 9b969df9c7..9b969df9c7 100644
--- a/tools/build/v2/tools/doxygen/windows-paths-check.doxyfile
+++ b/tools/build/src/tools/doxygen/windows-paths-check.doxyfile
diff --git a/tools/build/v2/tools/doxygen/windows-paths-check.hpp b/tools/build/src/tools/doxygen/windows-paths-check.hpp
index e69de29bb2..e69de29bb2 100644
--- a/tools/build/v2/tools/doxygen/windows-paths-check.hpp
+++ b/tools/build/src/tools/doxygen/windows-paths-check.hpp
diff --git a/tools/build/v2/tools/fop.jam b/tools/build/src/tools/fop.jam
index c24b8725f9..c24b8725f9 100644
--- a/tools/build/v2/tools/fop.jam
+++ b/tools/build/src/tools/fop.jam
diff --git a/tools/build/v2/tools/fortran.jam b/tools/build/src/tools/fortran.jam
index 37665825ef..37665825ef 100644
--- a/tools/build/v2/tools/fortran.jam
+++ b/tools/build/src/tools/fortran.jam
diff --git a/tools/build/src/tools/gcc.jam b/tools/build/src/tools/gcc.jam
new file mode 100644
index 0000000000..ce9be9d6df
--- /dev/null
+++ b/tools/build/src/tools/gcc.jam
@@ -0,0 +1,1189 @@
+# Copyright 2001 David Abrahams
+# Copyright 2002-2006 Rene Rivera
+# Copyright 2002-2003 Vladimir Prus
+# Copyright 2005 Reece H. Dunn
+# Copyright 2006 Ilya Sokolov
+# Copyright 2007 Roland Schwarz
+# Copyright 2007 Boris Gubenko
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import "class" : new ;
+import common ;
+import feature ;
+import fortran ;
+import generators ;
+import os ;
+import pch ;
+import property ;
+import property-set ;
+import rc ;
+import regex ;
+import set ;
+import toolset ;
+import type ;
+import unix ;
+
+
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+ .debug-configuration = true ;
+}
+
+
+feature.extend toolset : gcc ;
+# feature.subfeature toolset gcc : flavor : : optional ;
+
+toolset.inherit-generators gcc : unix : unix.link unix.link.dll ;
+toolset.inherit-flags gcc : unix ;
+toolset.inherit-rules gcc : unix ;
+
+generators.override gcc.prebuilt : builtin.prebuilt ;
+generators.override gcc.searched-lib-generator : searched-lib-generator ;
+
+# Make gcc toolset object files use the "o" suffix on all platforms.
+type.set-generated-target-suffix OBJ : <toolset>gcc : o ;
+type.set-generated-target-suffix OBJ : <toolset>gcc <target-os>windows : o ;
+type.set-generated-target-suffix OBJ : <toolset>gcc <target-os>cygwin : o ;
+
+
+# Initializes the gcc toolset for the given version. If necessary, command may
+# be used to specify where the compiler is located. The parameter 'options' is a
+# space-delimited list of options, each one specified as
+# <option-name>option-value. Valid option names are: cxxflags, linkflags and
+# linker-type. Accepted linker-type values are aix, darwin, gnu, hpux, osf or
+# sun and the default value will be selected based on the current OS.
+# Example:
+# using gcc : 3.4 : : <cxxflags>foo <linkflags>bar <linker-type>sun ;
+#
+# The compiler command to use is detected in three steps:
+# 1) If an explicit command is specified by the user, it will be used and must
+# be available.
+# 2) If only a certain version is specified, it is enforced:
+# - either the 'g++-VERSION' command must be available
+# - or the default command 'g++' must be available and match the exact
+# version.
+# 3) Without user-provided restrictions use default 'g++'.
+#
+rule init ( version ? : command * : options * )
+{
+ #1): use user-provided command
+ local tool-command = ;
+ if $(command)
+ {
+ tool-command = [ common.get-invocation-command-nodefault gcc : g++ :
+ $(command) ] ;
+ if ! $(tool-command)
+ {
+ import errors ;
+ errors.error toolset gcc initialization:
+ : provided command '$(command)' not found
+ : initialized from [ errors.nearest-user-location ] ;
+ }
+ }
+ #2): enforce user-provided version
+ else if $(version)
+ {
+ tool-command = [ common.get-invocation-command-nodefault gcc :
+ "g++-$(version[1])" ] ;
+
+ #2.1) fallback: check whether "g++" reports the requested version
+ if ! $(tool-command)
+ {
+ tool-command = [ common.get-invocation-command-nodefault gcc : g++ ]
+ ;
+ if $(tool-command)
+ {
+ local tool-command-string = $(tool-command:J=" ") ;
+ local tool-version = [ MATCH "^([0-9.]+)" :
+ [ SHELL "$(tool-command-string) -dumpversion" ] ] ;
+ if $(tool-version) != $(version)
+ {
+ # Permit a match betwen a two-digit version specified by the
+ # user (e.g. 4.4) and a 3-digit version reported by gcc.
+ # Since only two digits are present in the binary name
+ # anyway, insisting that user specify the 3-digit version
+ # when configuring Boost.Build, while it is not required on
+ # the command line, would be strange.
+ local stripped = [ MATCH "^([0-9]+\.[0-9]+).*" :
+ $(tool-version) ] ;
+ if $(stripped) != $(version)
+ {
+ import errors ;
+ errors.error toolset gcc initialization:
+ : version '$(version)' requested but
+ 'g++-$(version)' not found and version
+ '$(tool-version)' of default '$(tool-command)'
+ does not match
+ : initialized from [ errors.nearest-user-location ]
+ ;
+ tool-command = ;
+ }
+ # Use full 3-digit version to be compatible with the
+ # 'using gcc ;' case
+ version = $(tool-version) ;
+ }
+ }
+ else
+ {
+ import errors ;
+ errors.error toolset gcc initialization:
+ : version '$(version)' requested but neither
+ 'g++-$(version)' nor default 'g++' found
+ : initialized from [ errors.nearest-user-location ] ;
+ }
+ }
+ }
+ #3) default: no command and no version specified, try using "g++"
+ else
+ {
+ tool-command = [ common.get-invocation-command-nodefault gcc : g++ ] ;
+ if ! $(tool-command)
+ {
+ import errors ;
+ errors.error toolset gcc initialization:
+ : no command provided, default command 'g++' not found
+ : initialized from [ errors.nearest-user-location ] ;
+ }
+ }
+
+
+ # Information about the gcc command...
+ # The command.
+ local command = $(tool-command) ;
+ # The 'command' variable can have multiple elements but when calling the
+ # SHELL builtin we need a single string.
+ local command-string = $(command:J=" ") ;
+ # The root directory of the tool install.
+ local root = [ feature.get-values <root> : $(options) ] ;
+ # The bin directory where to find the command to execute.
+ local bin ;
+ # The compiler flavor.
+ local flavor = [ feature.get-values <flavor> : $(options) ] ;
+ # Autodetect the root and bin dir if not given.
+ if $(command)
+ {
+ bin ?= [ common.get-absolute-tool-path $(command[-1]) ] ;
+ root ?= $(bin:D) ;
+ }
+ # Autodetect the version and flavor if not given.
+ if $(command)
+ {
+ local machine = [ MATCH "^([^ ]+)" :
+ [ SHELL "$(command-string) -dumpmachine" ] ] ;
+ version ?= [ MATCH "^([0-9.]+)" :
+ [ SHELL "$(command-string) -dumpversion" ] ] ;
+ switch $(machine:L)
+ {
+ case *mingw* : flavor ?= mingw ;
+ }
+ }
+
+ local condition ;
+ if $(flavor)
+ {
+ condition = flavor $(flavor) ;
+ }
+ condition = [ common.check-init-parameters gcc : version $(version)
+ : $(condition) ] ;
+
+ common.handle-options gcc : $(condition) : $(command) : $(options) ;
+
+ local linker = [ feature.get-values <linker-type> : $(options) ] ;
+ # TODO: The logic below should actually be keyed on <target-os>.
+ if ! $(linker)
+ {
+ switch [ os.name ]
+ {
+ case OSF : linker = osf ;
+ case HPUX : linker = hpux ;
+ case AIX : linker = aix ;
+ case SOLARIS : linker = sun ;
+ case * : linker = gnu ;
+ }
+ }
+ init-link-flags gcc $(linker) $(condition) ;
+
+ # If gcc is installed in a non-standard location, we would need to add
+ # LD_LIBRARY_PATH when running programs created with it (for unit-test/run
+ # rules).
+ if $(command)
+ {
+ # On multilib 64-bit boxes, there are both 32-bit and 64-bit libraries
+ # and all must be added to LD_LIBRARY_PATH. The linker will pick the
+ # right onces. Note that we do not provide a clean way to build a 32-bit
+ # binary using a 64-bit compiler, but user can always pass -m32
+ # manually.
+ local lib_path = $(root)/bin $(root)/lib $(root)/lib32 $(root)/lib64 ;
+ if $(.debug-configuration)
+ {
+ ECHO notice: using gcc libraries :: $(condition) :: $(lib_path) ;
+ }
+ toolset.flags gcc.link RUN_PATH $(condition) : $(lib_path) ;
+ }
+
+ # If we are not using a system gcc installation we should adjust the various
+ # programs as needed to prefer using their installation specific versions.
+ # This is essential for correct use of MinGW and for cross-compiling.
+
+ local nl = "
+" ;
+
+ # - Archive builder.
+ local archiver = [ common.get-invocation-command gcc
+ : [ NORMALIZE_PATH [ MATCH "(.*)[$(nl)]+" :
+ [ SHELL "$(command-string) -print-prog-name=ar" ] ] ]
+ : [ feature.get-values <archiver> : $(options) ]
+ : $(bin)
+ : search-path ] ;
+ toolset.flags gcc.archive .AR $(condition) : $(archiver[1]) ;
+ if $(.debug-configuration)
+ {
+ ECHO notice: using gcc archiver :: $(condition) :: $(archiver[1]) ;
+ }
+
+ # - Ranlib.
+ local ranlib = [ common.get-invocation-command gcc
+ : [ NORMALIZE_PATH [ MATCH "(.*)[$(nl)]+" :
+ [ SHELL "$(command-string) -print-prog-name=ranlib" ] ] ]
+ : [ feature.get-values <ranlib> : $(options) ]
+ : $(bin)
+ : search-path ] ;
+ toolset.flags gcc.archive .RANLIB $(condition) : $(ranlib[1]) ;
+ if $(.debug-configuration)
+ {
+ ECHO notice: using gcc ranlib :: $(condition) :: $(ranlib[1]) ;
+ }
+
+ # - Resource compiler.
+ local rc = [ common.get-invocation-command-nodefault gcc : windres :
+ [ feature.get-values <rc> : $(options) ] : $(bin) : search-path ] ;
+ local rc-type = [ feature.get-values <rc-type> : $(options) ] ;
+ rc-type ?= windres ;
+ if ! $(rc)
+ {
+ # If we can not find an RC compiler we fallback to a null one that
+ # creates empty object files. This allows the same Jamfiles to work
+ # across the board. The null RC uses assembler to create the empty
+ # objects, so configure that.
+ rc = [ common.get-invocation-command gcc : as : : $(bin) : search-path ]
+ ;
+ rc-type = null ;
+ }
+ rc.configure $(rc) : $(condition) : <rc-type>$(rc-type) ;
+}
+
+if [ os.name ] = NT
+{
+ # This causes single-line command invocation to not go through .bat files,
+ # thus avoiding command-line length limitations.
+ # TODO: Set JAMSHELL on specific targets instead of globally.
+ JAMSHELL = % ;
+}
+
+generators.register-c-compiler gcc.compile.c++.preprocess : CPP : PREPROCESSED_CPP : <toolset>gcc ;
+generators.register-c-compiler gcc.compile.c.preprocess : C : PREPROCESSED_C : <toolset>gcc ;
+generators.register-c-compiler gcc.compile.c++ : CPP : OBJ : <toolset>gcc ;
+generators.register-c-compiler gcc.compile.c : C : OBJ : <toolset>gcc ;
+generators.register-c-compiler gcc.compile.asm : ASM : OBJ : <toolset>gcc ;
+generators.register-fortran-compiler gcc.compile.fortran : FORTRAN FORTRAN90 : OBJ : <toolset>gcc ;
+
+# pch support
+
+# The compiler looks for a precompiled header in each directory just before it
+# looks for the include file in that directory. The name searched for is the
+# name specified in the #include directive with ".gch" suffix appended. The
+# logic in gcc-pch-generator will make sure that the BASE_PCH suffix is appended
+# to the full header name.
+
+type.set-generated-target-suffix PCH : <toolset>gcc : gch ;
+
+# GCC-specific pch generator.
+class gcc-pch-generator : pch-generator
+{
+ import project ;
+ import property-set ;
+ import type ;
+
+ rule run-pch ( project name ? : property-set : sources + )
+ {
+ # Find the header in sources. Ignore any CPP sources.
+ local header ;
+ for local s in $(sources)
+ {
+ if [ type.is-derived [ $(s).type ] H ]
+ {
+ header = $(s) ;
+ }
+ }
+
+ # Error handling: base header file name should be the same as the base
+ # precompiled header name.
+ local header-name = [ $(header).name ] ;
+ local header-basename = $(header-name:B) ;
+ if $(header-basename) != $(name)
+ {
+ local location = [ $(project).project-module ] ;
+ import errors : user-error : errors.user-error ;
+ errors.user-error "in" $(location): pch target name '$(name)' should
+ be the same as the base name of header file '$(header-name)' ;
+ }
+
+ local pch-file = [ generator.run $(project) $(name) : $(property-set)
+ : $(header) ] ;
+
+ # Return result of base class and pch-file property as
+ # usage-requirements.
+ return
+ [ property-set.create <pch-file>$(pch-file) <cflags>-Winvalid-pch ]
+ $(pch-file)
+ ;
+ }
+
+ # Calls the base version specifying source's name as the name of the created
+ # target. As a result, the PCH will be named whatever.hpp.gch, and not
+ # whatever.gch.
+ rule generated-targets ( sources + : property-set : project name ? )
+ {
+ name = [ $(sources[1]).name ] ;
+ return [ generator.generated-targets $(sources)
+ : $(property-set) : $(project) $(name) ] ;
+ }
+}
+
+# Note: the 'H' source type will catch both '.h' header and '.hpp' header. The
+# latter have HPP type, but HPP type is derived from H. The type of compilation
+# is determined entirely by the destination type.
+generators.register [ new gcc-pch-generator gcc.compile.c.pch : H : C_PCH : <pch>on <toolset>gcc ] ;
+generators.register [ new gcc-pch-generator gcc.compile.c++.pch : H : CPP_PCH : <pch>on <toolset>gcc ] ;
+
+# Override default do-nothing generators.
+generators.override gcc.compile.c.pch : pch.default-c-pch-generator ;
+generators.override gcc.compile.c++.pch : pch.default-cpp-pch-generator ;
+
+toolset.flags gcc.compile PCH_FILE <pch>on : <pch-file> ;
+
+# Declare flags and action for compilation.
+toolset.flags gcc.compile OPTIONS <optimization>off : -O0 ;
+toolset.flags gcc.compile OPTIONS <optimization>speed : -O3 ;
+toolset.flags gcc.compile OPTIONS <optimization>space : -Os ;
+
+toolset.flags gcc.compile OPTIONS <inlining>off : -fno-inline ;
+toolset.flags gcc.compile OPTIONS <inlining>on : -Wno-inline ;
+toolset.flags gcc.compile OPTIONS <inlining>full : -finline-functions -Wno-inline ;
+
+toolset.flags gcc.compile OPTIONS <warnings>off : -w ;
+toolset.flags gcc.compile OPTIONS <warnings>on : -Wall ;
+toolset.flags gcc.compile OPTIONS <warnings>all : -Wall -pedantic ;
+toolset.flags gcc.compile OPTIONS <warnings-as-errors>on : -Werror ;
+
+toolset.flags gcc.compile OPTIONS <debug-symbols>on : -g ;
+toolset.flags gcc.compile OPTIONS <profiling>on : -pg ;
+
+toolset.flags gcc.compile.c++ OPTIONS <rtti>off : -fno-rtti ;
+toolset.flags gcc.compile.c++ OPTIONS <exception-handling>off : -fno-exceptions ;
+
+rule setup-fpic ( targets * : sources * : properties * )
+{
+ local link = [ feature.get-values link : $(properties) ] ;
+ if $(link) = shared
+ {
+ local target = [ feature.get-values target-os : $(properties) ] ;
+
+ # This logic will add -fPIC for all compilations:
+ #
+ # lib a : a.cpp b ;
+ # obj b : b.cpp ;
+ # exe c : c.cpp a d ;
+ # obj d : d.cpp ;
+ #
+ # This all is fine, except that 'd' will be compiled with -fPIC even
+ # though it is not needed, as 'd' is used only in exe. However, it is
+ # hard to detect where a target is going to be used. Alternatively, we
+ # can set -fPIC only when main target type is LIB but than 'b' would be
+ # compiled without -fPIC which would lead to link errors on x86-64. So,
+ # compile everything with -fPIC.
+ #
+ # Yet another alternative would be to create a propagated <sharedable>
+ # feature and set it when building shared libraries, but that would be
+ # hard to implement and would increase the target path length even more.
+
+ # On Windows, fPIC is the default, and specifying -fPIC explicitly leads
+ # to a warning.
+ if ! $(target) in cygwin windows
+ {
+ OPTIONS on $(targets) += -fPIC ;
+ }
+ }
+}
+
+rule setup-address-model ( targets * : sources * : properties * )
+{
+ local model = [ feature.get-values address-model : $(properties) ] ;
+ if $(model)
+ {
+ local option ;
+ local os = [ feature.get-values target-os : $(properties) ] ;
+ if $(os) = aix
+ {
+ if $(model) = 32
+ {
+ option = -maix32 ;
+ }
+ else
+ {
+ option = -maix64 ;
+ }
+ }
+ else if $(os) = hpux
+ {
+ if $(model) = 32
+ {
+ option = -milp32 ;
+ }
+ else
+ {
+ option = -mlp64 ;
+ }
+ }
+ else
+ {
+ local arch = [ feature.get-values architecture : $(properties) ] ;
+ if $(arch) != arm
+ {
+ if $(model) = 32
+ {
+ option = -m32 ;
+ }
+ else if $(model) = 64
+ {
+ option = -m64 ;
+ }
+ }
+ # For darwin, the model can be 32_64. darwin.jam will handle that
+ # on its own.
+ }
+ OPTIONS on $(targets) += $(option) ;
+ }
+}
+
+
+# FIXME: this should not use os.name.
+if ! [ os.name ] in NT OSF HPUX AIX
+{
+ # OSF does have an option called -soname but it does not seem to work as
+ # expected, therefore it has been disabled.
+ HAVE_SONAME = "" ;
+ SONAME_OPTION = -h ;
+}
+
+# HPUX, for some reason, seems to use '+h' instead of '-h'.
+if [ os.name ] = HPUX
+{
+ HAVE_SONAME = "" ;
+ SONAME_OPTION = +h ;
+}
+
+toolset.flags gcc.compile USER_OPTIONS <cflags> ;
+toolset.flags gcc.compile.c++ USER_OPTIONS <cxxflags> ;
+toolset.flags gcc.compile DEFINES <define> ;
+toolset.flags gcc.compile INCLUDES <include> ;
+toolset.flags gcc.compile.c++ TEMPLATE_DEPTH <c++-template-depth> ;
+toolset.flags gcc.compile.fortran USER_OPTIONS <fflags> ;
+
+rule compile.c++.pch ( targets * : sources * : properties * )
+{
+ setup-threading $(targets) : $(sources) : $(properties) ;
+ setup-fpic $(targets) : $(sources) : $(properties) ;
+ setup-address-model $(targets) : $(sources) : $(properties) ;
+}
+
+actions compile.c++.pch
+{
+ "$(CONFIG_COMMAND)" -x c++-header $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+rule compile.c.pch ( targets * : sources * : properties * )
+{
+ setup-threading $(targets) : $(sources) : $(properties) ;
+ setup-fpic $(targets) : $(sources) : $(properties) ;
+ setup-address-model $(targets) : $(sources) : $(properties) ;
+}
+
+actions compile.c.pch
+{
+ "$(CONFIG_COMMAND)" -x c-header $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+rule compile.c++.preprocess ( targets * : sources * : properties * )
+{
+ setup-threading $(targets) : $(sources) : $(properties) ;
+ setup-fpic $(targets) : $(sources) : $(properties) ;
+ setup-address-model $(targets) : $(sources) : $(properties) ;
+
+ # Some extensions are compiled as C++ by default. For others, we need to
+ # pass -x c++. We could always pass -x c++ but distcc does not work with it.
+ if ! $(>:S) in .cc .cp .cxx .cpp .c++ .C
+ {
+ LANG on $(<) = "-x c++" ;
+ }
+ DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ;
+}
+
+rule compile.c.preprocess ( targets * : sources * : properties * )
+{
+ setup-threading $(targets) : $(sources) : $(properties) ;
+ setup-fpic $(targets) : $(sources) : $(properties) ;
+ setup-address-model $(targets) : $(sources) : $(properties) ;
+
+ # If we use the name g++ then default file suffix -> language mapping does
+ # not work. So have to pass -x option. Maybe, we can work around this by
+ # allowing the user to specify both C and C++ compiler names.
+ #if $(>:S) != .c
+ #{
+ LANG on $(<) = "-x c" ;
+ #}
+ DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ;
+}
+
+rule compile.c++ ( targets * : sources * : properties * )
+{
+ setup-threading $(targets) : $(sources) : $(properties) ;
+ setup-fpic $(targets) : $(sources) : $(properties) ;
+ setup-address-model $(targets) : $(sources) : $(properties) ;
+
+ # Some extensions are compiled as C++ by default. For others, we need to
+ # pass -x c++. We could always pass -x c++ but distcc does not work with it.
+ if ! $(>:S) in .cc .cp .cxx .cpp .c++ .C
+ {
+ LANG on $(<) = "-x c++" ;
+ }
+ DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ;
+
+ # Here we want to raise the template-depth parameter value to something
+ # higher than the default value of 17. Note that we could do this using the
+ # feature.set-default rule but we do not want to set the default value for
+ # all toolsets as well.
+ #
+ # TODO: This 'modified default' has been inherited from some 'older Boost
+ # Build implementation' and has most likely been added to make some Boost
+ # library parts compile correctly. We should see what exactly prompted this
+ # and whether we can get around the problem more locally.
+ local template-depth = [ on $(<) return $(TEMPLATE_DEPTH) ] ;
+ if ! $(template-depth)
+ {
+ TEMPLATE_DEPTH on $(<) = 128 ;
+ }
+}
+
+rule compile.c ( targets * : sources * : properties * )
+{
+ setup-threading $(targets) : $(sources) : $(properties) ;
+ setup-fpic $(targets) : $(sources) : $(properties) ;
+ setup-address-model $(targets) : $(sources) : $(properties) ;
+
+ # If we use the name g++ then default file suffix -> language mapping does
+ # not work. So have to pass -x option. Maybe, we can work around this by
+ # allowing the user to specify both C and C++ compiler names.
+ #if $(>:S) != .c
+ #{
+ LANG on $(<) = "-x c" ;
+ #}
+ DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ;
+}
+
+rule compile.fortran ( targets * : sources * : properties * )
+{
+ setup-threading $(targets) : $(sources) : $(properties) ;
+ setup-fpic $(targets) : $(sources) : $(properties) ;
+ setup-address-model $(targets) : $(sources) : $(properties) ;
+}
+
+actions compile.c++ bind PCH_FILE
+{
+ "$(CONFIG_COMMAND)" $(LANG) -ftemplate-depth-$(TEMPLATE_DEPTH) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" -c -o "$(<:W)" "$(>:W)"
+}
+
+actions compile.c bind PCH_FILE
+{
+ "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions compile.c++.preprocess bind PCH_FILE
+{
+ "$(CONFIG_COMMAND)" $(LANG) -ftemplate-depth-$(TEMPLATE_DEPTH) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" "$(>:W)" -E >"$(<:W)"
+}
+
+actions compile.c.preprocess bind PCH_FILE
+{
+ "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" "$(>)" -E >$(<)
+}
+
+actions compile.fortran
+{
+ "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+rule compile.asm ( targets * : sources * : properties * )
+{
+ setup-fpic $(targets) : $(sources) : $(properties) ;
+ setup-address-model $(targets) : $(sources) : $(properties) ;
+ LANG on $(<) = "-x assembler-with-cpp" ;
+}
+
+actions compile.asm
+{
+ "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+# Class checking that we do not try to use the <runtime-link>static property
+# while creating or using a shared library, since it is not supported by
+# gcc/libc.
+class gcc-linking-generator : unix-linking-generator
+{
+ rule run ( project name ? : property-set : sources + )
+ {
+ # TODO: Replace this with the use of a target-os property.
+ local no-static-link = ;
+ if [ modules.peek : UNIX ]
+ {
+ switch [ modules.peek : JAMUNAME ]
+ {
+ case * : no-static-link = true ;
+ }
+ }
+
+ local properties = [ $(property-set).raw ] ;
+ local reason ;
+ if $(no-static-link) && <runtime-link>static in $(properties)
+ {
+ if <link>shared in $(properties)
+ {
+ reason = On gcc, DLLs can not be built with
+ '<runtime-link>static'. ;
+ }
+ else if [ type.is-derived $(self.target-types[1]) EXE ]
+ {
+ for local s in $(sources)
+ {
+ local type = [ $(s).type ] ;
+ if $(type) && [ type.is-derived $(type) SHARED_LIB ]
+ {
+ reason = On gcc, using DLLs together with the
+ '<runtime-link>static' option is not possible. ;
+ }
+ }
+ }
+ }
+ if $(reason)
+ {
+ ECHO warning: $(reason) ;
+ ECHO warning: It is suggested to use '<runtime-link>static' together
+ with '<link>static'. ;
+ }
+ else
+ {
+ return [ unix-linking-generator.run $(project) $(name) :
+ $(property-set) : $(sources) ] ;
+ }
+ }
+}
+
+# The set of permissible input types is different on mingw. So, define two sets
+# of generators, with mingw generators selected when target-os=windows.
+
+local g ;
+g = [ new gcc-linking-generator gcc.mingw.link
+ : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB
+ : EXE
+ : <toolset>gcc <target-os>windows ] ;
+$(g).set-rule-name gcc.link ;
+generators.register $(g) ;
+
+g = [ new gcc-linking-generator gcc.mingw.link.dll
+ : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB
+ : IMPORT_LIB SHARED_LIB
+ : <toolset>gcc <target-os>windows ] ;
+$(g).set-rule-name gcc.link.dll ;
+generators.register $(g) ;
+
+generators.register
+ [ new gcc-linking-generator gcc.link
+ : LIB OBJ
+ : EXE
+ : <toolset>gcc ] ;
+generators.register
+ [ new gcc-linking-generator gcc.link.dll
+ : LIB OBJ
+ : SHARED_LIB
+ : <toolset>gcc ] ;
+
+generators.override gcc.mingw.link : gcc.link ;
+generators.override gcc.mingw.link.dll : gcc.link.dll ;
+
+# Cygwin is similar to msvc and mingw in that it uses import libraries. While in
+# simple cases, it can directly link to a shared library, it is believed to be
+# slower, and not always possible. Define cygwin-specific generators here.
+
+g = [ new gcc-linking-generator gcc.cygwin.link
+ : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB
+ : EXE
+ : <toolset>gcc <target-os>cygwin ] ;
+$(g).set-rule-name gcc.link ;
+generators.register $(g) ;
+
+g = [ new gcc-linking-generator gcc.cygwin.link.dll
+ : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB
+ : IMPORT_LIB SHARED_LIB
+ : <toolset>gcc <target-os>cygwin ] ;
+$(g).set-rule-name gcc.link.dll ;
+generators.register $(g) ;
+
+generators.override gcc.cygwin.link : gcc.link ;
+generators.override gcc.cygwin.link.dll : gcc.link.dll ;
+
+# Declare flags for linking.
+# First, the common flags.
+toolset.flags gcc.link OPTIONS <debug-symbols>on : -g ;
+toolset.flags gcc.link OPTIONS <profiling>on : -pg ;
+toolset.flags gcc.link USER_OPTIONS <linkflags> ;
+toolset.flags gcc.link LINKPATH <library-path> ;
+toolset.flags gcc.link FINDLIBS-ST <find-static-library> ;
+toolset.flags gcc.link FINDLIBS-SA <find-shared-library> ;
+toolset.flags gcc.link LIBRARIES <library-file> ;
+
+toolset.flags gcc.link.dll .IMPLIB-COMMAND <target-os>windows : "-Wl,--out-implib," ;
+toolset.flags gcc.link.dll .IMPLIB-COMMAND <target-os>cygwin : "-Wl,--out-implib," ;
+
+# For <runtime-link>static we made sure there are no dynamic libraries in the
+# link. On HP-UX not all system libraries exist as archived libraries (for
+# example, there is no libunwind.a), so, on this platform, the -static option
+# cannot be specified.
+if [ os.name ] != HPUX
+{
+ toolset.flags gcc.link OPTIONS <runtime-link>static : -static ;
+}
+
+# Now, the vendor specific flags.
+# The parameter linker can be either aix, darwin, gnu, hpux, osf or sun.
+rule init-link-flags ( toolset linker condition )
+{
+ switch $(linker)
+ {
+ case aix :
+ # On AIX we *have* to use the native linker.
+ #
+ # Using -brtl, the AIX linker will look for libraries with both the .a
+ # and .so extensions, such as libfoo.a and libfoo.so. Without -brtl, the
+ # AIX linker looks only for libfoo.a. Note that libfoo.a is an archived
+ # file that may contain shared objects and is different from static libs
+ # as on Linux.
+ #
+ # The -bnoipath strips the prepending (relative) path of libraries from
+ # the loader section in the target library or executable. Hence, during
+ # load-time LIBPATH (identical to LD_LIBRARY_PATH) or a hard-coded
+ # -blibpath (*similar* to -lrpath/-lrpath-link) is searched. Without
+ # this option, the prepending (relative) path + library name is
+ # hard-coded in the loader section, causing *only* this path to be
+ # searched during load-time. Note that the AIX linker does not have an
+ # -soname equivalent, this is as close as it gets.
+ #
+ # The -bbigtoc option instrcuts the linker to create a TOC bigger than 64k.
+ # This is neccesary for some submodules such as math, but it does make running
+ # the tests a tad slower.
+ #
+ # The above options are definately for AIX 5.x, and most likely also for
+ # AIX 4.x and AIX 6.x. For details about the AIX linker see:
+ # http://download.boulder.ibm.com/ibmdl/pub/software/dw/aix/es-aix_ll.pdf
+ #
+
+ toolset.flags $(toolset).link OPTIONS : -Wl,-brtl -Wl,-bnoipath -Wl,-bbigtoc
+ : unchecked ;
+
+ case darwin :
+ # On Darwin, the -s option to ld does not work unless we pass -static,
+ # and passing -static unconditionally is a bad idea. So, do not pass -s
+ # at all and darwin.jam will use a separate 'strip' invocation.
+ toolset.flags $(toolset).link RPATH $(condition) : <dll-path> :
+ unchecked ;
+ toolset.flags $(toolset).link RPATH_LINK $(condition) : <xdll-path> :
+ unchecked ;
+
+ case gnu :
+ # Strip the binary when no debugging is needed. We use --strip-all flag
+ # as opposed to -s since icc (intel's compiler) is generally
+ # option-compatible with and inherits from the gcc toolset, but does not
+ # support -s.
+ toolset.flags $(toolset).link OPTIONS $(condition)/<strip>on : -Wl,--strip-all : unchecked ;
+ toolset.flags $(toolset).link RPATH $(condition) : <dll-path> : unchecked ;
+ toolset.flags $(toolset).link RPATH_LINK $(condition) : <xdll-path> : unchecked ;
+ toolset.flags $(toolset).link START-GROUP $(condition) : -Wl,--start-group : unchecked ;
+ toolset.flags $(toolset).link END-GROUP $(condition) : -Wl,--end-group : unchecked ;
+
+ # gnu ld has the ability to change the search behaviour for libraries
+ # referenced by the -l switch. These modifiers are -Bstatic and
+ # -Bdynamic and change search for -l switches that follow them. The
+ # following list shows the tried variants. Search stops at the first
+ # variant that has a match.
+ #
+ # *nix: -Bstatic -lxxx
+ # libxxx.a
+ #
+ # *nix: -Bdynamic -lxxx
+ # libxxx.so
+ # libxxx.a
+ #
+ # windows (mingw, cygwin) -Bstatic -lxxx
+ # libxxx.a
+ # xxx.lib
+ #
+ # windows (mingw, cygwin) -Bdynamic -lxxx
+ # libxxx.dll.a
+ # xxx.dll.a
+ # libxxx.a
+ # xxx.lib
+ # cygxxx.dll (*)
+ # libxxx.dll
+ # xxx.dll
+ # libxxx.a
+ #
+ # (*) This is for cygwin
+ # Please note that -Bstatic and -Bdynamic are not a guarantee that a
+ # static or dynamic lib indeed gets linked in. The switches only change
+ # search patterns!
+
+ # On *nix mixing shared libs with static runtime is not a good idea.
+ toolset.flags $(toolset).link FINDLIBS-ST-PFX
+ $(condition)/<runtime-link>shared : -Wl,-Bstatic : unchecked ;
+ toolset.flags $(toolset).link FINDLIBS-SA-PFX
+ $(condition)/<runtime-link>shared : -Wl,-Bdynamic : unchecked ;
+
+ # On windows allow mixing of static and dynamic libs with static
+ # runtime is not a good idea.
+ toolset.flags $(toolset).link FINDLIBS-ST-PFX
+ $(condition)/<runtime-link>static/<target-os>windows : -Wl,-Bstatic
+ : unchecked ;
+ toolset.flags $(toolset).link FINDLIBS-SA-PFX
+ $(condition)/<runtime-link>static/<target-os>windows : -Wl,-Bdynamic
+ : unchecked ;
+ toolset.flags $(toolset).link OPTIONS
+ $(condition)/<runtime-link>static/<target-os>windows : -Wl,-Bstatic
+ : unchecked ;
+
+ case hpux :
+ toolset.flags $(toolset).link OPTIONS $(condition)/<strip>on : -Wl,-s :
+ unchecked ;
+ toolset.flags $(toolset).link OPTIONS $(condition)/<link>shared : -fPIC
+ : unchecked ;
+
+ case osf :
+ # No --strip-all, just -s.
+ toolset.flags $(toolset).link OPTIONS $(condition)/<strip>on : -Wl,-s :
+ unchecked ;
+ toolset.flags $(toolset).link RPATH $(condition) : <dll-path> :
+ unchecked ;
+ # This does not support -R.
+ toolset.flags $(toolset).link RPATH_OPTION $(condition) : -rpath :
+ unchecked ;
+ # -rpath-link is not supported at all.
+
+ case sun :
+ toolset.flags $(toolset).link OPTIONS $(condition)/<strip>on : -Wl,-s :
+ unchecked ;
+ toolset.flags $(toolset).link RPATH $(condition) : <dll-path> :
+ unchecked ;
+ # Solaris linker does not have a separate -rpath-link, but allows using
+ # -L for the same purpose.
+ toolset.flags $(toolset).link LINKPATH $(condition) : <xdll-path> :
+ unchecked ;
+
+ # This permits shared libraries with non-PIC code on Solaris.
+ # VP, 2004/09/07: Now that we have -fPIC hardcode in link.dll, the
+ # following is not needed. Whether -fPIC should be hardcoded, is a
+ # separate question.
+ # AH, 2004/10/16: it is still necessary because some tests link against
+ # static libraries that were compiled without PIC.
+ toolset.flags $(toolset).link OPTIONS $(condition)/<link>shared :
+ -mimpure-text : unchecked ;
+
+ case * :
+ import errors ;
+ errors.user-error $(toolset) initialization: invalid linker '$(linker)'
+ : The value '$(linker)' specified for <linker> is not recognized.
+ : Possible values are 'aix', 'darwin', 'gnu', 'hpux', 'osf' or 'sun'
+ ;
+ }
+}
+
+
+# Enclose the RPATH variable on 'targets' in double quotes, unless it is already
+# enclosed in single quotes. This special casing is done because it is common to
+# pass '$ORIGIN' to linker -- and it has to have single quotes to prevent shell
+# expansion -- and if we add double quotes then the preventing properties of
+# single quotes disappear.
+#
+rule quote-rpath ( targets * )
+{
+ local r = [ on $(targets[1]) return $(RPATH) ] ;
+ if ! [ MATCH ('.*') : $(r) ]
+ {
+ r = \"$(r)\" ;
+ }
+ RPATH on $(targets) = $(r) ;
+}
+
+# Declare actions for linking.
+rule link ( targets * : sources * : properties * )
+{
+ setup-threading $(targets) : $(sources) : $(properties) ;
+ setup-address-model $(targets) : $(sources) : $(properties) ;
+ SPACE on $(targets) = " " ;
+ # Serialize execution of the 'link' action, since running N links in
+ # parallel is just slower. For now, serialize only gcc links, it might be a
+ # good idea to serialize all links.
+ JAM_SEMAPHORE on $(targets) = <s>gcc-link-semaphore ;
+ quote-rpath $(targets) ;
+}
+
+actions link bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,$(RPATH) -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" -o "$(<)" $(START-GROUP) "$(>)" "$(LIBRARIES)" $(FINDLIBS-ST-PFX) -l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA) $(END-GROUP) $(OPTIONS) $(USER_OPTIONS)
+}
+
+
+# Default value. Mostly for the sake of intel-linux that inherits from gcc, but
+# does not have the same logic to set the .AR variable. We can put the same
+# logic in intel-linux, but that is hardly worth the trouble as on Linux, 'ar'
+# is always available.
+.AR = ar ;
+.RANLIB = ranlib ;
+
+toolset.flags gcc.archive AROPTIONS <archiveflags> ;
+
+rule archive ( targets * : sources * : properties * )
+{
+ # Always remove archive and start again. Here is the rationale from
+ #
+ # Andre Hentz:
+ #
+ # I had a file, say a1.c, that was included into liba.a. I moved a1.c to
+ # a2.c, updated my Jamfiles and rebuilt. My program was crashing with absurd
+ # errors. After some debugging I traced it back to the fact that a1.o was
+ # *still* in liba.a
+ #
+ # Rene Rivera:
+ #
+ # Originally removing the archive was done by splicing an RM onto the
+ # archive action. That makes archives fail to build on NT when they have
+ # many files because it will no longer execute the action directly and blow
+ # the line length limit. Instead we remove the file in a different action,
+ # just before building the archive.
+ #
+ local clean.a = $(targets[1])(clean) ;
+ TEMPORARY $(clean.a) ;
+ NOCARE $(clean.a) ;
+ LOCATE on $(clean.a) = [ on $(targets[1]) return $(LOCATE) ] ;
+ DEPENDS $(clean.a) : $(sources) ;
+ DEPENDS $(targets) : $(clean.a) ;
+ common.RmTemps $(clean.a) : $(targets) ;
+}
+
+# Declare action for creating static libraries.
+# The letter 'r' means to add files to the archive with replacement. Since we
+# remove archive, we do not care about replacement, but there is no option "add
+# without replacement".
+# The letter 'c' suppresses the warning in case the archive does not exists yet.
+# That warning is produced only on some platforms, for whatever reasons.
+#
+actions piecemeal archive
+{
+ "$(.AR)" $(AROPTIONS) rc "$(<)" "$(>)"
+ "$(.RANLIB)" "$(<)"
+}
+
+rule link.dll ( targets * : sources * : properties * )
+{
+ setup-threading $(targets) : $(sources) : $(properties) ;
+ setup-address-model $(targets) : $(sources) : $(properties) ;
+ SPACE on $(targets) = " " ;
+ JAM_SEMAPHORE on $(targets) = <s>gcc-link-semaphore ;
+ quote-rpath $(targets) ;
+}
+
+# Differs from 'link' above only by -shared.
+actions link.dll bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,$(RPATH) "$(.IMPLIB-COMMAND)$(<[1])" -o "$(<[-1])" $(HAVE_SONAME)-Wl,$(SONAME_OPTION)$(SPACE)-Wl,$(<[-1]:D=) -shared $(START-GROUP) "$(>)" "$(LIBRARIES)" $(FINDLIBS-ST-PFX) -l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA) $(END-GROUP) $(OPTIONS) $(USER_OPTIONS)
+}
+
+rule setup-threading ( targets * : sources * : properties * )
+{
+ local threading = [ feature.get-values threading : $(properties) ] ;
+ if $(threading) = multi
+ {
+ local target = [ feature.get-values target-os : $(properties) ] ;
+ local option ;
+ local libs ;
+
+ switch $(target)
+ {
+ case android : # No threading options, everything is in already.
+ case windows : option = -mthreads ;
+ case cygwin : option = -mthreads ;
+ case solaris : option = -pthreads ; libs = rt ;
+ case beos : # No threading options.
+ case *bsd : option = -pthread ; # There is no -lrt on BSD.
+ case sgi : # gcc on IRIX does not support multi-threading.
+ case darwin : # No threading options.
+ case * : option = -pthread ; libs = rt ;
+ }
+
+ if $(option)
+ {
+ OPTIONS on $(targets) += $(option) ;
+ }
+ if $(libs)
+ {
+ FINDLIBS-SA on $(targets) += $(libs) ;
+ }
+ }
+}
+
+
+local rule cpu-flags ( toolset variable : architecture : instruction-set + :
+ values + : default ? )
+{
+ if $(default)
+ {
+ toolset.flags $(toolset) $(variable)
+ <architecture>$(architecture)/<instruction-set> : $(values) ;
+ }
+ toolset.flags $(toolset) $(variable)
+ <architecture>/<instruction-set>$(instruction-set)
+ <architecture>$(architecture)/<instruction-set>$(instruction-set)
+ : $(values) ;
+}
+
+
+# Set architecture/instruction-set options.
+#
+# x86 and compatible
+# The 'native' option appeared in gcc 4.2 so we cannot safely use it as default.
+# Use i686 instead for 32-bit.
+toolset.flags gcc OPTIONS <architecture>x86/<address-model>32/<instruction-set> : -march=i686 ;
+cpu-flags gcc OPTIONS : x86 : native : -march=native ;
+cpu-flags gcc OPTIONS : x86 : i486 : -march=i486 ;
+cpu-flags gcc OPTIONS : x86 : i586 : -march=i586 ;
+cpu-flags gcc OPTIONS : x86 : i686 : -march=i686 ;
+cpu-flags gcc OPTIONS : x86 : pentium : -march=pentium ;
+cpu-flags gcc OPTIONS : x86 : pentium-mmx : -march=pentium-mmx ;
+cpu-flags gcc OPTIONS : x86 : pentiumpro : -march=pentiumpro ;
+cpu-flags gcc OPTIONS : x86 : pentium2 : -march=pentium2 ;
+cpu-flags gcc OPTIONS : x86 : pentium3 : -march=pentium3 ;
+cpu-flags gcc OPTIONS : x86 : pentium3m : -march=pentium3m ;
+cpu-flags gcc OPTIONS : x86 : pentium-m : -march=pentium-m ;
+cpu-flags gcc OPTIONS : x86 : pentium4 : -march=pentium4 ;
+cpu-flags gcc OPTIONS : x86 : pentium4m : -march=pentium4m ;
+cpu-flags gcc OPTIONS : x86 : prescott : -march=prescott ;
+cpu-flags gcc OPTIONS : x86 : nocona : -march=nocona ;
+cpu-flags gcc OPTIONS : x86 : core2 : -march=core2 ;
+cpu-flags gcc OPTIONS : x86 : conroe : -march=core2 ;
+cpu-flags gcc OPTIONS : x86 : conroe-xe : -march=core2 ;
+cpu-flags gcc OPTIONS : x86 : conroe-l : -march=core2 ;
+cpu-flags gcc OPTIONS : x86 : allendale : -march=core2 ;
+cpu-flags gcc OPTIONS : x86 : wolfdale : -march=core2 -msse4.1 ;
+cpu-flags gcc OPTIONS : x86 : merom : -march=core2 ;
+cpu-flags gcc OPTIONS : x86 : merom-xe : -march=core2 ;
+cpu-flags gcc OPTIONS : x86 : kentsfield : -march=core2 ;
+cpu-flags gcc OPTIONS : x86 : kentsfield-xe : -march=core2 ;
+cpu-flags gcc OPTIONS : x86 : yorksfield : -march=core2 ;
+cpu-flags gcc OPTIONS : x86 : penryn : -march=core2 ;
+cpu-flags gcc OPTIONS : x86 : corei7 : -march=corei7 ;
+cpu-flags gcc OPTIONS : x86 : nehalem : -march=corei7 ;
+cpu-flags gcc OPTIONS : x86 : corei7-avx : -march=corei7-avx ;
+cpu-flags gcc OPTIONS : x86 : sandy-bridge : -march=corei7-avx ;
+cpu-flags gcc OPTIONS : x86 : core-avx-i : -march=core-avx-i ;
+cpu-flags gcc OPTIONS : x86 : ivy-bridge : -march=core-avx-i ;
+cpu-flags gcc OPTIONS : x86 : haswell : -march=core-avx-i -mavx2 -mfma -mbmi -mbmi2 -mlzcnt ;
+cpu-flags gcc OPTIONS : x86 : k6 : -march=k6 ;
+cpu-flags gcc OPTIONS : x86 : k6-2 : -march=k6-2 ;
+cpu-flags gcc OPTIONS : x86 : k6-3 : -march=k6-3 ;
+cpu-flags gcc OPTIONS : x86 : athlon : -march=athlon ;
+cpu-flags gcc OPTIONS : x86 : athlon-tbird : -march=athlon-tbird ;
+cpu-flags gcc OPTIONS : x86 : athlon-4 : -march=athlon-4 ;
+cpu-flags gcc OPTIONS : x86 : athlon-xp : -march=athlon-xp ;
+cpu-flags gcc OPTIONS : x86 : athlon-mp : -march=athlon-mp ;
+##
+cpu-flags gcc OPTIONS : x86 : k8 : -march=k8 ;
+cpu-flags gcc OPTIONS : x86 : opteron : -march=opteron ;
+cpu-flags gcc OPTIONS : x86 : athlon64 : -march=athlon64 ;
+cpu-flags gcc OPTIONS : x86 : athlon-fx : -march=athlon-fx ;
+cpu-flags gcc OPTIONS : x86 : k8-sse3 : -march=k8-sse3 ;
+cpu-flags gcc OPTIONS : x86 : opteron-sse3 : -march=opteron-sse3 ;
+cpu-flags gcc OPTIONS : x86 : athlon64-sse3 : -march=athlon64-sse3 ;
+cpu-flags gcc OPTIONS : x86 : amdfam10 : -march=amdfam10 ;
+cpu-flags gcc OPTIONS : x86 : barcelona : -march=barcelona ;
+cpu-flags gcc OPTIONS : x86 : bdver1 : -march=bdver1 ;
+cpu-flags gcc OPTIONS : x86 : bdver2 : -march=bdver2 ;
+cpu-flags gcc OPTIONS : x86 : bdver3 : -march=bdver3 ;
+cpu-flags gcc OPTIONS : x86 : btver1 : -march=btver1 ;
+cpu-flags gcc OPTIONS : x86 : btver2 : -march=btver2 ;
+cpu-flags gcc OPTIONS : x86 : winchip-c6 : -march=winchip-c6 ;
+cpu-flags gcc OPTIONS : x86 : winchip2 : -march=winchip2 ;
+cpu-flags gcc OPTIONS : x86 : c3 : -march=c3 ;
+cpu-flags gcc OPTIONS : x86 : c3-2 : -march=c3-2 ;
+##
+cpu-flags gcc OPTIONS : x86 : atom : -march=atom ;
+# Sparc
+cpu-flags gcc OPTIONS : sparc : c3 : -mcpu=c3 : default ;
+cpu-flags gcc OPTIONS : sparc : v7 : -mcpu=v7 ;
+cpu-flags gcc OPTIONS : sparc : cypress : -mcpu=cypress ;
+cpu-flags gcc OPTIONS : sparc : v8 : -mcpu=v8 ;
+cpu-flags gcc OPTIONS : sparc : supersparc : -mcpu=supersparc ;
+cpu-flags gcc OPTIONS : sparc : sparclite : -mcpu=sparclite ;
+cpu-flags gcc OPTIONS : sparc : hypersparc : -mcpu=hypersparc ;
+cpu-flags gcc OPTIONS : sparc : sparclite86x : -mcpu=sparclite86x ;
+cpu-flags gcc OPTIONS : sparc : f930 : -mcpu=f930 ;
+cpu-flags gcc OPTIONS : sparc : f934 : -mcpu=f934 ;
+cpu-flags gcc OPTIONS : sparc : sparclet : -mcpu=sparclet ;
+cpu-flags gcc OPTIONS : sparc : tsc701 : -mcpu=tsc701 ;
+cpu-flags gcc OPTIONS : sparc : v9 : -mcpu=v9 ;
+cpu-flags gcc OPTIONS : sparc : ultrasparc : -mcpu=ultrasparc ;
+cpu-flags gcc OPTIONS : sparc : ultrasparc3 : -mcpu=ultrasparc3 ;
+# RS/6000 & PowerPC
+cpu-flags gcc OPTIONS : power : 403 : -mcpu=403 ;
+cpu-flags gcc OPTIONS : power : 505 : -mcpu=505 ;
+cpu-flags gcc OPTIONS : power : 601 : -mcpu=601 ;
+cpu-flags gcc OPTIONS : power : 602 : -mcpu=602 ;
+cpu-flags gcc OPTIONS : power : 603 : -mcpu=603 ;
+cpu-flags gcc OPTIONS : power : 603e : -mcpu=603e ;
+cpu-flags gcc OPTIONS : power : 604 : -mcpu=604 ;
+cpu-flags gcc OPTIONS : power : 604e : -mcpu=604e ;
+cpu-flags gcc OPTIONS : power : 620 : -mcpu=620 ;
+cpu-flags gcc OPTIONS : power : 630 : -mcpu=630 ;
+cpu-flags gcc OPTIONS : power : 740 : -mcpu=740 ;
+cpu-flags gcc OPTIONS : power : 7400 : -mcpu=7400 ;
+cpu-flags gcc OPTIONS : power : 7450 : -mcpu=7450 ;
+cpu-flags gcc OPTIONS : power : 750 : -mcpu=750 ;
+cpu-flags gcc OPTIONS : power : 801 : -mcpu=801 ;
+cpu-flags gcc OPTIONS : power : 821 : -mcpu=821 ;
+cpu-flags gcc OPTIONS : power : 823 : -mcpu=823 ;
+cpu-flags gcc OPTIONS : power : 860 : -mcpu=860 ;
+cpu-flags gcc OPTIONS : power : 970 : -mcpu=970 ;
+cpu-flags gcc OPTIONS : power : 8540 : -mcpu=8540 ;
+cpu-flags gcc OPTIONS : power : power : -mcpu=power ;
+cpu-flags gcc OPTIONS : power : power2 : -mcpu=power2 ;
+cpu-flags gcc OPTIONS : power : power3 : -mcpu=power3 ;
+cpu-flags gcc OPTIONS : power : power4 : -mcpu=power4 ;
+cpu-flags gcc OPTIONS : power : power5 : -mcpu=power5 ;
+cpu-flags gcc OPTIONS : power : powerpc : -mcpu=powerpc ;
+cpu-flags gcc OPTIONS : power : powerpc64 : -mcpu=powerpc64 ;
+cpu-flags gcc OPTIONS : power : rios : -mcpu=rios ;
+cpu-flags gcc OPTIONS : power : rios1 : -mcpu=rios1 ;
+cpu-flags gcc OPTIONS : power : rios2 : -mcpu=rios2 ;
+cpu-flags gcc OPTIONS : power : rsc : -mcpu=rsc ;
+cpu-flags gcc OPTIONS : power : rs64a : -mcpu=rs64 ;
+# AIX variant of RS/6000 & PowerPC
+toolset.flags gcc AROPTIONS <address-model>64/<target-os>aix : "-X64" ;
diff --git a/tools/build/src/tools/gcc.py b/tools/build/src/tools/gcc.py
new file mode 100644
index 0000000000..97f1e79d45
--- /dev/null
+++ b/tools/build/src/tools/gcc.py
@@ -0,0 +1,842 @@
+# Status: being ported by Steven Watanabe
+# Base revision: 47077
+# TODO: common.jam needs to be ported
+# TODO: generators.jam needs to have register_c_compiler.
+#
+# Copyright 2001 David Abrahams.
+# Copyright 2002-2006 Rene Rivera.
+# Copyright 2002-2003 Vladimir Prus.
+# Copyright (c) 2005 Reece H. Dunn.
+# Copyright 2006 Ilya Sokolov.
+# Copyright 2007 Roland Schwarz
+# Copyright 2007 Boris Gubenko.
+# Copyright 2008 Steven Watanabe
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import os
+import subprocess
+import re
+
+import bjam
+
+from b2.tools import unix, common, rc, pch, builtin
+from b2.build import feature, type, toolset, generators, property_set
+from b2.build.property import Property
+from b2.util.utility import os_name, on_windows
+from b2.manager import get_manager
+from b2.build.generators import Generator
+from b2.build.toolset import flags
+from b2.util.utility import to_seq
+
+
+
+__debug = None
+
+def debug():
+ global __debug
+ if __debug is None:
+ __debug = "--debug-configuration" in bjam.variable("ARGV")
+ return __debug
+
+feature.extend('toolset', ['gcc'])
+
+
+toolset.inherit_generators('gcc', [], 'unix', ['unix.link', 'unix.link.dll'])
+toolset.inherit_flags('gcc', 'unix')
+toolset.inherit_rules('gcc', 'unix')
+
+generators.override('gcc.prebuilt', 'builtin.prebuilt')
+generators.override('gcc.searched-lib-generator', 'searched-lib-generator')
+
+# Target naming is determined by types/lib.jam and the settings below this
+# comment.
+#
+# On *nix:
+# libxxx.a static library
+# libxxx.so shared library
+#
+# On windows (mingw):
+# libxxx.lib static library
+# xxx.dll DLL
+# xxx.lib import library
+#
+# On windows (cygwin) i.e. <target-os>cygwin
+# libxxx.a static library
+# xxx.dll DLL
+# libxxx.dll.a import library
+#
+# Note: user can always override by using the <tag>@rule
+# This settings have been choosen, so that mingw
+# is in line with msvc naming conventions. For
+# cygwin the cygwin naming convention has been choosen.
+
+# Make the "o" suffix used for gcc toolset on all
+# platforms
+type.set_generated_target_suffix('OBJ', ['<toolset>gcc'], 'o')
+type.set_generated_target_suffix('STATIC_LIB', ['<toolset>gcc', '<target-os>cygwin'], 'a')
+
+type.set_generated_target_suffix('IMPORT_LIB', ['<toolset>gcc', '<target-os>cygwin'], 'dll.a')
+type.set_generated_target_prefix('IMPORT_LIB', ['<toolset>gcc', '<target-os>cygwin'], 'lib')
+
+__machine_match = re.compile('^([^ ]+)')
+__version_match = re.compile('^([0-9.]+)')
+
+def init(version = None, command = None, options = None):
+ """
+ Initializes the gcc toolset for the given version. If necessary, command may
+ be used to specify where the compiler is located. The parameter 'options' is a
+ space-delimited list of options, each one specified as
+ <option-name>option-value. Valid option names are: cxxflags, linkflags and
+ linker-type. Accepted linker-type values are gnu, darwin, osf, hpux or sun
+ and the default value will be selected based on the current OS.
+ Example:
+ using gcc : 3.4 : : <cxxflags>foo <linkflags>bar <linker-type>sun ;
+ """
+
+ options = to_seq(options)
+ command = to_seq(command)
+
+ # Information about the gcc command...
+ # The command.
+ command = to_seq(common.get_invocation_command('gcc', 'g++', command))
+ # The root directory of the tool install.
+ root = feature.get_values('<root>', options) ;
+ # The bin directory where to find the command to execute.
+ bin = None
+ # The flavor of compiler.
+ flavor = feature.get_values('<flavor>', options)
+ # Autodetect the root and bin dir if not given.
+ if command:
+ if not bin:
+ bin = common.get_absolute_tool_path(command[-1])
+ if not root:
+ root = os.path.dirname(bin)
+ # Autodetect the version and flavor if not given.
+ if command:
+ machine_info = subprocess.Popen(command + ['-dumpmachine'], stdout=subprocess.PIPE).communicate()[0]
+ machine = __machine_match.search(machine_info).group(1)
+
+ version_info = subprocess.Popen(command + ['-dumpversion'], stdout=subprocess.PIPE).communicate()[0]
+ version = __version_match.search(version_info).group(1)
+ if not flavor and machine.find('mingw') != -1:
+ flavor = 'mingw'
+
+ condition = None
+ if flavor:
+ condition = common.check_init_parameters('gcc', None,
+ ('version', version),
+ ('flavor', flavor))
+ else:
+ condition = common.check_init_parameters('gcc', None,
+ ('version', version))
+
+ if command:
+ command = command[0]
+
+ common.handle_options('gcc', condition, command, options)
+
+ linker = feature.get_values('<linker-type>', options)
+ if not linker:
+ if os_name() == 'OSF':
+ linker = 'osf'
+ elif os_name() == 'HPUX':
+ linker = 'hpux' ;
+ else:
+ linker = 'gnu'
+
+ init_link_flags('gcc', linker, condition)
+
+ # If gcc is installed in non-standard location, we'd need to add
+ # LD_LIBRARY_PATH when running programs created with it (for unit-test/run
+ # rules).
+ if command:
+ # On multilib 64-bit boxes, there are both 32-bit and 64-bit libraries
+ # and all must be added to LD_LIBRARY_PATH. The linker will pick the
+ # right onces. Note that we don't provide a clean way to build 32-bit
+ # binary with 64-bit compiler, but user can always pass -m32 manually.
+ lib_path = [os.path.join(root, 'bin'),
+ os.path.join(root, 'lib'),
+ os.path.join(root, 'lib32'),
+ os.path.join(root, 'lib64')]
+ if debug():
+ print 'notice: using gcc libraries ::', condition, '::', lib_path
+ toolset.flags('gcc.link', 'RUN_PATH', condition, lib_path)
+
+ # If it's not a system gcc install we should adjust the various programs as
+ # needed to prefer using the install specific versions. This is essential
+ # for correct use of MinGW and for cross-compiling.
+
+ # - The archive builder.
+ archiver = common.get_invocation_command('gcc',
+ 'ar', feature.get_values('<archiver>', options), [bin], path_last=True)
+ toolset.flags('gcc.archive', '.AR', condition, [archiver])
+ if debug():
+ print 'notice: using gcc archiver ::', condition, '::', archiver
+
+ # - Ranlib
+ ranlib = common.get_invocation_command('gcc',
+ 'ranlib', feature.get_values('<ranlib>', options), [bin], path_last=True)
+ toolset.flags('gcc.archive', '.RANLIB', condition, [ranlib])
+ if debug():
+ print 'notice: using gcc archiver ::', condition, '::', ranlib
+
+ # - The resource compiler.
+ rc_command = common.get_invocation_command_nodefault('gcc',
+ 'windres', feature.get_values('<rc>', options), [bin], path_last=True)
+ rc_type = feature.get_values('<rc-type>', options)
+
+ if not rc_type:
+ rc_type = 'windres'
+
+ if not rc_command:
+ # If we can't find an RC compiler we fallback to a null RC compiler that
+ # creates empty object files. This allows the same Jamfiles to work
+ # across the board. The null RC uses the assembler to create the empty
+ # objects, so configure that.
+ rc_command = common.get_invocation_command('gcc', 'as', [], [bin], path_last=True)
+ rc_type = 'null'
+ rc.configure(rc_command, condition, '<rc-type>' + rc_type)
+
+###if [ os.name ] = NT
+###{
+### # This causes single-line command invocation to not go through .bat files,
+### # thus avoiding command-line length limitations.
+### JAMSHELL = % ;
+###}
+
+#FIXME: when register_c_compiler is moved to
+# generators, these should be updated
+builtin.register_c_compiler('gcc.compile.c++', ['CPP'], ['OBJ'], ['<toolset>gcc'])
+builtin.register_c_compiler('gcc.compile.c', ['C'], ['OBJ'], ['<toolset>gcc'])
+builtin.register_c_compiler('gcc.compile.asm', ['ASM'], ['OBJ'], ['<toolset>gcc'])
+
+# pch support
+
+# The compiler looks for a precompiled header in each directory just before it
+# looks for the include file in that directory. The name searched for is the
+# name specified in the #include directive with ".gch" suffix appended. The
+# logic in gcc-pch-generator will make sure that BASE_PCH suffix is appended to
+# full name of the header.
+
+type.set_generated_target_suffix('PCH', ['<toolset>gcc'], 'gch')
+
+# GCC-specific pch generator.
+class GccPchGenerator(pch.PchGenerator):
+
+ # Inherit the __init__ method
+
+ def run_pch(self, project, name, prop_set, sources):
+ # Find the header in sources. Ignore any CPP sources.
+ header = None
+ for s in sources:
+ if type.is_derived(s.type(), 'H'):
+ header = s
+
+ # Error handling: Base header file name should be the same as the base
+ # precompiled header name.
+ header_name = header.name()
+ header_basename = os.path.basename(header_name).rsplit('.', 1)[0]
+ if header_basename != name:
+ location = project.project_module
+ ###FIXME:
+ raise Exception()
+ ### errors.user-error "in" $(location)": pch target name `"$(name)"' should be the same as the base name of header file `"$(header-name)"'" ;
+
+ pch_file = Generator.run(self, project, name, prop_set, [header])
+
+ # return result of base class and pch-file property as usage-requirements
+ # FIXME: what about multiple results from generator.run?
+ return (property_set.create([Property('pch-file', pch_file[0]),
+ Property('cflags', '-Winvalid-pch')]),
+ pch_file)
+
+ # Calls the base version specifying source's name as the name of the created
+ # target. As result, the PCH will be named whatever.hpp.gch, and not
+ # whatever.gch.
+ def generated_targets(self, sources, prop_set, project, name = None):
+ name = sources[0].name()
+ return Generator.generated_targets(self, sources,
+ prop_set, project, name)
+
+# Note: the 'H' source type will catch both '.h' header and '.hpp' header. The
+# latter have HPP type, but HPP type is derived from H. The type of compilation
+# is determined entirely by the destination type.
+generators.register(GccPchGenerator('gcc.compile.c.pch', False, ['H'], ['C_PCH'], ['<pch>on', '<toolset>gcc' ]))
+generators.register(GccPchGenerator('gcc.compile.c++.pch', False, ['H'], ['CPP_PCH'], ['<pch>on', '<toolset>gcc' ]))
+
+# Override default do-nothing generators.
+generators.override('gcc.compile.c.pch', 'pch.default-c-pch-generator')
+generators.override('gcc.compile.c++.pch', 'pch.default-cpp-pch-generator')
+
+flags('gcc.compile', 'PCH_FILE', ['<pch>on'], ['<pch-file>'])
+
+# Declare flags and action for compilation
+flags('gcc.compile', 'OPTIONS', ['<optimization>off'], ['-O0'])
+flags('gcc.compile', 'OPTIONS', ['<optimization>speed'], ['-O3'])
+flags('gcc.compile', 'OPTIONS', ['<optimization>space'], ['-Os'])
+
+flags('gcc.compile', 'OPTIONS', ['<inlining>off'], ['-fno-inline'])
+flags('gcc.compile', 'OPTIONS', ['<inlining>on'], ['-Wno-inline'])
+flags('gcc.compile', 'OPTIONS', ['<inlining>full'], ['-finline-functions', '-Wno-inline'])
+
+flags('gcc.compile', 'OPTIONS', ['<warnings>off'], ['-w'])
+flags('gcc.compile', 'OPTIONS', ['<warnings>on'], ['-Wall'])
+flags('gcc.compile', 'OPTIONS', ['<warnings>all'], ['-Wall', '-pedantic'])
+flags('gcc.compile', 'OPTIONS', ['<warnings-as-errors>on'], ['-Werror'])
+
+flags('gcc.compile', 'OPTIONS', ['<debug-symbols>on'], ['-g'])
+flags('gcc.compile', 'OPTIONS', ['<profiling>on'], ['-pg'])
+
+flags('gcc.compile.c++', 'OPTIONS', ['<rtti>off'], ['-fno-rtti'])
+flags('gcc.compile.c++', 'OPTIONS', ['<exception-handling>off'], ['-fno-exceptions'])
+
+# On cygwin and mingw, gcc generates position independent code by default, and
+# warns if -fPIC is specified. This might not be the right way of checking if
+# we're using cygwin. For example, it's possible to run cygwin gcc from NT
+# shell, or using crosscompiling. But we'll solve that problem when it's time.
+# In that case we'll just add another parameter to 'init' and move this login
+# inside 'init'.
+if not os_name () in ['CYGWIN', 'NT']:
+ # This logic will add -fPIC for all compilations:
+ #
+ # lib a : a.cpp b ;
+ # obj b : b.cpp ;
+ # exe c : c.cpp a d ;
+ # obj d : d.cpp ;
+ #
+ # This all is fine, except that 'd' will be compiled with -fPIC even though
+ # it's not needed, as 'd' is used only in exe. However, it's hard to detect
+ # where a target is going to be used. Alternative, we can set -fPIC only
+ # when main target type is LIB but than 'b' will be compiled without -fPIC.
+ # In x86-64 that will lead to link errors. So, compile everything with
+ # -fPIC.
+ #
+ # Yet another alternative would be to create propagated <sharedable>
+ # feature, and set it when building shared libraries, but that's hard to
+ # implement and will increase target path length even more.
+ flags('gcc.compile', 'OPTIONS', ['<link>shared'], ['-fPIC'])
+
+if os_name() != 'NT' and os_name() != 'OSF' and os_name() != 'HPUX':
+ # OSF does have an option called -soname but it doesn't seem to work as
+ # expected, therefore it has been disabled.
+ HAVE_SONAME = ''
+ SONAME_OPTION = '-h'
+
+
+flags('gcc.compile', 'USER_OPTIONS', [], ['<cflags>'])
+flags('gcc.compile.c++', 'USER_OPTIONS',[], ['<cxxflags>'])
+flags('gcc.compile', 'DEFINES', [], ['<define>'])
+flags('gcc.compile', 'INCLUDES', [], ['<include>'])
+
+engine = get_manager().engine()
+
+engine.register_action('gcc.compile.c++.pch',
+ '"$(CONFIG_COMMAND)" -x c++-header $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"')
+
+engine.register_action('gcc.compile.c.pch',
+ '"$(CONFIG_COMMAND)" -x c-header $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"')
+
+
+def gcc_compile_cpp(targets, sources, properties):
+ # Some extensions are compiled as C++ by default. For others, we need to
+ # pass -x c++. We could always pass -x c++ but distcc does not work with it.
+ extension = os.path.splitext (sources [0]) [1]
+ lang = ''
+ if not extension in ['.cc', '.cp', '.cxx', '.cpp', '.c++', '.C']:
+ lang = '-x c++'
+ get_manager().engine().set_target_variable (targets, 'LANG', lang)
+ engine.add_dependency(targets, bjam.call('get-target-variable', targets, 'PCH_FILE'))
+
+def gcc_compile_c(targets, sources, properties):
+ engine = get_manager().engine()
+ # If we use the name g++ then default file suffix -> language mapping does
+ # not work. So have to pass -x option. Maybe, we can work around this by
+ # allowing the user to specify both C and C++ compiler names.
+ #if $(>:S) != .c
+ #{
+ engine.set_target_variable (targets, 'LANG', '-x c')
+ #}
+ engine.add_dependency(targets, bjam.call('get-target-variable', targets, 'PCH_FILE'))
+
+engine.register_action(
+ 'gcc.compile.c++',
+ '"$(CONFIG_COMMAND)" $(LANG) -ftemplate-depth-128 $(OPTIONS) ' +
+ '$(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" ' +
+ '-c -o "$(<:W)" "$(>:W)"',
+ function=gcc_compile_cpp,
+ bound_list=['PCH_FILE'])
+
+engine.register_action(
+ 'gcc.compile.c',
+ '"$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) ' +
+ '-I"$(PCH_FILE:D)" -I"$(INCLUDES)" -c -o "$(<)" "$(>)"',
+ function=gcc_compile_c,
+ bound_list=['PCH_FILE'])
+
+def gcc_compile_asm(targets, sources, properties):
+ get_manager().engine().set_target_variable(targets, 'LANG', '-x assembler-with-cpp')
+
+engine.register_action(
+ 'gcc.compile.asm',
+ '"$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"',
+ function=gcc_compile_asm)
+
+
+class GccLinkingGenerator(unix.UnixLinkingGenerator):
+ """
+ The class which check that we don't try to use the <runtime-link>static
+ property while creating or using shared library, since it's not supported by
+ gcc/libc.
+ """
+ def run(self, project, name, ps, sources):
+ # TODO: Replace this with the use of a target-os property.
+
+ no_static_link = False
+ if bjam.variable('UNIX'):
+ no_static_link = True;
+ ##FIXME: what does this mean?
+## {
+## switch [ modules.peek : JAMUNAME ]
+## {
+## case * : no-static-link = true ;
+## }
+## }
+
+ reason = None
+ if no_static_link and ps.get('runtime-link') == 'static':
+ if ps.get('link') == 'shared':
+ reason = "On gcc, DLL can't be build with '<runtime-link>static'."
+ elif type.is_derived(self.target_types[0], 'EXE'):
+ for s in sources:
+ source_type = s.type()
+ if source_type and type.is_derived(source_type, 'SHARED_LIB'):
+ reason = "On gcc, using DLLS together with the " +\
+ "<runtime-link>static options is not possible "
+ if reason:
+ print 'warning:', reason
+ print 'warning:',\
+ "It is suggested to use '<runtime-link>static' together",\
+ "with '<link>static'." ;
+ return
+ else:
+ generated_targets = unix.UnixLinkingGenerator.run(self, project,
+ name, ps, sources)
+ return generated_targets
+
+if on_windows():
+ flags('gcc.link.dll', '.IMPLIB-COMMAND', [], ['-Wl,--out-implib,'])
+ generators.register(
+ GccLinkingGenerator('gcc.link', True,
+ ['OBJ', 'SEARCHED_LIB', 'STATIC_LIB', 'IMPORT_LIB'],
+ [ 'EXE' ],
+ [ '<toolset>gcc' ]))
+ generators.register(
+ GccLinkingGenerator('gcc.link.dll', True,
+ ['OBJ', 'SEARCHED_LIB', 'STATIC_LIB', 'IMPORT_LIB'],
+ ['IMPORT_LIB', 'SHARED_LIB'],
+ ['<toolset>gcc']))
+else:
+ generators.register(
+ GccLinkingGenerator('gcc.link', True,
+ ['LIB', 'OBJ'],
+ ['EXE'],
+ ['<toolset>gcc']))
+ generators.register(
+ GccLinkingGenerator('gcc.link.dll', True,
+ ['LIB', 'OBJ'],
+ ['SHARED_LIB'],
+ ['<toolset>gcc']))
+
+# Declare flags for linking.
+# First, the common flags.
+flags('gcc.link', 'OPTIONS', ['<debug-symbols>on'], ['-g'])
+flags('gcc.link', 'OPTIONS', ['<profiling>on'], ['-pg'])
+flags('gcc.link', 'USER_OPTIONS', [], ['<linkflags>'])
+flags('gcc.link', 'LINKPATH', [], ['<library-path>'])
+flags('gcc.link', 'FINDLIBS-ST', [], ['<find-static-library>'])
+flags('gcc.link', 'FINDLIBS-SA', [], ['<find-shared-library>'])
+flags('gcc.link', 'LIBRARIES', [], ['<library-file>'])
+
+# For <runtime-link>static we made sure there are no dynamic libraries in the
+# link. On HP-UX not all system libraries exist as archived libraries (for
+# example, there is no libunwind.a), so, on this platform, the -static option
+# cannot be specified.
+if os_name() != 'HPUX':
+ flags('gcc.link', 'OPTIONS', ['<runtime-link>static'], ['-static'])
+
+# Now, the vendor specific flags.
+# The parameter linker can be either gnu, darwin, osf, hpux or sun.
+def init_link_flags(toolset, linker, condition):
+ """
+ Now, the vendor specific flags.
+ The parameter linker can be either gnu, darwin, osf, hpux or sun.
+ """
+ toolset_link = toolset + '.link'
+ if linker == 'gnu':
+ # Strip the binary when no debugging is needed. We use --strip-all flag
+ # as opposed to -s since icc (intel's compiler) is generally
+ # option-compatible with and inherits from the gcc toolset, but does not
+ # support -s.
+
+ # FIXME: what does unchecked translate to?
+ flags(toolset_link, 'OPTIONS', map(lambda x: x + '/<debug-symbols>off', condition), ['-Wl,--strip-all']) # : unchecked ;
+ flags(toolset_link, 'RPATH', condition, ['<dll-path>']) # : unchecked ;
+ flags(toolset_link, 'RPATH_LINK', condition, ['<xdll-path>']) # : unchecked ;
+ flags(toolset_link, 'START-GROUP', condition, ['-Wl,--start-group'])# : unchecked ;
+ flags(toolset_link, 'END-GROUP', condition, ['-Wl,--end-group']) # : unchecked ;
+
+ # gnu ld has the ability to change the search behaviour for libraries
+ # referenced by -l switch. These modifiers are -Bstatic and -Bdynamic
+ # and change search for -l switches that follow them. The following list
+ # shows the tried variants.
+ # The search stops at the first variant that has a match.
+ # *nix: -Bstatic -lxxx
+ # libxxx.a
+ #
+ # *nix: -Bdynamic -lxxx
+ # libxxx.so
+ # libxxx.a
+ #
+ # windows (mingw,cygwin) -Bstatic -lxxx
+ # libxxx.a
+ # xxx.lib
+ #
+ # windows (mingw,cygwin) -Bdynamic -lxxx
+ # libxxx.dll.a
+ # xxx.dll.a
+ # libxxx.a
+ # xxx.lib
+ # cygxxx.dll (*)
+ # libxxx.dll
+ # xxx.dll
+ # libxxx.a
+ #
+ # (*) This is for cygwin
+ # Please note that -Bstatic and -Bdynamic are not a guarantee that a
+ # static or dynamic lib indeed gets linked in. The switches only change
+ # search patterns!
+
+ # On *nix mixing shared libs with static runtime is not a good idea.
+ flags(toolset_link, 'FINDLIBS-ST-PFX',
+ map(lambda x: x + '/<runtime-link>shared', condition),
+ ['-Wl,-Bstatic']) # : unchecked ;
+ flags(toolset_link, 'FINDLIBS-SA-PFX',
+ map(lambda x: x + '/<runtime-link>shared', condition),
+ ['-Wl,-Bdynamic']) # : unchecked ;
+
+ # On windows allow mixing of static and dynamic libs with static
+ # runtime.
+ flags(toolset_link, 'FINDLIBS-ST-PFX',
+ map(lambda x: x + '/<runtime-link>static/<target-os>windows', condition),
+ ['-Wl,-Bstatic']) # : unchecked ;
+ flags(toolset_link, 'FINDLIBS-SA-PFX',
+ map(lambda x: x + '/<runtime-link>static/<target-os>windows', condition),
+ ['-Wl,-Bdynamic']) # : unchecked ;
+ flags(toolset_link, 'OPTIONS',
+ map(lambda x: x + '/<runtime-link>static/<target-os>windows', condition),
+ ['-Wl,-Bstatic']) # : unchecked ;
+
+ elif linker == 'darwin':
+ # On Darwin, the -s option to ld does not work unless we pass -static,
+ # and passing -static unconditionally is a bad idea. So, don't pass -s.
+ # at all, darwin.jam will use separate 'strip' invocation.
+ flags(toolset_link, 'RPATH', condition, ['<dll-path>']) # : unchecked ;
+ flags(toolset_link, 'RPATH_LINK', condition, ['<xdll-path>']) # : unchecked ;
+
+ elif linker == 'osf':
+ # No --strip-all, just -s.
+ flags(toolset_link, 'OPTIONS', map(lambda x: x + '/<debug-symbols>off', condition), ['-Wl,-s'])
+ # : unchecked ;
+ flags(toolset_link, 'RPATH', condition, ['<dll-path>']) # : unchecked ;
+ # This does not supports -R.
+ flags(toolset_link, 'RPATH_OPTION', condition, ['-rpath']) # : unchecked ;
+ # -rpath-link is not supported at all.
+
+ elif linker == 'sun':
+ flags(toolset_link, 'OPTIONS', map(lambda x: x + '/<debug-symbols>off', condition), ['-Wl,-s'])
+ # : unchecked ;
+ flags(toolset_link, 'RPATH', condition, ['<dll-path>']) # : unchecked ;
+ # Solaris linker does not have a separate -rpath-link, but allows to use
+ # -L for the same purpose.
+ flags(toolset_link, 'LINKPATH', condition, ['<xdll-path>']) # : unchecked ;
+
+ # This permits shared libraries with non-PIC code on Solaris.
+ # VP, 2004/09/07: Now that we have -fPIC hardcode in link.dll, the
+ # following is not needed. Whether -fPIC should be hardcoded, is a
+ # separate question.
+ # AH, 2004/10/16: it is still necessary because some tests link against
+ # static libraries that were compiled without PIC.
+ flags(toolset_link, 'OPTIONS', map(lambda x: x + '/<link>shared', condition), ['-mimpure-text'])
+ # : unchecked ;
+
+ elif linker == 'hpux':
+ flags(toolset_link, 'OPTIONS', map(lambda x: x + '/<debug-symbols>off', condition),
+ ['-Wl,-s']) # : unchecked ;
+ flags(toolset_link, 'OPTIONS', map(lambda x: x + '/<link>shared', condition),
+ ['-fPIC']) # : unchecked ;
+
+ else:
+ # FIXME:
+ errors.user_error(
+ "$(toolset) initialization: invalid linker '$(linker)' " +
+ "The value '$(linker)' specified for <linker> is not recognized. " +
+ "Possible values are 'gnu', 'darwin', 'osf', 'hpux' or 'sun'")
+
+# Declare actions for linking.
+def gcc_link(targets, sources, properties):
+ engine = get_manager().engine()
+ engine.set_target_variable(targets, 'SPACE', ' ')
+ # Serialize execution of the 'link' action, since running N links in
+ # parallel is just slower. For now, serialize only gcc links, it might be a
+ # good idea to serialize all links.
+ engine.set_target_variable(targets, 'JAM_SEMAPHORE', '<s>gcc-link-semaphore')
+
+engine.register_action(
+ 'gcc.link',
+ '"$(CONFIG_COMMAND)" -L"$(LINKPATH)" ' +
+ '-Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,"$(RPATH)" ' +
+ '-Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" -o "$(<)" ' +
+ '$(START-GROUP) "$(>)" "$(LIBRARIES)" $(FINDLIBS-ST-PFX) ' +
+ '-l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA) $(END-GROUP) ' +
+ '$(OPTIONS) $(USER_OPTIONS)',
+ function=gcc_link,
+ bound_list=['LIBRARIES'])
+
+# Default value. Mostly for the sake of intel-linux that inherits from gcc, but
+# does not have the same logic to set the .AR variable. We can put the same
+# logic in intel-linux, but that's hardly worth the trouble as on Linux, 'ar' is
+# always available.
+__AR = 'ar'
+
+flags('gcc.archive', 'AROPTIONS', [], ['<archiveflags>'])
+
+def gcc_archive(targets, sources, properties):
+ # Always remove archive and start again. Here's rationale from
+ #
+ # Andre Hentz:
+ #
+ # I had a file, say a1.c, that was included into liba.a. I moved a1.c to
+ # a2.c, updated my Jamfiles and rebuilt. My program was crashing with absurd
+ # errors. After some debugging I traced it back to the fact that a1.o was
+ # *still* in liba.a
+ #
+ # Rene Rivera:
+ #
+ # Originally removing the archive was done by splicing an RM onto the
+ # archive action. That makes archives fail to build on NT when they have
+ # many files because it will no longer execute the action directly and blow
+ # the line length limit. Instead we remove the file in a different action,
+ # just before building the archive.
+ clean = targets[0] + '(clean)'
+ bjam.call('TEMPORARY', clean)
+ bjam.call('NOCARE', clean)
+ engine = get_manager().engine()
+ engine.set_target_variable('LOCATE', clean, bjam.call('get-target-variable', targets, 'LOCATE'))
+ engine.add_dependency(clean, sources)
+ engine.add_dependency(targets, clean)
+ engine.set_update_action('common.RmTemps', clean, targets)
+
+# Declare action for creating static libraries.
+# The letter 'r' means to add files to the archive with replacement. Since we
+# remove archive, we don't care about replacement, but there's no option "add
+# without replacement".
+# The letter 'c' suppresses the warning in case the archive does not exists yet.
+# That warning is produced only on some platforms, for whatever reasons.
+engine.register_action('gcc.archive',
+ '''"$(.AR)" $(AROPTIONS) rc "$(<)" "$(>)"
+ "$(.RANLIB)" "$(<)"
+ ''',
+ function=gcc_archive,
+ flags=['piecemeal'])
+
+def gcc_link_dll(targets, sources, properties):
+ engine = get_manager().engine()
+ engine.set_target_variable(targets, 'SPACE', ' ')
+ engine.set_target_variable(targets, 'JAM_SEMAPHORE', '<s>gcc-link-semaphore')
+ engine.set_target_variable(targets, "HAVE_SONAME", HAVE_SONAME)
+ engine.set_target_variable(targets, "SONAME_OPTION", SONAME_OPTION)
+
+engine.register_action(
+ 'gcc.link.dll',
+ # Differ from 'link' above only by -shared.
+ '"$(CONFIG_COMMAND)" -L"$(LINKPATH)" ' +
+ '-Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,"$(RPATH)" ' +
+ '"$(.IMPLIB-COMMAND)$(<[1])" -o "$(<[-1])" ' +
+ '$(HAVE_SONAME)-Wl,$(SONAME_OPTION)$(SPACE)-Wl,$(<[-1]:D=) ' +
+ '-shared $(START-GROUP) "$(>)" "$(LIBRARIES)" $(FINDLIBS-ST-PFX) ' +
+ '-l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA) $(END-GROUP) ' +
+ '$(OPTIONS) $(USER_OPTIONS)',
+ function = gcc_link_dll,
+ bound_list=['LIBRARIES'])
+
+# Set up threading support. It's somewhat contrived, so perform it at the end,
+# to avoid cluttering other code.
+
+if on_windows():
+ flags('gcc', 'OPTIONS', ['<threading>multi'], ['-mthreads'])
+elif bjam.variable('UNIX'):
+ jamuname = bjam.variable('JAMUNAME')
+ host_os_name = jamuname[0]
+ if host_os_name.startswith('SunOS'):
+ flags('gcc', 'OPTIONS', ['<threading>multi'], ['-pthreads'])
+ flags('gcc', 'FINDLIBS-SA', [], ['rt'])
+ elif host_os_name == 'BeOS':
+ # BeOS has no threading options, don't set anything here.
+ pass
+ elif host_os_name.endswith('BSD'):
+ flags('gcc', 'OPTIONS', ['<threading>multi'], ['-pthread'])
+ # there is no -lrt on BSD
+ elif host_os_name == 'DragonFly':
+ flags('gcc', 'OPTIONS', ['<threading>multi'], ['-pthread'])
+ # there is no -lrt on BSD - DragonFly is a FreeBSD variant,
+ # which anoyingly doesn't say it's a *BSD.
+ elif host_os_name == 'IRIX':
+ # gcc on IRIX does not support multi-threading, don't set anything here.
+ pass
+ elif host_os_name == 'Darwin':
+ # Darwin has no threading options, don't set anything here.
+ pass
+ else:
+ flags('gcc', 'OPTIONS', ['<threading>multi'], ['-pthread'])
+ flags('gcc', 'FINDLIBS-SA', [], ['rt'])
+
+def cpu_flags(toolset, variable, architecture, instruction_set, values, default=None):
+ #FIXME: for some reason this fails. Probably out of date feature code
+## if default:
+## flags(toolset, variable,
+## ['<architecture>' + architecture + '/<instruction-set>'],
+## values)
+ flags(toolset, variable,
+ #FIXME: same as above
+ [##'<architecture>/<instruction-set>' + instruction_set,
+ '<architecture>' + architecture + '/<instruction-set>' + instruction_set],
+ values)
+
+# Set architecture/instruction-set options.
+#
+# x86 and compatible
+flags('gcc', 'OPTIONS', ['<architecture>x86/<address-model>32'], ['-m32'])
+flags('gcc', 'OPTIONS', ['<architecture>x86/<address-model>64'], ['-m64'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'native', ['-march=native'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'i486', ['-march=i486'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'i586', ['-march=i586'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'i686', ['-march=i686'], default=True)
+cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium', ['-march=pentium'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium-mmx', ['-march=pentium-mmx'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'pentiumpro', ['-march=pentiumpro'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium2', ['-march=pentium2'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium3', ['-march=pentium3'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium3m', ['-march=pentium3m'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium-m', ['-march=pentium-m'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium4', ['-march=pentium4'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium4m', ['-march=pentium4m'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'prescott', ['-march=prescott'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'nocona', ['-march=nocona'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'core2', ['-march=core2'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'conroe', ['-march=core2'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'conroe-xe', ['-march=core2'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'conroe-l', ['-march=core2'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'allendale', ['-march=core2'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'wolfdale', ['-march=core2', '-msse4.1'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'merom', ['-march=core2'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'merom-xe', ['-march=core2'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'kentsfield', ['-march=core2'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'kentsfield-xe', ['-march=core2'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'yorksfield', ['-march=core2'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'penryn', ['-march=core2'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'corei7', ['-march=corei7'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'nehalem', ['-march=corei7'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'corei7-avx', ['-march=corei7-avx'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'sandy-bridge', ['-march=corei7-avx'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'core-avx-i', ['-march=core-avx-i'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'ivy-bridge', ['-march=core-avx-i'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'haswell', ['-march=core-avx-i', '-mavx2', '-mfma', '-mbmi', '-mbmi2', '-mlzcnt'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'k6', ['-march=k6'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'k6-2', ['-march=k6-2'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'k6-3', ['-march=k6-3'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon', ['-march=athlon'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon-tbird', ['-march=athlon-tbird'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon-4', ['-march=athlon-4'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon-xp', ['-march=athlon-xp'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon-mp', ['-march=athlon-mp'])
+##
+cpu_flags('gcc', 'OPTIONS', 'x86', 'k8', ['-march=k8'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'opteron', ['-march=opteron'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon64', ['-march=athlon64'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon-fx', ['-march=athlon-fx'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'k8-sse3', ['-march=k8-sse3'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'opteron-sse3', ['-march=opteron-sse3'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon64-sse3', ['-march=athlon64-sse3'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'amdfam10', ['-march=amdfam10'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'barcelona', ['-march=barcelona'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'bdver1', ['-march=bdver1'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'bdver2', ['-march=bdver2'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'bdver3', ['-march=bdver3'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'btver1', ['-march=btver1'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'btver2', ['-march=btver2'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'winchip-c6', ['-march=winchip-c6'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'winchip2', ['-march=winchip2'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'c3', ['-march=c3'])
+cpu_flags('gcc', 'OPTIONS', 'x86', 'c3-2', ['-march=c3-2'])
+##
+cpu_flags('gcc', 'OPTIONS', 'x86', 'atom', ['-march=atom'])
+# Sparc
+flags('gcc', 'OPTIONS', ['<architecture>sparc/<address-model>32'], ['-m32'])
+flags('gcc', 'OPTIONS', ['<architecture>sparc/<address-model>64'], ['-m64'])
+cpu_flags('gcc', 'OPTIONS', 'sparc', 'c3', ['-mcpu=c3'], default=True)
+cpu_flags('gcc', 'OPTIONS', 'sparc', 'v7', ['-mcpu=v7'])
+cpu_flags('gcc', 'OPTIONS', 'sparc', 'cypress', ['-mcpu=cypress'])
+cpu_flags('gcc', 'OPTIONS', 'sparc', 'v8', ['-mcpu=v8'])
+cpu_flags('gcc', 'OPTIONS', 'sparc', 'supersparc', ['-mcpu=supersparc'])
+cpu_flags('gcc', 'OPTIONS', 'sparc', 'sparclite', ['-mcpu=sparclite'])
+cpu_flags('gcc', 'OPTIONS', 'sparc', 'hypersparc', ['-mcpu=hypersparc'])
+cpu_flags('gcc', 'OPTIONS', 'sparc', 'sparclite86x', ['-mcpu=sparclite86x'])
+cpu_flags('gcc', 'OPTIONS', 'sparc', 'f930', ['-mcpu=f930'])
+cpu_flags('gcc', 'OPTIONS', 'sparc', 'f934', ['-mcpu=f934'])
+cpu_flags('gcc', 'OPTIONS', 'sparc', 'sparclet', ['-mcpu=sparclet'])
+cpu_flags('gcc', 'OPTIONS', 'sparc', 'tsc701', ['-mcpu=tsc701'])
+cpu_flags('gcc', 'OPTIONS', 'sparc', 'v9', ['-mcpu=v9'])
+cpu_flags('gcc', 'OPTIONS', 'sparc', 'ultrasparc', ['-mcpu=ultrasparc'])
+cpu_flags('gcc', 'OPTIONS', 'sparc', 'ultrasparc3', ['-mcpu=ultrasparc3'])
+# RS/6000 & PowerPC
+flags('gcc', 'OPTIONS', ['<architecture>power/<address-model>32'], ['-m32'])
+flags('gcc', 'OPTIONS', ['<architecture>power/<address-model>64'], ['-m64'])
+cpu_flags('gcc', 'OPTIONS', 'power', '403', ['-mcpu=403'])
+cpu_flags('gcc', 'OPTIONS', 'power', '505', ['-mcpu=505'])
+cpu_flags('gcc', 'OPTIONS', 'power', '601', ['-mcpu=601'])
+cpu_flags('gcc', 'OPTIONS', 'power', '602', ['-mcpu=602'])
+cpu_flags('gcc', 'OPTIONS', 'power', '603', ['-mcpu=603'])
+cpu_flags('gcc', 'OPTIONS', 'power', '603e', ['-mcpu=603e'])
+cpu_flags('gcc', 'OPTIONS', 'power', '604', ['-mcpu=604'])
+cpu_flags('gcc', 'OPTIONS', 'power', '604e', ['-mcpu=604e'])
+cpu_flags('gcc', 'OPTIONS', 'power', '620', ['-mcpu=620'])
+cpu_flags('gcc', 'OPTIONS', 'power', '630', ['-mcpu=630'])
+cpu_flags('gcc', 'OPTIONS', 'power', '740', ['-mcpu=740'])
+cpu_flags('gcc', 'OPTIONS', 'power', '7400', ['-mcpu=7400'])
+cpu_flags('gcc', 'OPTIONS', 'power', '7450', ['-mcpu=7450'])
+cpu_flags('gcc', 'OPTIONS', 'power', '750', ['-mcpu=750'])
+cpu_flags('gcc', 'OPTIONS', 'power', '801', ['-mcpu=801'])
+cpu_flags('gcc', 'OPTIONS', 'power', '821', ['-mcpu=821'])
+cpu_flags('gcc', 'OPTIONS', 'power', '823', ['-mcpu=823'])
+cpu_flags('gcc', 'OPTIONS', 'power', '860', ['-mcpu=860'])
+cpu_flags('gcc', 'OPTIONS', 'power', '970', ['-mcpu=970'])
+cpu_flags('gcc', 'OPTIONS', 'power', '8540', ['-mcpu=8540'])
+cpu_flags('gcc', 'OPTIONS', 'power', 'power', ['-mcpu=power'])
+cpu_flags('gcc', 'OPTIONS', 'power', 'power2', ['-mcpu=power2'])
+cpu_flags('gcc', 'OPTIONS', 'power', 'power3', ['-mcpu=power3'])
+cpu_flags('gcc', 'OPTIONS', 'power', 'power4', ['-mcpu=power4'])
+cpu_flags('gcc', 'OPTIONS', 'power', 'power5', ['-mcpu=power5'])
+cpu_flags('gcc', 'OPTIONS', 'power', 'powerpc', ['-mcpu=powerpc'])
+cpu_flags('gcc', 'OPTIONS', 'power', 'powerpc64', ['-mcpu=powerpc64'])
+cpu_flags('gcc', 'OPTIONS', 'power', 'rios', ['-mcpu=rios'])
+cpu_flags('gcc', 'OPTIONS', 'power', 'rios1', ['-mcpu=rios1'])
+cpu_flags('gcc', 'OPTIONS', 'power', 'rios2', ['-mcpu=rios2'])
+cpu_flags('gcc', 'OPTIONS', 'power', 'rsc', ['-mcpu=rsc'])
+cpu_flags('gcc', 'OPTIONS', 'power', 'rs64a', ['-mcpu=rs64'])
+# AIX variant of RS/6000 & PowerPC
+flags('gcc', 'OPTIONS', ['<architecture>power/<address-model>32/<target-os>aix'], ['-maix32'])
+flags('gcc', 'OPTIONS', ['<architecture>power/<address-model>64/<target-os>aix'], ['-maix64'])
+flags('gcc', 'AROPTIONS', ['<architecture>power/<address-model>64/<target-os>aix'], ['-X64'])
diff --git a/tools/build/v2/tools/generate.jam b/tools/build/src/tools/generate.jam
index 6732fa3551..6732fa3551 100644
--- a/tools/build/v2/tools/generate.jam
+++ b/tools/build/src/tools/generate.jam
diff --git a/tools/build/v2/tools/gettext.jam b/tools/build/src/tools/gettext.jam
index 99a43ffe9a..99a43ffe9a 100644
--- a/tools/build/v2/tools/gettext.jam
+++ b/tools/build/src/tools/gettext.jam
diff --git a/tools/build/v2/tools/gfortran.jam b/tools/build/src/tools/gfortran.jam
index 0aa69b85cc..0aa69b85cc 100644
--- a/tools/build/v2/tools/gfortran.jam
+++ b/tools/build/src/tools/gfortran.jam
diff --git a/tools/build/v2/tools/hp_cxx.jam b/tools/build/src/tools/hp_cxx.jam
index 86cd783e29..86cd783e29 100644
--- a/tools/build/v2/tools/hp_cxx.jam
+++ b/tools/build/src/tools/hp_cxx.jam
diff --git a/tools/build/v2/tools/hpfortran.jam b/tools/build/src/tools/hpfortran.jam
index 96e8d18b5a..96e8d18b5a 100644
--- a/tools/build/v2/tools/hpfortran.jam
+++ b/tools/build/src/tools/hpfortran.jam
diff --git a/tools/build/v2/tools/ifort.jam b/tools/build/src/tools/ifort.jam
index eb7c198818..eb7c198818 100644
--- a/tools/build/v2/tools/ifort.jam
+++ b/tools/build/src/tools/ifort.jam
diff --git a/tools/build/src/tools/intel-darwin.jam b/tools/build/src/tools/intel-darwin.jam
new file mode 100644
index 0000000000..8c77834afb
--- /dev/null
+++ b/tools/build/src/tools/intel-darwin.jam
@@ -0,0 +1,227 @@
+# Copyright Vladimir Prus 2004.
+# Copyright Noel Belcourt 2007.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt
+# or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+import intel ;
+import feature : feature ;
+import os ;
+import toolset ;
+import toolset : flags ;
+import gcc ;
+import common ;
+import errors ;
+import generators ;
+
+feature.extend-subfeature toolset intel : platform : darwin ;
+
+toolset.inherit-generators intel-darwin
+ <toolset>intel <toolset-intel:platform>darwin
+ : gcc
+ # Don't inherit PCH generators. They were not tested, and probably
+ # don't work for this compiler.
+ : gcc.mingw.link gcc.mingw.link.dll gcc.compile.c.pch gcc.compile.c++.pch
+ ;
+
+generators.override intel-darwin.prebuilt : builtin.lib-generator ;
+generators.override intel-darwin.prebuilt : builtin.prebuilt ;
+generators.override intel-darwin.searched-lib-generator : searched-lib-generator ;
+
+toolset.inherit-rules intel-darwin : gcc ;
+toolset.inherit-flags intel-darwin : gcc
+ : <inlining>off <inlining>on <inlining>full <optimization>space
+ <warnings>off <warnings>all <warnings>on
+ <architecture>x86/<address-model>32
+ <architecture>x86/<address-model>64
+ ;
+
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+ .debug-configuration = true ;
+}
+
+# vectorization diagnostics
+feature vectorize : off on full ;
+
+# Initializes the intel-darwin toolset
+# version in mandatory
+# name (default icc) is used to invoke the specified intel complier
+# compile and link options allow you to specify addition command line options for each version
+rule init ( version ? : command * : options * )
+{
+ local condition = [ common.check-init-parameters intel-darwin
+ : version $(version) ] ;
+
+ command = [ common.get-invocation-command intel-darwin : icc
+ : $(command) : /opt/intel_cc_80/bin ] ;
+
+ common.handle-options intel-darwin : $(condition) : $(command) : $(options) ;
+
+ gcc.init-link-flags intel-darwin darwin $(condition) ;
+
+ # handle <library-path>
+ # local library-path = [ feature.get-values <library-path> : $(options) ] ;
+ # flags intel-darwin.link USER_OPTIONS $(condition) : [ feature.get-values <dll-path> : $(options) ] ;
+
+ local root = [ feature.get-values <root> : $(options) ] ;
+ local bin ;
+ if $(command) || $(root)
+ {
+ bin ?= [ common.get-absolute-tool-path $(command[-1]) ] ;
+ root ?= $(bin:D) ;
+
+ if $(root)
+ {
+ # Libraries required to run the executable may be in either
+ # $(root)/lib (10.1 and earlier)
+ # or
+ # $(root)/lib/architecture-name (11.0 and later:
+ local lib_path = $(root)/lib $(root:P)/lib/$(bin:B) ;
+ if $(.debug-configuration)
+ {
+ ECHO notice: using intel libraries :: $(condition) :: $(lib_path) ;
+ }
+ flags intel-darwin.link RUN_PATH $(condition) : $(lib_path) ;
+ }
+ }
+
+ local m = [ MATCH (..).* : $(version) ] ;
+ local n = [ MATCH (.)\\. : $(m) ] ;
+ if $(n) {
+ m = $(n) ;
+ }
+
+ local major = $(m) ;
+
+ if $(major) = "9" {
+ flags intel-darwin.compile OPTIONS $(condition)/<inlining>off : -Ob0 ;
+ flags intel-darwin.compile OPTIONS $(condition)/<inlining>on : -Ob1 ;
+ flags intel-darwin.compile OPTIONS $(condition)/<inlining>full : -Ob2 ;
+ flags intel-darwin.compile OPTIONS $(condition)/<vectorize>off : -vec-report0 ;
+ flags intel-darwin.compile OPTIONS $(condition)/<vectorize>on : -vec-report1 ;
+ flags intel-darwin.compile OPTIONS $(condition)/<vectorize>full : -vec-report5 ;
+ flags intel-darwin.link OPTIONS $(condition)/<runtime-link>static : -static -static-libcxa -lstdc++ -lpthread ;
+ flags intel-darwin.link OPTIONS $(condition)/<runtime-link>shared : -shared-libcxa -lstdc++ -lpthread ;
+ }
+ else {
+ flags intel-darwin.compile OPTIONS $(condition)/<inlining>off : -inline-level=0 ;
+ flags intel-darwin.compile OPTIONS $(condition)/<inlining>on : -inline-level=1 ;
+ flags intel-darwin.compile OPTIONS $(condition)/<inlining>full : -inline-level=2 ;
+ flags intel-darwin.compile OPTIONS $(condition)/<vectorize>off : -vec-report0 ;
+ flags intel-darwin.compile OPTIONS $(condition)/<vectorize>on : -vec-report1 ;
+ flags intel-darwin.compile OPTIONS $(condition)/<vectorize>full : -vec-report5 ;
+ flags intel-darwin.link OPTIONS $(condition)/<runtime-link>static : -static -static-intel -lstdc++ -lpthread ;
+ flags intel-darwin.link OPTIONS $(condition)/<runtime-link>shared : -shared-intel -lstdc++ -lpthread ;
+ }
+
+ local minor = [ MATCH ".*\\.(.).*" : $(version) ] ;
+
+ # wchar_t char_traits workaround for compilers older than 10.2
+ if $(major) = "9" || ( $(major) = "10" && ( $(minor) = "0" || $(minor) = "1" ) ) {
+ flags intel-darwin.compile DEFINES $(condition) : __WINT_TYPE__=int : unchecked ;
+ }
+}
+
+SPACE = " " ;
+
+flags intel-darwin.compile OPTIONS <cflags> ;
+flags intel-darwin.compile.c++ OPTIONS <cxxflags> ;
+# flags intel-darwin.compile INCLUDES <include> ;
+
+flags intel-darwin.compile OPTIONS <optimization>space : -O1 ; # no specific space optimization flag in icc
+
+#
+.cpu-type-em64t = prescott nocona core2 corei7 corei7-avx core-avx-i
+ conroe conroe-xe conroe-l allendale merom
+ merom-xe kentsfield kentsfield-xe penryn wolfdale
+ yorksfield nehalem sandy-bridge ivy-bridge haswell ;
+.cpu-type-amd64 = k8 opteron athlon64 athlon-fx k8-sse3 opteron-sse3
+ athlon64-sse3 amdfam10 barcelona bdver1 bdver2 bdver3 btver1 btver2 ;
+.cpu-type-x86-64 = $(.cpu-type-em64t) $(.cpu-type-amd64) ;
+
+flags intel-darwin.compile OPTIONS <instruction-set>$(.cpu-type-x86-64)/<address-model>32 : -m32 ; # -mcmodel=small ;
+flags intel-darwin.compile OPTIONS <instruction-set>$(.cpu-type-x86-64)/<address-model>64 : -m64 ; # -mcmodel=large ;
+
+flags intel-darwin.compile.c OPTIONS <warnings>off : -w0 ;
+flags intel-darwin.compile.c OPTIONS <warnings>on : -w1 ;
+flags intel-darwin.compile.c OPTIONS <warnings>all : -w2 ;
+
+flags intel-darwin.compile.c++ OPTIONS <warnings>off : -w0 ;
+flags intel-darwin.compile.c++ OPTIONS <warnings>on : -w1 ;
+flags intel-darwin.compile.c++ OPTIONS <warnings>all : -w2 ;
+
+actions compile.c
+{
+ "$(CONFIG_COMMAND)" -xc $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions compile.c++
+{
+ "$(CONFIG_COMMAND)" -xc++ $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+flags intel-darwin ARFLAGS <archiveflags> ;
+
+# Default value. Mostly for the sake of intel-linux
+# that inherits from gcc, but does not has the same
+# logic to set the .AR variable. We can put the same
+# logic in intel-linux, but that's hardly worth the trouble
+# as on Linux, 'ar' is always available.
+.AR = ar ;
+
+rule archive ( targets * : sources * : properties * )
+{
+ # Always remove archive and start again. Here's rationale from
+ # Andre Hentz:
+ #
+ # I had a file, say a1.c, that was included into liba.a.
+ # I moved a1.c to a2.c, updated my Jamfiles and rebuilt.
+ # My program was crashing with absurd errors.
+ # After some debugging I traced it back to the fact that a1.o was *still*
+ # in liba.a
+ #
+ # Rene Rivera:
+ #
+ # Originally removing the archive was done by splicing an RM
+ # onto the archive action. That makes archives fail to build on NT
+ # when they have many files because it will no longer execute the
+ # action directly and blow the line length limit. Instead we
+ # remove the file in a different action, just before the building
+ # of the archive.
+ #
+ local clean.a = $(targets[1])(clean) ;
+ TEMPORARY $(clean.a) ;
+ NOCARE $(clean.a) ;
+ LOCATE on $(clean.a) = [ on $(targets[1]) return $(LOCATE) ] ;
+ DEPENDS $(clean.a) : $(sources) ;
+ DEPENDS $(targets) : $(clean.a) ;
+ common.RmTemps $(clean.a) : $(targets) ;
+}
+
+actions piecemeal archive
+{
+ "$(.AR)" $(AROPTIONS) rc "$(<)" "$(>)"
+ "ranlib" -cs "$(<)"
+}
+
+flags intel-darwin.link USER_OPTIONS <linkflags> ;
+
+# Declare actions for linking
+rule link ( targets * : sources * : properties * )
+{
+ SPACE on $(targets) = " " ;
+ # Serialize execution of the 'link' action, since
+ # running N links in parallel is just slower.
+ JAM_SEMAPHORE on $(targets) = <s>intel-darwin-link-semaphore ;
+}
+
+actions link bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" $(USER_OPTIONS) -L"$(LINKPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS)
+}
+
+actions link.dll bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" $(USER_OPTIONS) -L"$(LINKPATH)" -o "$(<)" -single_module -dynamiclib -install_name "$(<[1]:D=)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS)
+}
diff --git a/tools/build/v2/tools/intel-linux.jam b/tools/build/src/tools/intel-linux.jam
index d9164add80..d9164add80 100644
--- a/tools/build/v2/tools/intel-linux.jam
+++ b/tools/build/src/tools/intel-linux.jam
diff --git a/tools/build/src/tools/intel-win.jam b/tools/build/src/tools/intel-win.jam
new file mode 100644
index 0000000000..bccdb1fa3a
--- /dev/null
+++ b/tools/build/src/tools/intel-win.jam
@@ -0,0 +1,487 @@
+# Copyright Vladimir Prus 2004.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt
+# or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+# Importing common is needed because the rules we inherit here depend on it.
+# That is nasty.
+import common ;
+import errors ;
+import feature ;
+import intel ;
+import msvc ;
+import os ;
+import set ;
+import toolset ;
+import generators ;
+import type ;
+import path ;
+
+feature.extend-subfeature toolset intel : platform : win ;
+
+toolset.inherit-generators intel-win <toolset>intel <toolset-intel:platform>win : msvc ;
+toolset.inherit-flags intel-win : msvc : : YLOPTION ;
+toolset.inherit-rules intel-win : msvc ;
+
+# Override default do-nothing generators.
+generators.override intel-win.compile.c.pch : pch.default-c-pch-generator ;
+generators.override intel-win.compile.c++.pch : pch.default-cpp-pch-generator ;
+generators.override intel-win.compile.rc : rc.compile.resource ;
+generators.override intel-win.compile.mc : mc.compile ;
+
+toolset.flags intel-win.compile PCH_SOURCE <pch>on : <pch-source> ;
+
+toolset.add-requirements <toolset>intel-win,<runtime-link>shared:<threading>multi ;
+
+# Initializes the intel toolset for windows
+rule init ( version ? : # the compiler version
+ command * : # the command to invoke the compiler itself
+ options * # Additional option: <compatibility>
+ # either 'vc6', 'vc7', 'vc7.1'
+ # or 'native'(default).
+ )
+{
+ if $(version)
+ {
+ configure $(version) : $(command) : $(options) ;
+ }
+ else
+ {
+ if $(command)
+ {
+ errors.error "Autodetect of version from command not implemented!" ;
+ }
+ local intel_versions = [ get-autodetect-versions () ] ;
+ if ! $(intel_versions)
+ {
+ errors.error "No intel compiler version found!" ;
+ }
+ else
+ {
+ local msvc-version = [ feature.get-values <compatibility> : $(options) ] ; # On auto config mode the user can still request a msvc backend. If some intel compiler doesn't support it, don't try to configure it!
+ msvc-version = [ get-msvc-version-from-vc-string $(msvc-version) ] ;
+ for local v in $(intel_versions)
+ {
+ if [ is-msvc-supported $(v) : $(msvc-version) ]
+ {
+ configure $(v) : : $(options) ;
+ }
+ }
+ }
+ }
+}
+
+local rule configure ( version ? : command * : options * )
+{
+ local compatibility =
+ [ feature.get-values <compatibility> : $(options) ] ;
+ # Allow to specify toolset and visual studio backend from commandline .e.g --toolset=intel-14.0-vc10
+ local vc_in_version = [ MATCH (vc[0-9]+(\\.[0-9]+)?)$ : $(version) ] ;
+ vc_in_version = $(vc_in_version[1]) ;
+ if $(compatibility) && $(vc_in_version)
+ {
+ if $(compatibility) != $(vc_in_version)
+ {
+ errors.error "feature compatibility and vc version in toolset present!" ;
+ }
+ }
+
+ if $(vc_in_version) && ! $(compatibility)
+ {
+ # vc Version must be stripped before check-init-parameters is called!
+ version = [ MATCH (.+)-vc.+$ : $(version) ] ;
+
+ compatibility = $(vc_in_version) ;
+ options += <compatibility>$(vc_in_version) ;
+ }
+ if $(compatibility)
+ {
+ configure-really $(version) : $(command) : $(options) : $(compatibility) ;
+ }
+ else
+ {
+ local msvc_versions = [ feature.values <toolset-msvc:version> ] ;
+ if ! $(msvc_versions)
+ {
+ ECHO notice: no msvc versions detected. trying auto detect ;
+ toolset.using msvc : all ;
+ msvc_versions = [ feature.values <toolset-msvc:version> ] ;
+ }
+ if ! $(.iclvars-$(version)-supported-vcs)
+ {
+ errors.error "Supported msvc versions not known for intel $(version)" ;
+ }
+
+ for local v in $(msvc_versions)
+ {
+ if [ MATCH "($(v))" : $(.iclvars-$(version)-supported-vcs) ]
+ {
+ # Strip trailing .0 from msvc version as intel compiler uses atm only major version for Qvc
+ local m = [ MATCH ([0-9]+).0$ : $(v) ] ;
+ if $(m)
+ {
+ v = $(m) ;
+ }
+ v = "vc$(v)" ;
+ local options_really = $(options) ;
+ options_really += <compatibility>$(v) ;
+ if $(.debug-configuration)
+ {
+ ECHO "configure: intel version: $(version) msvc version: $(v)" ;
+ }
+ configure-really $(version) : $(command) : $(options) : $(v) ;
+ }
+ }
+ if ! [ feature.values <toolset-intel:version> ]
+ {
+ errors.error "Failed to register an intel toolset!" ;
+ }
+ }
+}
+
+local rule configure-really ( version ? : command * : options * : compatibility )
+{
+ local rewrite-setupscript = [ feature.get-values <rewrite-setup-scripts> : $(options) ] ;
+ local condition = [ common.check-init-parameters intel-win
+ : version $(version) : compatibility $(compatibility) ] ;
+
+ local m = [ MATCH ([0-9]+).* : $(version) ] ;
+ local major = $(m[1]) ;
+ if ! $(major)
+ {
+ errors.error "Major version not found: $(version)" ;
+ }
+
+ local msvc-version = [ get-msvc-version-from-vc-string $(compatibility) ] ;
+ if ! $(msvc-version)
+ {
+ errors.user-error "Invalid value for compatibility option:"
+ $(compatibility) ;
+ }
+
+ command = [ get-compiler-invocation-cmd $(major) : $(command) ] ;
+
+ common.handle-options intel-win : $(condition) : $(command) : $(options) ;
+
+ local root ;
+ if $(command)
+ {
+ root = [ common.get-absolute-tool-path $(command[-1]) ] ;
+ if $(major) >= 12
+ {
+ root = [ path.make $(root) ] ;
+ root = [ path.parent $(root) ] ;
+ }
+ root = $(root)/ ;
+ }
+
+ local setup ;
+ setup = [ path.glob $(root) : iclvars_*.bat ] ;
+ if ! $(setup)
+ {
+ setup = [ path.join $(root) "iclvars.bat" ] ;
+ setup = [ path.native $(setup) ] ;
+ }
+
+ local target_types ;
+ local iclvars_vs_arg ;
+ if $(major) >= 12
+ {
+ # if we have a known intel toolset check for visual studio compatibility
+ # if not trust parameters
+ if ! [ is-msvc-supported $(version) : $(msvc-version) ]
+ {
+ errors.error "msvc $(msvc-version) not supported for intel toolset version $(version)" ;
+ }
+ if $(.iclvars-version-alias-$(compatibility))
+ {
+ iclvars_vs_arg = $(.iclvars-version-alias-$(compatibility)) ;
+ }
+ else
+ {
+ errors.error "Don't know what parameter to pass for vc version ( $(compatibility) )" ;
+ }
+ # There are two possible paths for the 64-bit intel compiler,
+ # one for the IA32-Intel64 cross compiler, and one for the native
+ # 64 bit compiler. We prefer the latter one if it's installed,
+ # and don't rely on whether the OS reports whether we're 64 or 32 bit
+ # as that really only tells us which subsystem bjam is running in:
+ #
+ local intel64_path = [ path.join $(root) intel64 ] ;
+ if [ path.glob $(intel64_path) : icl.exe ]
+ {
+ target_types = ia32 intel64 ;
+ }
+ else
+ {
+ target_types = ia32 ia32_intel64 ;
+ }
+ }
+ else
+ {
+ target_types = default ;
+ iclvars_vs_arg = $(compatibility) ;
+ }
+
+ local default-assembler-intel64 = ml64 ;
+ local default-assembler-ia32_intel64 = ml64 ;
+ local default-assembler-ia32 = "ml -coff" ;
+ assembler = [ feature.get-values <assembler> : $(options) ] ;
+
+ for local c in $(target_types)
+ {
+ local cpu-conditions ;
+ local setup-call ;
+ if $(major) >= 12
+ {
+ local t = [ msvc.maybe-rewrite-setup intel-win : "\"$(setup)\"" : "$(c) $(iclvars_vs_arg)" : $(version) : $(rewrite-setupscript) ] ;
+ setup-call = "call $(t) > nul " ;
+ cpu-conditions = $(condition)/$(.cpu-arch-$(c)) ;
+ }
+ else
+ {
+ setup-call = "call \""$(setup)"\" $(compatibility) > nul " ;
+ cpu-conditions = $(condition) ;
+ }
+
+
+ if [ os.name ] = NT
+ {
+ setup-call = $(setup-call)"
+ " ;
+ }
+ else
+ {
+ setup-call = "cmd /S /C "$(setup-call)" \"&&\" " ;
+ }
+
+ if $(.debug-configuration)
+ {
+ for local cond in $(cpu-conditions)
+ {
+ ECHO "notice: [intel-cfg] condition: '$(cond)', setup: '$(setup-call)'" ;
+ }
+ }
+
+ local cpu-assembler = $(assembler) ;
+ cpu-assembler ?= $(default-assembler-$(c)) ;
+
+ toolset.flags intel-win.compile .CC $(cpu-conditions) : $(setup-call)icl ;
+ toolset.flags intel-win.link .LD $(cpu-conditions) : $(setup-call)xilink /nologo ;
+ toolset.flags intel-win.archive .LD $(cpu-conditions) : $(setup-call)xilink /lib /nologo ;
+ toolset.flags intel-win.link .MT $(cpu-conditions) : $(setup-call)mt -nologo ;
+ toolset.flags intel-win.compile .ASM $(cpu-conditions) : $(setup-call)$(cpu-assembler) -nologo ;
+ toolset.flags intel-win.compile .MC $(cpu-conditions) : $(setup-call)mc ;
+ toolset.flags intel-win.compile .RC $(cpu-conditions) : $(setup-call)rc ;
+ }
+
+ # Depending on the settings, running of tests require some runtime DLLs.
+ toolset.flags intel-win RUN_PATH $(condition) : $(root) ;
+
+
+ local C++FLAGS ;
+
+ C++FLAGS += /nologo ;
+
+ # Reduce the number of spurious error messages
+ C++FLAGS += /Qwn5 /Qwd985 ;
+
+ # Enable ADL
+ C++FLAGS += -Qoption,c,--arg_dep_lookup ; #"c" works for C++, too
+
+ # Disable Microsoft "secure" overloads in Dinkumware libraries since they
+ # cause compile errors with Intel versions 9 and 10.
+ if $(major) < 12
+ {
+ C++FLAGS += -D_SECURE_SCL=0 ;
+ }
+
+ if $(major) > 5
+ {
+ C++FLAGS += /Zc:forScope ; # Add support for correct for loop scoping.
+ }
+
+ # Add options recognized only by intel7 and above.
+ if $(major) >= 7
+ {
+ C++FLAGS += /Qansi_alias ;
+ }
+
+ if $(compatibility) = vc6
+ {
+ C++FLAGS +=
+ # Emulate VC6
+ /Qvc6
+
+ # No wchar_t support in vc6 dinkum library. Furthermore, in vc6
+ # compatibility-mode, wchar_t is not a distinct type from unsigned
+ # short.
+ -DBOOST_NO_INTRINSIC_WCHAR_T
+ ;
+ }
+ else
+ {
+ if $(major) > 5
+ {
+ # Add support for wchar_t
+ C++FLAGS += /Zc:wchar_t
+ # Tell the dinkumware library about it.
+ -D_NATIVE_WCHAR_T_DEFINED
+ ;
+ }
+ }
+
+ if $(compatibility) && $(compatibility) != native
+ {
+ C++FLAGS += /Q$(compatibility) ;
+ }
+ else
+ {
+ C++FLAGS +=
+ -Qoption,cpp,--arg_dep_lookup
+ # The following options were intended to disable the Intel compiler's
+ # 'bug-emulation' mode, but were later reported to be causing ICE with
+ # Intel-Win 9.0. It is not yet clear which options can be safely used.
+ # -Qoption,cpp,--const_string_literals
+ # -Qoption,cpp,--new_for_init
+ # -Qoption,cpp,--no_implicit_typename
+ # -Qoption,cpp,--no_friend_injection
+ # -Qoption,cpp,--no_microsoft_bugs
+ ;
+ }
+
+ toolset.flags intel-win CFLAGS $(condition) : $(C++FLAGS) ;
+ # By default, when creating PCH, intel adds 'i' to the explicitly
+ # specified name of the PCH file. Of course, Boost.Build is not
+ # happy when compiler produces not the file it was asked for.
+ # The option below stops this behaviour.
+ toolset.flags intel-win CFLAGS : -Qpchi- ;
+
+ if ! $(compatibility)
+ {
+ # If there's no backend version, assume 7.1.
+ compatibility = vc7.1 ;
+ }
+
+ msvc-version = [ msvc.resolve-possible-msvc-version-alias $(msvc-version) ] ;
+ msvc.configure-version-specific intel-win : $(msvc-version) : $(condition) ;
+}
+
+local rule get-autodetect-versions
+{
+ local result ;
+ for local v in $(.intel-autodetect-versions)
+ {
+ local major = [ MATCH ([0-9]+).* : $(v) ] ; # Use only major version
+ if [ get-icl-path-from-environment $(major) ]
+ {
+ result += $(v) ;
+ }
+ }
+ return $(result) ;
+}
+
+local rule get-icl-path-from-environment ( major_version )
+{
+ local path = [ os.environ ICPP_COMPILER$(major_version) ] ;
+ if $(path)
+ {
+ path = [ path.make $(path) ] ;
+ local cmdpath ;
+ local subdirs = $(.icl-target-subdirectories) ;
+ while $(subdirs)
+ {
+ cmdpath = [ path.join $(path) "bin/$(subdirs[0])/icl.exe" ] ;
+ cmdpath = [ path.native $(cmdpath) ] ;
+ if [ path.exists $(cmdpath) ]
+ {
+ subdirs = ;
+ } else {
+ cmdpath = ;
+ subdirs = $(subdirs[2-]) ;
+ }
+ }
+ path = $(cmdpath) ;
+ }
+ return $(path) ;
+}
+
+local rule get-compiler-invocation-cmd ( major_version : command * )
+{
+ if $(command)
+ {
+ return [ common.get-invocation-command intel-win : icl.exe : $(command) ] ;
+ }
+ else
+ {
+ local path = [ get-icl-path-from-environment $(major_version) ] ;
+ return [ common.get-invocation-command intel-win : icl.exe : $(path) ] ;
+ }
+}
+
+local rule is-msvc-supported ( intel-version : msvc-version )
+{
+ if ! $(msvc-version)
+ {
+ return true ;
+ }
+ else
+ {
+ if $(.iclvars-$(intel-version)-supported-vcs)
+ {
+ if [ MATCH "($(msvc-version))" : $(.iclvars-$(intel-version)-supported-vcs) ]
+ {
+ return true ;
+ }
+ }
+ else
+ {
+ return true ;
+ }
+ }
+}
+
+local rule get-msvc-version-from-vc-string ( vc-string )
+{
+ local r = [ MATCH "^vc([0-9]+(\\.[0-9]+)?)$" : $(vc-string) ] ;
+ return $(r[1]) ;
+}
+
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+ .debug-configuration = true ;
+}
+
+# Copied from msvc.jam
+# Supported CPU architectures.
+.cpu-arch-ia32 =
+ <architecture>/<address-model>
+ <architecture>/<address-model>32
+ <architecture>x86/<address-model>
+ <architecture>x86/<address-model>32 ;
+
+.cpu-arch-intel64 =
+ <architecture>/<address-model>64
+ <architecture>x86/<address-model>64 ;
+
+.cpu-arch-ia32_intel64 =
+ <architecture>/<address-model>64
+ <architecture>x86/<address-model>64 ;
+
+.intel-autodetect-versions = 14.0 13.0 12.0 ;
+.iclvars-12.0-supported-vcs = "10.0 9.0 8.0" ;
+.iclvars-12.1-supported-vcs = "10.0 9.0 8.0" ;
+.iclvars-13.0-supported-vcs = "11.0 10.0 9.0" ;
+.iclvars-14.0-supported-vcs = "12.0 11.0 10.0 9.0" ;
+.iclvars-15.0-supported-vcs = "12.0 11.0 10.0 9.0" ;
+.iclvars-version-alias-vc12 = vs2013 ;
+.iclvars-version-alias-vc11 = vs2012 ;
+.iclvars-version-alias-vc10 = vs2010 ;
+.iclvars-version-alias-vc9 = vs2008 ;
+.iclvars-version-alias-vc8 = vs2005 ;
+.icl-target-subdirectories = ia32 ia32_intel64 intel64 ;
+
+toolset.flags intel-win.link LIBRARY_OPTION <toolset>intel : "" ;
+
+toolset.flags intel-win YLOPTION ;
+
diff --git a/tools/build/v2/tools/intel.jam b/tools/build/src/tools/intel.jam
index 67038aa287..67038aa287 100644
--- a/tools/build/v2/tools/intel.jam
+++ b/tools/build/src/tools/intel.jam
diff --git a/tools/build/v2/tools/lex.jam b/tools/build/src/tools/lex.jam
index 75d641318c..75d641318c 100644
--- a/tools/build/v2/tools/lex.jam
+++ b/tools/build/src/tools/lex.jam
diff --git a/tools/build/src/tools/libjpeg.jam b/tools/build/src/tools/libjpeg.jam
new file mode 100644
index 0000000000..309b59df61
--- /dev/null
+++ b/tools/build/src/tools/libjpeg.jam
@@ -0,0 +1,232 @@
+# Copyright (c) 2010 Vladimir Prus.
+# Copyright (c) 2013 Steven Watanabe
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Supports the libjpeg library
+#
+# After 'using libjpeg', the following targets are available:
+#
+# /libjpeg//libjpeg -- The libjpeg library
+
+import project ;
+import ac ;
+import errors ;
+import "class" : new ;
+import targets ;
+import path ;
+import modules ;
+import errors ;
+import indirect ;
+import property ;
+import property-set ;
+
+header = jpeglib.h ;
+
+names = jpeg ;
+
+sources = jaricom.c jcapimin.c jcapistd.c jcarith.c jccoefct.c jccolor.c
+ jcdctmgr.c jchuff.c jcinit.c jcmainct.c jcmarker.c jcmaster.c
+ jcomapi.c jcparam.c jcprepct.c jcsample.c jctrans.c jdapimin.c
+ jdapistd.c jdarith.c jdatadst.c jdatasrc.c jdcoefct.c jdcolor.c
+ jddctmgr.c jdhuff.c jdinput.c jdmainct.c jdmarker.c jdmaster.c
+ jdmerge.c jdpostct.c jdsample.c jdtrans.c jerror.c jfdctflt.c
+ jfdctfst.c jfdctint.c jidctflt.c jidctfst.c jidctint.c jquant1.c ;
+
+library-id = 0 ;
+
+if --debug-configuration in [ modules.peek : ARGV ]
+{
+ .debug = true ;
+}
+
+# Initializes the libjpeg library.
+#
+# libjpeg can be configured either to use pre-existing binaries
+# or to build the library from source.
+#
+# Options for configuring a prebuilt libjpeg::
+#
+# <search>
+# The directory containing the libjpeg binaries.
+# <name>
+# Overrides the default library name.
+# <include>
+# The directory containing the libjpeg headers.
+#
+# If none of these options is specified, then the environmental
+# variables LIBJPEG_LIBRARY_PATH, LIBJPEG_NAME, and LIBJPEG_INCLUDE will
+# be used instead.
+#
+# Options for building libjpeg from source::
+#
+# <source>
+# The libjpeg source directory. Defaults to the environmental variable
+# LIBJPEG_SOURCE.
+# <tag>
+# A rule which computes the actual name of the compiled
+# libraries based on the build properties. Ignored
+# when using precompiled binaries.
+# <build-name>
+# The base name to use for the compiled library. Ignored
+# when using precompiled binaries.
+#
+# Examples::
+#
+# # Find libjpeg in the default system location
+# using libjpeg ;
+# # Build libjpeg from source
+# using libjpeg : 8c : <source>/home/steven/libjpeg-8c ;
+# # Find libjpeg in /usr/local
+# using libjpeg : 8c
+# : <include>/usr/local/include <search>/usr/local/lib ;
+# # Build libjpeg from source for msvc and find
+# # prebuilt binaries for gcc.
+# using libjpeg : 8c : <source>C:/Devel/src/libjpeg-8c : <toolset>msvc ;
+# using libjpeg : 8c : : <toolset>gcc ;
+#
+rule init (
+ version ?
+ # The libjpeg version (currently ignored)
+
+ : options *
+ # A list of the options to use
+
+ : requirements *
+ # The requirements for the libjpeg target
+
+ : is-default ?
+ # Default configurations are only used when libjpeg
+ # has not yet been configured.
+ )
+{
+ local caller = [ project.current ] ;
+
+ if ! $(.initialized)
+ {
+ .initialized = true ;
+
+ project.initialize $(__name__) ;
+ .project = [ project.current ] ;
+ project libjpeg ;
+ }
+
+ local library-path = [ property.select <search> : $(options) ] ;
+ library-path = $(library-path:G=) ;
+ local include-path = [ property.select <include> : $(options) ] ;
+ include-path = $(include-path:G=) ;
+ local source-path = [ property.select <source> : $(options) ] ;
+ source-path = $(source-path:G=) ;
+ local library-name = [ property.select <name> : $(options) ] ;
+ library-name = $(library-name:G=) ;
+ local tag = [ property.select <tag> : $(options) ] ;
+ tag = $(tag:G=) ;
+ local build-name = [ property.select <build-name> : $(options) ] ;
+ build-name = $(build-name:G=) ;
+
+ condition = [ property-set.create $(requirements) ] ;
+ condition = [ property-set.create [ $(condition).base ] ] ;
+
+ local no-build-from-source ;
+ # Ignore environmental ZLIB_SOURCE if this initialization
+ # requested to search for a specific pre-built library.
+ if $(library-path) || $(include-path) || $(library-name)
+ {
+ if $(source-path) || $(tag) || $(build-name)
+ {
+ errors.user-error "incompatible options for libjpeg:"
+ [ property.select <search> <include> <name> : $(options) ] "and"
+ [ property.select <source> <tag> <build-name> : $(options) ] ;
+ }
+ else
+ {
+ no-build-from-source = true ;
+ }
+ }
+
+ source-path ?= [ modules.peek : ZLIB_SOURCE ] ;
+
+ if $(.configured.$(condition))
+ {
+ if $(is-default)
+ {
+ if $(.debug)
+ {
+ ECHO "notice: [libjpeg] libjpeg is already configured" ;
+ }
+ }
+ else
+ {
+ errors.user-error "libjpeg is already configured" ;
+ }
+ return ;
+ }
+ else if $(source-path) && ! $(no-build-from-source)
+ {
+ build-name ?= jpeg ;
+ library-id = [ CALC $(library-id) + 1 ] ;
+ tag = [ MATCH ^@?(.*)$ : $(tag) ] ;
+ if $(tag) && ! [ MATCH ^([^%]*)%([^%]+)$ : $(tag) ]
+ {
+ tag = [ indirect.make $(tag) : [ $(caller).project-module ] ] ;
+ }
+ sources = [ path.glob $(source-path) : $(sources) ] ;
+ if $(.debug)
+ {
+ ECHO "notice: [libjpeg] Building libjpeg from source as $(build-name)" ;
+ if $(condition)
+ {
+ ECHO "notice: [libjpeg] Condition" [ $(condition).raw ] ;
+ }
+ if $(sources)
+ {
+ ECHO "notice: [libjpeg] found libjpeg source in $(source-path)" ;
+ }
+ else
+ {
+ ECHO "warning: [libjpeg] could not find libjpeg source in $(source-path)" ;
+ }
+ }
+ local target ;
+ if $(sources) {
+ target = [ targets.create-typed-target LIB : $(.project)
+ : $(build-name).$(library-id)
+ : $(sources)
+ : $(requirements)
+ <tag>@$(tag)
+ <include>$(source-path)
+ <toolset>msvc:<define>_CRT_SECURE_NO_DEPRECATE
+ <toolset>msvc:<define>_SCL_SECURE_NO_DEPRECATE
+ <link>shared:<define>ZLIB_DLL
+ :
+ : <include>$(source-path) ] ;
+ }
+
+ local mt = [ new ac-library libjpeg : $(.project) : $(condition) ] ;
+ $(mt).set-header $(header) ;
+ $(mt).set-default-names $(names) ;
+ if $(target)
+ {
+ $(mt).set-target $(target) ;
+ }
+ targets.main-target-alternative $(mt) ;
+ } else {
+ if $(.debug)
+ {
+ ECHO "notice: [libjpeg] Using pre-installed library" ;
+ if $(condition)
+ {
+ ECHO "notice: [libjpeg] Condition" [ $(condition).raw ] ;
+ }
+ }
+
+ local mt = [ new ac-library libjpeg : $(.project) : $(condition) :
+ $(include-path) : $(library-path) : $(library-name) : $(root) ] ;
+ $(mt).set-header $(header) ;
+ $(mt).set-default-names $(names) ;
+ targets.main-target-alternative $(mt) ;
+ }
+ .configured.$(condition) = true ;
+}
diff --git a/tools/build/src/tools/libpng.jam b/tools/build/src/tools/libpng.jam
new file mode 100644
index 0000000000..46e3cc9ae3
--- /dev/null
+++ b/tools/build/src/tools/libpng.jam
@@ -0,0 +1,226 @@
+# Copyright (c) 2010 Vladimir Prus.
+# Copyright (c) 2013 Steven Watanabe
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Supports the libpng library
+#
+# After 'using libpng', the following targets are available:
+#
+# /libpng//libpng -- The libpng library
+
+import project ;
+import ac ;
+import errors ;
+import "class" : new ;
+import targets ;
+import path ;
+import modules ;
+import errors ;
+import indirect ;
+import property ;
+import property-set ;
+
+header = png.h ;
+names = png ;
+
+sources = png.c pngerror.c pngget.c pngmem.c pngpread.c pngread.c pngrio.c pngrtran.c pngrutil.c
+ pngset.c pngtrans.c pngwio.c pngwrite.c pngwtran.c pngwutil.c ;
+
+library-id = 0 ;
+
+if --debug-configuration in [ modules.peek : ARGV ]
+{
+ .debug = true ;
+}
+
+# Initializes the libpng library.
+#
+# libpng can be configured either to use pre-existing binaries
+# or to build the library from source.
+#
+# Options for configuring a prebuilt libpng::
+#
+# <search>
+# The directory containing the libpng binaries.
+# <name>
+# Overrides the default library name.
+# <include>
+# The directory containing the libpng headers.
+#
+# If none of these options is specified, then the environmental
+# variables LIBPNG_LIBRARY_PATH, LIBPNG_NAME, and LIBPNG_INCLUDE will
+# be used instead.
+#
+# Options for building libpng from source::
+#
+# <source>
+# The libpng source directory. Defaults to the environmental variable
+# LIBPNG_SOURCE.
+# <tag>
+# A rule which computes the actual name of the compiled
+# libraries based on the build properties. Ignored
+# when using precompiled binaries.
+# <build-name>
+# The base name to use for the compiled library. Ignored
+# when using precompiled binaries.
+#
+# Examples::
+#
+# # Find libpng in the default system location
+# using libpng ;
+# # Build libpng from source
+# using libpng : 1.5.4 : <source>/home/steven/libpng-1.5.4 ;
+# # Find libpng in /usr/local
+# using libpng : 1.5.4
+# : <include>/usr/local/include <search>/usr/local/lib ;
+# # Build libpng from source for msvc and find
+# # prebuilt binaries for gcc.
+# using libpng : 1.5.4 : <source>C:/Devel/src/libpng-1.5.4 : <toolset>msvc ;
+# using libpng : 1.5.4 : : <toolset>gcc ;
+#
+rule init (
+ version ?
+ # The libpng version (currently ignored)
+
+ : options *
+ # A list of the options to use
+
+ : requirements *
+ # The requirements for the libpng target
+
+ : is-default ?
+ # Default configurations are only used when libpng
+ # has not yet been configured.
+ )
+{
+ local caller = [ project.current ] ;
+
+ if ! $(.initialized)
+ {
+ .initialized = true ;
+
+ project.initialize $(__name__) ;
+ .project = [ project.current ] ;
+ project libpng ;
+ }
+
+ local library-path = [ property.select <search> : $(options) ] ;
+ library-path = $(library-path:G=) ;
+ local include-path = [ property.select <include> : $(options) ] ;
+ include-path = $(include-path:G=) ;
+ local source-path = [ property.select <source> : $(options) ] ;
+ source-path = $(source-path:G=) ;
+ local library-name = [ property.select <name> : $(options) ] ;
+ library-name = $(library-name:G=) ;
+ local tag = [ property.select <tag> : $(options) ] ;
+ tag = $(tag:G=) ;
+ local build-name = [ property.select <build-name> : $(options) ] ;
+ build-name = $(build-name:G=) ;
+
+ condition = [ property-set.create $(requirements) ] ;
+ condition = [ property-set.create [ $(condition).base ] ] ;
+
+ local no-build-from-source ;
+ # Ignore environmental LIBPNG_SOURCE if this initialization
+ # requested to search for a specific pre-built library.
+ if $(library-path) || $(include-path) || $(library-name)
+ {
+ if $(source-path) || $(tag) || $(build-name)
+ {
+ errors.user-error "incompatible options for libpng:"
+ [ property.select <search> <include> <name> : $(options) ] "and"
+ [ property.select <source> <tag> <build-name> : $(options) ] ;
+ }
+ else
+ {
+ no-build-from-source = true ;
+ }
+ }
+
+ source-path ?= [ modules.peek : LIBPNG_SOURCE ] ;
+
+ if $(.configured.$(condition))
+ {
+ if $(is-default)
+ {
+ if $(.debug)
+ {
+ ECHO "notice: [libpng] libpng is already configured" ;
+ }
+ }
+ else
+ {
+ errors.user-error "libpng is already configured" ;
+ }
+ return ;
+ }
+ else if $(source-path) && ! $(no-build-from-source)
+ {
+ build-name ?= png ;
+ library-id = [ CALC $(library-id) + 1 ] ;
+ tag = [ MATCH ^@?(.*)$ : $(tag) ] ;
+ if $(tag) && ! [ MATCH ^([^%]*)%([^%]+)$ : $(tag) ]
+ {
+ tag = [ indirect.make $(tag) : [ $(caller).project-module ] ] ;
+ }
+ sources = [ path.glob $(source-path) : $(sources) ] ;
+ if $(.debug)
+ {
+ ECHO "notice: [libpng] Building libpng from source as $(build-name)" ;
+ if $(condition)
+ {
+ ECHO "notice: [libpng] Condition" [ $(condition).raw ] ;
+ }
+ if $(sources)
+ {
+ ECHO "notice: [libpng] found libpng source in $(source-path)" ;
+ }
+ else
+ {
+ ECHO "warning: [libpng] could not find libpng source in $(source-path)" ;
+ }
+ }
+ local target ;
+ if $(sources) {
+ target = [ targets.create-typed-target LIB : $(.project)
+ : $(build-name).$(library-id)
+ : $(sources)
+ : $(requirements)
+ <tag>@$(tag)
+ <include>$(source-path)
+ <toolset>msvc:<define>_CRT_SECURE_NO_DEPRECATE
+ <toolset>msvc:<define>_SCL_SECURE_NO_DEPRECATE
+ <link>shared:<define>LIBPNG_DLL
+ :
+ : <include>$(source-path) ] ;
+ }
+
+ local mt = [ new ac-library libpng : $(.project) : $(condition) ] ;
+ $(mt).set-header $(header) ;
+ $(mt).set-default-names $(names) ;
+ if $(target)
+ {
+ $(mt).set-target $(target) ;
+ }
+ targets.main-target-alternative $(mt) ;
+ } else {
+ if $(.debug)
+ {
+ ECHO "notice: [libpng] Using pre-installed library" ;
+ if $(condition)
+ {
+ ECHO "notice: [libpng] Condition" [ $(condition).raw ] ;
+ }
+ }
+
+ local mt = [ new ac-library libpng : $(.project) : $(condition) :
+ $(include-path) : $(library-path) : $(library-name) : $(root) ] ;
+ $(mt).set-header $(header) ;
+ $(mt).set-default-names $(names) ;
+ targets.main-target-alternative $(mt) ;
+ }
+ .configured.$(condition) = true ;
+}
diff --git a/tools/build/src/tools/libtiff.jam b/tools/build/src/tools/libtiff.jam
new file mode 100644
index 0000000000..cbd8ad0151
--- /dev/null
+++ b/tools/build/src/tools/libtiff.jam
@@ -0,0 +1,229 @@
+# Copyright (c) 2010 Vladimir Prus.
+# Copyright (c) 2013 Steven Watanabe
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Supports the libtiff library
+#
+# After 'using libtiff', the following targets are available:
+#
+# /libtiff//libtiff -- The libtiff library
+
+import project ;
+import ac ;
+import errors ;
+import "class" : new ;
+import targets ;
+import path ;
+import modules ;
+import errors ;
+import indirect ;
+import property ;
+import property-set ;
+
+header = tiff.h ;
+names = tiff ;
+
+sources = tif_aux.c tif_close.c tif_codec.c tif_color.c tif_compress.c tif_dir.c tif_dirinfo.c
+ tif_dirread.c tif_dirwrite.c tif_dumpmode.c tif_error.c tif_extension.c tif_fax3.c tif_fax3sm.c
+ tif_getimage.c tif_jbig.c tif_jpeg.c tif_jpeg_12.c tif_ojpeg.c tif_flush.c tif_luv.c tif_lzw.c
+ tif_next.c tif_open.c tif_packbits.c tif_pixarlog.c tif_predict.c tif_print.c tif_read.c tif_stream.cxx
+ tif_swab.c tif_strip.c tif_thunder.c tif_tile.c tif_version.c tif_warning.c tif_write.c tif_zip.c ;
+
+library-id = 0 ;
+
+if --debug-configuration in [ modules.peek : ARGV ]
+{
+ .debug = true ;
+}
+
+# Initializes the libtiff library.
+#
+# libtiff can be configured either to use pre-existing binaries
+# or to build the library from source.
+#
+# Options for configuring a prebuilt libtiff::
+#
+# <search>
+# The directory containing the libtiff binaries.
+# <name>
+# Overrides the default library name.
+# <include>
+# The directory containing the libtiff headers.
+#
+# If none of these options is specified, then the environmental
+# variables LIBTIFF_LIBRARY_PATH, LIBTIFF_NAME, and LIBTIFF_INCLUDE will
+# be used instead.
+#
+# Options for building libtiff from source::
+#
+# <source>
+# The libtiff source directory. Defaults to the environmental variable
+# LIBTIFF_SOURCE.
+# <tag>
+# A rule which computes the actual name of the compiled
+# libraries based on the build properties. Ignored
+# when using precompiled binaries.
+# <build-name>
+# The base name to use for the compiled library. Ignored
+# when using precompiled binaries.
+#
+# Examples::
+#
+# # Find libtiff in the default system location
+# using libtiff ;
+# # Build libtiff from source
+# using libtiff : 4.0.1 : <source>/home/steven/libtiff-4.0.1 ;
+# # Find libtiff in /usr/local
+# using libtiff : 4.0.1
+# : <include>/usr/local/include <search>/usr/local/lib ;
+# # Build libtiff from source for msvc and find
+# # prebuilt binaries for gcc.
+# using libtiff : 4.0.1 : <source>C:/Devel/src/libtiff-4.0.1 : <toolset>msvc ;
+# using libtiff : 4.0.1 : : <toolset>gcc ;
+#
+rule init (
+ version ?
+ # The libtiff version (currently ignored)
+
+ : options *
+ # A list of the options to use
+
+ : requirements *
+ # The requirements for the libtiff target
+
+ : is-default ?
+ # Default configurations are only used when libtiff
+ # has not yet been configured.
+ )
+{
+ local caller = [ project.current ] ;
+
+ if ! $(.initialized)
+ {
+ .initialized = true ;
+
+ project.initialize $(__name__) ;
+ .project = [ project.current ] ;
+ project libtiff ;
+ }
+
+ local library-path = [ property.select <search> : $(options) ] ;
+ library-path = $(library-path:G=) ;
+ local include-path = [ property.select <include> : $(options) ] ;
+ include-path = $(include-path:G=) ;
+ local source-path = [ property.select <source> : $(options) ] ;
+ source-path = $(source-path:G=) ;
+ local library-name = [ property.select <name> : $(options) ] ;
+ library-name = $(library-name:G=) ;
+ local tag = [ property.select <tag> : $(options) ] ;
+ tag = $(tag:G=) ;
+ local build-name = [ property.select <build-name> : $(options) ] ;
+ build-name = $(build-name:G=) ;
+
+ condition = [ property-set.create $(requirements) ] ;
+ condition = [ property-set.create [ $(condition).base ] ] ;
+
+ local no-build-from-source ;
+ # Ignore environmental ZLIB_SOURCE if this initialization
+ # requested to search for a specific pre-built library.
+ if $(library-path) || $(include-path) || $(library-name)
+ {
+ if $(source-path) || $(tag) || $(build-name)
+ {
+ errors.user-error "incompatible options for libtiff:"
+ [ property.select <search> <include> <name> : $(options) ] "and"
+ [ property.select <source> <tag> <build-name> : $(options) ] ;
+ }
+ else
+ {
+ no-build-from-source = true ;
+ }
+ }
+
+ source-path ?= [ modules.peek : ZLIB_SOURCE ] ;
+
+ if $(.configured.$(condition))
+ {
+ if $(is-default)
+ {
+ if $(.debug)
+ {
+ ECHO "notice: [libtiff] libtiff is already configured" ;
+ }
+ }
+ else
+ {
+ errors.user-error "libtiff is already configured" ;
+ }
+ return ;
+ }
+ else if $(source-path) && ! $(no-build-from-source)
+ {
+ build-name ?= tiff ;
+ library-id = [ CALC $(library-id) + 1 ] ;
+ tag = [ MATCH ^@?(.*)$ : $(tag) ] ;
+ if $(tag) && ! [ MATCH ^([^%]*)%([^%]+)$ : $(tag) ]
+ {
+ tag = [ indirect.make $(tag) : [ $(caller).project-module ] ] ;
+ }
+ sources = [ path.glob $(source-path) : $(sources) ] ;
+ if $(.debug)
+ {
+ ECHO "notice: [libtiff] Building libtiff from source as $(build-name)" ;
+ if $(condition)
+ {
+ ECHO "notice: [libtiff] Condition" [ $(condition).raw ] ;
+ }
+ if $(sources)
+ {
+ ECHO "notice: [libtiff] found libtiff source in $(source-path)" ;
+ }
+ else
+ {
+ ECHO "warning: [libtiff] could not find libtiff source in $(source-path)" ;
+ }
+ }
+ local target ;
+ if $(sources) {
+ target = [ targets.create-typed-target LIB : $(.project)
+ : $(build-name).$(library-id)
+ : $(sources)
+ : $(requirements)
+ <tag>@$(tag)
+ <include>$(source-path)
+ <toolset>msvc:<define>_CRT_SECURE_NO_DEPRECATE
+ <toolset>msvc:<define>_SCL_SECURE_NO_DEPRECATE
+ <link>shared:<define>ZLIB_DLL
+ :
+ : <include>$(source-path) ] ;
+ }
+
+ local mt = [ new ac-library libtiff : $(.project) : $(condition) ] ;
+ $(mt).set-header $(header) ;
+ $(mt).set-default-names $(names) ;
+ if $(target)
+ {
+ $(mt).set-target $(target) ;
+ }
+ targets.main-target-alternative $(mt) ;
+ } else {
+ if $(.debug)
+ {
+ ECHO "notice: [libtiff] Using pre-installed library" ;
+ if $(condition)
+ {
+ ECHO "notice: [libtiff] Condition" [ $(condition).raw ] ;
+ }
+ }
+
+ local mt = [ new ac-library libtiff : $(.project) : $(condition) :
+ $(include-path) : $(library-path) : $(library-name) : $(root) ] ;
+ $(mt).set-header $(header) ;
+ $(mt).set-default-names $(names) ;
+ targets.main-target-alternative $(mt) ;
+ }
+ .configured.$(condition) = true ;
+}
diff --git a/tools/build/src/tools/link.jam b/tools/build/src/tools/link.jam
new file mode 100644
index 0000000000..4a7034b8ac
--- /dev/null
+++ b/tools/build/src/tools/link.jam
@@ -0,0 +1,500 @@
+# Copyright 2012 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import os ;
+import targets ;
+import project ;
+import "class" : new ;
+import virtual-target ;
+import configure ;
+import path ;
+import property ;
+import property-set ;
+import common ;
+
+rule get-root-project ( project )
+{
+ # Find the root project.
+ local root-project = $(project) ;
+ root-project = [ $(root-project).project-module ] ;
+ while
+ [ project.attribute $(root-project) parent-module ] &&
+ [ project.attribute $(root-project) parent-module ] != user-config &&
+ [ project.attribute $(root-project) parent-module ] != project-config
+ {
+ root-project = [ project.attribute $(root-project) parent-module ] ;
+ }
+ return $(root-project) ;
+}
+
+TOUCH = [ common.file-touch-command ] ;
+
+actions touch {
+ $(TOUCH) "$(<)"
+}
+
+rule can-symlink ( project : ps )
+{
+ if ! $(.can-symlink)
+ {
+ local root-project = [ get-root-project $(project) ] ;
+
+ local source-target = [ new file-target test-symlink-source : :
+ $(project) : [ new action : link.touch ] ] ;
+ local target = [ new file-target test-symlink : :
+ $(project) : [ new action $(source-target) : link.mklink ] ] ;
+
+ if [ configure.try-build $(target) : $(ps) : "symlinks supported" ]
+ {
+ .can-symlink = true ;
+ }
+ else
+ {
+ .can-symlink = false ;
+ }
+ }
+ if $(.can-symlink) = true
+ {
+ return true ;
+ }
+}
+
+if [ os.name ] = NT
+{
+
+# Test for Windows junctions (mklink /J)
+rule can-junction ( project : ps )
+{
+ if ! $(.can-junction)
+ {
+ local root-project = [ get-root-project $(project) ] ;
+
+ local source-target = [ new file-target test-junction-source : :
+ $(project) : [ new action : common.mkdir ] ] ;
+ local target = [ new file-target test-junction : :
+ $(project) : [ new action $(source-target) : link.junction ] ] ;
+
+ if [ configure.try-build $(target) : $(ps) : "junctions supported" ]
+ {
+ .can-junction = true ;
+ }
+ else
+ {
+ .can-junction = false ;
+ }
+ }
+ if $(.can-junction) = true
+ {
+ return true ;
+ }
+}
+
+}
+else
+{
+
+rule can-junction ( project : ps )
+{
+}
+
+}
+
+rule can-hardlink ( project : ps )
+{
+ if ! $(.can-hardlink)
+ {
+ local root-project = [ get-root-project $(project) ] ;
+
+ local source-target = [ new file-target test-hardlink-source : :
+ $(project) : [ new action : link.touch ] ] ;
+ # Use <location-prefix> so that the destination link is created
+ # in a different directory. AFS refuses to make hard links
+ # between files in different directories, so we want to check
+ # it.
+ local target = [ new file-target test-hardlink : :
+ $(project) : [ new action $(source-target) : link.hardlink
+ : [ new property-set <location-prefix>symlink ]
+ ] ] ;
+
+ if [ configure.try-build $(target) : $(ps) : "hardlinks supported" ]
+ {
+ .can-hardlink = true ;
+ }
+ else
+ {
+ .can-hardlink = false ;
+ }
+ }
+ if $(.can-hardlink) = true
+ {
+ return true ;
+ }
+}
+
+class file-or-directory-reference : basic-target
+{
+ import virtual-target ;
+ import property-set ;
+ import path ;
+
+ rule construct ( name : source-targets * : property-set )
+ {
+ return [ property-set.empty ] [ virtual-target.from-file $(self.name) :
+ [ location ] : $(self.project) ] ;
+ }
+
+ # Returns true if the referred file really exists.
+ rule exists ( )
+ {
+ location ;
+ return $(self.file-path) ;
+ }
+
+ # Returns the location of target. Needed by 'testing.jam'.
+ rule location ( )
+ {
+ if ! $(self.file-location)
+ {
+ local source-location = [ $(self.project).get source-location ] ;
+ for local src-dir in $(source-location)
+ {
+ if ! $(self.file-location)
+ {
+ local location = [ path.root $(self.name) $(src-dir) ] ;
+ if [ path.exists [ path.native $(location) ] ]
+ {
+ self.file-location = $(src-dir) ;
+ self.file-path = $(location) ;
+ }
+ }
+ }
+ }
+ return $(self.file-location) ;
+ }
+}
+
+class symlink-target-class : basic-target
+{
+ import path ;
+ import virtual-target ;
+ import link ;
+ import os ;
+ import type ;
+ rule construct ( name : source-target : property-set )
+ {
+ local location = [ path.join
+ [ $(source-target).path ] [ $(source-target).name ] ] ;
+ local files = [ path.glob-tree $(location) : * ] ;
+ local targets ;
+
+ # If we have symlinks, don't bother checking
+ # for hardlinks and junctions.
+ if ! [ link.can-symlink $(self.project) : $(property-set) ]
+ {
+ link.can-junction $(self.project) : $(property-set) ;
+ link.can-hardlink $(self.project) : $(property-set) ;
+ }
+
+ if [ $(property-set).get <location> ]
+ {
+ property-set = [ property-set.create
+ [ property.select <location> : [ $(property-set).raw ] ] ] ;
+ }
+ else
+ {
+ local path,relative-to-build-dir = [ $(property-set).target-path ] ;
+ local path = $(path,relative-to-build-dir[1]) ;
+ local relative-to-build-dir = $(path,relative-to-build-dir[2]) ;
+
+ if $(relative-to-build-dir)
+ {
+ path = [ path.join [ $(self.project).build-dir ] $(path) ] ;
+ }
+
+ property-set = [ property-set.create <location>$(path) ] ;
+ }
+
+ local a = [ new non-scanning-action $(source-target) :
+ link.do-link-recursively : $(property-set) ] ;
+
+ local t = [ new notfile-target $(name)
+ : $(self.project) : $(a) ] ;
+
+ return [ property-set.empty ] [ virtual-target.register $(t) ] ;
+ }
+}
+
+rule do-file-link
+{
+ local target = [ path.native [ path.relative-to [ path.pwd ] $(<) ] ] ;
+ local source = [ path.native [ path.relative-to [ path.pwd ] $(>) ] ] ;
+ LOCATE on $(target) = . ;
+ DEPENDS $(.current-target) : $(target) ;
+ if $(.can-symlink) = true
+ {
+ link.mklink $(target) : $(source) ;
+ }
+ else if $(.can-hardlink) = true
+ {
+ DEPENDS $(target) : $(source) ;
+ link.hardlink $(target) : $(source) ;
+ }
+ else
+ {
+ DEPENDS $(target) : $(source) ;
+ common.copy $(target) : $(source) ;
+ }
+}
+
+rule do-link
+{
+ local target = [ path.native [ path.relative-to [ path.pwd ] $(<) ] ] ;
+ local source = [ path.native [ path.relative-to [ path.pwd ] $(>) ] ] ;
+ local relative = [ path.native [ path.relative-to [ path.parent $(<) ] $(>) ] ] ;
+ if ! [ on $(target) return $(MKLINK_OR_DIR) ]
+ {
+ LOCATE on $(target) = . ;
+ DEPENDS $(.current-target) : $(target) ;
+ mklink-or-dir $(target) : $(source) ;
+ }
+ if [ os.name ] = NT
+ {
+ if $(.can-symlink) = true
+ {
+ MKLINK_OR_DIR on $(target) = mklink /D \"$(target)\" \"$(relative)\" ;
+ }
+ else
+ {
+ # This function should only be called
+ # if either symlinks or junctions are supported.
+ # To get here $(.can-junction) must be true.
+ mklink-opt = /J ;
+ MKLINK_OR_DIR on $(target) = mklink /J \"$(target)\" \"$(source)\" ;
+ }
+ }
+ else
+ {
+ MKLINK_OR_DIR on $(target) = ln -s $(relative) $(target) ;
+ }
+}
+
+rule do-split
+{
+ local target = [ path.native [ path.relative-to [ path.pwd ] $(<) ] ] ;
+ if ! [ on $(target) return $(MKLINK_OR_DIR) ]
+ {
+ LOCATE on $(target) = . ;
+ DEPENDS $(.current-target) : $(target) ;
+ common.mkdir $(target) ;
+ }
+ MKLINK_OR_DIR on $(target) = mkdir \"$(target)\" ;
+}
+
+rule do-rm
+{
+ local target = [ path.native [ path.relative-to [ path.pwd ] $(<) ] ] ;
+ ALWAYS $(target) ;
+ RM on $(target) = rmdir ;
+ link.rm $(target) ;
+}
+
+rule mklink-or-dir
+{
+ NOUPDATE $(<) ;
+}
+
+actions mklink-or-dir
+{
+ $(MKLINK_OR_DIR)
+}
+
+rule link-entries ( target : files * : split ? )
+{
+ for local s in $(files)
+ {
+ local t = [ path.join $(target) [ path.basename $(s) ] ] ;
+ if ! $(.known-dirs.$(t))
+ {
+ local t = [ path.native [ path.relative-to [ path.pwd ] $(t) ] ] ;
+ local s = [ path.native [ path.relative-to [ path.pwd ] $(target) ] ] ;
+ LOCATE on $(t) = . ;
+ DEPENDS $(t) : $(s) ;
+ NOUPDATE $(s) ;
+ }
+ if $(split)
+ {
+ link-recursively $(t) : $(s) ;
+ }
+ else
+ {
+ link-entries $(t) : [ path.glob $(s) : * ] ;
+ }
+ }
+ if ! $(.known-dirs.$(target))
+ {
+ .known-dirs.$(target) += $(files) ;
+ .known-dirs.base.$(target) = $(.current-target) ;
+ }
+}
+
+rule link-recursively ( target : source : no-recurse ? )
+{
+ local split ;
+ if [ CHECK_IF_FILE [ path.native $(source) ] ]
+ {
+ do-file-link $(target) : $(source) ;
+ }
+ else if $(.known-dirs.$(target)) && ! $(no-recurse)
+ {
+ split = true ;
+ if ! $(.split-dirs.$(target))
+ {
+ local .current-target = $(.known-dirs.base.$(target)) ;
+ for local s in $(.known-dirs.$(target))
+ {
+ local t = [ path.join $(target) [ path.basename $(s) ] ] ;
+ link-recursively $(t) : $(s) : flat ;
+ }
+ if [ READLINK [ path.native $(target) ] ]
+ {
+ do-rm $(target) ;
+ }
+ do-split $(target) ;
+ .split-dirs.$(target) = true ;
+ }
+ }
+ else if [ path.exists [ path.native $(target) ] ]
+ {
+ local link-target = [ READLINK [ path.native $(target) ] ] ;
+ if $(link-target)
+ {
+ local full-path =
+ [ path.root [ path.make $(link-target) ] [ path.parent $(target) ] ] ;
+ if $(full-path) != $(source)
+ {
+ do-rm $(target) ;
+ do-split $(target) ;
+ split = true ;
+ }
+ }
+ else
+ {
+ do-split $(target) ;
+ split = true ;
+ }
+ }
+ else if $(.can-symlink) = false && $(.can-junction) = false
+ {
+ if [ READLINK [ path.native $(target) ] ]
+ {
+ do-rm $(target) ;
+ }
+ do-split $(target) ;
+ split = true ;
+ }
+ else
+ {
+ do-link $(target) : $(source) ;
+ }
+
+ if ! $(no-recurse)
+ {
+ link-entries $(target) : [ path.glob $(source) : * ] : $(split) ;
+ }
+}
+
+rule do-link-recursively ( target : source : properties * )
+{
+ local target-path = [ property.select <location> : $(properties) ] ;
+ local source-path = [ on $(source) return $(LOCATE) ] [ on $(source) return $(SEARCH) ] ;
+
+ local absolute-target = [ path.root
+ [ path.join [ path.make $(target-path[1]:G=) ]
+ [ path.basename [ path.make $(source:G=) ] ] ]
+ [ path.pwd ] ] ;
+
+ local absolute-source = [ path.root
+ [ path.root [ path.make $(source:G=) ]
+ [ path.make $(source-path[1]) ] ]
+ [ path.pwd ] ] ;
+
+ local .current-target = $(target) ;
+
+ link-recursively $(absolute-target) : $(absolute-source) ;
+}
+
+rule mklink
+{
+ local target-path = [ on $(<) return $(LOCATE) ] [ on $(<) return $(SEARCH) ] . ;
+ local source-path = [ on $(>) return $(LOCATE) ] [ on $(>) return $(SEARCH) ] . ;
+ local relative-path = [ path.relative-to
+ [ path.parent [ path.join [ path.root [ path.make $(target-path[1]) ] [ path.pwd ] ] [ path.make $(<:G=) ] ] ]
+ [ path.join [ path.root [ path.make $(source-path[1]) ] [ path.pwd ] ] [ path.make $(>:G=) ] ] ] ;
+
+ PATH_TO_SOURCE on $(<) = [ path.native $(relative-path) ] ;
+ NOUPDATE $(<) ;
+}
+
+if [ os.name ] = NT
+{
+
+actions junction
+{
+ if exist "$(<)" del "$(<)"
+ mklink /J "$(<)" "$(>)"
+}
+
+actions mklink
+{
+ if exist "$(<)" del "$(<)"
+ mklink "$(<)" "$(PATH_TO_SOURCE)"
+}
+
+actions hardlink
+{
+ if exist "$(<)" del "$(<)"
+ mklink /H "$(<)" "$(>)"
+}
+
+actions rm
+{
+ rmdir "$(<)"
+}
+
+}
+else
+{
+
+actions mklink
+{
+ ln -f -s "$(PATH_TO_SOURCE)" "$(<)"
+}
+
+actions hardlink
+{
+ ln -f "$(>)" "$(<)"
+}
+
+actions rm
+{
+ rm "$(<)"
+}
+
+}
+
+rule link-directory ( name : sources : requirements * : default-build * : usage-requirements * )
+{
+ local project = [ project.current ] ;
+ sources = [ new file-or-directory-reference $(sources) : $(project) ] ;
+ targets.main-target-alternative $(sources) ;
+ return [ targets.main-target-alternative
+ [ new symlink-target-class $(name) : $(project)
+ : [ targets.main-target-sources $(sources) : $(name) : no-renaming ]
+ : [ targets.main-target-requirements $(requirements) : $(project) ]
+ : [ targets.main-target-default-build : $(project) ]
+ : [ targets.main-target-usage-requirements $(usage-requirements) :
+ $(project) ] ] ] ;
+}
+
+IMPORT $(__name__) : link-directory : : link-directory ;
diff --git a/tools/build/src/tools/make.jam b/tools/build/src/tools/make.jam
new file mode 100644
index 0000000000..40b59faf38
--- /dev/null
+++ b/tools/build/src/tools/make.jam
@@ -0,0 +1,63 @@
+# Copyright 2003 Dave Abrahams
+# Copyright 2003 Douglas Gregor
+# Copyright 2006 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# This module defines the 'make' main target rule.
+
+import "class" : new ;
+import project ;
+import property-set ;
+import targets ;
+
+
+class make-target-class : basic-target
+{
+ import "class" : new ;
+ import type ;
+ import virtual-target ;
+
+ rule __init__ ( name : project : sources * : requirements *
+ : default-build * : usage-requirements * )
+ {
+ basic-target.__init__ $(name) : $(project) : $(sources) :
+ $(requirements) : $(default-build) : $(usage-requirements) ;
+ }
+
+ rule construct ( name : source-targets * : property-set )
+ {
+ local action-name = [ $(property-set).get <action> ] ;
+ # 'm' will always be set -- we add '@' ourselves in the 'make' rule
+ # below.
+ local m = [ MATCH ^@(.*) : $(action-name) ] ;
+
+ local a = [ new action $(source-targets) : $(m[1]) : $(property-set) ] ;
+ local t = [ new file-target $(self.name) exact : [ type.type
+ $(self.name) ] : $(self.project) : $(a) ] ;
+ return [ property-set.empty ] [ virtual-target.register $(t) ] ;
+ }
+}
+
+
+# Declares the 'make' main target.
+#
+rule make ( target-name : sources * : generating-rule + : requirements * :
+ usage-requirements * )
+{
+ # The '@' sign causes the feature.jam module to qualify rule name with the
+ # module name of current project, if needed.
+ local m = [ MATCH ^(@).* : $(generating-rule) ] ;
+ if ! $(m)
+ {
+ generating-rule = @$(generating-rule) ;
+ }
+ targets.create-metatarget make-target-class : [ project.current ] :
+ $(target-name) : $(sources) : $(requirements) <action>$(generating-rule)
+ : : $(usage-requirements) ;
+}
+
+
+IMPORT $(__name__) : make : : make ;
diff --git a/tools/build/v2/tools/make.py b/tools/build/src/tools/make.py
index 10baa1cb41..10baa1cb41 100644
--- a/tools/build/v2/tools/make.py
+++ b/tools/build/src/tools/make.py
diff --git a/tools/build/v2/tools/mc.jam b/tools/build/src/tools/mc.jam
index 578377735e..578377735e 100644
--- a/tools/build/v2/tools/mc.jam
+++ b/tools/build/src/tools/mc.jam
diff --git a/tools/build/v2/tools/mc.py b/tools/build/src/tools/mc.py
index c194acdff7..c194acdff7 100644
--- a/tools/build/v2/tools/mc.py
+++ b/tools/build/src/tools/mc.py
diff --git a/tools/build/src/tools/message.jam b/tools/build/src/tools/message.jam
new file mode 100644
index 0000000000..672b6e0bce
--- /dev/null
+++ b/tools/build/src/tools/message.jam
@@ -0,0 +1,62 @@
+# Copyright 2008 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Defines main target type 'message', that prints a message when built for the
+# first time.
+
+import project ;
+import "class" : new ;
+import targets ;
+import property-set ;
+
+class message-target-class : basic-target
+{
+ rule set-message ( * )
+ {
+ self.1 = $(1) ;
+ self.2 = $(2) ;
+ self.3 = $(3) ;
+ self.4 = $(4) ;
+ self.5 = $(5) ;
+ self.6 = $(6) ;
+ self.7 = $(7) ;
+ self.8 = $(8) ;
+ self.9 = $(9) ;
+ self.built = ;
+ }
+
+ rule construct ( name : source-targets * : property-set )
+ {
+ if ! $(self.built)
+ {
+ for i in 1 2 3 4 5 6 7 8 9
+ {
+ if $(self.$(i))
+ {
+ ECHO $(self.$(i)) ;
+ }
+ }
+ self.built = 1 ;
+ }
+
+ return [ property-set.empty ] ;
+ }
+}
+
+
+rule message ( name : * )
+{
+ local project = [ project.current ] ;
+
+ local result = [ targets.main-target-alternative
+ [ new message-target-class $(name) : $(project)
+ : [ targets.main-target-sources : $(name) ]
+ : [ targets.main-target-requirements : $(project) ]
+ : [ targets.main-target-default-build : $(project) ]
+ : [ targets.main-target-usage-requirements : $(project) ]
+ ] ] ;
+ $(result).set-message $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ return $(result) ;
+}
+IMPORT $(__name__) : message : : message ;
diff --git a/tools/build/v2/tools/message.py b/tools/build/src/tools/message.py
index cc0b946ff1..cc0b946ff1 100644
--- a/tools/build/v2/tools/message.py
+++ b/tools/build/src/tools/message.py
diff --git a/tools/build/v2/tools/midl.jam b/tools/build/src/tools/midl.jam
index 0aa5dda31c..0aa5dda31c 100644
--- a/tools/build/v2/tools/midl.jam
+++ b/tools/build/src/tools/midl.jam
diff --git a/tools/build/src/tools/midl.py b/tools/build/src/tools/midl.py
new file mode 100644
index 0000000000..86c1f34b6c
--- /dev/null
+++ b/tools/build/src/tools/midl.py
@@ -0,0 +1,134 @@
+# Copyright (c) 2005 Alexey Pakhunov.
+# Copyright (c) 2011 Juraj Ivancic
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Microsoft Interface Definition Language (MIDL) related routines
+from b2.build import scanner, type
+from b2.build.toolset import flags
+from b2.build.feature import feature
+from b2.manager import get_manager
+from b2.tools import builtin, common
+from b2.util import regex, utility
+
+def init():
+ pass
+
+type.register('IDL', ['idl'])
+
+# A type library (.tlb) is generated by MIDL compiler and can be included
+# to resources of an application (.rc). In order to be found by a resource
+# compiler its target type should be derived from 'H' - otherwise
+# the property '<implicit-dependency>' will be ignored.
+type.register('MSTYPELIB', ['tlb'], 'H')
+
+# Register scanner for MIDL files
+class MidlScanner(scanner.Scanner):
+ def __init__ (self, includes=[]):
+ scanner.Scanner.__init__(self)
+ self.includes = includes
+
+ # List of quoted strings
+ re_strings = "[ \t]*\"([^\"]*)\"([ \t]*,[ \t]*\"([^\"]*)\")*[ \t]*" ;
+
+ # 'import' and 'importlib' directives
+ self.re_import = "import" + re_strings + "[ \t]*;" ;
+ self.re_importlib = "importlib[ \t]*[(]" + re_strings + "[)][ \t]*;" ;
+
+ # C preprocessor 'include' directive
+ self.re_include_angle = "#[ \t]*include[ \t]*<(.*)>" ;
+ self.re_include_quoted = "#[ \t]*include[ \t]*\"(.*)\"" ;
+
+ def pattern():
+ # Match '#include', 'import' and 'importlib' directives
+ return "((#[ \t]*include|import(lib)?).+(<(.*)>|\"(.*)\").+)"
+
+ def process(self, target, matches, binding):
+ included_angle = regex.transform(matches, self.re_include_angle)
+ included_quoted = regex.transform(matches, self.re_include_quoted)
+ imported = regex.transform(matches, self.re_import, [1, 3])
+ imported_tlbs = regex.transform(matches, self.re_importlib, [1, 3])
+
+ # CONSIDER: the new scoping rule seem to defeat "on target" variables.
+ g = bjam.call('get-target-variable', target, 'HDRGRIST')[0]
+ b = os.path.normpath(os.path.dirname(binding))
+
+ # Attach binding of including file to included targets.
+ # When target is directly created from virtual target
+ # this extra information is unnecessary. But in other
+ # cases, it allows to distinguish between two headers of the
+ # same name included from different places.
+ g2 = g + "#" + b
+
+ g = "<" + g + ">"
+ g2 = "<" + g2 + ">"
+
+ included_angle = [ g + x for x in included_angle ]
+ included_quoted = [ g + x for x in included_quoted ]
+ imported = [ g + x for x in imported ]
+ imported_tlbs = [ g + x for x in imported_tlbs ]
+
+ all = included_angle + included_quoted + imported
+
+ bjam.call('INCLUDES', [target], all)
+ bjam.call('DEPENDS', [target], imported_tlbs)
+ bjam.call('NOCARE', all + imported_tlbs)
+ engine.set_target_variable(included_angle , 'SEARCH', [utility.get_value(inc) for inc in self.includes])
+ engine.set_target_variable(included_quoted, 'SEARCH', [utility.get_value(inc) for inc in self.includes])
+ engine.set_target_variable(imported , 'SEARCH', [utility.get_value(inc) for inc in self.includes])
+ engine.set_target_variable(imported_tlbs , 'SEARCH', [utility.get_value(inc) for inc in self.includes])
+
+ get_manager().scanners().propagate(type.get_scanner('CPP', PropertySet(self.includes)), included_angle + included_quoted)
+ get_manager().scanners().propagate(self, imported)
+
+scanner.register(MidlScanner, 'include')
+type.set_scanner('IDL', MidlScanner)
+
+
+# Command line options
+feature('midl-stubless-proxy', ['yes', 'no'], ['propagated'] )
+feature('midl-robust', ['yes', 'no'], ['propagated'] )
+
+flags('midl.compile.idl', 'MIDLFLAGS', ['<midl-stubless-proxy>yes'], ['/Oicf' ])
+flags('midl.compile.idl', 'MIDLFLAGS', ['<midl-stubless-proxy>no' ], ['/Oic' ])
+flags('midl.compile.idl', 'MIDLFLAGS', ['<midl-robust>yes' ], ['/robust' ])
+flags('midl.compile.idl', 'MIDLFLAGS', ['<midl-robust>no' ], ['/no_robust'])
+
+# Architecture-specific options
+architecture_x86 = ['<architecture>' , '<architecture>x86']
+address_model_32 = ['<address-model>', '<address-model>32']
+address_model_64 = ['<address-model>', '<address-model>64']
+
+flags('midl.compile.idl', 'MIDLFLAGS', [ar + '/' + m for ar in architecture_x86 for m in address_model_32 ], ['/win32'])
+flags('midl.compile.idl', 'MIDLFLAGS', [ar + '/<address-model>64' for ar in architecture_x86], ['/x64'])
+flags('midl.compile.idl', 'MIDLFLAGS', ['<architecture>ia64/' + m for m in address_model_64], ['/ia64'])
+
+flags('midl.compile.idl', 'DEFINES', [], ['<define>'])
+flags('midl.compile.idl', 'UNDEFS', [], ['<undef>'])
+flags('midl.compile.idl', 'INCLUDES', [], ['<include>'])
+
+
+builtin.register_c_compiler('midl.compile.idl', ['IDL'], ['MSTYPELIB', 'H', 'C(%_i)', 'C(%_proxy)', 'C(%_dlldata)'], [])
+
+
+# MIDL does not always generate '%_proxy.c' and '%_dlldata.c'. This behavior
+# depends on contents of the source IDL file. Calling TOUCH_FILE below ensures
+# that both files will be created so bjam will not try to recreate them
+# constantly.
+get_manager().engine().register_action(
+ 'midl.compile.idl',
+ '''midl /nologo @"@($(<[1]:W).rsp:E=
+"$(>:W)"
+-D$(DEFINES)
+"-I$(INCLUDES)"
+-U$(UNDEFS)
+$(MIDLFLAGS)
+/tlb "$(<[1]:W)"
+/h "$(<[2]:W)"
+/iid "$(<[3]:W)"
+/proxy "$(<[4]:W)"
+/dlldata "$(<[5]:W)")"
+{touch} "$(<[4]:W)"
+{touch} "$(<[5]:W)"'''.format(touch=common.file_creation_command()))
diff --git a/tools/build/v2/tools/mipspro.jam b/tools/build/src/tools/mipspro.jam
index 417eaefcf7..417eaefcf7 100644
--- a/tools/build/v2/tools/mipspro.jam
+++ b/tools/build/src/tools/mipspro.jam
diff --git a/tools/build/src/tools/mpi.jam b/tools/build/src/tools/mpi.jam
new file mode 100644
index 0000000000..a161101bc4
--- /dev/null
+++ b/tools/build/src/tools/mpi.jam
@@ -0,0 +1,600 @@
+# Support for the Message Passing Interface (MPI)
+#
+# (C) Copyright 2005, 2006 Trustees of Indiana University
+# (C) Copyright 2005 Douglas Gregor
+#
+# Distributed under the Boost Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt.)
+#
+# Authors: Douglas Gregor
+# Andrew Lumsdaine
+#
+# ==== MPI Configuration ====
+#
+# For many users, MPI support can be enabled simply by adding the following
+# line to your user-config.jam file:
+#
+# using mpi ;
+#
+# This should auto-detect MPI settings based on the MPI wrapper compiler in
+# your path, e.g., "mpic++". If the wrapper compiler is not in your path, or
+# has a different name, you can pass the name of the wrapper compiler as the
+# first argument to the mpi module:
+#
+# using mpi : /opt/mpich2-1.0.4/bin/mpiCC ;
+#
+# If your MPI implementation does not have a wrapper compiler, or the MPI
+# auto-detection code does not work with your MPI's wrapper compiler,
+# you can pass MPI-related options explicitly via the second parameter to the
+# mpi module:
+#
+# using mpi : : <find-shared-library>lammpio <find-shared-library>lammpi++
+# <find-shared-library>mpi <find-shared-library>lam
+# <find-shared-library>dl ;
+#
+# To see the results of MPI auto-detection, pass "--debug-configuration" on
+# the bjam command line.
+#
+# The (optional) fourth argument configures Boost.MPI for running
+# regression tests. These parameters specify the executable used to
+# launch jobs (default: "mpirun") followed by any necessary arguments
+# to this to run tests and tell the program to expect the number of
+# processors to follow (default: "-np"). With the default parameters,
+# for instance, the test harness will execute, e.g.,
+#
+# mpirun -np 4 all_gather_test
+#
+# ==== Linking Against the MPI Libraries ===
+#
+# To link against the MPI libraries, import the "mpi" module and add the
+# following requirement to your target:
+#
+# <library>/mpi//mpi
+#
+# Since MPI support is not always available, you should check
+# "mpi.configured" before trying to link against the MPI libraries.
+
+import "class" : new ;
+import common ;
+import feature : feature ;
+import generators ;
+import os ;
+import project ;
+import property ;
+import testing ;
+import toolset ;
+import type ;
+import path ;
+
+# Make this module a project
+project.initialize $(__name__) ;
+project mpi ;
+
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+ .debug-configuration = true ;
+}
+
+# Assuming the first part of the command line is the given prefix
+# followed by some non-empty value, remove the first argument. Returns
+# either nothing (if there was no prefix or no value) or a pair
+#
+# <name>value rest-of-cmdline
+#
+# This is a subroutine of cmdline_to_features
+rule add_feature ( prefix name cmdline )
+{
+ local match = [ MATCH "^$(prefix)([^\" ]+|\"[^\"]+\") *(.*)$" : $(cmdline) ] ;
+
+ # If there was no value associated with the prefix, abort
+ if ! $(match) {
+ return ;
+ }
+
+ local value = $(match[1]) ;
+
+ if [ MATCH " +" : $(value) ] {
+ value = "\"$(value)\"" ;
+ }
+
+ return "<$(name)>$(value)" $(match[2]) ;
+}
+
+# Strip any end-of-line characters off the given string and return the
+# result.
+rule strip-eol ( string )
+{
+ local match = [ MATCH "^(([A-Za-z0-9~`\.!@#$%^&*()_+={};:'\",.<>/?\\| -]|[|])*).*$" : $(string) ] ;
+
+ if $(match)
+ {
+ return $(match[1]) ;
+ }
+ else
+ {
+ return $(string) ;
+ }
+}
+
+# Split a command-line into a set of features. Certain kinds of
+# compiler flags are recognized (e.g., -I, -D, -L, -l) and replaced
+# with their Boost.Build equivalents (e.g., <include>, <define>,
+# <library-path>, <find-library>). All other arguments are introduced
+# using the features in the unknown-features parameter, because we
+# don't know how to deal with them. For instance, if your compile and
+# correct. The incoming command line should be a string starting with
+# an executable (e.g., g++ -I/include/path") and may contain any
+# number of command-line arguments thereafter. The result is a list of
+# features corresponding to the given command line, ignoring the
+# executable.
+rule cmdline_to_features ( cmdline : unknown-features ? )
+{
+ local executable ;
+ local features ;
+ local otherflags ;
+ local result ;
+
+ unknown-features ?= <cxxflags> <linkflags> ;
+
+ # Pull the executable out of the command line. At this point, the
+ # executable is just thrown away.
+ local match = [ MATCH "^([^\" ]+|\"[^\"]+\") *(.*)$" : $(cmdline) ] ;
+ executable = $(match[1]) ;
+ cmdline = $(match[2]) ;
+
+ # List the prefix/feature pairs that we will be able to transform.
+ # Every kind of parameter not mentioned here will be placed in both
+ # cxxflags and linkflags, because we don't know where they should go.
+ local feature_kinds-D = "define" ;
+ local feature_kinds-I = "include" ;
+ local feature_kinds-L = "library-path" ;
+ local feature_kinds-l = "find-shared-library" ;
+
+ while $(cmdline) {
+
+ # Check for one of the feature prefixes we know about. If we
+ # find one (and the associated value is nonempty), convert it
+ # into a feature.
+ local match = [ MATCH "^(-.)(.*)" : $(cmdline) ] ;
+ local matched ;
+ if $(match) && $(match[2]) {
+ local prefix = $(match[1]) ;
+ if $(feature_kinds$(prefix)) {
+ local name = $(feature_kinds$(prefix)) ;
+ local add = [ add_feature $(prefix) $(name) $(cmdline) ] ;
+
+ if $(add) {
+
+ if $(add[1]) = <find-shared-library>pthread
+ {
+ # Uhm. It's not really nice that this MPI implementation
+ # uses -lpthread as opposed to -pthread. We do want to
+ # set <threading>multi, instead of -lpthread.
+ result += "<threading>multi" ;
+ MPI_EXTRA_REQUIREMENTS += "<threading>multi" ;
+ }
+ else
+ {
+ result += $(add[1]) ;
+ }
+
+ cmdline = $(add[2]) ;
+ matched = yes ;
+ }
+ }
+ }
+
+ # If we haven't matched a feature prefix, just grab the command-line
+ # argument itself. If we can map this argument to a feature
+ # (e.g., -pthread -> <threading>multi), then do so; otherwise,
+ # and add it to the list of "other" flags that we don't
+ # understand.
+ if ! $(matched) {
+ match = [ MATCH "^([^\" ]+|\"[^\"]+\") *(.*)$" : $(cmdline) ] ;
+ local value = $(match[1]) ;
+ cmdline = $(match[2]) ;
+
+ # Check for multithreading support
+ if $(value) = "-pthread" || $(value) = "-pthreads"
+ {
+ result += "<threading>multi" ;
+
+ # DPG: This is a hack intended to work around a BBv2 bug where
+ # requirements propagated from libraries are not checked for
+ # conflicts when BBv2 determines which "common" properties to
+ # apply to a target. In our case, the <threading>single property
+ # gets propagated from the common properties to Boost.MPI
+ # targets, even though <threading>multi is in the usage
+ # requirements of <library>/mpi//mpi.
+ MPI_EXTRA_REQUIREMENTS += "<threading>multi" ;
+ }
+ else if [ MATCH "(.*[a-zA-Z0-9<>?-].*)" : $(value) ] {
+ otherflags += $(value) ;
+ }
+ }
+ }
+
+ # If there are other flags that we don't understand, add them to the
+ # result as both <cxxflags> and <linkflags>
+ if $(otherflags) {
+ for unknown in $(unknown-features)
+ {
+ result += "$(unknown)$(otherflags:J= )" ;
+ }
+ }
+
+ return $(result) ;
+}
+
+# Determine if it is safe to execute the given shell command by trying
+# to execute it and determining whether the exit code is zero or
+# not. Returns true for an exit code of zero, false otherwise.
+local rule safe-shell-command ( cmdline )
+{
+ local result = [ SHELL "$(cmdline) > /dev/null 2>/dev/null; if [ "$?" -eq "0" ]; then echo SSCOK; fi" ] ;
+ return [ MATCH ".*(SSCOK).*" : $(result) ] ;
+}
+
+# Initialize the MPI module.
+rule init ( mpicxx ? : options * : mpirun-with-options * )
+{
+ if ! $(options) && $(.debug-configuration)
+ {
+ ECHO "===============MPI Auto-configuration===============" ;
+ }
+
+ if ! $(mpicxx) && [ os.on-windows ]
+ {
+ # Try to auto-configure to the Microsoft Compute Cluster Pack
+ local cluster_pack_path_native = "C:\\Program Files\\Microsoft Compute Cluster Pack" ;
+ local cluster_pack_path = [ path.make $(cluster_pack_path_native) ] ;
+ if [ GLOB $(cluster_pack_path_native)\\Include : mpi.h ]
+ {
+ if $(.debug-configuration)
+ {
+ ECHO "Found Microsoft Compute Cluster Pack: $(cluster_pack_path_native)" ;
+ }
+
+ # Pick up either the 32-bit or 64-bit library, depending on which address
+ # model the user has selected. Default to 32-bit.
+ options = <include>$(cluster_pack_path)/Include
+ <address-model>64:<library-path>$(cluster_pack_path)/Lib/amd64
+ <library-path>$(cluster_pack_path)/Lib/i386
+ <find-static-library>msmpi
+ <toolset>msvc:<define>_SECURE_SCL=0
+ ;
+
+ # Setup the "mpirun" equivalent (mpiexec)
+ .mpirun = "\"$(cluster_pack_path_native)\\Bin\\mpiexec.exe"\" ;
+ .mpirun_flags = -n ;
+ }
+ else if $(.debug-configuration)
+ {
+ ECHO "Did not find Microsoft Compute Cluster Pack in $(cluster_pack_path_native)." ;
+ }
+ }
+
+ if ! $(options)
+ {
+ # Try to auto-detect options based on the wrapper compiler
+ local command = [ common.get-invocation-command mpi : mpic++ : $(mpicxx) ] ;
+
+ if ! $(mpicxx) && ! $(command)
+ {
+ # Try "mpiCC", which is used by MPICH
+ command = [ common.get-invocation-command mpi : mpiCC ] ;
+ }
+
+ if ! $(mpicxx) && ! $(command)
+ {
+ # Try "mpicxx", which is used by OpenMPI and MPICH2
+ command = [ common.get-invocation-command mpi : mpicxx ] ;
+ }
+
+ if ! $(mpicxx) && ! $(command)
+ {
+ # Try "CC", which is used by Cray
+ command = [ common.get-invocation-command mpi : CC ] ;
+ }
+
+ local result ;
+ local compile_flags ;
+ local link_flags ;
+
+ if ! $(command)
+ {
+ # Do nothing: we'll complain later
+ }
+ # OpenMPI and newer versions of LAM-MPI have -showme:compile and
+ # -showme:link.
+ else if [ safe-shell-command "$(command) -showme:compile" ] &&
+ [ safe-shell-command "$(command) -showme:link" ]
+ {
+ if $(.debug-configuration)
+ {
+ ECHO "Found recent LAM-MPI or Open MPI wrapper compiler: $(command)" ;
+ }
+
+ compile_flags = [ SHELL "$(command) -showme:compile" ] ;
+ link_flags = [ SHELL "$(command) -showme:link" ] ;
+
+ # Prepend COMPILER as the executable name, to match the format of
+ # other compilation commands.
+ compile_flags = "COMPILER $(compile_flags) -DOMPI_SKIP_MPICXX " ;
+ link_flags = "COMPILER $(link_flags)" ;
+ }
+ # Look for LAM-MPI's -showme
+ else if [ safe-shell-command "$(command) -showme" ]
+ {
+ if $(.debug-configuration)
+ {
+ ECHO "Found older LAM-MPI wrapper compiler: $(command)" ;
+ }
+
+ result = [ SHELL "$(command) -showme" ] ;
+ }
+ # Look for MPICH
+ else if [ safe-shell-command "$(command) -show" ]
+ {
+ if $(.debug-configuration)
+ {
+ ECHO "Found MPICH wrapper compiler: $(command)" ;
+ }
+ compile_flags = [ SHELL "$(command) -compile_info" ] ;
+ link_flags = [ SHELL "$(command) -link_info" ] ;
+ }
+ # Sun HPC and Ibm POE
+ else if [ SHELL "$(command) -v 2>/dev/null" ]
+ {
+ compile_flags = [ SHELL "$(command) -c -v -xtarget=native64 2>/dev/null" ] ;
+
+ local back = [ MATCH "--------------------(.*)" : $(compile_flags) ] ;
+ if $(back)
+ {
+ # Sun HPC
+ if $(.debug-configuration)
+ {
+ ECHO "Found Sun MPI wrapper compiler: $(command)" ;
+ }
+
+ compile_flags = [ MATCH "(.*)--------------------" : $(back) ] ;
+ compile_flags = [ MATCH "(.*)-v" : $(compile_flags) ] ;
+ link_flags = [ SHELL "$(command) -v -xtarget=native64 2>/dev/null" ] ;
+ link_flags = [ MATCH "--------------------(.*)" : $(link_flags) ] ;
+ link_flags = [ MATCH "(.*)--------------------" : $(link_flags) ] ;
+
+ # strip out -v from compile options
+ local front = [ MATCH "(.*)-v" : $(link_flags) ] ;
+ local back = [ MATCH "-v(.*)" : $(link_flags) ] ;
+ link_flags = "$(front) $(back)" ;
+ front = [ MATCH "(.*)-xtarget=native64" : $(link_flags) ] ;
+ back = [ MATCH "-xtarget=native64(.*)" : $(link_flags) ] ;
+ link_flags = "$(front) $(back)" ;
+ }
+ else
+ {
+ # Ibm POE
+ if $(.debug-configuration)
+ {
+ ECHO "Found IBM MPI wrapper compiler: $(command)" ;
+ }
+
+ #
+ compile_flags = [ SHELL "$(command) -c -v 2>/dev/null" ] ;
+ compile_flags = [ MATCH "(.*)exec: export.*" : $(compile_flags) ] ;
+ local front = [ MATCH "(.*)-v" : $(compile_flags) ] ;
+ local back = [ MATCH "-v(.*)" : $(compile_flags) ] ;
+ compile_flags = "$(front) $(back)" ;
+ front = [ MATCH "(.*)-c" : $(compile_flags) ] ;
+ back = [ MATCH "-c(.*)" : $(compile_flags) ] ;
+ compile_flags = "$(front) $(back)" ;
+ link_flags = $(compile_flags) ;
+
+ # get location of mpif.h from mpxlf
+ local f_flags = [ SHELL "mpxlf -v 2>/dev/null" ] ;
+ f_flags = [ MATCH "(.*)exec: export.*" : $(f_flags) ] ;
+ front = [ MATCH "(.*)-v" : $(f_flags) ] ;
+ back = [ MATCH "-v(.*)" : $(f_flags) ] ;
+ f_flags = "$(front) $(back)" ;
+ f_flags = [ MATCH "xlf_r(.*)" : $(f_flags) ] ;
+ f_flags = [ MATCH "-F:mpxlf_r(.*)" : $(f_flags) ] ;
+ compile_flags = [ strip-eol $(compile_flags) ] ;
+ compile_flags = "$(compile_flags) $(f_flags)" ;
+ }
+ }
+ # Cray
+ else if [ safe-shell-command "$(command) -v" ]
+ {
+ compile_flags = [ safe-shell-command "$(command) -###" ] ;
+ link_flags = [ safe-shell-command "$(command) -###" ] ;
+ # ECHO "Noel: compile_flags: $(compile_flags)" ;
+ # ECHO "Noel: link_flags: $(link_flags)" ;
+ result = " " ;
+ }
+
+ # Prepend COMPILER as the executable name, to match the format of
+
+ if $(result) || $(compile_flags) && $(link_flags)
+ {
+ if $(result)
+ {
+ result = [ strip-eol $(result) ] ;
+ options = [ cmdline_to_features $(result) ] ;
+ }
+ else
+ {
+ compile_flags = [ strip-eol $(compile_flags) ] ;
+ link_flags = [ strip-eol $(link_flags) ] ;
+
+ # Separately process compilation and link features, then combine
+ # them at the end.
+ local compile_features = [ cmdline_to_features $(compile_flags)
+ : "<cxxflags>" ] ;
+ local link_features = [ cmdline_to_features $(link_flags)
+ : "<linkflags>" ] ;
+ options = $(compile_features) $(link_features) ;
+ }
+
+ # If requested, display MPI configuration information.
+ if $(.debug-configuration)
+ {
+ if $(result)
+ {
+ ECHO " Wrapper compiler command line: $(result)" ;
+ }
+ else
+ {
+ local match = [ MATCH "^([^\" ]+|\"[^\"]+\") *(.*)$"
+ : $(compile_flags) ] ;
+ ECHO "MPI compilation flags: $(match[2])" ;
+ local match = [ MATCH "^([^\" ]+|\"[^\"]+\") *(.*)$"
+ : $(link_flags) ] ;
+ ECHO "MPI link flags: $(match[2])" ;
+ }
+ }
+ }
+ else
+ {
+ if $(command)
+ {
+ ECHO "MPI auto-detection failed: unknown wrapper compiler $(command)" ;
+ ECHO "Please report this error to the Boost mailing list: http://www.boost.org" ;
+ }
+ else if $(mpicxx)
+ {
+ ECHO "MPI auto-detection failed: unable to find wrapper compiler $(mpicxx)" ;
+ }
+ else
+ {
+ ECHO "MPI auto-detection failed: unable to find wrapper compiler `mpic++' or `mpiCC'" ;
+ }
+ ECHO "You will need to manually configure MPI support." ;
+ }
+
+ }
+
+ # Find mpirun (or its equivalent) and its flags
+ if ! $(.mpirun)
+ {
+ .mpirun =
+ [ common.get-invocation-command mpi : mpirun : $(mpirun-with-options[1]) ] ;
+ .mpirun_flags = $(mpirun-with-options[2-]) ;
+ .mpirun_flags ?= -np ;
+ }
+
+ if $(.debug-configuration)
+ {
+ if $(options)
+ {
+ echo "MPI build features: " ;
+ ECHO $(options) ;
+ }
+
+ if $(.mpirun)
+ {
+ echo "MPI launcher: $(.mpirun) $(.mpirun_flags)" ;
+ }
+
+ ECHO "====================================================" ;
+ }
+
+ if $(options)
+ {
+ .configured = true ;
+
+ # Set up the "mpi" alias
+ alias mpi : : : : $(options) ;
+ }
+}
+
+# States whether MPI has bee configured
+rule configured ( )
+{
+ return $(.configured) ;
+}
+
+# Returs the "extra" requirements needed to build MPI. These requirements are
+# part of the /mpi//mpi library target, but they need to be added to anything
+# that uses MPI directly to work around bugs in BBv2's propagation of
+# requirements.
+rule extra-requirements ( )
+{
+ return $(MPI_EXTRA_REQUIREMENTS) ;
+}
+
+# Support for testing; borrowed from Python
+type.register RUN_MPI_OUTPUT ;
+type.register RUN_MPI : : TEST ;
+
+class mpi-test-generator : generator
+{
+ import property-set ;
+
+ rule __init__ ( * : * )
+ {
+ generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ self.composing = true ;
+ }
+
+ rule run ( project name ? : property-set : sources * : multiple ? )
+ {
+ # Generate an executable from the sources. This is the executable we will run.
+ local executable =
+ [ generators.construct $(project) $(name) : EXE : $(property-set) : $(sources) ] ;
+
+ result =
+ [ construct-result $(executable[2-]) : $(project) $(name)-run : $(property-set) ] ;
+ }
+}
+
+# Use mpi-test-generator to generate MPI tests from sources
+generators.register
+ [ new mpi-test-generator mpi.capture-output : : RUN_MPI_OUTPUT ] ;
+
+generators.register-standard testing.expect-success
+ : RUN_MPI_OUTPUT : RUN_MPI ;
+
+# The number of processes to spawn when executing an MPI test.
+feature mpi:processes : : free incidental ;
+
+# The flag settings on testing.capture-output do not
+# apply to mpi.capture output at the moment.
+# Redo this explicitly.
+toolset.flags mpi.capture-output ARGS <testing.arg> ;
+rule capture-output ( target : sources * : properties * )
+{
+ # Use the standard capture-output rule to run the tests
+ testing.capture-output $(target) : $(sources[1]) : $(properties) ;
+
+ # Determine the number of processes we should run on.
+ local num_processes = [ property.select <mpi:processes> : $(properties) ] ;
+ num_processes = $(num_processes:G=) ;
+
+ # serialize the MPI tests to avoid overloading systems
+ JAM_SEMAPHORE on $(target) = <s>mpi-run-semaphore ;
+
+ # We launch MPI processes using the "mpirun" equivalent specified by the user.
+ LAUNCHER on $(target) =
+ [ on $(target) return $(.mpirun) $(.mpirun_flags) $(num_processes) ] ;
+}
+
+# Creates a set of test cases to be run through the MPI launcher. The name, sources,
+# and requirements are the same as for any other test generator. However, schedule is
+# a list of numbers, which indicates how many processes each test run will use. For
+# example, passing 1 2 7 will run the test with 1 process, then 2 processes, then 7
+# 7 processes. The name provided is just the base name: the actual tests will be
+# the name followed by a hypen, then the number of processes.
+rule mpi-test ( name : sources * : requirements * : schedule * )
+{
+ sources ?= $(name).cpp ;
+ schedule ?= 1 2 3 4 7 8 13 17 ;
+
+ local result ;
+ for processes in $(schedule)
+ {
+ result += [ testing.make-test
+ run-mpi : $(sources) /boost/mpi//boost_mpi
+ : $(requirements) <toolset>msvc:<link>static <mpi:processes>$(processes) : $(name)-$(processes) ] ;
+ }
+ return $(result) ;
+}
diff --git a/tools/build/v2/tools/msvc-config.jam b/tools/build/src/tools/msvc-config.jam
index 6c71e3b002..6c71e3b002 100644
--- a/tools/build/v2/tools/msvc-config.jam
+++ b/tools/build/src/tools/msvc-config.jam
diff --git a/tools/build/src/tools/msvc.jam b/tools/build/src/tools/msvc.jam
new file mode 100644
index 0000000000..7fbe0f2e11
--- /dev/null
+++ b/tools/build/src/tools/msvc.jam
@@ -0,0 +1,1625 @@
+# Copyright (c) 2003 David Abrahams
+# Copyright (c) 2005 Vladimir Prus
+# Copyright (c) 2005 Alexey Pakhunov
+# Copyright (c) 2006 Bojan Resnik
+# Copyright (c) 2006 Ilya Sokolov
+# Copyright (c) 2007 Rene Rivera
+# Copyright (c) 2008 Jurko Gospodnetic
+# Copyright (c) 2014 Microsoft Corporation
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+################################################################################
+#
+# MSVC Boost Build toolset module.
+# --------------------------------
+#
+# All toolset versions need to have their location either auto-detected or
+# explicitly specified except for the special 'default' version that expects the
+# environment to find the needed tools or report an error.
+#
+################################################################################
+
+import "class" : new ;
+import common ;
+import feature ;
+import generators ;
+import mc ;
+import midl ;
+import os ;
+import path ;
+import pch ;
+import property ;
+import rc ;
+import set ;
+import toolset ;
+import type ;
+
+
+type.register MANIFEST : manifest ;
+feature.feature embed-manifest : on off : incidental propagated ;
+feature.feature embed-manifest-file : : free dependency ;
+
+type.register PDB : pdb ;
+
+
+################################################################################
+#
+# Public rules.
+#
+################################################################################
+
+# Initialize a specific toolset version configuration. As the result, path to
+# compiler and, possible, program names are set up, and will be used when that
+# version of compiler is requested. For example, you might have:
+#
+# using msvc : 6.5 : cl.exe ;
+# using msvc : 7.0 : Y:/foo/bar/cl.exe ;
+#
+# The version parameter may be omitted:
+#
+# using msvc : : Z:/foo/bar/cl.exe ;
+#
+# The following keywords have special meanings when specified as versions:
+# - all - all detected but not yet used versions will be marked as used
+# with their default options.
+# - default - this is an equivalent to an empty version.
+#
+# Depending on a supplied version, detected configurations and presence 'cl.exe'
+# in the path different results may be achieved. The following table describes
+# the possible scenarios:
+#
+# Nothing "x.y"
+# Passed Nothing "x.y" detected, detected,
+# version detected detected cl.exe in path cl.exe in path
+#
+# default Error Use "x.y" Create "default" Use "x.y"
+# all None Use all None Use all
+# x.y - Use "x.y" - Use "x.y"
+# a.b Error Error Create "a.b" Create "a.b"
+#
+# "x.y" - refers to a detected version;
+# "a.b" - refers to an undetected version.
+#
+# FIXME: Currently the command parameter and the <compiler> property parameter
+# seem to overlap in duties. Remove this duplication. This seems to be related
+# to why someone started preparing to replace init with configure rules.
+#
+rule init (
+ # The msvc version being configured. When omitted the tools invoked when no
+ # explicit version is given will be configured.
+ version ?
+
+ # The command used to invoke the compiler. If not specified:
+ # - if version is given, default location for that version will be
+ # searched
+ #
+ # - if version is not given, default locations for MSVC 9.0, 8.0, 7.1, 7.0
+ # and 6.* will be searched
+ #
+ # - if compiler is not found in the default locations, PATH will be
+ # searched.
+ : command *
+
+ # Options may include:
+ #
+ # All options shared by multiple toolset types as handled by the
+ # common.handle-options() rule, e.g. <cflags>, <compileflags>, <cxxflags>,
+ # <fflags> & <linkflags>.
+ #
+ # <assembler>
+ # <compiler>
+ # <idl-compiler>
+ # <linker>
+ # <mc-compiler>
+ # <resource-compiler>
+ # Exact tool names to be used by this msvc toolset configuration.
+ #
+ # <compiler-filter>
+ # Command through which to pipe the output of running the compiler.
+ # For example to pass the output to STLfilt.
+ #
+ # <setup>
+ # Global setup command to invoke before running any of the msvc tools.
+ # It will be passed additional option parameters depending on the actual
+ # target platform.
+ #
+ # <setup-amd64>
+ # <setup-i386>
+ # <setup-ia64>
+ # <setup-arm>
+ # <setup-phone-i386>
+ # <setup-phone-arm>
+ # Platform specific setup command to invoke before running any of the
+ # msvc tools used when builing a target for a specific platform, e.g.
+ # when building a 32 or 64 bit executable.
+ #
+ # <rewrite-setup-scripts>
+ # Whether to rewrite setup scripts. New scripts will be output in
+ # TEMP directory and will be used instead of originals in build actions.
+ # Possible values:
+ # * on - rewrite scripts, if they do not already exist (default)
+ # * always - always rewrite scripts, even if they already exist
+ # * off - use original setup scripts
+ : options *
+)
+{
+ if $(command)
+ {
+ options += <command>$(command) ;
+ }
+ configure $(version) : $(options) ;
+}
+
+
+# 'configure' is a newer version of 'init'. The parameter 'command' is passed as
+# a part of the 'options' list. See the 'init' rule comment for more detailed
+# information.
+#
+rule configure ( version ? : options * )
+{
+ switch $(version)
+ {
+ case "all" :
+ if $(options)
+ {
+ import errors ;
+ errors.error "MSVC toolset configuration: options should be"
+ "empty when '$(version)' is specified." ;
+ }
+
+ # Configure (i.e. mark as used) all registered versions.
+ local all-versions = [ $(.versions).all ] ;
+ if ! $(all-versions)
+ {
+ if $(.debug-configuration)
+ {
+ ECHO "notice: [msvc-cfg] Asked to configure all registered"
+ "msvc toolset versions when there are none currently"
+ "registered." ;
+ }
+ }
+ else
+ {
+ for local v in $(all-versions)
+ {
+ # Note that there is no need to skip already configured
+ # versions here as this will request configure-really rule
+ # to configure the version using default options which will
+ # in turn cause it to simply do nothing in case the version
+ # has already been configured.
+ configure-really $(v) ;
+ }
+ }
+
+ case "default" :
+ configure-really : $(options) ;
+
+ case * :
+ configure-really $(version) : $(options) ;
+ }
+}
+
+
+# Sets up flag definitions dependent on the compiler version used.
+# - 'version' is the version of compiler in N.M format.
+# - 'conditions' is the property set to be used as flag conditions.
+# - 'toolset' is the toolset for which flag settings are to be defined.
+# This makes the rule reusable for other msvc-option-compatible compilers.
+#
+rule configure-version-specific ( toolset : version : conditions )
+{
+ toolset.push-checking-for-flags-module unchecked ;
+ # Starting with versions 7.0, the msvc compiler have the /Zc:forScope and
+ # /Zc:wchar_t options that improve C++ standard conformance, but those
+ # options are off by default. If we are sure that the msvc version is at
+ # 7.*, add those options explicitly. We can be sure either if user specified
+ # version 7.* explicitly or if we auto-detected the version ourselves.
+ if ! [ MATCH ^(6\\.) : $(version) ]
+ {
+ toolset.flags $(toolset).compile CFLAGS $(conditions) : /Zc:forScope /Zc:wchar_t ;
+ toolset.flags $(toolset).compile.c++ C++FLAGS $(conditions) : /wd4675 ;
+
+ # Explicitly disable the 'function is deprecated' warning. Some msvc
+ # versions have a bug, causing them to emit the deprecation warning even
+ # with /W0.
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/<warnings>off : /wd4996 ;
+
+ if [ MATCH ^([78]\\.) : $(version) ]
+ {
+ # 64-bit compatibility warning deprecated since 9.0, see
+ # http://msdn.microsoft.com/en-us/library/yt4xw8fh.aspx
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/<warnings>all : /Wp64 ;
+ }
+ }
+
+ #
+ # Processor-specific optimization.
+ #
+
+ if [ MATCH ^([67]) : $(version) ]
+ {
+ # 8.0 deprecates some of the options.
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/<optimization>speed $(conditions)/<optimization>space : /Ogiy /Gs ;
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/<optimization>speed : /Ot ;
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/<optimization>space : /Os ;
+
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-i386)/<instruction-set> : /GB ;
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-i386)/<instruction-set>i486 : /G4 ;
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-i386)/<instruction-set>$(.cpu-type-g5) : /G5 ;
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-i386)/<instruction-set>$(.cpu-type-g6) : /G6 ;
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-i386)/<instruction-set>$(.cpu-type-g7) : /G7 ;
+
+ # Improve floating-point accuracy. Otherwise, some of C++ Boost's "math"
+ # tests will fail.
+ toolset.flags $(toolset).compile CFLAGS $(conditions) : /Op ;
+
+ # 7.1 and below have single-threaded static RTL.
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/<runtime-debugging>off/<runtime-link>static/<threading>single : /ML ;
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/<runtime-debugging>on/<runtime-link>static/<threading>single : /MLd ;
+ }
+ else
+ {
+ # 8.0 and above adds some more options.
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-amd64)/<instruction-set> : /favor:blend ;
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-amd64)/<instruction-set>$(.cpu-type-em64t) : /favor:EM64T ;
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-amd64)/<instruction-set>$(.cpu-type-amd64) : /favor:AMD64 ;
+
+ # 8.0 and above only has multi-threaded static RTL.
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/<runtime-debugging>off/<runtime-link>static/<threading>single : /MT ;
+ toolset.flags $(toolset).compile CFLAGS $(conditions)/<runtime-debugging>on/<runtime-link>static/<threading>single : /MTd ;
+
+ # Specify target machine type so the linker will not need to guess.
+ toolset.flags $(toolset).link LINKFLAGS $(conditions)/$(.cpu-arch-amd64) : /MACHINE:X64 ;
+ toolset.flags $(toolset).link LINKFLAGS $(conditions)/$(.cpu-arch-i386) : /MACHINE:X86 ;
+ toolset.flags $(toolset).link LINKFLAGS $(conditions)/$(.cpu-arch-ia64) : /MACHINE:IA64 ;
+ toolset.flags $(toolset).link LINKFLAGS $(conditions)/$(.cpu-arch-arm) : /MACHINE:ARM ;
+
+ # Make sure that manifest will be generated even if there is no
+ # dependencies to put there.
+ toolset.flags $(toolset).link LINKFLAGS $(conditions) : /MANIFEST ;
+ }
+
+ # Starting with Visual Studio 2013 the CRT is split into a desktop and app dll.
+ #If targeting WinRT and 12.0 set lib path to link against app CRT.
+ if [ MATCH "(12)" : $(version) ]
+ {
+ local VCPath = [ path.parent [ path.make [ default-path $(version) ] ] ] ;
+ local storeLibPath = [ path.join [ path.join $(VCPath) "lib" ] "store" ] ;
+ toolset.flags $(toolset).link LINKPATH $(conditions)/<windows-api>store/$(.cpu-arch-i386) : [ path.native $(storeLibPath) ] ;
+ toolset.flags $(toolset).link LINKPATH $(conditions)/<windows-api>store/$(.cpu-arch-amd64) : [ path.native [ path.join $(storeLibPath) "amd64" ] ] ;
+ toolset.flags $(toolset).link LINKPATH $(conditions)/<windows-api>store/$(.cpu-arch-arm) : [ path.native [ path.join $(storeLibPath) "arm" ] ] ;
+ }
+
+ toolset.pop-checking-for-flags-module ;
+}
+
+# Feature for handling targeting different Windows API sets.
+feature.feature windows-api : desktop store phone : propagated composite link-incompatible ;
+feature.compose <windows-api>store : <define>WINAPI_FAMILY=WINAPI_FAMILY_APP <define>_WIN32_WINNT=0x0602
+ <linkflags>/APPCONTAINER ;
+feature.compose <windows-api>phone : <define>WINAPI_FAMILY=WINAPI_FAMILY_PHONE_APP <define>_WIN32_WINNT=0x0602
+ <linkflags>/APPCONTAINER <linkflags>/NODEFAULTLIB:ole32.lib <linkflags>/NODEFAULTLIB:kernel32.lib <linkflags>WindowsPhoneCore.lib ;
+feature.set-default windows-api : desktop ;
+
+
+# Registers this toolset including all of its flags, features & generators. Does
+# nothing on repeated calls.
+#
+rule register-toolset ( )
+{
+ if ! msvc in [ feature.values toolset ]
+ {
+ register-toolset-really ;
+ }
+}
+
+rule resolve-possible-msvc-version-alias ( version )
+{
+ if $(.version-alias-$(version))
+ {
+ version = $(.version-alias-$(version)) ;
+ }
+ return $(version) ;
+}
+
+
+# Declare action for creating static libraries. If library exists, remove it
+# before adding files. See
+# http://article.gmane.org/gmane.comp.lib.boost.build/4241 for rationale.
+if [ os.name ] in NT
+{
+ # The 'DEL' command would issue a message to stdout if the file does not
+ # exist, so need a check.
+ actions archive
+ {
+ if exist "$(<[1])" DEL "$(<[1])"
+ $(.LD) $(AROPTIONS) /out:"$(<[1])" @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
+ }
+}
+else
+{
+ actions archive
+ {
+ $(.RM) "$(<[1])"
+ $(.LD) $(AROPTIONS) /out:"$(<[1])" @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
+ }
+}
+
+
+# For the assembler the following options are turned on by default:
+#
+# -Zp4 align structures to 4 bytes
+# -Cp preserve case of user identifiers
+# -Cx preserve case in publics, externs
+#
+actions compile.asm
+{
+ $(.ASM) -c -Zp4 -Cp -Cx -D$(DEFINES) $(ASMFLAGS) $(USER_ASMFLAGS) -Fo "$(<:W)" "$(>:W)"
+}
+
+
+rule compile.c ( targets + : sources * : properties * )
+{
+ C++FLAGS on $(targets[1]) = ;
+ get-rspline $(targets) : -TC ;
+ compile-c-c++ $(<) : $(>) [ on $(<) return $(PCH_FILE) ] [ on $(<) return $(PCH_HEADER) ] ;
+}
+
+
+rule compile.c.preprocess ( targets + : sources * : properties * )
+{
+ C++FLAGS on $(targets[1]) = ;
+ get-rspline $(targets) : -TC ;
+ preprocess-c-c++ $(<) : $(>) [ on $(<) return $(PCH_FILE) ] [ on $(<) return $(PCH_HEADER) ] ;
+}
+
+
+rule compile.c.pch ( targets + : sources * : properties * )
+{
+ C++FLAGS on $(targets[1]) = ;
+ get-rspline $(targets[1]) : -TC ;
+ get-rspline $(targets[2]) : -TC ;
+ local pch-source = [ on $(<) return $(PCH_SOURCE) ] ;
+ if $(pch-source)
+ {
+ DEPENDS $(<) : $(pch-source) ;
+ compile-c-c++-pch-s $(targets) : $(sources) $(pch-source) ;
+ }
+ else
+ {
+ compile-c-c++-pch $(targets) : $(sources) ;
+ }
+}
+
+toolset.flags msvc YLOPTION : "-Yl" ;
+
+# Action for running the C/C++ compiler without using precompiled headers.
+#
+# WARNING: Synchronize any changes this in action with intel-win
+#
+# Notes regarding PDB generation, for when we use
+# <debug-symbols>on/<debug-store>database:
+#
+# 1. PDB_CFLAG is only set for <debug-symbols>on/<debug-store>database, ensuring
+# that the /Fd flag is dropped if PDB_CFLAG is empty.
+#
+# 2. When compiling executables's source files, PDB_NAME is set on a per-source
+# file basis by rule compile-c-c++. The linker will pull these into the
+# executable's PDB.
+#
+# 3. When compiling library's source files, PDB_NAME is updated to <libname>.pdb
+# for each source file by rule archive, as in this case compiler must be used
+# to create a single PDB for our library.
+#
+actions compile-c-c++ bind PDB_NAME
+{
+ $(.CC) @"@($(<[1]:W).rsp:E="$(>[1]:W)" -Fo"$(<[1]:W)" $(PDB_CFLAG)"$(PDB_NAME)" -Yu"$(>[3]:D=)" -Fp"$(>[2]:W)" $(CC_RSPLINE))" $(.CC.FILTER)
+}
+
+actions preprocess-c-c++ bind PDB_NAME
+{
+ $(.CC) @"@($(<[1]:W).rsp:E="$(>[1]:W)" -E $(PDB_CFLAG)"$(PDB_NAME)" -Yu"$(>[3]:D=)" -Fp"$(>[2]:W)" $(CC_RSPLINE))" >"$(<[1]:W)"
+}
+
+rule compile-c-c++ ( targets + : sources * )
+{
+ DEPENDS $(<[1]) : [ on $(<[1]) return $(PCH_HEADER) ] ;
+ DEPENDS $(<[1]) : [ on $(<[1]) return $(PCH_FILE) ] ;
+ PDB_NAME on $(<) = $(<[1]:S=.pdb) ;
+ LOCATE on $(<[1]:S=.pdb) = [ on $(<[1]) return $(LOCATE) ] ;
+}
+
+rule preprocess-c-c++ ( targets + : sources * )
+{
+ DEPENDS $(<[1]) : [ on $(<[1]) return $(PCH_HEADER) ] ;
+ DEPENDS $(<[1]) : [ on $(<[1]) return $(PCH_FILE) ] ;
+ PDB_NAME on $(<) = $(<:S=.pdb) ;
+ LOCATE on $(<[1]:S=.pdb) = [ on $(<[1]) return $(LOCATE) ] ;
+}
+
+# Action for running the C/C++ compiler using precompiled headers. In addition
+# to whatever else it needs to compile, this action also adds a temporary source
+# .cpp file used to compile the precompiled headers themselves.
+#
+# The global .escaped-double-quote variable is used to avoid messing up Emacs
+# syntax highlighting in the messy N-quoted code below.
+actions compile-c-c++-pch
+{
+ $(.CC) @"@($(<[1]:W).rsp:E="$(>[2]:W)" -Fo"$(<[2]:W)" -Yc"$(>[1]:D=)" $(YLOPTION)"__bjam_pch_symbol_$(>[1]:D=)" -Fp"$(<[1]:W)" $(CC_RSPLINE))" "@($(<[1]:W).cpp:E=#include $(.escaped-double-quote)$(>[1]:D=)$(.escaped-double-quote)$(.nl))" $(.CC.FILTER)
+}
+
+
+# Action for running the C/C++ compiler using precompiled headers. An already
+# built source file for compiling the precompiled headers is expected to be
+# given as one of the source parameters.
+actions compile-c-c++-pch-s
+{
+ $(.CC) @"@($(<[1]:W).rsp:E="$(>[2]:W)" -Fo"$(<[2]:W)" -Yc"$(>[1]:D=)" $(YLOPTION)"__bjam_pch_symbol_$(>[1]:D=)" -Fp"$(<[1]:W)" $(CC_RSPLINE))" $(.CC.FILTER)
+}
+
+
+rule compile.c++ ( targets + : sources * : properties * )
+{
+ get-rspline $(targets) : -TP ;
+ compile-c-c++ $(<) : $(>) [ on $(<) return $(PCH_FILE) ] [ on $(<) return $(PCH_HEADER) ] ;
+}
+
+rule compile.c++.preprocess ( targets + : sources * : properties * )
+{
+ get-rspline $(targets) : -TP ;
+ preprocess-c-c++ $(<) : $(>) [ on $(<) return $(PCH_FILE) ] [ on $(<) return $(PCH_HEADER) ] ;
+}
+
+
+rule compile.c++.pch ( targets + : sources * : properties * )
+{
+ get-rspline $(targets[1]) : -TP ;
+ get-rspline $(targets[2]) : -TP ;
+ local pch-source = [ on $(<) return $(PCH_SOURCE) ] ;
+ if $(pch-source)
+ {
+ DEPENDS $(<) : $(pch-source) ;
+ compile-c-c++-pch-s $(targets) : $(sources) $(pch-source) ;
+ }
+ else
+ {
+ compile-c-c++-pch $(targets) : $(sources) ;
+ }
+}
+
+
+# See midl.jam for details.
+#
+actions compile.idl
+{
+ $(.IDL) /nologo @"@($(<[1]:W).rsp:E=$(.nl)"$(>:W)" $(.nl)-D$(DEFINES) $(.nl)"-I$(INCLUDES:W)" $(.nl)-U$(UNDEFS) $(.nl)$(MIDLFLAGS) $(.nl)/tlb "$(<[1]:W)" $(.nl)/h "$(<[2]:W)" $(.nl)/iid "$(<[3]:W)" $(.nl)/proxy "$(<[4]:W)" $(.nl)/dlldata "$(<[5]:W)")"
+ $(.TOUCH_FILE) "$(<[4]:W)"
+ $(.TOUCH_FILE) "$(<[5]:W)"
+}
+
+
+actions compile.mc
+{
+ $(.MC) $(MCFLAGS) -h "$(<[1]:DW)" -r "$(<[2]:DW)" "$(>:W)"
+}
+
+
+actions compile.rc
+{
+ $(.RC) -l 0x409 -U$(UNDEFS) -D$(DEFINES) -I"$(INCLUDES:W)" -fo "$(<:W)" "$(>:W)"
+}
+
+
+rule link ( targets + : sources * : properties * )
+{
+ if <embed-manifest>on in $(properties)
+ {
+ if [ feature.get-values <embed-manifest-file> : $(properties) ]
+ {
+ DEPENDS $(<) : [ on $(<) return $(EMBED_MANIFEST_FILE) ] ;
+ msvc.manifest.user $(targets) $(EMBED_MANIFEST_FILE) : $(sources) : $(properties) ;
+ }
+ else
+ {
+ msvc.manifest $(targets) : $(sources) : $(properties) ;
+ }
+ }
+}
+
+rule link.dll ( targets + : sources * : properties * )
+{
+ DEPENDS $(<) : [ on $(<) return $(DEF_FILE) ] ;
+ if <embed-manifest>on in $(properties)
+ {
+ if [ feature.get-values <embed-manifest-file> : $(properties) ]
+ {
+ DEPENDS $(<) : [ on $(<) return $(EMBED_MANIFEST_FILE) ] ;
+ msvc.manifest.dll.user $(targets) $(EMBED_MANIFEST_FILE) : $(sources) : $(properties) ;
+ }
+ else
+ {
+ msvc.manifest.dll $(targets) : $(sources) : $(properties) ;
+ }
+ }
+}
+
+# Incremental linking a DLL causes no end of problems: if the actual exports do
+# not change, the import .lib file is never updated. Therefore, the .lib is
+# always out-of-date and gets rebuilt every time. I am not sure that incremental
+# linking is such a great idea in general, but in this case I am sure we do not
+# want it.
+
+# Windows manifest is a new way to specify dependencies on managed DotNet
+# assemblies and Windows native DLLs. The manifests are embedded as resources
+# and are useful in any PE target (both DLL and EXE).
+
+if [ os.name ] in NT
+{
+ actions link bind DEF_FILE LIBRARIES_MENTIONED_BY_FILE
+ {
+ $(.LD) $(LINKFLAGS) /out:"$(<[1]:W)" /LIBPATH:"$(LINKPATH:W)" $(OPTIONS) @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)$(LIBRARIES) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
+ if %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL%
+ }
+
+ actions manifest
+ {
+ if exist "$(<[1]).manifest" (
+ $(.MT) -manifest "$(<[1]).manifest" "-outputresource:$(<[1]);1"
+ )
+ }
+
+ actions manifest.user bind EMBED_MANIFEST_FILE
+ {
+ $(.MT) -manifest "$(EMBED_MANIFEST_FILE)" "-outputresource:$(<[1]);1"
+ }
+
+ actions link.dll bind DEF_FILE LIBRARIES_MENTIONED_BY_FILE
+ {
+ $(.LD) /DLL $(LINKFLAGS) /out:"$(<[1]:W)" /IMPLIB:"$(<[2]:W)" /LIBPATH:"$(LINKPATH:W)" /def:"$(DEF_FILE)" $(OPTIONS) @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)$(LIBRARIES) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
+ if %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL%
+ }
+
+ actions manifest.dll
+ {
+ if exist "$(<[1]).manifest" (
+ $(.MT) -manifest "$(<[1]).manifest" "-outputresource:$(<[1]);2"
+ )
+ }
+ actions manifest.dll.user bind EMBED_MANIFEST_FILE
+ {
+ $(.MT) -manifest "$(EMBED_MANIFEST_FILE)" "-outputresource:$(<[1]);2"
+ }
+}
+else
+{
+ actions link bind DEF_FILE LIBRARIES_MENTIONED_BY_FILE
+ {
+ $(.LD) $(LINKFLAGS) /out:"$(<[1]:W)" /LIBPATH:"$(LINKPATH:W)" $(OPTIONS) @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)$(LIBRARIES) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
+ }
+
+ actions manifest
+ {
+ if test -e "$(<[1]).manifest"; then
+ $(.MT) -manifest "$(<[1]:W).manifest" "-outputresource:$(<[1]:W);1"
+ fi
+ }
+
+ actions link.dll bind DEF_FILE LIBRARIES_MENTIONED_BY_FILE
+ {
+ $(.LD) /DLL $(LINKFLAGS) /out:"$(<[1]:W)" /IMPLIB:"$(<[2]:W)" /LIBPATH:"$(LINKPATH:W)" /def:"$(DEF_FILE)" $(OPTIONS) @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)$(LIBRARIES) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
+ }
+
+ actions manifest.dll
+ {
+ if test -e "$(<[1]).manifest"; then
+ $(.MT) -manifest "$(<[1]:W).manifest" "-outputresource:$(<[1]:W);2"
+ fi
+ }
+
+ actions manifest.dll.user bind EMBED_MANIFEST_FILE
+ {
+ $(.MT) -manifest "$(EMBED_MANIFEST_FILE)" "-outputresource:$(<[1]);2"
+ }
+}
+
+# This rule sets up the pdb file that will be used when generating static
+# libraries and the debug-store option is database, so that the compiler puts
+# all the debug info into a single .pdb file named after the library.
+#
+# Poking at source targets this way is probably not clean, but it is the
+# easiest approach.
+#
+rule archive ( targets + : sources * : properties * )
+{
+ PDB_NAME on $(>) = $(<[1]:S=.pdb) ;
+ LOCATE on $(<[1]:S=.pdb) = [ on $(<[1]) return $(LOCATE) ] ;
+}
+
+
+################################################################################
+#
+# Classes.
+#
+################################################################################
+
+class msvc-pch-generator : pch-generator
+{
+ import property-set ;
+
+ rule run-pch ( project name ? : property-set : sources * )
+ {
+ # Searching for the header and source file in the sources.
+ local pch-header ;
+ local pch-source ;
+ for local s in $(sources)
+ {
+ if [ type.is-derived [ $(s).type ] H ]
+ {
+ pch-header = $(s) ;
+ }
+ else if
+ [ type.is-derived [ $(s).type ] CPP ] ||
+ [ type.is-derived [ $(s).type ] C ]
+ {
+ pch-source = $(s) ;
+ }
+ }
+
+ if ! $(pch-header)
+ {
+ import errors : user-error : errors.user-error ;
+ errors.user-error "can not build pch without pch-header" ;
+ }
+
+ # If we do not have the PCH source - that is fine. We will just create a
+ # temporary .cpp file in the action.
+
+ local generated = [ generator.run $(project) $(name)
+ : [ property-set.create
+ # Passing of <pch-source> is a dirty trick, needed because
+ # non-composing generators with multiple inputs are subtly
+ # broken. For more detailed information see:
+ # https://zigzag.cs.msu.su:7813/boost.build/ticket/111
+ <pch-source>$(pch-source)
+ [ $(property-set).raw ] ]
+ : $(pch-header) ] ;
+
+ local pch-file ;
+ for local g in $(generated)
+ {
+ if [ type.is-derived [ $(g).type ] PCH ]
+ {
+ pch-file = $(g) ;
+ }
+ }
+
+ return [ property-set.create <pch-header>$(pch-header)
+ <pch-file>$(pch-file) ] $(generated) ;
+ }
+}
+
+
+################################################################################
+#
+# Local rules.
+#
+################################################################################
+
+# Detects versions listed as '.known-versions' by checking registry information,
+# environment variables & default paths. Supports both native Windows and
+# Cygwin.
+#
+local rule auto-detect-toolset-versions ( )
+{
+ if [ os.name ] in NT CYGWIN
+ {
+ # Get installation paths from the registry.
+ for local i in $(.known-versions)
+ {
+ if $(.version-$(i)-reg)
+ {
+ local vc-path ;
+ for local x in "" "Wow6432Node\\"
+ {
+ vc-path += [ W32_GETREG
+ "HKEY_LOCAL_MACHINE\\SOFTWARE\\"$(x)"\\Microsoft\\"$(.version-$(i)-reg)
+ : "ProductDir" ] ;
+ }
+
+ if $(vc-path)
+ {
+ vc-path = [ path.join [ path.make-NT $(vc-path[1]) ] "bin" ] ;
+ register-configuration $(i) : [ path.native $(vc-path[1]) ] ;
+ }
+ }
+ }
+ }
+
+ # Check environment and default installation paths.
+ for local i in $(.known-versions)
+ {
+ if ! $(i) in [ $(.versions).all ]
+ {
+ register-configuration $(i) : [ default-path $(i) ] ;
+ }
+ }
+}
+
+# Helper rule to generate a faster alternative to MSVC setup scripts.
+# We used to call MSVC setup scripts directly in every action, however in
+# newer MSVC versions (10.0+) they make long-lasting registry queries
+# which have a significant impact on build time.
+rule maybe-rewrite-setup ( toolset : setup-script : setup-options : version : rewrite-setup ? )
+{
+ local result = $(setup-script)" "$(setup-options) ;
+ # At the moment we only know how to rewrite scripts with cmd shell.
+ if ( [ os.name ] in NT ) && ( $(rewrite-setup) != off )
+ {
+ setup-script-id = b2_$(toolset)_$(version)_$(setup-script:B) ;
+ if $(setup-options)-is-not-empty
+ {
+ setup-script-id = $(setup-script-id)_$(setup-options) ;
+ }
+
+ if $(.$(setup-script-id))
+ {
+ errors.error rewriting setup script for the second time ;
+ }
+
+ local tmpdir = [ os.environ TEMP ] ;
+ local replacement = [ path.native $(tmpdir)/$(setup-script-id).cmd ] ;
+ if ( $(rewrite-setup) = always ) || ( ! [ path.exists $(replacement) ] )
+ {
+ local original-vars = [ SPLIT_BY_CHARACTERS [ SHELL set ] : "\n" ] ;
+ local new-vars = [ SPLIT_BY_CHARACTERS [ SHELL "$(setup-script) $(setup-options)>nul && set" ] : "\n" ] ;
+ local diff-vars = [ set.difference $(new-vars) : $(original-vars) ] ;
+ if $(diff-vars)
+ {
+ local target = <new-setup-script>$(replacement) ;
+ FILE_CONTENTS on $(target) = "SET "$(diff-vars) ;
+ ALWAYS $(target) ;
+ msvc.write-setup-script $(target) ;
+ UPDATE_NOW $(target) : : ignore-minus-n ;
+ .$(setup-script-id) = $(replacement) ;
+ result = "\""$(replacement)"\"" ;
+ }
+ }
+ else
+ {
+ result = "\""$(replacement)"\"" ;
+ }
+ }
+ return $(result) ;
+}
+
+actions write-setup-script
+{
+ @($(STDOUT):E=$(FILE_CONTENTS:J=$(.nl))) > "$(<)"
+}
+
+
+# Local helper rule to create the vcvars setup command for given architecture
+# and options.
+#
+local rule generate-setup-cmd ( version : command : parent : options * : cpu : global-setup : default-global-setup-options : default-setup )
+{
+ local setup-prefix = "call " ;
+ local setup-suffix = " >nul"$(.nl) ;
+ if ! [ os.name ] in NT
+ {
+ setup-prefix = "cmd.exe /S /C call " ;
+ setup-suffix = " \">nul\" \"&&\" " ;
+ }
+
+ local setup-options ;
+ local setup = [ feature.get-values <setup-$(cpu)> : $(options) ] ;
+
+ if ! $(setup)-is-defined
+ {
+ if $(global-setup)-is-defined
+ {
+ setup = $(global-setup) ;
+
+ # If needed we can easily add using configuration flags
+ # here for overriding which options get passed to the
+ # global setup command for which target platform:
+ # setup-options = [ feature.get-values <setup-options-$(c)> : $(options) ] ;
+ setup-options ?= $(default-global-setup-options) ;
+ }
+ else
+ {
+ setup = [ locate-default-setup $(command) : $(parent) : $(default-setup) ] ;
+ }
+ }
+
+ # Cygwin to Windows path translation.
+ setup = "\""$(setup:W)"\"" ;
+
+ # Append setup options to the setup name and add the final setup
+ # prefix & suffix.
+ setup-options ?= "" ;
+ local rewrite = [ feature.get-values <rewrite-setup-scripts> : $(options) ] ;
+ setup = [ maybe-rewrite-setup msvc : $(setup:J=" ") : $(setup-options:J=" ") : $(version) : $(rewrite) ] ;
+ setup = $(setup-prefix)$(setup)$(setup-suffix) ;
+
+ return $(setup) ;
+}
+
+
+# Worker rule for toolset version configuration. Takes an explicit version id or
+# nothing in case it should configure the default toolset version (the first
+# registered one or a new 'default' one in case no toolset versions have been
+# registered yet).
+#
+local rule configure-really ( version ? : options * )
+{
+ local v = $(version) ;
+
+ # Decide what the 'default' version is.
+ if ! $(v)
+ {
+ # Take the first registered (i.e. auto-detected) version.
+ version = [ $(.versions).all ] ;
+ version = $(version[1]) ;
+ v = $(version) ;
+
+ # Note: 'version' can still be empty at this point if no versions have
+ # been auto-detected.
+ version ?= "default" ;
+ }
+
+ # Version alias -> real version number.
+ version = [ resolve-possible-msvc-version-alias $(version) ] ;
+
+ # Check whether the selected configuration is already in use.
+ if $(version) in [ $(.versions).used ]
+ {
+ # Allow multiple 'toolset.using' calls for the same configuration if the
+ # identical sets of options are used.
+ if $(options) && ( $(options) != [ $(.versions).get $(version) : options ] )
+ {
+ import errors ;
+ errors.error "MSVC toolset configuration: Toolset version"
+ "'$(version)' already configured." ;
+ }
+ }
+ else
+ {
+ # Register a new configuration.
+ $(.versions).register $(version) ;
+
+ # Add user-supplied to auto-detected options.
+ options = [ $(.versions).get $(version) : options ] $(options) ;
+
+ # Mark the configuration as 'used'.
+ $(.versions).use $(version) ;
+
+ # Generate conditions and save them.
+ local conditions = [ common.check-init-parameters msvc : version $(v) ]
+ ;
+
+ $(.versions).set $(version) : conditions : $(conditions) ;
+
+ local command = [ feature.get-values <command> : $(options) ] ;
+
+ # If version is specified, we try to search first in default paths, and
+ # only then in PATH.
+ command = [ common.get-invocation-command msvc : cl.exe : $(command) :
+ [ default-paths $(version) ] : $(version) ] ;
+
+ common.handle-options msvc : $(conditions) : $(command) : $(options) ;
+
+ if ! $(version)
+ {
+ # Even if version is not explicitly specified, try to detect the
+ # version from the path.
+ # FIXME: We currently detect both Microsoft Visual Studio 9.0 and
+ # 9.0express as 9.0 here.
+ if [ MATCH "(Microsoft Visual Studio 14)" : $(command) ]
+ {
+ version = 14.0 ;
+ }
+ else if [ MATCH "(Microsoft Visual Studio 12)" : $(command) ]
+ {
+ version = 12.0 ;
+ }
+ else if [ MATCH "(Microsoft Visual Studio 11)" : $(command) ]
+ {
+ version = 11.0 ;
+ }
+ else if [ MATCH "(Microsoft Visual Studio 10)" : $(command) ]
+ {
+ version = 10.0 ;
+ }
+ else if [ MATCH "(Microsoft Visual Studio 9)" : $(command) ]
+ {
+ version = 9.0 ;
+ }
+ else if [ MATCH "(Microsoft Visual Studio 8)" : $(command) ]
+ {
+ version = 8.0 ;
+ }
+ else if [ MATCH "(NET 2003[\/\\]VC7)" : $(command) ]
+ {
+ version = 7.1 ;
+ }
+ else if [ MATCH "(Microsoft Visual C\\+\\+ Toolkit 2003)" :
+ $(command) ]
+ {
+ version = 7.1toolkit ;
+ }
+ else if [ MATCH "(.NET[\/\\]VC7)" : $(command) ]
+ {
+ version = 7.0 ;
+ }
+ else
+ {
+ version = 6.0 ;
+ }
+ }
+
+ # Generate and register setup command.
+
+ local below-8.0 = [ MATCH ^([67]\\.) : $(version) ] ;
+ local below-11.0 = [ MATCH ^([6789]\\.|10\\.) : $(version) ] ;
+
+ local cpu = i386 amd64 ia64 arm ;
+ if $(below-8.0)
+ {
+ cpu = i386 ;
+ }
+ if $(below-11.0)
+ {
+ cpu = i386 amd64 ia64 ;
+ }
+
+ local setup-amd64 ;
+ local setup-i386 ;
+ local setup-ia64 ;
+ local setup-arm ;
+ local setup-phone-i386 ;
+ local setup-phone-arm ;
+
+ if $(command)
+ {
+ # TODO: Note that if we specify a non-existant toolset version then
+ # this rule may find and use a corresponding compiler executable
+ # belonging to an incorrect toolset version. For example, if you
+ # have only MSVC 7.1 installed, have its executable on the path and
+ # specify you want Boost Build to use MSVC 9.0, then you want Boost
+ # Build to report an error but this may cause it to silently use the
+ # MSVC 7.1 compiler even though it thinks it is using the msvc-9.0
+ # toolset version.
+ command = [ common.get-absolute-tool-path $(command[-1]) ] ;
+ }
+
+ if $(command)
+ {
+ local parent = [ path.make $(command) ] ;
+ parent = [ path.parent $(parent) ] ;
+ parent = [ path.native $(parent) ] ;
+
+ # Setup will be used if the command name has been specified. If
+ # setup is not specified explicitly then a default setup script will
+ # be used instead. Setup scripts may be global or architecture/
+ # /platform/cpu specific. Setup options are used only in case of
+ # global setup scripts.
+
+ # Default setup scripts provided with different VC distributions:
+ #
+ # VC 7.1 had only the vcvars32.bat script specific to 32 bit i386
+ # builds. It was located in the bin folder for the regular version
+ # and in the root folder for the free VC 7.1 tools.
+ #
+ # Later 8.0 & 9.0 versions introduce separate platform specific
+ # vcvars*.bat scripts (e.g. 32 bit, 64 bit AMD or 64 bit Itanium)
+ # located in or under the bin folder. Most also include a global
+ # vcvarsall.bat helper script located in the root folder which runs
+ # one of the aforementioned vcvars*.bat scripts based on the options
+ # passed to it. So far only the version coming with some PlatformSDK
+ # distributions does not include this top level script but to
+ # support those we need to fall back to using the worker scripts
+ # directly in case the top level script can not be found.
+
+ local global-setup = [ feature.get-values <setup> : $(options) ] ;
+ global-setup = $(global-setup[1]) ;
+ local global-setup-phone = $(global-setup) ;
+ if ! $(below-8.0)
+ {
+ global-setup ?= [ locate-default-setup $(command) : $(parent) :
+ vcvarsall.bat ] ;
+ }
+
+ local default-setup-amd64 = vcvarsx86_amd64.bat ;
+ local default-setup-i386 = vcvars32.bat ;
+ local default-setup-ia64 = vcvarsx86_ia64.bat ;
+ local default-setup-arm = vcvarsx86_arm.bat ;
+ local default-setup-phone-i386 = vcvarsphonex86.bat ;
+ local default-setup-phone-arm = vcvarsphonex86_arm.bat ;
+
+ # http://msdn2.microsoft.com/en-us/library/x4d2c09s(VS.80).aspx and
+ # http://msdn2.microsoft.com/en-us/library/x4d2c09s(vs.90).aspx
+ # mention an x86_IPF option, that seems to be a documentation bug
+ # and x86_ia64 is the correct option.
+ local default-global-setup-options-amd64 = x86_amd64 ;
+ local default-global-setup-options-i386 = x86 ;
+ local default-global-setup-options-ia64 = x86_ia64 ;
+ local default-global-setup-options-arm = x86_arm ;
+
+ # When using 64-bit Windows, and targeting 64-bit, it is possible to
+ # use a native 64-bit compiler, selected by the "amd64" & "ia64"
+ # parameters to vcvarsall.bat. There are two variables we can use --
+ # PROCESSOR_ARCHITECTURE and PROCESSOR_IDENTIFIER. The first is
+ # 'x86' when running 32-bit Windows, no matter which processor is
+ # used, and 'AMD64' on 64-bit windows on x86 (either AMD64 or EM64T)
+ # Windows.
+ #
+ if [ MATCH ^(AMD64) : [ os.environ PROCESSOR_ARCHITECTURE ] ]
+ {
+ default-global-setup-options-amd64 = amd64 ;
+ }
+ # TODO: The same 'native compiler usage' should be implemented for
+ # the Itanium platform by using the "ia64" parameter. For this
+ # though we need someone with access to this platform who can find
+ # out how to correctly detect this case.
+ else if $(somehow-detect-the-itanium-platform)
+ {
+ default-global-setup-options-ia64 = ia64 ;
+ }
+
+ for local c in $(cpu)
+ {
+ setup-$(c) = [ generate-setup-cmd $(version) : $(command) : $(parent) : $(options) : $(c) : $(global-setup) : $(default-global-setup-options-$(c)) : $(default-setup-$(c)) ] ;
+ }
+
+ # Windows phone has different setup scripts, located in a different directory hierarchy.
+ # The 11.0 toolset can target Windows Phone 8.0 and the 12.0 toolset can target Windows Phone 8.1,
+ # each of which have a different directory for their vcvars setup scripts.
+ local phone-parent = [ path.native [ path.join $(parent) WPSDK ] ] ;
+ local phone-directory = $(phone-parent) ;
+ if [ MATCH "(11.0)" : $(version) ]
+ {
+ phone-directory = [ path.native [ path.join $(phone-directory) WP80 ] ] ;
+ }
+ else if [ MATCH "(12.0)" : $(version) ]
+ {
+ phone-directory = [ path.native [ path.join $(phone-directory) WP81 ] ] ;
+ }
+ global-setup-phone ?= [ locate-default-setup $(phone-directory) : $(phone-parent) : vcvarsphoneall.bat ] ;
+
+ # If can't locate default phone setup script then this VS version doesn't support Windows Phone.
+ if $(global-setup-phone)-is-defined
+ {
+ # i386 CPU is for the Windows Phone emulator in Visual Studio.
+ local phone-cpu = i386 arm ;
+ for local c in $(phone-cpu)
+ {
+ setup-phone-$(c) = [ generate-setup-cmd $(version) : $(phone-directory) : $(phone-parent) : $(options) : $(c) : $(global-setup-phone) : $(default-global-setup-options-$(c)) : $(default-setup-phone-$(c)) ] ;
+ }
+ }
+ }
+
+ # Get tool names (if any) and finish setup.
+
+ compiler = [ feature.get-values <compiler> : $(options) ] ;
+ compiler ?= cl ;
+
+ linker = [ feature.get-values <linker> : $(options) ] ;
+ linker ?= link ;
+
+ resource-compiler = [ feature.get-values <resource-compiler> : $(options) ] ;
+ resource-compiler ?= rc ;
+
+ # Turn on some options for i386 assembler
+ # -coff generate COFF format object file (compatible with cl.exe output)
+ local default-assembler-amd64 = ml64 ;
+ local default-assembler-i386 = "ml -coff" ;
+ local default-assembler-ia64 = ias ;
+ local default-assembler-ia64 = armasm ;
+
+ assembler = [ feature.get-values <assembler> : $(options) ] ;
+
+ idl-compiler = [ feature.get-values <idl-compiler> : $(options) ] ;
+ idl-compiler ?= midl ;
+
+ mc-compiler = [ feature.get-values <mc-compiler> : $(options) ] ;
+ mc-compiler ?= mc ;
+
+ manifest-tool = [ feature.get-values <manifest-tool> : $(options) ] ;
+ manifest-tool ?= mt ;
+
+ local cc-filter = [ feature.get-values <compiler-filter> : $(options) ]
+ ;
+
+ for local c in $(cpu)
+ {
+ # Setup script is not required in some configurations.
+ setup-$(c) ?= "" ;
+
+ local cpu-conditions = $(conditions)/$(.cpu-arch-$(c)) ;
+
+ if $(.debug-configuration)
+ {
+ for local cpu-condition in $(cpu-conditions)
+ {
+ ECHO "notice: [msvc-cfg] condition: '$(cpu-condition)', setup: '$(setup-$(c))'" ;
+ }
+ }
+
+ local cpu-assembler = $(assembler) ;
+ cpu-assembler ?= $(default-assembler-$(c)) ;
+
+ toolset.flags msvc.compile .RC <windows-api>$(api)/$(cpu-conditions) : $(setup-$(c))$(resource-compiler) ;
+ toolset.flags msvc.compile .IDL <windows-api>$(api)/$(cpu-conditions) : $(setup-$(c))$(idl-compiler) ;
+ toolset.flags msvc.compile .MC <windows-api>$(api)/$(cpu-conditions) : $(setup-$(c))$(mc-compiler) ;
+ toolset.flags msvc.link .MT <windows-api>$(api)/$(cpu-conditions) : $(setup-$(c))$(manifest-tool) -nologo ;
+
+ for api in desktop store phone
+ {
+ local setup-script = $(setup-$(c)) ;
+ if $(api) = phone
+ {
+ setup-script = $(setup-phone-$(c)) ;
+ }
+ toolset.flags msvc.compile .CC <windows-api>$(api)/$(cpu-conditions) : $(setup-script)$(compiler) /Zm800 -nologo ;
+ toolset.flags msvc.compile .ASM <windows-api>$(api)/$(cpu-conditions) : $(setup-script)$(cpu-assembler) -nologo ;
+ toolset.flags msvc.link .LD <windows-api>$(api)/$(cpu-conditions) : $(setup-script)$(linker) /NOLOGO /INCREMENTAL:NO ;
+ toolset.flags msvc.archive .LD <windows-api>$(api)/$(cpu-conditions) : $(setup-script)$(linker) /lib /NOLOGO ;
+ }
+
+ if $(cc-filter)
+ {
+ toolset.flags msvc .CC.FILTER $(cpu-conditions) : "|" $(cc-filter) ;
+ }
+ }
+
+ # Set version-specific flags.
+ configure-version-specific msvc : $(version) : $(conditions) ;
+ }
+}
+
+
+# Returns the default installation path for the given version.
+#
+local rule default-path ( version )
+{
+ # Use auto-detected path if possible.
+ local path = [ feature.get-values <command> : [ $(.versions).get $(version)
+ : options ] ] ;
+
+ if $(path)
+ {
+ path = $(path:D) ;
+ }
+ else
+ {
+ # Check environment.
+ if $(.version-$(version)-env)
+ {
+ local vc-path = [ os.environ $(.version-$(version)-env) ] ;
+ if $(vc-path)
+ {
+ vc-path = [ path.make $(vc-path) ] ;
+ vc-path = [ path.join $(vc-path) $(.version-$(version)-envpath) ] ;
+ vc-path = [ path.native $(vc-path) ] ;
+
+ path = $(vc-path) ;
+ }
+ }
+
+ # Check default path.
+ if ! $(path) && $(.version-$(version)-path)
+ {
+ path = [ path.native [ path.join $(.ProgramFiles) $(.version-$(version)-path) ] ] ;
+ }
+ }
+
+ return $(path) ;
+}
+
+
+# Returns either the default installation path (if 'version' is not empty) or
+# list of all known default paths (if no version is given)
+#
+local rule default-paths ( version ? )
+{
+ local possible-paths ;
+
+ if $(version)
+ {
+ possible-paths += [ default-path $(version) ] ;
+ }
+ else
+ {
+ for local i in $(.known-versions)
+ {
+ possible-paths += [ default-path $(i) ] ;
+ }
+ }
+
+ return $(possible-paths) ;
+}
+
+
+rule get-rspline ( target : lang-opt )
+{
+ CC_RSPLINE on $(target) = [ on $(target) return $(lang-opt) -U$(UNDEFS)
+ $(CFLAGS) $(C++FLAGS) $(OPTIONS) -c $(.nl)-D$(DEFINES)
+ $(.nl)\"-I$(INCLUDES:W)\" ] ;
+}
+
+class msvc-linking-generator : linking-generator
+{
+ # Calls the base version. If necessary, also create a target for the
+ # manifest file.specifying source's name as the name of the created
+ # target. As result, the PCH will be named whatever.hpp.gch, and not
+ # whatever.gch.
+ rule generated-targets ( sources + : property-set : project name ? )
+ {
+ local result = [ linking-generator.generated-targets $(sources)
+ : $(property-set) : $(project) $(name) ] ;
+
+ if $(result)
+ {
+ local name-main = [ $(result[0]).name ] ;
+ local action = [ $(result[0]).action ] ;
+
+ if [ $(property-set).get <debug-symbols> ] = "on"
+ {
+ # We force the exact name on PDB. The reason is tagging -- the
+ # tag rule may reasonably special case some target types, like
+ # SHARED_LIB. The tag rule will not catch PDBs, and it cannot
+ # even easily figure out if a PDB is paired with a SHARED_LIB,
+ # EXE or something else. Because PDBs always get the same name
+ # as the main target, with .pdb as extension, just force it.
+ local target = [ class.new file-target $(name-main:S=.pdb) exact
+ : PDB : $(project) : $(action) ] ;
+ local registered-target = [ virtual-target.register $(target) ]
+ ;
+ if $(target) != $(registered-target)
+ {
+ $(action).replace-targets $(target) : $(registered-target) ;
+ }
+ result += $(registered-target) ;
+ }
+
+ if [ $(property-set).get <embed-manifest> ] = "off"
+ {
+ # Manifest is an evil target. It has .manifest appened to the
+ # name of the main target, including extension, e.g.
+ # a.exe.manifest. We use the 'exact' name to achieve this
+ # effect.
+ local target = [ class.new file-target $(name-main).manifest
+ exact : MANIFEST : $(project) : $(action) ] ;
+ local registered-target = [ virtual-target.register $(target) ]
+ ;
+ if $(target) != $(registered-target)
+ {
+ $(action).replace-targets $(target) : $(registered-target) ;
+ }
+ result += $(registered-target) ;
+ }
+ }
+ return $(result) ;
+ }
+}
+
+
+# Unsafe worker rule for the register-toolset() rule. Must not be called
+# multiple times.
+#
+local rule register-toolset-really ( )
+{
+ feature.extend toolset : msvc ;
+
+ # Intel and msvc supposedly have link-compatible objects.
+ feature.subfeature toolset msvc : vendor : intel : propagated optional ;
+
+ # Inherit MIDL flags.
+ toolset.inherit-flags msvc : midl ;
+
+ # Inherit MC flags.
+ toolset.inherit-flags msvc : mc ;
+
+ # Dynamic runtime comes only in MT flavour.
+ toolset.add-requirements
+ <toolset>msvc,<runtime-link>shared:<threading>multi ;
+
+ # Declare msvc toolset specific features.
+ {
+ feature.feature debug-store : object database : propagated ;
+ feature.feature pch-source : : dependency free ;
+ }
+
+ # Declare generators.
+ {
+ # TODO: Is it possible to combine these? Make the generators
+ # non-composing so that they do not convert each source into a separate
+ # .rsp file.
+ generators.register [ new msvc-linking-generator msvc.link :
+ OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : EXE : <toolset>msvc ] ;
+ generators.register [ new msvc-linking-generator msvc.link.dll :
+ OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : SHARED_LIB IMPORT_LIB :
+ <toolset>msvc <suppress-import-lib>false ] ;
+ generators.register [ new msvc-linking-generator msvc.link.dll :
+ OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : SHARED_LIB :
+ <toolset>msvc <suppress-import-lib>true ] ;
+
+ generators.register-archiver msvc.archive : OBJ : STATIC_LIB : <toolset>msvc ;
+ generators.register-c-compiler msvc.compile.c++ : CPP : OBJ : <toolset>msvc ;
+ generators.register-c-compiler msvc.compile.c : C : OBJ : <toolset>msvc ;
+ generators.register-c-compiler msvc.compile.c++.preprocess : CPP : PREPROCESSED_CPP : <toolset>msvc ;
+ generators.register-c-compiler msvc.compile.c.preprocess : C : PREPROCESSED_C : <toolset>msvc ;
+
+ # Using 'register-c-compiler' adds the build directory to INCLUDES.
+ generators.register-c-compiler msvc.compile.rc : RC : OBJ(%_res) : <toolset>msvc ;
+ generators.override msvc.compile.rc : rc.compile.resource ;
+ generators.register-standard msvc.compile.asm : ASM : OBJ : <toolset>msvc ;
+
+ generators.register-c-compiler msvc.compile.idl : IDL : MSTYPELIB H C(%_i) C(%_proxy) C(%_dlldata) : <toolset>msvc ;
+ generators.override msvc.compile.idl : midl.compile.idl ;
+
+ generators.register-standard msvc.compile.mc : MC : H RC : <toolset>msvc ;
+ generators.override msvc.compile.mc : mc.compile ;
+
+ # Note: the 'H' source type will catch both '.h' and '.hpp' headers as
+ # the latter have their HPP type derived from H. The type of compilation
+ # is determined entirely by the destination type.
+ generators.register [ new msvc-pch-generator msvc.compile.c.pch : H : C_PCH OBJ : <pch>on <toolset>msvc ] ;
+ generators.register [ new msvc-pch-generator msvc.compile.c++.pch : H : CPP_PCH OBJ : <pch>on <toolset>msvc ] ;
+
+ generators.override msvc.compile.c.pch : pch.default-c-pch-generator ;
+ generators.override msvc.compile.c++.pch : pch.default-cpp-pch-generator ;
+ }
+
+ toolset.flags msvc.compile PCH_FILE <pch>on : <pch-file> ;
+ toolset.flags msvc.compile PCH_SOURCE <pch>on : <pch-source> ;
+ toolset.flags msvc.compile PCH_HEADER <pch>on : <pch-header> ;
+
+ #
+ # Declare flags for compilation.
+ #
+
+ toolset.flags msvc.compile CFLAGS <optimization>speed : /O2 ;
+ toolset.flags msvc.compile CFLAGS <optimization>space : /O1 ;
+
+ toolset.flags msvc.compile CFLAGS $(.cpu-arch-ia64)/<instruction-set>$(.cpu-type-itanium) : /G1 ;
+ toolset.flags msvc.compile CFLAGS $(.cpu-arch-ia64)/<instruction-set>$(.cpu-type-itanium2) : /G2 ;
+
+ toolset.flags msvc.compile CFLAGS <debug-symbols>on/<debug-store>object : /Z7 ;
+ toolset.flags msvc.compile CFLAGS <debug-symbols>on/<debug-store>database : /Zi ;
+ toolset.flags msvc.compile CFLAGS <optimization>off : /Od ;
+ toolset.flags msvc.compile CFLAGS <inlining>off : /Ob0 ;
+ toolset.flags msvc.compile CFLAGS <inlining>on : /Ob1 ;
+ toolset.flags msvc.compile CFLAGS <inlining>full : /Ob2 ;
+
+ toolset.flags msvc.compile CFLAGS <warnings>on : /W3 ;
+ toolset.flags msvc.compile CFLAGS <warnings>off : /W0 ;
+ toolset.flags msvc.compile CFLAGS <warnings>all : /W4 ;
+ toolset.flags msvc.compile CFLAGS <warnings-as-errors>on : /WX ;
+
+ toolset.flags msvc.compile C++FLAGS <exception-handling>on/<asynch-exceptions>off/<extern-c-nothrow>off : /EHs ;
+ toolset.flags msvc.compile C++FLAGS <exception-handling>on/<asynch-exceptions>off/<extern-c-nothrow>on : /EHsc ;
+ toolset.flags msvc.compile C++FLAGS <exception-handling>on/<asynch-exceptions>on/<extern-c-nothrow>off : /EHa ;
+ toolset.flags msvc.compile C++FLAGS <exception-handling>on/<asynch-exceptions>on/<extern-c-nothrow>on : /EHac ;
+
+ # By default 8.0 enables rtti support while prior versions disabled it. We
+ # simply enable or disable it explicitly so we do not have to depend on this
+ # default behaviour.
+ toolset.flags msvc.compile CFLAGS <rtti>on : /GR ;
+ toolset.flags msvc.compile CFLAGS <rtti>off : /GR- ;
+ toolset.flags msvc.compile CFLAGS <runtime-debugging>off/<runtime-link>shared : /MD ;
+ toolset.flags msvc.compile CFLAGS <runtime-debugging>on/<runtime-link>shared : /MDd ;
+
+ toolset.flags msvc.compile CFLAGS <runtime-debugging>off/<runtime-link>static/<threading>multi : /MT ;
+ toolset.flags msvc.compile CFLAGS <runtime-debugging>on/<runtime-link>static/<threading>multi : /MTd ;
+
+ toolset.flags msvc.compile OPTIONS <cflags> : ;
+ toolset.flags msvc.compile.c++ OPTIONS <cxxflags> : ;
+
+ toolset.flags msvc.compile PDB_CFLAG <debug-symbols>on/<debug-store>database : /Fd ;
+
+ toolset.flags msvc.compile DEFINES <define> ;
+ toolset.flags msvc.compile UNDEFS <undef> ;
+ toolset.flags msvc.compile INCLUDES <include> ;
+
+ # Declare flags for the assembler.
+ toolset.flags msvc.compile.asm USER_ASMFLAGS <asmflags> ;
+
+ toolset.flags msvc.compile.asm ASMFLAGS <debug-symbols>on : "/Zi /Zd" ;
+
+ toolset.flags msvc.compile.asm ASMFLAGS <warnings>on : /W3 ;
+ toolset.flags msvc.compile.asm ASMFLAGS <warnings>off : /W0 ;
+ toolset.flags msvc.compile.asm ASMFLAGS <warnings>all : /W4 ;
+ toolset.flags msvc.compile.asm ASMFLAGS <warnings-as-errors>on : /WX ;
+
+ toolset.flags msvc.compile.asm DEFINES <define> ;
+
+ # Declare flags for linking.
+ {
+ toolset.flags msvc.link PDB_LINKFLAG <debug-symbols>on/<debug-store>database : /PDB: ; # not used yet
+ toolset.flags msvc.link LINKFLAGS <debug-symbols>on : /DEBUG ;
+ toolset.flags msvc.link DEF_FILE <def-file> ;
+
+ # The linker disables the default optimizations when using /DEBUG so we
+ # have to enable them manually for release builds with debug symbols.
+ toolset.flags msvc LINKFLAGS <debug-symbols>on/<runtime-debugging>off : /OPT:REF,ICF ;
+
+ toolset.flags msvc LINKFLAGS <user-interface>console : /subsystem:console ;
+ toolset.flags msvc LINKFLAGS <user-interface>gui : /subsystem:windows ;
+ toolset.flags msvc LINKFLAGS <user-interface>wince : /subsystem:windowsce ;
+ toolset.flags msvc LINKFLAGS <user-interface>native : /subsystem:native ;
+ toolset.flags msvc LINKFLAGS <user-interface>auto : /subsystem:posix ;
+
+ toolset.flags msvc.link OPTIONS <linkflags> ;
+ toolset.flags msvc.link LINKPATH <library-path> ;
+
+ toolset.flags msvc.link FINDLIBS_ST <find-static-library> ;
+ toolset.flags msvc.link FINDLIBS_SA <find-shared-library> ;
+ toolset.flags msvc.link LIBRARY_OPTION <toolset>msvc : "" : unchecked ;
+ toolset.flags msvc.link LIBRARIES_MENTIONED_BY_FILE : <library-file> ;
+
+ toolset.flags msvc.link.dll LINKFLAGS <suppress-import-lib>true : /NOENTRY ;
+ }
+
+ toolset.flags msvc.archive AROPTIONS <archiveflags> ;
+}
+
+
+# Locates the requested setup script under the given folder and returns its full
+# path or nothing in case the script can not be found. In case multiple scripts
+# are found only the first one is returned.
+#
+# TODO: There used to exist a code comment for the msvc.init rule stating that
+# we do not correctly detect the location of the vcvars32.bat setup script for
+# the free VC7.1 tools in case user explicitly provides a path. This should be
+# tested or simply remove this whole comment in case this toolset version is no
+# longer important.
+#
+local rule locate-default-setup ( command : parent : setup-name )
+{
+ local result = [ GLOB $(command) $(parent) : $(setup-name) ] ;
+ if $(result[1])
+ {
+ return $(result[1]) ;
+ }
+}
+
+
+# Validates given path, registers found configuration and prints debug
+# information about it.
+#
+local rule register-configuration ( version : path ? )
+{
+ if $(path)
+ {
+ local command = [ GLOB $(path) : cl.exe ] ;
+
+ if $(command)
+ {
+ if $(.debug-configuration)
+ {
+ ECHO notice: [msvc-cfg] msvc-$(version) detected, command:
+ '$(command)' ;
+ }
+
+ $(.versions).register $(version) ;
+ $(.versions).set $(version) : options : <command>$(command) ;
+ }
+ }
+}
+
+
+################################################################################
+#
+# Startup code executed when loading this module.
+#
+################################################################################
+
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+ .debug-configuration = true ;
+}
+
+# Miscellaneous constants.
+.RM = [ common.rm-command ] ;
+.nl = "
+" ;
+.ProgramFiles = [ path.make [ common.get-program-files-dir ] ] ;
+.escaped-double-quote = "\"" ;
+.TOUCH_FILE = [ common.file-touch-command ] ;
+
+# List of all registered configurations.
+.versions = [ new configurations ] ;
+
+# Supported CPU architectures.
+.cpu-arch-i386 =
+ <architecture>/<address-model>
+ <architecture>/<address-model>32
+ <architecture>x86/<address-model>
+ <architecture>x86/<address-model>32 ;
+
+.cpu-arch-amd64 =
+ <architecture>/<address-model>64
+ <architecture>x86/<address-model>64 ;
+
+.cpu-arch-ia64 =
+ <architecture>ia64/<address-model>
+ <architecture>ia64/<address-model>64 ;
+
+.cpu-arch-arm =
+ <architecture>arm/<address-model>
+ <architecture>arm/<address-model>32 ;
+
+
+# Supported CPU types (only Itanium optimization options are supported from
+# VC++ 2005 on). See
+# http://msdn2.microsoft.com/en-us/library/h66s5s0e(vs.90).aspx for more
+# detailed information.
+.cpu-type-g5 = i586 pentium pentium-mmx ;
+.cpu-type-g6 = i686 pentiumpro pentium2 pentium3 pentium3m pentium-m k6
+ k6-2 k6-3 winchip-c6 winchip2 c3 c3-2 ;
+.cpu-type-em64t = prescott nocona core2 corei7 corei7-avx core-avx-i
+ conroe conroe-xe conroe-l allendale merom
+ merom-xe kentsfield kentsfield-xe penryn wolfdale
+ yorksfield nehalem sandy-bridge ivy-bridge haswell ;
+.cpu-type-amd64 = k8 opteron athlon64 athlon-fx k8-sse3 opteron-sse3
+ athlon64-sse3 amdfam10 barcelona bdver1 bdver2 bdver3
+ btver1 btver2 ;
+.cpu-type-g7 = pentium4 pentium4m athlon athlon-tbird athlon-4 athlon-xp
+ athlon-mp $(.cpu-type-em64t) $(.cpu-type-amd64) ;
+.cpu-type-itanium = itanium itanium1 merced ;
+.cpu-type-itanium2 = itanium2 mckinley ;
+.cpu-type-arm = armv2 armv2a armv3 armv3m armv4 armv4t armv5 armv5t armv5te armv6 armv6j iwmmxt ep9312
+ armv7 armv7s ;
+
+# Known toolset versions, in order of preference.
+.known-versions = 14.0 12.0 11.0 10.0 10.0express 9.0 9.0express 8.0 8.0express 7.1
+ 7.1toolkit 7.0 6.0 ;
+
+# Version aliases.
+.version-alias-6 = 6.0 ;
+.version-alias-6.5 = 6.0 ;
+.version-alias-7 = 7.0 ;
+.version-alias-8 = 8.0 ;
+.version-alias-9 = 9.0 ;
+.version-alias-10 = 10.0 ;
+.version-alias-11 = 11.0 ;
+.version-alias-12 = 12.0 ;
+.version-alias-14 = 14.0 ;
+
+# Names of registry keys containing the Visual C++ installation path (relative
+# to "HKEY_LOCAL_MACHINE\SOFTWARE\\Microsoft").
+.version-6.0-reg = "VisualStudio\\6.0\\Setup\\Microsoft Visual C++" ;
+.version-7.0-reg = "VisualStudio\\7.0\\Setup\\VC" ;
+.version-7.1-reg = "VisualStudio\\7.1\\Setup\\VC" ;
+.version-8.0-reg = "VisualStudio\\8.0\\Setup\\VC" ;
+.version-8.0express-reg = "VCExpress\\8.0\\Setup\\VC" ;
+.version-9.0-reg = "VisualStudio\\9.0\\Setup\\VC" ;
+.version-9.0express-reg = "VCExpress\\9.0\\Setup\\VC" ;
+.version-10.0-reg = "VisualStudio\\10.0\\Setup\\VC" ;
+.version-10.0express-reg = "VCExpress\\10.0\\Setup\\VC" ;
+.version-11.0-reg = "VisualStudio\\11.0\\Setup\\VC" ;
+.version-12.0-reg = "VisualStudio\\12.0\\Setup\\VC" ;
+.version-14.0-reg = "VisualStudio\\14.0\\Setup\\VC" ;
+
+# Visual C++ Toolkit 2003 does not store its installation path in the registry.
+# The environment variable 'VCToolkitInstallDir' and the default installation
+# path will be checked instead.
+.version-7.1toolkit-path = "Microsoft Visual C++ Toolkit 2003" "bin" ;
+.version-7.1toolkit-env = VCToolkitInstallDir ;
+
+# Path to the folder containing "cl.exe" relative to the value of the
+# corresponding environment variable.
+.version-7.1toolkit-envpath = "bin" ;
+
+
+# Auto-detect all the available msvc installations on the system.
+auto-detect-toolset-versions ;
+
+
+# And finally trigger the actual Boost Build toolset registration.
+register-toolset ;
diff --git a/tools/build/src/tools/msvc.py b/tools/build/src/tools/msvc.py
new file mode 100644
index 0000000000..02dce9f9ed
--- /dev/null
+++ b/tools/build/src/tools/msvc.py
@@ -0,0 +1,1237 @@
+# Copyright (c) 2003 David Abrahams.
+# Copyright (c) 2005 Vladimir Prus.
+# Copyright (c) 2005 Alexey Pakhunov.
+# Copyright (c) 2006 Bojan Resnik.
+# Copyright (c) 2006 Ilya Sokolov.
+# Copyright (c) 2007 Rene Rivera
+# Copyright (c) 2008 Jurko Gospodnetic
+# Copyright (c) 2011 Juraj Ivancic
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+################################################################################
+#
+# MSVC Boost Build toolset module.
+# --------------------------------
+#
+# All toolset versions need to have their location either auto-detected or
+# explicitly specified except for the special 'default' version that expects the
+# environment to find the needed tools or report an error.
+#
+################################################################################
+
+from os import environ
+import os.path
+import re
+import _winreg
+
+import bjam
+
+from b2.tools import common, rc, pch, builtin, mc, midl
+from b2.build import feature, type, toolset, generators, property_set
+from b2.build.property import Property
+from b2.util import path
+from b2.manager import get_manager
+from b2.build.generators import Generator
+from b2.build.toolset import flags
+from b2.util.utility import to_seq, on_windows
+from b2.tools.common import Configurations
+
+__debug = None
+
+def debug():
+ global __debug
+ if __debug is None:
+ __debug = "--debug-configuration" in bjam.variable("ARGV")
+ return __debug
+
+
+# It is not yet clear what to do with Cygwin on python port.
+def on_cygwin():
+ return False
+
+
+type.register('MANIFEST', ['manifest'])
+feature.feature('embed-manifest',['on','off'], ['incidental', 'propagated']) ;
+
+type.register('PDB',['pdb'])
+
+################################################################################
+#
+# Public rules.
+#
+################################################################################
+
+# Initialize a specific toolset version configuration. As the result, path to
+# compiler and, possible, program names are set up, and will be used when that
+# version of compiler is requested. For example, you might have:
+#
+# using msvc : 6.5 : cl.exe ;
+# using msvc : 7.0 : Y:/foo/bar/cl.exe ;
+#
+# The version parameter may be omitted:
+#
+# using msvc : : Z:/foo/bar/cl.exe ;
+#
+# The following keywords have special meanings when specified as versions:
+# - all - all detected but not yet used versions will be marked as used
+# with their default options.
+# - default - this is an equivalent to an empty version.
+#
+# Depending on a supplied version, detected configurations and presence 'cl.exe'
+# in the path different results may be achieved. The following table describes
+# the possible scenarios:
+#
+# Nothing "x.y"
+# Passed Nothing "x.y" detected, detected,
+# version detected detected cl.exe in path cl.exe in path
+#
+# default Error Use "x.y" Create "default" Use "x.y"
+# all None Use all None Use all
+# x.y - Use "x.y" - Use "x.y"
+# a.b Error Error Create "a.b" Create "a.b"
+#
+# "x.y" - refers to a detected version;
+# "a.b" - refers to an undetected version.
+#
+# FIXME: Currently the command parameter and the <compiler> property parameter
+# seem to overlap in duties. Remove this duplication. This seems to be related
+# to why someone started preparing to replace init with configure rules.
+
+def init(version = None, command = None, options = None):
+ # When initialized from
+ # using msvc : x.0 ;
+ # we get version as a single element list i.e. ['x.0'],
+ # but when specified from the command line we get a string i.e. 'x.0'.
+ # We want to work with a string, so unpack the list if needed.
+ is_single_element_list = (isinstance(version,list) and len(version) == 1)
+ assert(version==None or isinstance(version,str) or is_single_element_list)
+ if is_single_element_list:
+ version = version[0]
+
+ options = to_seq(options)
+ command = to_seq(command)
+
+ if command:
+ options.extend("<command>"+cmd for cmd in command)
+ configure(version,options)
+
+def configure(version=None, options=None):
+ if version == "all":
+ if options:
+ raise RuntimeError("MSVC toolset configuration: options should be empty when '{}' is specified.".format(version))
+
+ # Configure (i.e. mark as used) all registered versions.
+ all_versions = __versions.all()
+ if not all_versions:
+ if debug():
+ print "notice: [msvc-cfg] Asked to configure all registered" \
+ "msvc toolset versions when there are none currently" \
+ "registered." ;
+ else:
+ for v in all_versions:
+ # Note that there is no need to skip already configured
+ # versions here as this will request configure-really rule
+ # to configure the version using default options which will
+ # in turn cause it to simply do nothing in case the version
+ # has already been configured.
+ configure_really(v)
+ elif version == "default":
+ configure_really(None,options)
+ else:
+ configure_really(version, options)
+
+def extend_conditions(conditions,exts):
+ return [ cond + '/' + ext for cond in conditions for ext in exts ]
+
+def configure_version_specific(toolset_arg, version, conditions):
+ # Starting with versions 7.0, the msvc compiler have the /Zc:forScope and
+ # /Zc:wchar_t options that improve C++ standard conformance, but those
+ # options are off by default. If we are sure that the msvc version is at
+ # 7.*, add those options explicitly. We can be sure either if user specified
+ # version 7.* explicitly or if we auto-detected the version ourselves.
+ if not re.search('^6\\.', version):
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS',conditions, ['/Zc:forScope','/Zc:wchar_t'])
+ toolset.flags('{}.compile.c++'.format(toolset_arg), 'C++FLAGS',conditions, ['/wd4675'])
+
+ # Explicitly disable the 'function is deprecated' warning. Some msvc
+ # versions have a bug, causing them to emit the deprecation warning even
+ # with /W0.
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS',extend_conditions(conditions,['<warnings>off']), ['/wd4996'])
+ if re.search('^[78]\.', version):
+ # 64-bit compatibility warning deprecated since 9.0, see
+ # http://msdn.microsoft.com/en-us/library/yt4xw8fh.aspx
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS',extend_conditions(conditions,['<warnings>all']), ['/Wp64'])
+
+ #
+ # Processor-specific optimization.
+ #
+ if re.search('^[67]', version ):
+ # 8.0 deprecates some of the options.
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(conditions,['<optimization>speed','<optimization>space']), ['/Ogiy', '/Gs'])
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(conditions,['<optimization>speed']), ['/Ot'])
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(conditions,['<optimization>space']), ['/Os'])
+
+ cpu_arch_i386_cond = extend_conditions(conditions, __cpu_arch_i386)
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(cpu_arch_i386_cond,['<instruction-set>']),['/GB'])
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(cpu_arch_i386_cond,['<instruction-set>i486']),['/G4'])
+
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(cpu_arch_i386_cond,['<instruction-set>' + t for t in __cpu_type_g5]), ['/G5'])
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(cpu_arch_i386_cond,['<instruction-set>' + t for t in __cpu_type_g6]), ['/G6'])
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(cpu_arch_i386_cond,['<instruction-set>' + t for t in __cpu_type_g7]), ['/G7'])
+
+ # Improve floating-point accuracy. Otherwise, some of C++ Boost's "math"
+ # tests will fail.
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', conditions, ['/Op'])
+
+ # 7.1 and below have single-threaded static RTL.
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(conditions,['<runtime-debugging>off/<runtime-link>static/<threading>single']), ['/ML'])
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(conditions,['<runtime-debugging>on/<runtime-link>static/<threading>single']), ['/MLd'])
+ else:
+ # 8.0 and above adds some more options.
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(conditions, [a + '/<instruction-set>' for a in __cpu_arch_amd64]), ['/favor:blend'])
+
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(conditions, [a + '/<instruction-set>' + t for a in __cpu_arch_amd64 for t in __cpu_type_em64t]), ['/favor:EM64T'])
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(conditions, [a + '/<instruction-set>' + t for a in __cpu_arch_amd64 for t in __cpu_type_amd64]), ['/favor:AMD64'])
+
+ # 8.0 and above only has multi-threaded static RTL.
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(conditions,['<runtime-debugging>off/<runtime-link>static/<threading>single']), ['/MT'])
+ toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(conditions,['<runtime-debugging>on/<runtime-link>static/<threading>single']), ['/MTd'])
+
+ # Specify target machine type so the linker will not need to guess.
+ toolset.flags('{}.link'.format(toolset_arg), 'LINKFLAGS', extend_conditions(conditions, __cpu_arch_amd64), ['/MACHINE:X64'])
+ toolset.flags('{}.link'.format(toolset_arg), 'LINKFLAGS', extend_conditions(conditions, __cpu_arch_i386), ['/MACHINE:X86'])
+ toolset.flags('{}.link'.format(toolset_arg), 'LINKFLAGS', extend_conditions(conditions, __cpu_arch_ia64), ['/MACHINE:IA64'])
+
+ # Make sure that manifest will be generated even if there is no
+ # dependencies to put there.
+ toolset.flags('{}.link'.format(toolset_arg), 'LINKFLAGS', conditions, ['/MANIFEST'])
+
+
+# Registers this toolset including all of its flags, features & generators. Does
+# nothing on repeated calls.
+
+def register_toolset():
+ if not 'msvc' in feature.values('toolset'):
+ register_toolset_really()
+
+
+engine = get_manager().engine()
+
+# this rule sets up the pdb file that will be used when generating static
+# libraries and the debug-store option is database, so that the compiler
+# puts all debug info into a single .pdb file named after the library
+#
+# Poking at source targets this way is probably not clean, but it's the
+# easiest approach.
+def archive(targets, sources=None, properties=None):
+ bjam.call('set-target-variable',targets,'PDB_NAME', os.path.splitext(targets[0])[0] + '.pdb')
+
+# Declare action for creating static libraries. If library exists, remove it
+# before adding files. See
+# http://article.gmane.org/gmane.comp.lib.boost.build/4241 for rationale.
+if not on_cygwin():
+ engine.register_action(
+ 'msvc.archive',
+ '''if exist "$(<[1])" DEL "$(<[1])"
+ $(.LD) $(AROPTIONS) /out:"$(<[1])" @"@($(<[1]:W).rsp:E=
+"$(>)"
+$(LIBRARIES_MENTIONED_BY_FILE)
+"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib"
+"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"''',
+ function=archive)
+else:
+ engine.register_action(
+ 'msvc.archive',
+ '''{rm} "$(<[1])"
+ $(.LD) $(AROPTIONS) /out:"$(<[1])" @"@($(<[1]:W).rsp:E=
+"$(>)"
+$(LIBRARIES_MENTIONED_BY_FILE)
+"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib"
+"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"'''.format(rm=common.rm_command()),
+ function=archive)
+
+# For the assembler the following options are turned on by default:
+#
+# -Zp4 align structures to 4 bytes
+# -Cp preserve case of user identifiers
+# -Cx preserve case in publics, externs
+#
+engine.register_action(
+ 'msvc.compile.asm',
+ '$(.ASM) -c -Zp4 -Cp -Cx -D$(DEFINES) $(ASMFLAGS) $(USER_ASMFLAGS) -Fo "$(<:W)" "$(>:W)"' )
+
+
+# Equivalent to [ on $(target) return $(prefix)$(var)$(suffix) ]. Note that $(var) can be a list.
+def expand_target_variable(target,var,prefix=None,suffix=None):
+ list = bjam.call( 'get-target-variable', target, var )
+ return " ".join([ ("" if prefix is None else prefix) + elem + ("" if suffix is None else suffix) for elem in list ])
+
+
+def get_rspline(targets, lang_opt):
+ result = lang_opt + '\n' + \
+ expand_target_variable(targets, 'UNDEFS' , '\n-U' ) + \
+ expand_target_variable(targets, 'CFLAGS' , '\n' ) + \
+ expand_target_variable(targets, 'C++FLAGS', '\n' ) + \
+ expand_target_variable(targets, 'OPTIONS' , '\n' ) + '\n-c' + \
+ expand_target_variable(targets, 'DEFINES' , '\n-D' , '\n' ) + \
+ expand_target_variable(targets, 'INCLUDES', '\n"-I', '"\n' )
+ bjam.call('set-target-variable', targets, 'CC_RSPLINE', result)
+
+def compile_c(targets, sources = [], properties = None):
+ get_manager().engine().set_target_variable( targets[0], 'C++FLAGS', '' )
+ get_rspline(targets, '-TC')
+ compile_c_cpp(targets,sources)
+
+def compile_c_preprocess(targets, sources = [], properties = None):
+ get_manager().engine().set_target_variable( targets[0], 'C++FLAGS', '' )
+ get_rspline(targets, '-TC')
+ preprocess_c_cpp(targets,sources)
+
+def compile_c_pch(targets, sources = [], properties = []):
+ get_manager().engine().set_target_variable( targets[0], 'C++FLAGS', '' )
+ get_rspline([targets[0]], '-TC')
+ get_rspline([targets[1]], '-TC')
+
+toolset.flags( 'msvc', 'YLOPTION', [], ['-Yl'] )
+
+def compile_cpp(targets,sources=[],properties=None):
+ get_rspline(targets,'-TP')
+ bjam.call('set-target-variable', targets, 'PCH_FILE', sources)
+ compile_c_cpp(targets,sources)
+
+def compile_cpp_preprocess(targets,sources=[],properties=None):
+ get_rspline(targets,'-TP')
+ preprocess_c_cpp(targets,sources)
+
+def compile_cpp_pch(targets,sources=[],properties=None):
+ get_rspline([targets[0]], '-TP')
+ get_rspline([targets[1]], '-TP')
+
+
+# Action for running the C/C++ compiler without using precompiled headers.
+#
+# WARNING: Synchronize any changes this in action with intel-win
+#
+# Notes regarding PDB generation, for when we use <debug-symbols>on/<debug-store>database
+#
+# 1. PDB_CFLAG is only set for <debug-symbols>on/<debug-store>database, ensuring that the /Fd flag is dropped if PDB_CFLAG is empty
+#
+# 2. When compiling executables's source files, PDB_NAME is set on a per-source file basis by rule compile-c-c++.
+# The linker will pull these into the executable's PDB
+#
+# 3. When compiling library's source files, PDB_NAME is updated to <libname>.pdb for each source file by rule archive,
+# as in this case the compiler must be used to create a single PDB for our library.
+#
+
+class SetupAction:
+ def __init__(self, setup_func, function):
+ self.setup_func = setup_func
+ self.function = function
+
+ def __call__(self, targets, sources, property_set):
+ assert(callable(self.setup_func))
+ # This can modify sources.
+ action_name = self.setup_func(targets, sources, property_set)
+ # Bjam actions defined from Python have only the command
+ # to execute, and no associated jam procedural code. So
+ # passing 'property_set' to it is not necessary.
+ bjam.call("set-update-action", action_name, targets, sources, [])
+ if self.function:
+ self.function(targets, sources, property_set)
+
+def register_setup_action(action_name,setup_function,function=None):
+ global engine
+ if engine.actions.has_key(action_name):
+ raise "Bjam action %s is already defined" % action_name
+ engine.actions[action_name] = SetupAction(setup_function, function)
+
+
+engine.register_action('compile-c-c++',
+'$(.CC) @"@($(<[1]:W).rsp:E="$(>[1]:W)" -Fo"$(<[1]:W)" $(PDB_CFLAG)"$(PDB_NAME)" -Yu"$(>[3]:D=)" -Fp"$(>[2]:W)" $(CC_RSPLINE))" $(.CC.FILTER)''',
+bound_list=['PDB_NAME'])
+
+def setup_compile_c_cpp_action(targets, sources, properties):
+ sources += bjam.call('get-target-variable',targets,'PCH_FILE')
+ sources += bjam.call('get-target-variable',targets,'PCH_HEADER')
+ return 'compile-c-c++'
+
+
+register_setup_action(
+ 'msvc.compile.c',
+ setup_compile_c_cpp_action,
+ function=compile_c)
+
+register_setup_action(
+ 'msvc.compile.c++',
+ setup_compile_c_cpp_action,
+ function=compile_cpp)
+
+
+engine.register_action('preprocess-c-c++',
+'$(.CC) @"@($(<[1]:W).rsp:E="$(>[1]:W)" -E $(PDB_CFLAG)"$(PDB_NAME)" -Yu"$(>[3]:D=)" -Fp"$(>[2]:W)" $(CC_RSPLINE))" >"$(<[1]:W)"',
+bound_list=['PDB_NAME'])
+
+def setup_preprocess_c_cpp_action(targets, sources, properties):
+ sources += bjam.call('get-target-variable',targets,'PCH_FILE')
+ sources += bjam.call('get-target-variable',targets,'PCH_HEADER')
+ return 'preprocess-c-c++'
+
+register_setup_action(
+ 'msvc.preprocess.c',
+ setup_preprocess_c_cpp_action,
+ function=compile_c_preprocess)
+
+register_setup_action(
+ 'msvc.preprocess.c++',
+ setup_preprocess_c_cpp_action,
+ function=compile_cpp_preprocess)
+
+def compile_c_cpp(targets,sources=None):
+ pch_header = bjam.call('get-target-variable',targets[0],'PCH_HEADER')
+ pch_file = bjam.call('get-target-variable',targets[0],'PCH_FILE')
+ if pch_header: get_manager().engine().add_dependency(targets[0],pch_header)
+ if pch_file: get_manager().engine().add_dependency(targets[0],pch_file)
+ bjam.call('set-target-variable',targets,'PDB_NAME', os.path.splitext(targets[0])[0] + '.pdb')
+
+def preprocess_c_cpp(targets,sources=None):
+ #same as above
+ return compile_c_cpp(targets,sources)
+
+# Action for running the C/C++ compiler using precompiled headers. In addition
+# to whatever else it needs to compile, this action also adds a temporary source
+# .cpp file used to compile the precompiled headers themselves.
+
+
+engine.register_action('compile-c-c++-pch',
+'$(.CC) @"@($(<[1]:W).rsp:E="$(>[2]:W)" -Fo"$(<[2]:W)" -Yc"$(>[1]:D=)" $(YLOPTION)"__bjam_pch_symbol_$(>[1]:D=)" -Fp"$(<[1]:W)" $(CC_RSPLINE))" "@($(<[1]:W).cpp:E=#include "$(>[1]:D=)"\n)" $(.CC.FILTER)')
+
+engine.register_action('compile-c-c++-pch-s',
+'$(.CC) @"@($(<[1]:W).rsp:E="$(>[2]:W)" -Fo"$(<[2]:W)" -Yc"$(>[1]:D=)" $(YLOPTION)"__bjam_pch_symbol_$(>[1]:D=)" -Fp"$(<[1]:W)" $(CC_RSPLINE))" $(.CC.FILTER)')
+
+def setup_c_cpp_pch(targets, sources, properties):
+ pch_source = bjam.call('get-target-variable', targets, 'PCH_SOURCE')
+ if pch_source:
+ sources += pch_source
+ get_manager().engine().add_dependency(targets,pch_source)
+ return 'compile-c-c++-pch-s'
+ else:
+ return 'compile-c-c++-pch'
+
+register_setup_action(
+ 'msvc.compile.c.pch',
+ setup_c_cpp_pch,
+ function=compile_c_pch)
+
+register_setup_action(
+ 'msvc.compile.c++.pch',
+ setup_c_cpp_pch,
+ function=compile_cpp_pch)
+
+
+# See midl.py for details.
+#
+engine.register_action(
+ 'msvc.compile.idl',
+ '''$(.IDL) /nologo @"@($(<[1]:W).rsp:E=
+"$(>:W)"
+-D$(DEFINES)
+"-I$(INCLUDES:W)"
+-U$(UNDEFS)
+$(MIDLFLAGS)
+/tlb "$(<[1]:W)"
+/h "$(<[2]:W)"
+/iid "$(<[3]:W)"
+/proxy "$(<[4]:W)"
+/dlldata "$(<[5]:W)")"
+ {touch} "$(<[4]:W)"
+ {touch} "$(<[5]:W)"'''.format(touch=common.file_creation_command()))
+
+engine.register_action(
+ 'msvc.compile.mc',
+ '$(.MC) $(MCFLAGS) -h "$(<[1]:DW)" -r "$(<[2]:DW)" "$(>:W)"')
+
+engine.register_action(
+ 'msvc.compile.rc',
+ '$(.RC) -l 0x409 -U$(UNDEFS) -D$(DEFINES) -I"$(INCLUDES:W)" -fo "$(<:W)" "$(>:W)"')
+
+def link_dll(targets,sources=None,properties=None):
+ get_manager().engine().add_dependency(targets,bjam.call('get-target-variable',targets,'DEF_FILE'))
+ manifest(targets, sources, properties)
+
+def manifest(targets,sources=None,properties=None):
+ if 'on' in properties.get('<embed-manifest>'):
+ get_manager().engine().set_update_action('msvc.manifest', targets, sources, properties)
+
+
+# Incremental linking a DLL causes no end of problems: if the actual exports do
+# not change, the import .lib file is never updated. Therefore, the .lib is
+# always out-of-date and gets rebuilt every time. I am not sure that incremental
+# linking is such a great idea in general, but in this case I am sure we do not
+# want it.
+
+# Windows manifest is a new way to specify dependencies on managed DotNet
+# assemblies and Windows native DLLs. The manifests are embedded as resources
+# and are useful in any PE target (both DLL and EXE).
+
+if not on_cygwin():
+ engine.register_action(
+ 'msvc.link',
+ '''$(.LD) $(LINKFLAGS) /out:"$(<[1]:W)" /LIBPATH:"$(LINKPATH:W)" $(OPTIONS) @"@($(<[1]:W).rsp:E=
+"$(>)"
+$(LIBRARIES_MENTIONED_BY_FILE)
+$(LIBRARIES)
+"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib"
+"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
+if %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL%''',
+ function=manifest,
+ bound_list=['PDB_NAME','DEF_FILE','LIBRARIES_MENTIONED_BY_FILE'])
+
+ engine.register_action(
+ 'msvc.manifest',
+ '''if exist "$(<[1]).manifest" (
+ $(.MT) -manifest "$(<[1]).manifest" "-outputresource:$(<[1]);1"
+ )''')
+
+ engine.register_action(
+ 'msvc.link.dll',
+ '''$(.LD) /DLL $(LINKFLAGS) /out:"$(<[1]:W)" /IMPLIB:"$(<[2]:W)" /LIBPATH:"$(LINKPATH:W)" /def:"$(DEF_FILE)" $(OPTIONS) @"@($(<[1]:W).rsp:E=
+"$(>)"
+$(LIBRARIES_MENTIONED_BY_FILE)
+$(LIBRARIES)
+"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib"
+"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
+if %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL%''',
+ function=link_dll,
+ bound_list=['DEF_FILE','LIBRARIES_MENTIONED_BY_FILE'])
+
+ engine.register_action(
+ 'msvc.manifest.dll',
+ '''if exist "$(<[1]).manifest" (
+ $(.MT) -manifest "$(<[1]).manifest" "-outputresource:$(<[1]);2"
+ )''')
+else:
+ engine.register_action(
+ 'msvc.link',
+ '''$(.LD) $(LINKFLAGS) /out:"$(<[1]:W)" /LIBPATH:"$(LINKPATH:W)" $(OPTIONS) @"@($(<[1]:W).rsp:E=
+"$(>)"
+$(LIBRARIES_MENTIONED_BY_FILE)
+$(LIBRARIES)
+"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib"
+"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"''',
+ function=manifest,
+ bound_list=['PDB_NAME','DEF_FILE','LIBRARIES_MENTIONED_BY_FILE'])
+
+ engine.register_action(
+ 'msvc.manifest',
+ '''if test -e "$(<[1]).manifest"; then
+ $(.MT) -manifest "$(<[1]).manifest" "-outputresource:$(<[1]);1"
+ fi''')
+
+ engine.register_action(
+ 'msvc.link.dll',
+ '''$(.LD) /DLL $(LINKFLAGS) /out:"$(<[1]:W)" /IMPLIB:"$(<[2]:W)" /LIBPATH:"$(LINKPATH:W)" /def:"$(DEF_FILE)" $(OPTIONS) @"@($(<[1]:W).rsp:E=
+"$(>)"
+$(LIBRARIES_MENTIONED_BY_FILE)
+$(LIBRARIES)
+"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib"
+"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"''',
+ function=link_dll,
+ bound_list=['DEF_FILE','LIBRARIES_MENTIONED_BY_FILE'])
+
+ engine.register_action(
+ 'msvc.manifest.dll',
+ '''if test -e "$(<[1]).manifest"; then
+ $(.MT) -manifest "$(<[1]).manifest" "-outputresource:$(<[1]);2"
+ fi''')
+
+
+################################################################################
+#
+# Classes.
+#
+################################################################################
+
+class MsvcPchGenerator(pch.PchGenerator):
+
+ # Inherit the __init__ method
+ def run_pch(self, project, name, prop_set, sources):
+ # Find the header in sources. Ignore any CPP sources.
+ pch_header = None
+ pch_source = None
+ for s in sources:
+ if type.is_derived(s.type(), 'H'):
+ pch_header = s
+ elif type.is_derived(s.type(), 'CPP') or type.is_derived(s.type(), 'C'):
+ pch_source = s
+
+ if not pch_header:
+ raise RuntimeError( "can not build pch without pch-header" )
+
+ # If we do not have the PCH source - that is fine. We will just create a
+ # temporary .cpp file in the action.
+ properties = prop_set.all()
+ # Passing of <pch-source> is a dirty trick, needed because
+ # non-composing generators with multiple inputs are subtly
+ # broken. For more detailed information see:
+ # https://zigzag.cs.msu.su:7813/boost.build/ticket/111
+ if pch_source:
+ properties.append(Property('pch-source',pch_source))
+ generated = Generator.run(self,project,name,property_set.create(properties),[pch_header])
+ pch_file = None
+ for g in generated:
+ if type.is_derived(g.type(), 'PCH'):
+ pch_file = g
+ result_props = []
+ if pch_header:
+ result_props.append(Property('pch-header', pch_header))
+ if pch_file:
+ result_props.append(Property('pch-file', pch_file))
+
+ return property_set.PropertySet(result_props), generated
+
+
+################################################################################
+#
+# Local rules.
+#
+################################################################################
+
+# Detects versions listed as '_known_versions' by checking registry information,
+# environment variables & default paths. Supports both native Windows and
+# Cygwin.
+def auto_detect_toolset_versions():
+ if on_windows() or on_cygwin():
+ for version in _known_versions:
+ versionVarName = '__version_{}_reg'.format(version.replace('.','_'))
+ if versionVarName in globals():
+ vc_path = None
+ for x64elt in [ '', 'Wow6432Node\\' ]:
+ try:
+ with _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\{}Microsoft\\{}'.format(x64elt, globals()[versionVarName])) as reg_key:
+ vc_path = _winreg.QueryValueEx(reg_key, "ProductDir")[0]
+ except:
+ pass
+ if vc_path:
+ vc_path = os.path.join(vc_path,'bin')
+ register_configuration(version,os.path.normpath(vc_path))
+
+ for i in _known_versions:
+ if not i in __versions.all():
+ register_configuration(i,default_path(i))
+
+
+# Worker rule for toolset version configuration. Takes an explicit version id or
+# nothing in case it should configure the default toolset version (the first
+# registered one or a new 'default' one in case no toolset versions have been
+# registered yet).
+#
+
+def configure_really(version=None, options=[]):
+ v = version
+ if not v:
+ # Take the first registered (i.e. auto-detected) version.
+ version = __versions.first()
+ v = version
+
+ # Note: 'version' can still be empty at this point if no versions have
+ # been auto-detected.
+ if not version:
+ version = "default"
+
+ # Version alias -> real version number.
+ version = globals().get("__version_alias_{}".format(version), version)
+
+ # Check whether the selected configuration is already in use.
+ if version in __versions.used():
+ # Allow multiple 'toolset.using' calls for the same configuration if the
+ # identical sets of options are used.
+ if options and options != __versions.get(version,'options'):
+ raise RuntimeError("MSVC toolset configuration: Toolset version '$(version)' already configured.".format(version))
+ else:
+ # Register a new configuration.
+ __versions.register(version)
+
+ # Add user-supplied to auto-detected options.
+ version_opts = __versions.get(version, 'options')
+ if (version_opts):
+ options = version_opts + options
+
+ # Mark the configuration as 'used'.
+ __versions.use(version)
+ # Generate conditions and save them.
+ conditions = common.check_init_parameters('msvc', None, ('version', v))
+ __versions.set(version, 'conditions', conditions)
+ command = feature.get_values('<command>', options)
+
+ # If version is specified, we try to search first in default paths, and
+ # only then in PATH.
+ command = common.get_invocation_command('msvc', 'cl.exe', command, default_paths(version))
+ common.handle_options('msvc', conditions, command, options)
+
+ if not version:
+ # Even if version is not explicitly specified, try to detect the
+ # version from the path.
+ # FIXME: We currently detect both Microsoft Visual Studio 9.0 and
+ # 9.0express as 9.0 here.
+ if re.search("Microsoft Visual Studio 14", command):
+ version = '14.0'
+ elif re.search("Microsoft Visual Studio 12", command):
+ version = '12.0'
+ elif re.search("Microsoft Visual Studio 11", command):
+ version = '11.0'
+ elif re.search("Microsoft Visual Studio 10", command):
+ version = '10.0'
+ elif re.search("Microsoft Visual Studio 9", command):
+ version = '9.0'
+ elif re.search("Microsoft Visual Studio 8", command):
+ version = '8.0'
+ elif re.search("NET 2003[\/\\]VC7", command):
+ version = '7.1'
+ elif re.search("Microsoft Visual C\\+\\+ Toolkit 2003", command):
+ version = '7.1toolkit'
+ elif re.search(".NET[\/\\]VC7", command):
+ version = '7.0'
+ else:
+ version = '6.0'
+
+ # Generate and register setup command.
+
+ below_8_0 = re.search("^[67]\\.",version) != None
+
+ if below_8_0:
+ cpu = ['i386']
+ else:
+ cpu = ['i386', 'amd64', 'ia64']
+
+ setup_scripts = {}
+
+ if command:
+ # TODO: Note that if we specify a non-existant toolset version then
+ # this rule may find and use a corresponding compiler executable
+ # belonging to an incorrect toolset version. For example, if you
+ # have only MSVC 7.1 installed, have its executable on the path and
+ # specify you want Boost Build to use MSVC 9.0, then you want Boost
+ # Build to report an error but this may cause it to silently use the
+ # MSVC 7.1 compiler even though it thinks it is using the msvc-9.0
+ # toolset version.
+ command = common.get_absolute_tool_path(command)
+
+ if command:
+ parent = os.path.dirname(os.path.normpath(command))
+ # Setup will be used if the command name has been specified. If
+ # setup is not specified explicitly then a default setup script will
+ # be used instead. Setup scripts may be global or arhitecture/
+ # /platform/cpu specific. Setup options are used only in case of
+ # global setup scripts.
+
+ # Default setup scripts provided with different VC distributions:
+ #
+ # VC 7.1 had only the vcvars32.bat script specific to 32 bit i386
+ # builds. It was located in the bin folder for the regular version
+ # and in the root folder for the free VC 7.1 tools.
+ #
+ # Later 8.0 & 9.0 versions introduce separate platform specific
+ # vcvars*.bat scripts (e.g. 32 bit, 64 bit AMD or 64 bit Itanium)
+ # located in or under the bin folder. Most also include a global
+ # vcvarsall.bat helper script located in the root folder which runs
+ # one of the aforementioned vcvars*.bat scripts based on the options
+ # passed to it. So far only the version coming with some PlatformSDK
+ # distributions does not include this top level script but to
+ # support those we need to fall back to using the worker scripts
+ # directly in case the top level script can not be found.
+
+ global_setup = feature.get_values('<setup>',options)
+ if global_setup:
+ global_setup = global_setup[0]
+ else:
+ global_setup = None
+
+ if not below_8_0 and not global_setup:
+ global_setup = locate_default_setup(command,parent,'vcvarsall.bat')
+
+
+ default_setup = {
+ 'amd64' : 'vcvarsx86_amd64.bat',
+ 'i386' : 'vcvars32.bat',
+ 'ia64' : 'vcvarsx86_ia64.bat' }
+
+ # http://msdn2.microsoft.com/en-us/library/x4d2c09s(VS.80).aspx and
+ # http://msdn2.microsoft.com/en-us/library/x4d2c09s(vs.90).aspx
+ # mention an x86_IPF option, that seems to be a documentation bug
+ # and x86_ia64 is the correct option.
+ default_global_setup_options = {
+ 'amd64' : 'x86_amd64',
+ 'i386' : 'x86',
+ 'ia64' : 'x86_ia64' }
+
+ somehow_detect_the_itanium_platform = None
+ # When using 64-bit Windows, and targeting 64-bit, it is possible to
+ # use a native 64-bit compiler, selected by the "amd64" & "ia64"
+ # parameters to vcvarsall.bat. There are two variables we can use --
+ # PROCESSOR_ARCHITECTURE and PROCESSOR_IDENTIFIER. The first is
+ # 'x86' when running 32-bit Windows, no matter which processor is
+ # used, and 'AMD64' on 64-bit windows on x86 (either AMD64 or EM64T)
+ # Windows.
+ #
+ if re.search( 'AMD64', environ[ "PROCESSOR_ARCHITECTURE" ] ) != None:
+ default_global_setup_options[ 'amd64' ] = 'amd64'
+ # TODO: The same 'native compiler usage' should be implemented for
+ # the Itanium platform by using the "ia64" parameter. For this
+ # though we need someone with access to this platform who can find
+ # out how to correctly detect this case.
+ elif somehow_detect_the_itanium_platform:
+ default_global_setup_options[ 'ia64' ] = 'ia64'
+
+ setup_prefix = "call "
+ setup_suffix = """ >nul\n"""
+ if on_cygwin():
+ setup_prefix = "cmd.exe /S /C call "
+ setup_suffix = " \">nul\" \"&&\" "
+
+ for c in cpu:
+ setup_options = None
+ setup_cpu = feature.get_values('<setup-{}>'.format(c),options)
+
+ if not setup_cpu:
+ if global_setup:
+ setup_cpu = global_setup
+ # If needed we can easily add using configuration flags
+ # here for overriding which options get passed to the
+ # global setup command for which target platform:
+ # setup_options = feature.get_values('<setup-options-{}>'.format(c),options)
+ if not setup_options:
+ setup_options = default_global_setup_options[ c ]
+ else:
+ setup_cpu = locate_default_setup(command, parent, default_setup[ c ])
+
+ # Cygwin to Windows path translation.
+ # setup-$(c) = "\""$(setup-$(c):W)"\"" ;
+
+ # Append setup options to the setup name and add the final setup
+ # prefix & suffix.
+ setup_scripts[ c ] = '{}"{}" {}{}'.format(setup_prefix, setup_cpu, setup_options, setup_suffix)
+
+ # Get tool names (if any) and finish setup.
+ compiler = feature.get_values("<compiler>", options)
+ if not compiler:
+ compiler = "cl"
+
+ linker = feature.get_values("<linker>", options)
+ if not linker:
+ linker = "link"
+
+ resource_compiler = feature.get_values("<resource-compiler>", options)
+ if not resource_compiler:
+ resource_compiler = "rc"
+
+ # Turn on some options for i386 assembler
+ # -coff generate COFF format object file (compatible with cl.exe output)
+ default_assembler_amd64 = 'ml64'
+ default_assembler_i386 = 'ml -coff'
+ default_assembler_ia64 = 'ias'
+
+ assembler = feature.get_values('<assembler>',options)
+
+ idl_compiler = feature.get_values('<idl-compiler>',options)
+ if not idl_compiler:
+ idl_compiler = 'midl'
+
+ mc_compiler = feature.get_values('<mc-compiler>',options)
+ if not mc_compiler:
+ mc_compiler = 'mc'
+
+ manifest_tool = feature.get_values('<manifest-tool>',options)
+ if not manifest_tool:
+ manifest_tool = 'mt'
+
+ cc_filter = feature.get_values('<compiler-filter>',options)
+
+ for c in cpu:
+ cpu_conditions = [ condition + '/' + arch for arch in globals()['__cpu_arch_{}'.format(c)] for condition in conditions ]
+
+ setup_script = setup_scripts.get(c, '')
+
+ if debug():
+ for cpu_condition in cpu_conditions:
+ print "notice: [msvc-cfg] condition: '{}', setup: '{}'".format(cpu_condition,setup_script)
+
+ cpu_assembler = assembler
+ if not cpu_assembler:
+ cpu_assembler = locals()['default_assembler_{}'.format(c)]
+
+ toolset.flags('msvc.compile', '.CC' , cpu_conditions, ['{}{} /Zm800 -nologo' .format(setup_script, compiler)])
+ toolset.flags('msvc.compile', '.RC' , cpu_conditions, ['{}{}' .format(setup_script, resource_compiler)])
+ toolset.flags('msvc.compile', '.ASM', cpu_conditions, ['{}{} -nologo' .format(setup_script, cpu_assembler)])
+ toolset.flags('msvc.link' , '.LD' , cpu_conditions, ['{}{} /NOLOGO /INCREMENTAL:NO'.format(setup_script, linker)])
+ toolset.flags('msvc.archive', '.LD' , cpu_conditions, ['{}{} /lib /NOLOGO' .format(setup_script, linker)])
+ toolset.flags('msvc.compile', '.IDL', cpu_conditions, ['{}{}' .format(setup_script, idl_compiler)])
+ toolset.flags('msvc.compile', '.MC' , cpu_conditions, ['{}{}' .format(setup_script, mc_compiler)])
+ toolset.flags('msvc.link' , '.MT' , cpu_conditions, ['{}{} -nologo' .format(setup_script, manifest_tool)])
+
+ if cc_filter:
+ toolset.flags('msvc', '.CC.FILTER', cpu_conditions, ['"|" {}'.format(cc_filter)])
+
+ # Set version-specific flags.
+ configure_version_specific('msvc', version, conditions)
+
+
+# Returns the default installation path for the given version.
+#
+def default_path(version):
+ # Use auto-detected path if possible.
+ options = __versions.get(version, 'options')
+ tmp_path = None
+ if options:
+ tmp_path = feature.get_values('<command>', options)
+
+ if tmp_path:
+ tmp_path="".join(tmp_path)
+ tmp_path=os.path.dirname(tmp_path)
+ else:
+ env_var_var_name = '__version_{}_env'.format(version.replace('.','_'))
+ vc_path = None
+ if env_var_var_name in globals():
+ env_var_name = globals()[env_var_var_name]
+ if env_var_name in os.environ:
+ vc_path = environ[env_var_name]
+ if vc_path:
+ vc_path = os.path.join(vc_path,globals()['__version_{}_envpath'.format(version.replace('.','_'))])
+ tmp_path = os.path.normpath(vc_path)
+
+ var_name = '__version_{}_path'.format(version.replace('.','_'))
+ if not tmp_path and var_name in globals():
+ tmp_path = os.path.normpath(os.path.join(common.get_program_files_dir(), globals()[var_name]))
+ return tmp_path
+
+
+# Returns either the default installation path (if 'version' is not empty) or
+# list of all known default paths (if no version is given)
+#
+def default_paths(version = None):
+ possible_paths = []
+ if version:
+ path = default_path(version)
+ if path:
+ possible_paths.append(path)
+ else:
+ for i in _known_versions:
+ path = default_path(i)
+ if path:
+ possible_paths.append(path)
+ return possible_paths
+
+
+class MsvcLinkingGenerator(builtin.LinkingGenerator):
+ # Calls the base version. If necessary, also create a target for the
+ # manifest file.specifying source's name as the name of the created
+ # target. As result, the PCH will be named whatever.hpp.gch, and not
+ # whatever.gch.
+ def generated_targets(self, sources, prop_set, project, name):
+ result = builtin.LinkingGenerator.generated_targets(self, sources, prop_set, project, name)
+ if result:
+ name_main = result[0].name()
+ action = result[0].action()
+
+ if prop_set.get('<debug-symbols>') == 'on':
+ # We force exact name on PDB. The reason is tagging -- the tag rule may
+ # reasonably special case some target types, like SHARED_LIB. The tag rule
+ # will not catch PDB, and it cannot even easily figure if PDB is paired with
+ # SHARED_LIB or EXE or something else. Because PDB always get the
+ # same name as the main target, with .pdb as extension, just force it.
+ target = FileTarget(name_main.split_ext()[0]+'.pdb','PDB',project,action,True)
+ registered_target = virtual_target.register(target)
+ if target != registered_target:
+ action.replace_targets(target,registered_target)
+ result.append(registered_target)
+ if prop_set.get('<embed-manifest>') == 'off':
+ # Manifest is evil target. It has .manifest appened to the name of
+ # main target, including extension. E.g. a.exe.manifest. We use 'exact'
+ # name because to achieve this effect.
+ target = FileTarget(name_main+'.manifest', 'MANIFEST', project, action, True)
+ registered_target = virtual_target.register(target)
+ if target != registered_target:
+ action.replace_targets(target,registered_target)
+ result.append(registered_target)
+ return result
+
+
+# Unsafe worker rule for the register-toolset() rule. Must not be called
+# multiple times.
+
+def register_toolset_really():
+ feature.extend('toolset', ['msvc'])
+
+ # Intel and msvc supposedly have link-compatible objects.
+ feature.subfeature( 'toolset', 'msvc', 'vendor', 'intel', ['propagated', 'optional'])
+
+ # Inherit MIDL flags.
+ toolset.inherit_flags('msvc', 'midl')
+
+ # Inherit MC flags.
+ toolset.inherit_flags('msvc','mc')
+
+ # Dynamic runtime comes only in MT flavour.
+ toolset.add_requirements(['<toolset>msvc,<runtime-link>shared:<threading>multi'])
+
+ # Declare msvc toolset specific features.
+ feature.feature('debug-store', ['object', 'database'], ['propagated'])
+ feature.feature('pch-source', [], ['dependency', 'free'])
+
+ # Declare generators.
+
+ # TODO: Is it possible to combine these? Make the generators
+ # non-composing so that they do not convert each source into a separate
+ # .rsp file.
+ generators.register(MsvcLinkingGenerator('msvc.link', True, ['OBJ', 'SEARCHED_LIB', 'STATIC_LIB', 'IMPORT_LIB'], ['EXE'], ['<toolset>msvc']))
+ generators.register(MsvcLinkingGenerator('msvc.link.dll', True, ['OBJ', 'SEARCHED_LIB', 'STATIC_LIB', 'IMPORT_LIB'], ['SHARED_LIB','IMPORT_LIB'], ['<toolset>msvc']))
+
+ builtin.register_archiver('msvc.archive', ['OBJ'], ['STATIC_LIB'], ['<toolset>msvc'])
+ builtin.register_c_compiler('msvc.compile.c++', ['CPP'], ['OBJ'], ['<toolset>msvc'])
+ builtin.register_c_compiler('msvc.compile.c', ['C'], ['OBJ'], ['<toolset>msvc'])
+ builtin.register_c_compiler('msvc.compile.c++.preprocess', ['CPP'], ['PREPROCESSED_CPP'], ['<toolset>msvc'])
+ builtin.register_c_compiler('msvc.compile.c.preprocess', ['C'], ['PREPROCESSED_C'], ['<toolset>msvc'])
+
+ # Using 'register-c-compiler' adds the build directory to INCLUDES.
+ builtin.register_c_compiler('msvc.compile.rc', ['RC'], ['OBJ(%_res)'], ['<toolset>msvc'])
+ generators.override('msvc.compile.rc', 'rc.compile.resource')
+ generators.register_standard('msvc.compile.asm', ['ASM'], ['OBJ'], ['<toolset>msvc'])
+
+ builtin.register_c_compiler('msvc.compile.idl', ['IDL'], ['MSTYPELIB', 'H', 'C(%_i)', 'C(%_proxy)', 'C(%_dlldata)'], ['<toolset>msvc'])
+ generators.override('msvc.compile.idl', 'midl.compile.idl')
+
+ generators.register_standard('msvc.compile.mc', ['MC'], ['H','RC'], ['<toolset>msvc'])
+ generators.override('msvc.compile.mc', 'mc.compile')
+
+ # Note: the 'H' source type will catch both '.h' and '.hpp' headers as
+ # the latter have their HPP type derived from H. The type of compilation
+ # is determined entirely by the destination type.
+ generators.register(MsvcPchGenerator('msvc.compile.c.pch', False, ['H'], ['C_PCH','OBJ'], ['<pch>on', '<toolset>msvc']))
+ generators.register(MsvcPchGenerator('msvc.compile.c++.pch', False, ['H'], ['CPP_PCH','OBJ'], ['<pch>on', '<toolset>msvc']))
+
+ generators.override('msvc.compile.c.pch', 'pch.default-c-pch-generator')
+ generators.override('msvc.compile.c++.pch', 'pch.default-cpp-pch-generator')
+
+ toolset.flags('msvc.compile', 'PCH_FILE' , ['<pch>on'], ['<pch-file>' ])
+ toolset.flags('msvc.compile', 'PCH_SOURCE', ['<pch>on'], ['<pch-source>'])
+ toolset.flags('msvc.compile', 'PCH_HEADER', ['<pch>on'], ['<pch-header>'])
+
+ #
+ # Declare flags for compilation.
+ #
+ toolset.flags('msvc.compile', 'CFLAGS', ['<optimization>speed'], ['/O2'])
+ toolset.flags('msvc.compile', 'CFLAGS', ['<optimization>space'], ['/O1'])
+
+ toolset.flags('msvc.compile', 'CFLAGS', [ a + '/<instruction-set>' + t for a in __cpu_arch_ia64 for t in __cpu_type_itanium ], ['/G1'])
+ toolset.flags('msvc.compile', 'CFLAGS', [ a + '/<instruction-set>' + t for a in __cpu_arch_ia64 for t in __cpu_type_itanium2 ], ['/G2'])
+
+ toolset.flags('msvc.compile', 'CFLAGS', ['<debug-symbols>on/<debug-store>object'], ['/Z7'])
+ toolset.flags('msvc.compile', 'CFLAGS', ['<debug-symbols>on/<debug-store>database'], ['/Zi'])
+ toolset.flags('msvc.compile', 'CFLAGS', ['<optimization>off'], ['/Od'])
+ toolset.flags('msvc.compile', 'CFLAGS', ['<inlining>off'], ['/Ob0'])
+ toolset.flags('msvc.compile', 'CFLAGS', ['<inlining>on'], ['/Ob1'])
+ toolset.flags('msvc.compile', 'CFLAGS', ['<inlining>full'], ['/Ob2'])
+
+ toolset.flags('msvc.compile', 'CFLAGS', ['<warnings>on'], ['/W3'])
+ toolset.flags('msvc.compile', 'CFLAGS', ['<warnings>off'], ['/W0'])
+ toolset.flags('msvc.compile', 'CFLAGS', ['<warnings>all'], ['/W4'])
+ toolset.flags('msvc.compile', 'CFLAGS', ['<warnings-as-errors>on'], ['/WX'])
+
+ toolset.flags('msvc.compile', 'C++FLAGS', ['<exception-handling>on/<asynch-exceptions>off/<extern-c-nothrow>off'], ['/EHs'])
+ toolset.flags('msvc.compile', 'C++FLAGS', ['<exception-handling>on/<asynch-exceptions>off/<extern-c-nothrow>on'], ['/EHsc'])
+ toolset.flags('msvc.compile', 'C++FLAGS', ['<exception-handling>on/<asynch-exceptions>on/<extern-c-nothrow>off'], ['/EHa'])
+ toolset.flags('msvc.compile', 'C++FLAGS', ['<exception-handling>on/<asynch-exceptions>on/<extern-c-nothrow>on'], ['/EHac'])
+
+ # By default 8.0 enables rtti support while prior versions disabled it. We
+ # simply enable or disable it explicitly so we do not have to depend on this
+ # default behaviour.
+ toolset.flags('msvc.compile', 'CFLAGS', ['<rtti>on'], ['/GR'])
+ toolset.flags('msvc.compile', 'CFLAGS', ['<rtti>off'], ['/GR-'])
+ toolset.flags('msvc.compile', 'CFLAGS', ['<runtime-debugging>off/<runtime-link>shared'], ['/MD'])
+ toolset.flags('msvc.compile', 'CFLAGS', ['<runtime-debugging>on/<runtime-link>shared'], ['/MDd'])
+
+ toolset.flags('msvc.compile', 'CFLAGS', ['<runtime-debugging>off/<runtime-link>static/<threading>multi'], ['/MT'])
+ toolset.flags('msvc.compile', 'CFLAGS', ['<runtime-debugging>on/<runtime-link>static/<threading>multi'], ['/MTd'])
+
+ toolset.flags('msvc.compile', 'OPTIONS', [], ['<cflags>'])
+ toolset.flags('msvc.compile.c++', 'OPTIONS', [], ['<cxxflags>'])
+
+ toolset.flags('msvc.compile', 'PDB_CFLAG', ['<debug-symbols>on/<debug-store>database'],['/Fd'])
+
+ toolset.flags('msvc.compile', 'DEFINES', [], ['<define>'])
+ toolset.flags('msvc.compile', 'UNDEFS', [], ['<undef>'])
+ toolset.flags('msvc.compile', 'INCLUDES', [], ['<include>'])
+
+ # Declare flags for the assembler.
+ toolset.flags('msvc.compile.asm', 'USER_ASMFLAGS', [], ['<asmflags>'])
+
+ toolset.flags('msvc.compile.asm', 'ASMFLAGS', ['<debug-symbols>on'], ['/Zi', '/Zd'])
+
+ toolset.flags('msvc.compile.asm', 'ASMFLAGS', ['<warnings>on'], ['/W3'])
+ toolset.flags('msvc.compile.asm', 'ASMFLAGS', ['<warnings>off'], ['/W0'])
+ toolset.flags('msvc.compile.asm', 'ASMFLAGS', ['<warnings>all'], ['/W4'])
+ toolset.flags('msvc.compile.asm', 'ASMFLAGS', ['<warnings-as-errors>on'], ['/WX'])
+
+ toolset.flags('msvc.compile.asm', 'DEFINES', [], ['<define>'])
+
+ # Declare flags for linking.
+ toolset.flags('msvc.link', 'PDB_LINKFLAG', ['<debug-symbols>on/<debug-store>database'], ['/PDB']) # not used yet
+ toolset.flags('msvc.link', 'LINKFLAGS', ['<debug-symbols>on'], ['/DEBUG'])
+ toolset.flags('msvc.link', 'DEF_FILE', [], ['<def-file>'])
+
+ # The linker disables the default optimizations when using /DEBUG so we
+ # have to enable them manually for release builds with debug symbols.
+ toolset.flags('msvc', 'LINKFLAGS', ['<debug-symbols>on/<runtime-debugging>off'], ['/OPT:REF,ICF'])
+
+ toolset.flags('msvc', 'LINKFLAGS', ['<user-interface>console'], ['/subsystem:console'])
+ toolset.flags('msvc', 'LINKFLAGS', ['<user-interface>gui'], ['/subsystem:windows'])
+ toolset.flags('msvc', 'LINKFLAGS', ['<user-interface>wince'], ['/subsystem:windowsce'])
+ toolset.flags('msvc', 'LINKFLAGS', ['<user-interface>native'], ['/subsystem:native'])
+ toolset.flags('msvc', 'LINKFLAGS', ['<user-interface>auto'], ['/subsystem:posix'])
+
+ toolset.flags('msvc.link', 'OPTIONS', [], ['<linkflags>'])
+ toolset.flags('msvc.link', 'LINKPATH', [], ['<library-path>'])
+
+ toolset.flags('msvc.link', 'FINDLIBS_ST', [], ['<find-static-library>'])
+ toolset.flags('msvc.link', 'FINDLIBS_SA', [], ['<find-shared-library>'])
+ toolset.flags('msvc.link', 'LIBRARY_OPTION', ['<toolset>msvc'], [''])
+ toolset.flags('msvc.link', 'LIBRARIES_MENTIONED_BY_FILE', [], ['<library-file>'])
+
+ toolset.flags('msvc.archive', 'AROPTIONS', [], ['<archiveflags>'])
+
+
+# Locates the requested setup script under the given folder and returns its full
+# path or nothing in case the script can not be found. In case multiple scripts
+# are found only the first one is returned.
+#
+# TODO: There used to exist a code comment for the msvc.init rule stating that
+# we do not correctly detect the location of the vcvars32.bat setup script for
+# the free VC7.1 tools in case user explicitly provides a path. This should be
+# tested or simply remove this whole comment in case this toolset version is no
+# longer important.
+#
+def locate_default_setup(command, parent, setup_name):
+ for setup in [os.path.join(dir,setup_name) for dir in [command,parent]]:
+ if os.path.exists(setup):
+ return setup
+ return None
+
+
+# Validates given path, registers found configuration and prints debug
+# information about it.
+#
+def register_configuration(version, path=None):
+ if path:
+ command = os.path.join(path, 'cl.exe')
+ if os.path.exists(command):
+ if debug():
+ print "notice: [msvc-cfg] msvc-$(version) detected, command: ''".format(version,command)
+ __versions.register(version)
+ __versions.set(version,'options',['<command>{}'.format(command)])
+
+
+################################################################################
+#
+# Startup code executed when loading this module.
+#
+################################################################################
+
+# Similar to Configurations, but remembers the first registered configuration.
+class MSVCConfigurations(Configurations):
+ def __init__(self):
+ Configurations.__init__(self)
+ self.first_ = None
+
+ def register(self, id):
+ Configurations.register(self,id)
+ if not self.first_:
+ self.first_ = id
+
+ def first(self):
+ return self.first_
+
+
+# List of all registered configurations.
+__versions = MSVCConfigurations()
+
+# Supported CPU architectures.
+__cpu_arch_i386 = [
+ '<architecture>/<address-model>',
+ '<architecture>/<address-model>32',
+ '<architecture>x86/<address-model>',
+ '<architecture>x86/<address-model>32']
+
+__cpu_arch_amd64 = [
+ '<architecture>/<address-model>64',
+ '<architecture>x86/<address-model>64']
+
+__cpu_arch_ia64 = [
+ '<architecture>ia64/<address-model>',
+ '<architecture>ia64/<address-model>64']
+
+
+# Supported CPU types (only Itanium optimization options are supported from
+# VC++ 2005 on). See
+# http://msdn2.microsoft.com/en-us/library/h66s5s0e(vs.90).aspx for more
+# detailed information.
+__cpu_type_g5 = ['i586', 'pentium', 'pentium-mmx' ]
+__cpu_type_g6 = ['i686', 'pentiumpro', 'pentium2', 'pentium3', 'pentium3m', 'pentium-m', 'k6',
+ 'k6-2', 'k6-3', 'winchip-c6', 'winchip2', 'c3', 'c3-2' ]
+__cpu_type_em64t = ['prescott', 'nocona', 'core2', 'corei7', 'corei7-avx', 'core-avx-i', 'conroe', 'conroe-xe', 'conroe-l', 'allendale', 'merom',
+ 'merom-xe', 'kentsfield', 'kentsfield-xe', 'penryn', 'wolfdale',
+ 'yorksfield', 'nehalem', 'sandy-bridge', 'ivy-bridge', 'haswell' ]
+__cpu_type_amd64 = ['k8', 'opteron', 'athlon64', 'athlon-fx', 'k8-sse3', 'opteron-sse3', 'athlon64-sse3', 'amdfam10', 'barcelona',
+ 'bdver1', 'bdver2', 'bdver3', 'btver1', 'btver2' ]
+__cpu_type_g7 = ['pentium4', 'pentium4m', 'athlon', 'athlon-tbird', 'athlon-4', 'athlon-xp'
+ 'athlon-mp'] + __cpu_type_em64t + __cpu_type_amd64
+__cpu_type_itanium = ['itanium', 'itanium1', 'merced']
+__cpu_type_itanium2 = ['itanium2', 'mckinley']
+
+
+# Known toolset versions, in order of preference.
+_known_versions = ['14.0', '12.0', '11.0', '10.0', '10.0express', '9.0', '9.0express', '8.0', '8.0express', '7.1', '7.1toolkit', '7.0', '6.0']
+
+# Version aliases.
+__version_alias_6 = '6.0'
+__version_alias_6_5 = '6.0'
+__version_alias_7 = '7.0'
+__version_alias_8 = '8.0'
+__version_alias_9 = '9.0'
+__version_alias_10 = '10.0'
+__version_alias_11 = '11.0'
+__version_alias_12 = '12.0'
+__version_alias_14 = '14.0'
+
+# Names of registry keys containing the Visual C++ installation path (relative
+# to "HKEY_LOCAL_MACHINE\SOFTWARE\\Microsoft").
+__version_6_0_reg = "VisualStudio\\6.0\\Setup\\Microsoft Visual C++"
+__version_7_0_reg = "VisualStudio\\7.0\\Setup\\VC"
+__version_7_1_reg = "VisualStudio\\7.1\\Setup\\VC"
+__version_8_0_reg = "VisualStudio\\8.0\\Setup\\VC"
+__version_8_0express_reg = "VCExpress\\8.0\\Setup\\VC"
+__version_9_0_reg = "VisualStudio\\9.0\\Setup\\VC"
+__version_9_0express_reg = "VCExpress\\9.0\\Setup\\VC"
+__version_10_0_reg = "VisualStudio\\10.0\\Setup\\VC"
+__version_10_0express_reg = "VCExpress\\10.0\\Setup\\VC"
+__version_11_0_reg = "VisualStudio\\11.0\\Setup\\VC"
+__version_12_0_reg = "VisualStudio\\12.0\\Setup\\VC"
+__version_14_0_reg = "VisualStudio\\14.0\\Setup\\VC"
+
+# Visual C++ Toolkit 2003 does not store its installation path in the registry.
+# The environment variable 'VCToolkitInstallDir' and the default installation
+# path will be checked instead.
+__version_7_1toolkit_path = 'Microsoft Visual C++ Toolkit 2003\\bin'
+__version_7_1toolkit_env = 'VCToolkitInstallDir'
+
+# Path to the folder containing "cl.exe" relative to the value of the
+# corresponding environment variable.
+__version_7_1toolkit_envpath = 'bin' ;
+#
+#
+# Auto-detect all the available msvc installations on the system.
+auto_detect_toolset_versions()
+
+# And finally trigger the actual Boost Build toolset registration.
+register_toolset()
diff --git a/tools/build/src/tools/notfile.jam b/tools/build/src/tools/notfile.jam
new file mode 100644
index 0000000000..7d0985b456
--- /dev/null
+++ b/tools/build/src/tools/notfile.jam
@@ -0,0 +1,65 @@
+# Copyright (c) 2005 Vladimir Prus.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import "class" : new ;
+import generators ;
+import project ;
+import targets ;
+import toolset ;
+import type ;
+
+
+type.register NOTFILE_MAIN ;
+
+
+class notfile-generator : generator
+{
+ rule __init__ ( * : * )
+ {
+ generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8)
+ : $(9) : $(10) : $(11) : $(12) : $(13) : $(14) : $(15) : $(16) :
+ $(17) : $(18) : $(19) ;
+ }
+
+ rule run ( project name ? : property-set : sources * : multiple ? )
+ {
+ local action ;
+ local action-name = [ $(property-set).get <action> ] ;
+ local m = [ MATCH ^@(.*) : $(action-name) ] ;
+ if $(m)
+ {
+ action = [ new action $(sources) : $(m[1]) : $(property-set) ] ;
+ }
+ else
+ {
+ action = [ new action $(sources) : notfile.run : $(property-set) ] ;
+ }
+ local t = [ new notfile-target $(name) : $(project) : $(action) ] ;
+ return [ virtual-target.register $(t) ] ;
+ }
+}
+
+
+generators.register [ new notfile-generator notfile.main : : NOTFILE_MAIN ] ;
+
+
+toolset.flags notfile.run ACTION : <action> ;
+
+
+actions run
+{
+ $(ACTION)
+}
+
+
+rule notfile ( target-name : action + : sources * : requirements * :
+ default-build * )
+{
+ targets.create-typed-target NOTFILE_MAIN : [ project.current ] :
+ $(target-name) : $(sources) : $(requirements) <action>$(action) :
+ $(default-build) ;
+}
+
+IMPORT $(__name__) : notfile : : notfile ;
diff --git a/tools/build/v2/tools/notfile.py b/tools/build/src/tools/notfile.py
index afbf68fb0e..afbf68fb0e 100644
--- a/tools/build/v2/tools/notfile.py
+++ b/tools/build/src/tools/notfile.py
diff --git a/tools/build/v2/tools/package.jam b/tools/build/src/tools/package.jam
index 198c223151..198c223151 100644
--- a/tools/build/v2/tools/package.jam
+++ b/tools/build/src/tools/package.jam
diff --git a/tools/build/v2/tools/package.py b/tools/build/src/tools/package.py
index aa081b4f49..aa081b4f49 100644
--- a/tools/build/v2/tools/package.py
+++ b/tools/build/src/tools/package.py
diff --git a/tools/build/src/tools/pathscale.jam b/tools/build/src/tools/pathscale.jam
new file mode 100644
index 0000000000..94abcf1c83
--- /dev/null
+++ b/tools/build/src/tools/pathscale.jam
@@ -0,0 +1,178 @@
+# Copyright 2006 Noel Belcourt
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import property ;
+import generators ;
+import toolset : flags ;
+import feature ;
+import type ;
+import os ;
+import common ;
+import fortran ;
+
+feature.extend toolset : pathscale ;
+toolset.inherit pathscale : unix ;
+generators.override pathscale.prebuilt : builtin.prebuilt ;
+generators.override pathscale.searched-lib-generator : searched-lib-generator ;
+
+# Documentation and toolchain description located
+# http://www.pathscale.com/docs.html
+
+rule init ( version ? : command * : options * )
+{
+ command = [ common.get-invocation-command pathscale : pathCC : $(command)
+ : /opt/ekopath/bin ] ;
+
+ # Determine the version
+ local command-string = $(command:J=" ") ;
+ if $(command)
+ {
+ version ?= [ MATCH "^([0-9.]+)"
+ : [ SHELL "$(command-string) -dumpversion" ] ] ;
+ }
+
+ local condition = [ common.check-init-parameters pathscale
+ : version $(version) ] ;
+
+ common.handle-options pathscale : $(condition) : $(command) : $(options) ;
+
+ toolset.flags pathscale.compile.fortran90 OPTIONS $(condition) :
+ [ feature.get-values <fflags> : $(options) ] : unchecked ;
+
+ command_c = $(command_c[1--2]) $(command[-1]:B=pathcc) ;
+
+ toolset.flags pathscale CONFIG_C_COMMAND $(condition) : $(command_c) ;
+
+ # fortran support
+ local f-command = [ common.get-invocation-command pathscale : pathf90 : $(command) ] ;
+ local command_f = $(command_f[1--2]) $(f-command[-1]:B=pathf90) ;
+ local command_f90 = $(command_f[1--2]) $(f-command[-1]:B=pathf90) ;
+
+ toolset.flags pathscale CONFIG_F_COMMAND $(condition) : $(command_f) ;
+ toolset.flags pathscale CONFIG_F90_COMMAND $(condition) : $(command_f90) ;
+
+ # always link lib rt to resolve clock_gettime()
+ flags pathscale.link FINDLIBS-SA : rt : unchecked ;
+
+ switch [ os.name ]
+ {
+ case SOLARIS :
+ toolset.flags pathscale.link RPATH_OPTION $(condition) : -Wl,-R, -Wl, : unchecked ;
+
+ case * : # GNU
+ toolset.flags pathscale.link RPATH_OPTION $(condition) : -Wl,-rpath= : unchecked ;
+ }
+}
+
+# Declare generators
+generators.register-c-compiler pathscale.compile.c : C : OBJ : <toolset>pathscale ;
+generators.register-c-compiler pathscale.compile.c++ : CPP : OBJ : <toolset>pathscale ;
+generators.register-fortran-compiler pathscale.compile.fortran : FORTRAN : OBJ : <toolset>pathscale ;
+generators.register-fortran90-compiler pathscale.compile.fortran90 : FORTRAN90 : OBJ : <toolset>pathscale ;
+
+# Declare flags and actions for compilation
+flags pathscale.compile OPTIONS <optimization>off : -O0 ;
+flags pathscale.compile OPTIONS <optimization>speed : -O3 ;
+flags pathscale.compile OPTIONS <optimization>space : -Os ;
+
+flags pathscale.compile OPTIONS <inlining>off : -noinline ;
+flags pathscale.compile OPTIONS <inlining>on : -inline ;
+flags pathscale.compile OPTIONS <inlining>full : -inline ;
+
+flags pathscale.compile OPTIONS <warnings>off : -woffall ;
+flags pathscale.compile OPTIONS <warnings>on : -Wall ;
+flags pathscale.compile OPTIONS <warnings>all : -Wall -pedantic ;
+flags pathscale.compile OPTIONS <warnings-as-errors>on : -Werror ;
+
+flags pathscale.compile OPTIONS <debug-symbols>on : -ggdb ;
+flags pathscale.compile OPTIONS <profiling>on : -pg ;
+flags pathscale.compile OPTIONS <link>shared : -fPIC ;
+flags pathscale.compile OPTIONS <address-model>32 : -m32 ;
+flags pathscale.compile OPTIONS <address-model>64 : -m64 ;
+
+flags pathscale.compile USER_OPTIONS <cflags> ;
+flags pathscale.compile.c++ USER_OPTIONS <cxxflags> ;
+flags pathscale.compile DEFINES <define> ;
+flags pathscale.compile INCLUDES <include> ;
+
+flags pathscale.compile.fortran USER_OPTIONS <fflags> ;
+flags pathscale.compile.fortran90 USER_OPTIONS <fflags> ;
+
+actions compile.c
+{
+ "$(CONFIG_C_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions compile.c++
+{
+ "$(CONFIG_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions compile.fortran
+{
+ "$(CONFIG_F_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+rule compile.fortran90 ( targets * : sources * : properties * )
+{
+ # the space rule inserts spaces between targets and it's necessary
+ SPACE on $(targets) = " " ;
+ # Serialize execution of the compile.fortran90 action
+ # F90 source must be compiled in a particular order so we
+ # serialize the build as a parallel F90 compile might fail
+ JAM_SEMAPHORE on $(targets) = <s>pathscale-f90-semaphore ;
+}
+
+actions compile.fortran90
+{
+ "$(CONFIG_F90_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -module $(<[1]:D) -c -o "$(<)" "$(>)"
+}
+
+# Declare flags and actions for linking
+flags pathscale.link OPTIONS <debug-symbols>on : -ggdb -rdynamic ;
+# Strip the binary when no debugging is needed
+flags pathscale.link OPTIONS <debug-symbols>off : -g0 ;
+flags pathscale.link OPTIONS <profiling>on : -pg ;
+flags pathscale.link USER_OPTIONS <linkflags> ;
+flags pathscale.link LINKPATH <library-path> ;
+flags pathscale.link FINDLIBS-ST <find-static-library> ;
+flags pathscale.link FINDLIBS-SA <find-shared-library> ;
+flags pathscale.link FINDLIBS-SA <threading>multi : pthread ;
+flags pathscale.link LIBRARIES <library-file> ;
+flags pathscale.link LINK-RUNTIME <runtime-link>static : static ;
+flags pathscale.link LINK-RUNTIME <runtime-link>shared : dynamic ;
+flags pathscale.link RPATH <dll-path> ;
+# On gcc, there are separate options for dll path at runtime and
+# link time. On Solaris, there's only one: -R, so we have to use
+# it, even though it's bad idea.
+flags pathscale.link RPATH <xdll-path> ;
+
+rule link ( targets * : sources * : properties * )
+{
+ SPACE on $(targets) = " " ;
+}
+
+actions link bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -L"$(LINKPATH)" $(RPATH_OPTION:E=-Wl,-rpath=)"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST)
+}
+
+# Slight mods for dlls
+rule link.dll ( targets * : sources * : properties * )
+{
+ SPACE on $(targets) = " " ;
+}
+
+actions link.dll bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -L"$(LINKPATH)" $(RPATH_OPTION:E=-Wl,-rpath=)"$(RPATH)" -o "$(<)" -Wl,-soname$(SPACE)-Wl,$(<[1]:D=) -shared "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST)
+}
+
+# Declare action for creating static libraries
+# "$(CONFIG_COMMAND)" -ar -o "$(<)" "$(>)"
+actions piecemeal archive
+{
+ ar $(ARFLAGS) ru "$(<)" "$(>)"
+}
diff --git a/tools/build/v2/tools/pch.jam b/tools/build/src/tools/pch.jam
index 0c6e98fac7..0c6e98fac7 100644
--- a/tools/build/v2/tools/pch.jam
+++ b/tools/build/src/tools/pch.jam
diff --git a/tools/build/v2/tools/pch.py b/tools/build/src/tools/pch.py
index 71cb7166eb..71cb7166eb 100644
--- a/tools/build/v2/tools/pch.py
+++ b/tools/build/src/tools/pch.py
diff --git a/tools/build/src/tools/pgi.jam b/tools/build/src/tools/pgi.jam
new file mode 100644
index 0000000000..90e827d5c7
--- /dev/null
+++ b/tools/build/src/tools/pgi.jam
@@ -0,0 +1,147 @@
+# Copyright Noel Belcourt 2007.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import property ;
+import generators ;
+import os ;
+import toolset : flags ;
+import feature ;
+import fortran ;
+import type ;
+import common ;
+import gcc ;
+
+feature.extend toolset : pgi ;
+toolset.inherit pgi : unix ;
+generators.override pgi.prebuilt : builtin.lib-generator ;
+generators.override pgi.searched-lib-generator : searched-lib-generator ;
+
+# Documentation and toolchain description located
+# http://www.pgroup.com/resources/docs.htm
+
+rule init ( version ? : command * : options * )
+{
+ local condition = [ common.check-init-parameters pgi : version $(version) ] ;
+
+ local l_command = [ common.get-invocation-command pgi : pgCC : $(command) ] ;
+
+ common.handle-options pgi : $(condition) : $(l_command) : $(options) ;
+
+ command_c = $(command_c[1--2]) $(l_command[-1]:B=pgcc) ;
+
+ toolset.flags pgi CONFIG_C_COMMAND $(condition) : $(command_c) ;
+
+ flags pgi.compile DEFINES $(condition) :
+ [ feature.get-values <define> : $(options) ] : unchecked ;
+
+ # IOV_MAX support
+ flags pgi.compile DEFINES $(condition) : __need_IOV_MAX : unchecked ;
+
+ # set link flags
+ flags pgi.link FINDLIBS-ST : [
+ feature.get-values <find-static-library> : $(options) ] : unchecked ;
+
+ # always link lib rt to resolve clock_gettime()
+ flags pgi.link FINDLIBS-SA : rt [
+ feature.get-values <find-shared-library> : $(options) ] : unchecked ;
+
+ gcc.init-link-flags pgi gnu $(condition) ;
+}
+
+# Declare generators
+generators.register-c-compiler pgi.compile.c : C : OBJ : <toolset>pgi ;
+generators.register-c-compiler pgi.compile.c++ : CPP : OBJ : <toolset>pgi ;
+generators.register-fortran-compiler pgi.compile.fortran : FORTRAN : OBJ : <toolset>pgi ;
+
+# Declare flags and actions for compilation
+flags pgi.compile OPTIONS : -Kieee ;
+flags pgi.compile OPTIONS <link>shared : -fpic -fPIC ;
+flags pgi.compile OPTIONS <debug-symbols>on : -gopt ;
+flags pgi.compile OPTIONS <profiling>on : -xprofile=tcov ;
+flags pgi.compile OPTIONS <optimization>speed : -fast -Mx,8,0x10000000 ;
+flags pgi.compile OPTIONS <optimization>space : -xO2 -xspace ;
+# flags pgi.compile OPTIONS <threading>multi : -mt ;
+
+flags pgi.compile OPTIONS <warnings>off : -Minform=severe ;
+flags pgi.compile OPTIONS <warnings>on : -Minform=warn ;
+
+flags pgi.compile.c++ OPTIONS <inlining>off : -INLINE:none ;
+
+flags pgi.compile OPTIONS <cflags> ;
+flags pgi.compile.c++ OPTIONS <cxxflags> ;
+flags pgi.compile DEFINES <define> ;
+flags pgi.compile INCLUDES <include> ;
+
+flags pgi.compile.fortran OPTIONS <fflags> ;
+
+actions compile.c
+{
+ "$(CONFIG_C_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions compile.c++
+{
+ "$(CONFIG_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions compile.fortran
+{
+ "$(CONFIG_F_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+# Declare flags and actions for linking
+flags pgi.link OPTIONS <debug-symbols>on : -gopt ;
+# Strip the binary when no debugging is needed
+flags pgi.link OPTIONS <debug-symbols>off : -s ;
+flags pgi.link OPTIONS <profiling>on : -xprofile=tcov ;
+flags pgi.link OPTIONS <linkflags> ;
+flags pgi.link OPTIONS <link>shared : -fpic -fPIC ;
+flags pgi.link LINKPATH <library-path> ;
+flags pgi.link FINDLIBS-ST <find-static-library> ;
+flags pgi.link FINDLIBS-SA <find-shared-library> ;
+flags pgi.link FINDLIBS-SA <threading>multi : pthread rt ;
+flags pgi.link LIBRARIES <library-file> ;
+flags pgi.link LINK-RUNTIME <runtime-link>static : static ;
+flags pgi.link LINK-RUNTIME <runtime-link>shared : dynamic ;
+flags pgi.link RPATH <dll-path> ;
+
+# On gcc, there are separate options for dll path at runtime and
+# link time. On Solaris, there's only one: -R, so we have to use
+# it, even though it's bad idea.
+flags pgi.link RPATH <xdll-path> ;
+
+rule link ( targets * : sources * : properties * )
+{
+ SPACE on $(targets) = " " ;
+}
+
+# reddish can only link statically and, somehow, the presence of -Bdynamic on the link line
+# marks the executable as a dynamically linked exec even though no dynamic libraries are supplied.
+# Yod on redstorm refuses to load an executable that is dynamically linked.
+# removing the dynamic link options should get us where we need to be on redstorm.
+# "$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME)
+actions link bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -Bstatic -l$(FINDLIBS-ST) -Bdynamic -l$(FINDLIBS-SA) -B$(LINK-RUNTIME)
+}
+
+# Slight mods for dlls
+rule link.dll ( targets * : sources * : properties * )
+{
+ SPACE on $(targets) = " " ;
+}
+
+# "$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" -h$(<[1]:D=) -G "$(>)" "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME)
+
+actions link.dll bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" $(OPTIONS) -shared -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" "$(>)" -Wl,-h -Wl,$(<[1]:D=) "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME)
+}
+
+actions updated together piecemeal pgi.archive
+{
+ ar -rc$(ARFLAGS:E=) "$(<)" "$(>)"
+}
+
diff --git a/tools/build/v2/tools/python-config.jam b/tools/build/src/tools/python-config.jam
index 40aa825bc7..40aa825bc7 100644
--- a/tools/build/v2/tools/python-config.jam
+++ b/tools/build/src/tools/python-config.jam
diff --git a/tools/build/src/tools/python.jam b/tools/build/src/tools/python.jam
new file mode 100644
index 0000000000..783b9cee81
--- /dev/null
+++ b/tools/build/src/tools/python.jam
@@ -0,0 +1,1258 @@
+# Copyright 2004 Vladimir Prus.
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Support for Python and the the Boost.Python library.
+#
+# This module defines
+#
+# - a project 'python' with a target 'python' in it, that corresponds to the
+# python library
+#
+# - a main target rule 'python-extension' which can be used to build a python
+# extension.
+#
+# Extensions that use Boost.Python must explicitly link to it.
+
+import type ;
+import testing ;
+import generators ;
+import project ;
+import errors ;
+import targets ;
+import "class" : new ;
+import os ;
+import common ;
+import toolset ;
+import regex ;
+import numbers ;
+import string ;
+import property ;
+import sequence ;
+import path ;
+import feature ;
+import set ;
+import builtin ;
+
+
+# Make this module a project.
+project.initialize $(__name__) ;
+project python ;
+
+# Save the project so that if 'init' is called several times we define new
+# targets in the python project, not in whatever project we were called by.
+.project = [ project.current ] ;
+
+# Dynamic linker lib. Necessary to specify it explicitly on some platforms.
+lib dl ;
+# This contains 'openpty' function need by python. Again, on some system need to
+# pass this to linker explicitly.
+lib util ;
+# Python uses pthread symbols.
+lib pthread ;
+# Extra library needed by phtread on some platforms.
+lib rt ;
+
+# The pythonpath feature specifies additional elements for the PYTHONPATH
+# environment variable, set by run-pyd. For example, pythonpath can be used to
+# access Python modules that are part of the product being built, but are not
+# installed in the development system's default paths.
+feature.feature pythonpath : : free optional path ;
+
+# Initializes the Python toolset. Note that all parameters are optional.
+#
+# - version -- the version of Python to use. Should be in Major.Minor format,
+# for example 2.3. Do not include the subminor version.
+#
+# - cmd-or-prefix: Preferably, a command that invokes a Python interpreter.
+# Alternatively, the installation prefix for Python libraries and includes. If
+# empty, will be guessed from the version, the platform's installation
+# patterns, and the python executables that can be found in PATH.
+#
+# - includes: the include path to Python headers. If empty, will be guessed.
+#
+# - libraries: the path to Python library binaries. If empty, will be guessed.
+# On MacOS/Darwin, you can also pass the path of the Python framework.
+#
+# - condition: if specified, should be a set of properties that are matched
+# against the build configuration when Boost.Build selects a Python
+# configuration to use.
+#
+# - extension-suffix: A string to append to the name of extension modules before
+# the true filename extension. Ordinarily we would just compute this based on
+# the value of the <python-debugging> feature. However ubuntu's python-dbg
+# package uses the windows convention of appending _d to debug-build extension
+# modules. We have no way of detecting ubuntu, or of probing python for the
+# "_d" requirement, and if you configure and build python using
+# --with-pydebug, you'll be using the standard *nix convention. Defaults to ""
+# (or "_d" when targeting windows and <python-debugging> is set).
+#
+# Example usage:
+#
+# using python : 2.3 ;
+# using python : 2.3 : /usr/local/bin/python ;
+#
+rule init ( version ? : cmd-or-prefix ? : includes * : libraries ?
+ : condition * : extension-suffix ? )
+{
+ project.push-current $(.project) ;
+
+ debug-message Configuring python... ;
+ for local v in version cmd-or-prefix includes libraries condition
+ {
+ if $($(v))
+ {
+ debug-message " user-specified "$(v): \"$($(v))\" ;
+ }
+ }
+
+ configure $(version) : $(cmd-or-prefix) : $(includes) : $(libraries) : $(condition) : $(extension-suffix) ;
+
+ project.pop-current ;
+}
+
+# A simpler version of SHELL that grabs stderr as well as stdout, but returns
+# nothing if there was an error.
+#
+local rule shell-cmd ( cmd )
+{
+ debug-message running command '$(cmd)" 2>&1"' ;
+ x = [ SHELL $(cmd)" 2>&1" : exit-status ] ;
+ if $(x[2]) = 0
+ {
+ return $(x[1]) ;
+ }
+ else
+ {
+ return ;
+ }
+}
+
+
+# Try to identify Cygwin symlinks. Invoking such a file directly as an NT
+# executable from a native Windows build of bjam would be fatal to the bjam
+# process. One /can/ invoke them through sh.exe or bash.exe, if you can prove
+# that those are not also symlinks. ;-)
+#
+# If a symlink is found returns non-empty; we try to extract the target of the
+# symlink from the file and return that.
+#
+# Note: 1. only works on NT 2. path is a native path.
+local rule is-cygwin-symlink ( path )
+{
+ local is-symlink = ;
+
+ # Look for a file with the given path having the S attribute set, as cygwin
+ # symlinks do. /-C means "do not use thousands separators in file sizes."
+ local dir-listing = [ shell-cmd "DIR /-C /A:S \""$(path)"\"" ] ;
+
+ if $(dir-listing)
+ {
+ # Escape any special regex characters in the base part of the path.
+ local base-pat = [ regex.escape $(path:D=) : ].[()*+?|\\$^ : \\ ] ;
+
+ # Extract the file's size from the directory listing.
+ local size-of-system-file = [ MATCH "([0-9]+) "$(base-pat) : $(dir-listing) : 1 ] ;
+
+ # If the file has a reasonably small size, look for the special symlink
+ # identification text.
+ if $(size-of-system-file) && [ numbers.less $(size-of-system-file) 1000 ]
+ {
+ local link = [ SHELL "FIND /OFF \"!<symlink>\" \""$(path)"\" 2>&1" ] ;
+ if $(link[2]) != 0
+ {
+ local nl = "
+
+" ;
+ is-symlink = [ MATCH ".*!<symlink>([^"$(nl)"]*)" : $(link[1]) : 1 ] ;
+ if $(is-symlink)
+ {
+ is-symlink = [ *nix-path-to-native $(is-symlink) ] ;
+ is-symlink = $(is-symlink:R=$(path:D)) ;
+ }
+
+ }
+ }
+ }
+ return $(is-symlink) ;
+}
+
+
+# Append ext to each member of names that does not contain '.'.
+#
+local rule default-extension ( names * : ext * )
+{
+ local result ;
+ for local n in $(names)
+ {
+ switch $(n)
+ {
+ case *.* : result += $(n) ;
+ case * : result += $(n)$(ext) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Tries to determine whether invoking "cmd" would actually attempt to launch a
+# cygwin symlink.
+#
+# Note: only works on NT.
+#
+local rule invokes-cygwin-symlink ( cmd )
+{
+ local dirs = $(cmd:D) ;
+ if ! $(dirs)
+ {
+ dirs = . [ os.executable-path ] ;
+ }
+ local base = [ default-extension $(cmd:D=) : .exe .cmd .bat ] ;
+ local paths = [ GLOB $(dirs) : $(base) ] ;
+ if $(paths)
+ {
+ # Make sure we have not run into a Cygwin symlink. Invoking such a file
+ # as an NT executable would be fatal for the bjam process.
+ return [ is-cygwin-symlink $(paths[1]) ] ;
+ }
+}
+
+
+local rule debug-message ( message * )
+{
+ if --debug-configuration in [ modules.peek : ARGV ]
+ {
+ ECHO notice: [python-cfg] $(message) ;
+ }
+}
+
+
+# Like W32_GETREG, except prepend HKEY_CURRENT_USER\SOFTWARE and
+# HKEY_LOCAL_MACHINE\SOFTWARE to the first argument, returning the first result
+# found. Also accounts for the fact that on 64-bit machines, 32-bit software has
+# its own area, under SOFTWARE\Wow6432node.
+#
+local rule software-registry-value ( path : data ? )
+{
+ local result ;
+ for local root in HKEY_CURRENT_USER HKEY_LOCAL_MACHINE
+ {
+ for local x64elt in "" Wow6432node\\ # Account for 64-bit windows
+ {
+ if ! $(result)
+ {
+ result = [ W32_GETREG $(root)\\SOFTWARE\\$(x64elt)$(path) : $(data) ] ;
+ }
+ }
+
+ }
+ return $(result) ;
+}
+
+
+.windows-drive-letter-re = ^([A-Za-z]):[\\/](.*) ;
+.cygwin-drive-letter-re = ^/cygdrive/([a-z])/(.*) ;
+
+.working-directory = [ PWD ] ;
+.working-drive-letter = [ SUBST $(.working-directory) $(.windows-drive-letter-re) $1 ] ;
+.working-drive-letter ?= [ SUBST $(.working-directory) $(.cygwin-drive-letter-re) $1 ] ;
+
+
+local rule windows-to-cygwin-path ( path )
+{
+ # If path is rooted with a drive letter, rewrite it using the /cygdrive
+ # mountpoint.
+ local p = [ SUBST $(path:T) $(.windows-drive-letter-re) /cygdrive/$1/$2 ] ;
+
+ # Else if path is rooted without a drive letter, use the working directory.
+ p ?= [ SUBST $(path:T) ^/(.*) /cygdrive/$(.working-drive-letter:L)/$2 ] ;
+
+ # Else return the path unchanged.
+ return $(p:E=$(path:T)) ;
+}
+
+
+# :W only works in Cygwin builds of bjam. This one works on NT builds as well.
+#
+local rule cygwin-to-windows-path ( path )
+{
+ path = $(path:R="") ; # strip any trailing slash
+
+ local drive-letter = [ SUBST $(path) $(.cygwin-drive-letter-re) $1:/$2 ] ;
+ if $(drive-letter)
+ {
+ path = $(drive-letter) ;
+ }
+ else if $(path:R=/x) = $(path) # already rooted?
+ {
+ # Look for a cygwin mount that includes each head sequence in $(path).
+ local head = $(path) ;
+ local tail = "" ;
+
+ while $(head)
+ {
+ local root = [ software-registry-value
+ "Cygnus Solutions\\Cygwin\\mounts v2\\"$(head) : native ] ;
+
+ if $(root)
+ {
+ path = $(tail:R=$(root)) ;
+ head = ;
+ }
+ tail = $(tail:R=$(head:D=)) ;
+
+ if $(head) = /
+ {
+ head = ;
+ }
+ else
+ {
+ head = $(head:D) ;
+ }
+ }
+ }
+ return [ regex.replace $(path:R="") / \\ ] ;
+}
+
+
+# Convert a *nix path to native.
+#
+local rule *nix-path-to-native ( path )
+{
+ if [ os.name ] = NT
+ {
+ path = [ cygwin-to-windows-path $(path) ] ;
+ }
+ return $(path) ;
+}
+
+
+# Convert an NT path to native.
+#
+local rule windows-path-to-native ( path )
+{
+ if [ os.name ] = NT
+ {
+ return $(path) ;
+ }
+ else
+ {
+ return [ windows-to-cygwin-path $(path) ] ;
+ }
+}
+
+
+# Return nonempty if path looks like a windows path, i.e. it starts with a drive
+# letter or contains backslashes.
+#
+local rule guess-windows-path ( path )
+{
+ return [ SUBST $(path) ($(.windows-drive-letter-re)|.*([\\]).*) $1 ] ;
+}
+
+
+local rule path-to-native ( paths * )
+{
+ local result ;
+
+ for local p in $(paths)
+ {
+ if [ guess-windows-path $(p) ]
+ {
+ result += [ windows-path-to-native $(p) ] ;
+ }
+ else
+ {
+ result += [ *nix-path-to-native $(p:T) ] ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Validate the version string and extract the major/minor part we care about.
+#
+local rule split-version ( version )
+{
+ local major-minor = [ MATCH ^([0-9]+)\.([0-9]+)(.*)$ : $(version) : 1 2 3 ] ;
+ if ! $(major-minor[2]) || $(major-minor[3])
+ {
+ ECHO "Warning: \"using python\" expects a two part (major, minor) version number; got" $(version) instead ;
+
+ # Add a zero to account for the missing digit if necessary.
+ major-minor += 0 ;
+ }
+
+ return $(major-minor[1]) $(major-minor[2]) ;
+}
+
+
+# Build a list of versions from 3.4 down to 1.5. Because bjam can not enumerate
+# registry sub-keys, we have no way of finding a version with a 2-digit minor
+# version, e.g. 2.10 -- let us hope that never happens.
+#
+.version-countdown = ;
+for local v in [ numbers.range 15 34 ]
+{
+ .version-countdown = [ SUBST $(v) (.)(.*) $1.$2 ] $(.version-countdown) ;
+}
+
+
+local rule windows-installed-pythons ( version ? )
+{
+ version ?= $(.version-countdown) ;
+ local interpreters ;
+
+ for local v in $(version)
+ {
+ local install-path = [
+ software-registry-value "Python\\PythonCore\\"$(v)"\\InstallPath" ] ;
+
+ if $(install-path)
+ {
+ install-path = [ windows-path-to-native $(install-path) ] ;
+ debug-message Registry indicates Python $(v) installed at \"$(install-path)\" ;
+ }
+
+ interpreters += $(:E=python:R=$(install-path)) ;
+ }
+ return $(interpreters) ;
+}
+
+
+local rule darwin-installed-pythons ( version ? )
+{
+ version ?= $(.version-countdown) ;
+
+ local prefix
+ = [ GLOB /System/Library/Frameworks /Library/Frameworks
+ : Python.framework ] ;
+
+ return $(prefix)/Versions/$(version)/bin/python ;
+}
+
+
+# Assume "python-cmd" invokes a python interpreter and invoke it to extract all
+# the information we care about from its "sys" module. Returns void if
+# unsuccessful.
+#
+local rule probe ( python-cmd )
+{
+ # Avoid invoking a Cygwin symlink on NT.
+ local skip-symlink ;
+ if [ os.name ] = NT
+ {
+ skip-symlink = [ invokes-cygwin-symlink $(python-cmd) ] ;
+ }
+
+ if $(skip-symlink)
+ {
+ debug-message -------------------------------------------------------------------- ;
+ debug-message \"$(python-cmd)\" would attempt to invoke a Cygwin symlink, ;
+ debug-message causing a bjam built for Windows to hang. ;
+ debug-message ;
+ debug-message If you intend to target a Cygwin build of Python, please ;
+ debug-message replace the path to the link with the path to a real executable ;
+ debug-message (guessing: \"$(skip-symlink)\") "in" your 'using python' line ;
+ debug-message "in" user-config.jam or site-config.jam. Do not forget to escape ;
+ debug-message backslashes ;
+ debug-message -------------------------------------------------------------------- ;
+ }
+ else
+ {
+ # Prepare a List of Python format strings and expressions that can be
+ # used to print the constants we want from the sys module.
+
+ # We do not really want sys.version since that is a complicated string,
+ # so get the information from sys.version_info instead.
+ local format = "version=%d.%d" ;
+ local exprs = "version_info[0]" "version_info[1]" ;
+
+ for local s in $(sys-elements[2-])
+ {
+ format += $(s)=%s ;
+ exprs += $(s) ;
+ }
+
+ # Invoke Python and ask it for all those values.
+ local full-cmd =
+ $(python-cmd)" -c \"from sys import *; print('"$(format:J=\\n)"' % ("$(exprs:J=,)"))\"" ;
+
+ local output = [ shell-cmd $(full-cmd) ] ;
+ if $(output)
+ {
+ # Parse the output to get all the results.
+ local nl = "
+
+" ;
+ for s in $(sys-elements)
+ {
+ # These variables are expected to be declared local in the
+ # caller, so Jam's dynamic scoping will set their values there.
+ sys.$(s) = [ SUBST $(output) \\<$(s)=([^$(nl)]+) $1 ] ;
+ }
+ }
+ return $(output) ;
+ }
+}
+
+
+# Make sure the "libraries" and "includes" variables (in an enclosing scope)
+# have a value based on the information given.
+#
+local rule compute-default-paths ( target-os : version ? : prefix ? :
+ exec-prefix ? )
+{
+ exec-prefix ?= $(prefix) ;
+
+ if $(target-os) = windows
+ {
+ # The exec_prefix is where you're supposed to look for machine-specific
+ # libraries.
+ local default-library-path = $(exec-prefix)\\libs ;
+ local default-include-path = $(:E=Include:R=$(prefix)) ;
+
+ # If the interpreter was found in a directory called "PCBuild" or
+ # "PCBuild8," assume we're looking at a Python built from the source
+ # distro, and go up one additional level to the default root. Otherwise,
+ # the default root is the directory where the interpreter was found.
+
+ # We ask Python itself what the executable path is in case of
+ # intermediate symlinks or shell scripts.
+ local executable-dir = $(sys.executable:D) ;
+
+ if [ MATCH ^(PCBuild) : $(executable-dir:D=) ]
+ {
+ debug-message "This Python appears to reside in a source distribution;" ;
+ debug-message "prepending \""$(executable-dir)"\" to default library search path" ;
+
+ default-library-path = $(executable-dir) $(default-library-path) ;
+
+ default-include-path = $(:E=PC:R=$(executable-dir:D)) $(default-include-path) ;
+
+ debug-message "and \""$(default-include-path[1])"\" to default #include path" ;
+ }
+
+ libraries ?= $(default-library-path) ;
+ includes ?= $(default-include-path) ;
+ }
+ else
+ {
+ includes ?= $(prefix)/include/python$(version) ;
+
+ local lib = $(exec-prefix)/lib ;
+ libraries ?= $(lib)/python$(version)/config $(lib) ;
+ }
+}
+
+# The version of the python interpreter to use.
+feature.feature python : : propagated ;
+feature.feature python.interpreter : : free ;
+
+toolset.flags python.capture-output PYTHON : <python.interpreter> ;
+
+#
+# Support for Python configured --with-pydebug
+#
+feature.feature python-debugging : off on : propagated ;
+builtin.variant debug-python : debug : <python-debugging>on ;
+
+
+# Return a list of candidate commands to try when looking for a Python
+# interpreter. prefix is expected to be a native path.
+#
+local rule candidate-interpreters ( version ? : prefix ? : target-os )
+{
+ local bin-path = bin ;
+ if $(target-os) = windows
+ {
+ # On Windows, look in the root directory itself and, to work with the
+ # result of a build-from-source, the PCBuild directory.
+ bin-path = PCBuild8 PCBuild "" ;
+ }
+
+ bin-path = $(bin-path:R=$(prefix)) ;
+
+ if $(target-os) in windows darwin
+ {
+ return # Search:
+ $(:E=python:R=$(bin-path)) # Relative to the prefix, if any
+ python # In the PATH
+ [ $(target-os)-installed-pythons $(version) ] # Standard install locations
+ ;
+ }
+ else
+ {
+ # Search relative to the prefix, or if none supplied, in PATH.
+ local unversioned = $(:E=python:R=$(bin-path:E=)) ;
+
+ # If a version was specified, look for a python with that specific
+ # version appended before looking for one called, simply, "python"
+ return $(unversioned)$(version) $(unversioned) ;
+ }
+}
+
+
+# Compute system library dependencies for targets linking with static Python
+# libraries.
+#
+# On many systems, Python uses libraries such as pthreads or libdl. Since static
+# libraries carry no library dependency information of their own that the linker
+# can extract, these extra dependencies have to be given explicitly on the link
+# line of the client. The information about these dependencies is packaged into
+# the "python" target below.
+#
+# Even where Python itself uses pthreads, it never allows extension modules to
+# be entered concurrently (unless they explicitly give up the interpreter lock).
+# Therefore, extension modules do not need the efficiency overhead of threadsafe
+# code as produced by <threading>multi, and we handle libpthread along with
+# other libraries here. Note: this optimization is based on an assumption that
+# the compiler generates link-compatible code in both the single- and
+# multi-threaded cases, and that system libraries do not change their ABIs
+# either.
+#
+# Returns a list of usage-requirements that link to the necessary system
+# libraries.
+#
+local rule system-library-dependencies ( target-os )
+{
+ switch $(target-os)
+ {
+ case s[uo][nl]* : # solaris, sun, sunos
+ # Add a librt dependency for the gcc toolset on SunOS (the sun
+ # toolset adds -lrt unconditionally). While this appears to
+ # duplicate the logic already in gcc.jam, it does not as long as
+ # we are not forcing <threading>multi.
+
+ # On solaris 10, distutils.sysconfig.get_config_var('LIBS') yields
+ # '-lresolv -lsocket -lnsl -lrt -ldl'. However, that does not seem
+ # to be the right list for extension modules. For example, on my
+ # installation, adding -ldl causes at least one test to fail because
+ # the library can not be found and removing it causes no failures.
+
+ # Apparently, though, we need to add -lrt for gcc.
+ return <toolset>gcc:<library>rt ;
+
+ case osf : return <library>pthread <toolset>gcc:<library>rt ;
+
+ case qnx* : return ;
+ case darwin : return ;
+ case windows : return ;
+
+ case hpux : return <library>rt ;
+ case *bsd : return <library>pthread <toolset>gcc:<library>util ;
+
+ case aix : return <library>pthread <library>dl ;
+
+ case * : return <library>pthread <library>dl
+ <toolset>gcc:<library>util <toolset-intel:platform>linux:<library>util ;
+ }
+}
+
+
+# Declare a target to represent Python's library.
+#
+local rule declare-libpython-target ( version ? : requirements * )
+{
+ # Compute the representation of Python version in the name of Python's
+ # library file.
+ local lib-version = $(version) ;
+ if <target-os>windows in $(requirements)
+ {
+ local major-minor = [ split-version $(version) ] ;
+ lib-version = $(major-minor:J="") ;
+ if <python-debugging>on in $(requirements)
+ {
+ lib-version = $(lib-version)_d ;
+ }
+ }
+
+ if ! $(lib-version)
+ {
+ ECHO *** warning: could not determine Python version, which will ;
+ ECHO *** warning: probably prevent us from linking with the python ;
+ ECHO *** warning: library. Consider explicitly passing the version ;
+ ECHO *** warning: to 'using python'. ;
+ }
+
+ # Declare it.
+ lib python.lib : : <name>python$(lib-version) $(requirements) ;
+}
+
+
+# Implementation of init.
+local rule configure ( version ? : cmd-or-prefix ? : includes * : libraries ? :
+ condition * : extension-suffix ? )
+{
+ local prefix ;
+ local exec-prefix ;
+ local cmds-to-try ;
+ local interpreter-cmd ;
+
+ local target-os = [ feature.get-values target-os : $(condition) ] ;
+ target-os ?= [ feature.defaults target-os ] ;
+ target-os = $(target-os:G=) ;
+
+ if $(target-os) = windows && <python-debugging>on in $(condition)
+ {
+ extension-suffix ?= _d ;
+ }
+ extension-suffix ?= "" ;
+
+ # Normalize and dissect any version number.
+ local major-minor ;
+ if $(version)
+ {
+ major-minor = [ split-version $(version) ] ;
+ version = $(major-minor:J=.) ;
+ }
+
+ local cmds-to-try ;
+
+ if ! $(cmd-or-prefix) || [ GLOB $(cmd-or-prefix) : * ]
+ {
+ # If the user did not pass a command, whatever we got was a prefix.
+ prefix = $(cmd-or-prefix) ;
+ cmds-to-try = [ candidate-interpreters $(version) : $(prefix) : $(target-os) ] ;
+ }
+ else
+ {
+ # Work with the command the user gave us.
+ cmds-to-try = $(cmd-or-prefix) ;
+
+ # On Windows, do not nail down the interpreter command just yet in case
+ # the user specified something that turns out to be a cygwin symlink,
+ # which could bring down bjam if we invoke it.
+ if $(target-os) != windows
+ {
+ interpreter-cmd = $(cmd-or-prefix) ;
+ }
+ }
+
+ # Values to use in case we can not really find anything in the system.
+ local fallback-cmd = $(cmds-to-try[1]) ;
+ local fallback-version ;
+
+ # Anything left to find or check?
+ if ! ( $(interpreter-cmd) && $(includes) && $(libraries) )
+ {
+ # Values to be extracted from python's sys module. These will be set by
+ # the probe rule, above, using Jam's dynamic scoping.
+ local sys-elements = version platform prefix exec_prefix executable ;
+ local sys.$(sys-elements) ;
+
+ # Compute the string Python's sys.platform needs to match. If not
+ # targeting Windows or cygwin we will assume only native builds can
+ # possibly run, so we will not require a match and we leave sys.platform
+ # blank.
+ local platform ;
+ switch $(target-os)
+ {
+ case windows : platform = win32 ;
+ case cygwin : platform = cygwin ;
+ }
+
+ while $(cmds-to-try)
+ {
+ # Pop top command.
+ local cmd = $(cmds-to-try[1]) ;
+ cmds-to-try = $(cmds-to-try[2-]) ;
+
+ debug-message Checking interpreter command \"$(cmd)\"... ;
+ if [ probe $(cmd) ]
+ {
+ fallback-version ?= $(sys.version) ;
+
+ # Check for version/platform validity.
+ for local x in version platform
+ {
+ if $($(x)) && $($(x)) != $(sys.$(x))
+ {
+ debug-message ...$(x) "mismatch (looking for"
+ $($(x)) but found $(sys.$(x))")" ;
+ cmd = ;
+ }
+ }
+
+ if $(cmd)
+ {
+ debug-message ...requested configuration matched! ;
+
+ exec-prefix = $(sys.exec_prefix) ;
+
+ compute-default-paths $(target-os) : $(sys.version) :
+ $(sys.prefix) : $(sys.exec_prefix) ;
+
+ version = $(sys.version) ;
+ interpreter-cmd ?= $(cmd) ;
+ cmds-to-try = ; # All done.
+ }
+ }
+ else
+ {
+ debug-message ...does not invoke a working interpreter ;
+ }
+ }
+ }
+
+ # Anything left to compute?
+ if $(includes) && $(libraries)
+ {
+ .configured = true ;
+ }
+ else
+ {
+ version ?= $(fallback-version) ;
+ version ?= 2.5 ;
+ exec-prefix ?= $(prefix) ;
+ compute-default-paths $(target-os) : $(version) : $(prefix:E=) ;
+ }
+
+ if ! $(interpreter-cmd)
+ {
+ fallback-cmd ?= python ;
+ debug-message No working Python interpreter found. ;
+ if [ os.name ] != NT || ! [ invokes-cygwin-symlink $(fallback-cmd) ]
+ {
+ interpreter-cmd = $(fallback-cmd) ;
+ debug-message falling back to \"$(interpreter-cmd)\" ;
+ }
+ }
+
+ includes = [ path-to-native $(includes) ] ;
+ libraries = [ path-to-native $(libraries) ] ;
+
+ debug-message "Details of this Python configuration:" ;
+ debug-message " interpreter command:" \"$(interpreter-cmd:E=<empty>)\" ;
+ debug-message " include path:" \"$(includes:E=<empty>)\" ;
+ debug-message " library path:" \"$(libraries:E=<empty>)\" ;
+ if $(target-os) = windows
+ {
+ debug-message " DLL search path:" \"$(exec-prefix:E=<empty>)\" ;
+ }
+
+ #
+ # End autoconfiguration sequence.
+ #
+ local target-requirements = $(condition) ;
+
+ # Add the version, if any, to the target requirements.
+ if $(version)
+ {
+ if ! $(version) in [ feature.values python ]
+ {
+ feature.extend python : $(version) ;
+ }
+ target-requirements += <python>$(version:E=default) ;
+ }
+
+ target-requirements += <target-os>$(target-os) ;
+
+ # See if we can find a framework directory on darwin.
+ local framework-directory ;
+ if $(target-os) = darwin
+ {
+ # Search upward for the framework directory.
+ local framework-directory = $(libraries[-1]) ;
+ while $(framework-directory:D=) && $(framework-directory:D=) != Python.framework
+ {
+ framework-directory = $(framework-directory:D) ;
+ }
+
+ if $(framework-directory:D=) = Python.framework
+ {
+ debug-message framework directory is \"$(framework-directory)\" ;
+ }
+ else
+ {
+ debug-message "no framework directory found; using library path" ;
+ framework-directory = ;
+ }
+ }
+
+ local dll-path = $(libraries) ;
+
+ # Make sure that we can find the Python DLL on Windows.
+ if ( $(target-os) = windows ) && $(exec-prefix)
+ {
+ dll-path += $(exec-prefix) ;
+ }
+
+ #
+ # Prepare usage requirements.
+ #
+ local usage-requirements = [ system-library-dependencies $(target-os) ] ;
+ usage-requirements += <include>$(includes) <python.interpreter>$(interpreter-cmd) ;
+ if <python-debugging>on in $(condition)
+ {
+ if $(target-os) = windows
+ {
+ # In pyconfig.h, Py_DEBUG is set if _DEBUG is set. If we define
+ # Py_DEBUG we will get multiple definition warnings.
+ usage-requirements += <define>_DEBUG ;
+ }
+ else
+ {
+ usage-requirements += <define>Py_DEBUG ;
+ }
+ }
+
+ # Global, but conditional, requirements to give access to the interpreter
+ # for general utilities, like other toolsets, that run Python scripts.
+ toolset.add-requirements
+ $(target-requirements:J=,):<python.interpreter>$(interpreter-cmd) ;
+
+ # Register the right suffix for extensions.
+ register-extension-suffix $(extension-suffix) : $(target-requirements) ;
+
+ #
+ # Declare the "python" target. This should really be called
+ # python_for_embedding.
+ #
+
+ if $(framework-directory)
+ {
+ alias python
+ :
+ : $(target-requirements)
+ :
+ : $(usage-requirements) <framework>$(framework-directory)
+ ;
+ }
+ else
+ {
+ declare-libpython-target $(version) : $(target-requirements) ;
+
+ # This is an evil hack. On, Windows, when Python is embedded, nothing
+ # seems to set up sys.path to include Python's standard library
+ # (http://article.gmane.org/gmane.comp.python.general/544986). The evil
+ # here, aside from the workaround necessitated by Python's bug, is that:
+ #
+ # a. we're guessing the location of the python standard library from the
+ # location of pythonXX.lib
+ #
+ # b. we're hijacking the <testing.launcher> property to get the
+ # environment variable set up, and the user may want to use it for
+ # something else (e.g. launch the debugger).
+ local set-PYTHONPATH ;
+ if $(target-os) = windows
+ {
+ set-PYTHONPATH = [ common.prepend-path-variable-command PYTHONPATH :
+ $(libraries:D)/Lib ] ;
+ }
+
+ alias python
+ :
+ : $(target-requirements)
+ :
+ # Why python.lib must be listed here instead of along with the
+ # system libs is a mystery, but if we do not do it, on cygwin,
+ # -lpythonX.Y never appears in the command line (although it does on
+ # linux).
+ : $(usage-requirements)
+ <testing.launcher>$(set-PYTHONPATH)
+ <library-path>$(libraries) <dll-path>$(dll-path) <library>python.lib
+ ;
+ }
+
+ # On *nix, we do not want to link either Boost.Python or Python extensions
+ # to libpython, because the Python interpreter itself provides all those
+ # symbols. If we linked to libpython, we would get duplicate symbols. So
+ # declare two targets -- one for building extensions and another for
+ # embedding.
+ #
+ # Unlike most *nix systems, Mac OS X's linker does not permit undefined
+ # symbols when linking a shared library. So, we still need to link against
+ # the Python framework, even when building extensions. Note that framework
+ # builds of Python always use shared libraries, so we do not need to worry
+ # about duplicate Python symbols.
+ if $(target-os) in windows cygwin darwin
+ {
+ alias python_for_extensions : python : $(target-requirements) ;
+ }
+ # On AIX we need Python extensions and Boost.Python to import symbols from
+ # the Python interpreter. Dynamic libraries opened with dlopen() do not
+ # inherit the symbols from the Python interpreter.
+ else if $(target-os) = aix
+ {
+ alias python_for_extensions
+ :
+ : $(target-requirements)
+ :
+ : $(usage-requirements) <linkflags>-Wl,-bI:$(libraries[1])/python.exp
+ ;
+ }
+ else
+ {
+ alias python_for_extensions
+ :
+ : $(target-requirements)
+ :
+ : $(usage-requirements)
+ ;
+ }
+}
+
+
+rule configured ( )
+{
+ return $(.configured) ;
+}
+
+
+type.register PYTHON_EXTENSION : : SHARED_LIB ;
+
+
+local rule register-extension-suffix ( root : condition * )
+{
+ local suffix ;
+
+ switch [ feature.get-values target-os : $(condition) ]
+ {
+ case windows : suffix = pyd ;
+ case cygwin : suffix = dll ;
+ case hpux :
+ {
+ if [ feature.get-values python : $(condition) ] in 1.5 1.6 2.0 2.1 2.2 2.3 2.4
+ {
+ suffix = sl ;
+ }
+ else
+ {
+ suffix = so ;
+ }
+ }
+ case * : suffix = so ;
+ }
+
+ type.set-generated-target-suffix PYTHON_EXTENSION : $(condition) : <$(root).$(suffix)> ;
+}
+
+
+# Unset 'lib' prefix for PYTHON_EXTENSION
+type.set-generated-target-prefix PYTHON_EXTENSION : : "" ;
+
+
+rule python-extension ( name : sources * : requirements * : default-build * :
+ usage-requirements * )
+{
+ if [ configured ]
+ {
+ requirements += <use>/python//python_for_extensions ;
+ }
+ requirements += <suppress-import-lib>true ;
+
+ local project = [ project.current ] ;
+
+ targets.main-target-alternative
+ [ new typed-target $(name) : $(project) : PYTHON_EXTENSION
+ : [ targets.main-target-sources $(sources) : $(name) ]
+ : [ targets.main-target-requirements $(requirements) : $(project) ]
+ : [ targets.main-target-default-build $(default-build) : $(project) ]
+ ] ;
+}
+
+IMPORT python : python-extension : : python-extension ;
+
+rule py2to3
+{
+ common.copy $(<) : $(>) ;
+ 2to3 $(<) ;
+}
+
+actions 2to3
+{
+ 2to3 -wn --no-diffs "$(<)"
+ 2to3 -dwn --no-diffs "$(<)"
+}
+
+
+# Support for testing.
+type.register PY : py ;
+type.register RUN_PYD_OUTPUT ;
+type.register RUN_PYD : : TEST ;
+
+
+class python-test-generator : generator
+{
+ import set ;
+
+ rule __init__ ( * : * )
+ {
+ generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ self.composing = true ;
+ }
+
+ rule run ( project name ? : property-set : sources * : multiple ? )
+ {
+ local pyversion = [ $(property-set).get <python> ] ;
+ local python ;
+ local other-pythons ;
+
+ # Make new target that converting Python source by 2to3 when running with Python 3.
+ local rule make-2to3-source ( source )
+ {
+ if $(pyversion) >= 3.0
+ {
+ local a = [ new action $(source) : python.py2to3 : $(property-set) ] ;
+ local t = [ utility.basename [ $(s).name ] ] ;
+ local p = [ new file-target $(t) : PY : $(project) : $(a) ] ;
+ return $(p) ;
+ }
+ else
+ {
+ return $(source) ;
+ }
+ }
+
+ for local s in $(sources)
+ {
+ if [ $(s).type ] = PY
+ {
+ if ! $(python)
+ {
+ # First Python source ends up on command line.
+ python = [ make-2to3-source $(s) ] ;
+
+ }
+ else
+ {
+ # Other Python sources become dependencies.
+ other-pythons += [ make-2to3-source $(s) ] ;
+ }
+ }
+ }
+
+ local extensions ;
+ for local s in $(sources)
+ {
+ if [ $(s).type ] = PYTHON_EXTENSION
+ {
+ extensions += $(s) ;
+ }
+ }
+
+ local libs ;
+ for local s in $(sources)
+ {
+ if [ type.is-derived [ $(s).type ] LIB ]
+ && ! $(s) in $(extensions)
+ {
+ libs += $(s) ;
+ }
+ }
+
+ local new-sources ;
+ for local s in $(sources)
+ {
+ if [ type.is-derived [ $(s).type ] CPP ]
+ {
+ local name = [ utility.basename [ $(s).name ] ] ;
+ if $(name) = [ utility.basename [ $(python).name ] ]
+ {
+ name = $(name)_ext ;
+ }
+ local extension = [ generators.construct $(project) $(name) :
+ PYTHON_EXTENSION : $(property-set) : $(s) $(libs) ] ;
+
+ # The important part of usage requirements returned from
+ # PYTHON_EXTENSION generator are xdll-path properties that will
+ # allow us to find the python extension at runtime.
+ property-set = [ $(property-set).add $(extension[1]) ] ;
+
+ # Ignore usage requirements. We're a top-level generator and
+ # nobody is going to use what we generate.
+ new-sources += $(extension[2-]) ;
+ }
+ }
+
+ property-set = [ $(property-set).add-raw <dependency>$(other-pythons) ] ;
+
+ return [ construct-result $(python) $(extensions) $(new-sources) :
+ $(project) $(name) : $(property-set) ] ;
+ }
+}
+
+
+generators.register
+ [ new python-test-generator python.capture-output : : RUN_PYD_OUTPUT ] ;
+
+generators.register-standard testing.expect-success
+ : RUN_PYD_OUTPUT : RUN_PYD ;
+
+
+# There are two different ways of spelling OS names. One is used for [ os.name ]
+# and the other is used for the <host-os> and <target-os> properties. Until that
+# is remedied, this sets up a crude mapping from the latter to the former, that
+# will work *for the purposes of cygwin/NT cross-builds only*. Could not think
+# of a better name than "translate".
+#
+.translate-os-windows = NT ;
+.translate-os-cygwin = CYGWIN ;
+local rule translate-os ( src-os )
+{
+ local x = $(.translate-os-$(src-os)) [ os.name ] ;
+ return $(x[1]) ;
+}
+
+
+# Extract the path to a single ".pyd" source. This is used to build the
+# PYTHONPATH for running bpl tests.
+#
+local rule pyd-pythonpath ( source )
+{
+ return [ on $(source) return $(LOCATE) $(SEARCH) ] ;
+}
+
+
+# The flag settings on testing.capture-output do not apply to python.capture
+# output at the moment. Redo this explicitly.
+toolset.flags python.capture-output ARGS <testing.arg> ;
+
+
+rule capture-output ( target : sources * : properties * )
+{
+ # Setup up a proper DLL search path. Here, $(sources[1]) is a python module
+ # and $(sources[2]) is a DLL. Only $(sources[1]) is passed to
+ # testing.capture-output, so RUN_PATH variable on $(sources[2]) is not
+ # consulted. Move it over explicitly.
+ RUN_PATH on $(sources[1]) = [ on $(sources[2-]) return $(RUN_PATH) ] ;
+
+ PYTHONPATH = [ sequence.transform pyd-pythonpath : $(sources[2-]) ] ;
+ PYTHONPATH += [ feature.get-values pythonpath : $(properties) ] ;
+
+ # After test is run, we remove the Python module, but not the Python script.
+ testing.capture-output $(target) : $(sources[1]) : $(properties) :
+ $(sources[2-]) ;
+
+ # PYTHONPATH is different; it will be interpreted by whichever Python is
+ # invoked and so must follow path rules for the target os. The only OSes
+ # where we can run python for other OSes currently are NT and CYGWIN so we
+ # only need to handle those cases.
+ local target-os = [ feature.get-values target-os : $(properties) ] ;
+ # Oddly, host-os is not in properties, so grab the default value.
+ local host-os = [ feature.defaults host-os ] ;
+ host-os = $(host-os:G=) ;
+ if $(target-os) != $(host-os) && $(target-os) in windows cygwin && $(host-os) in windows cygwin
+ {
+ PYTHONPATH = [ sequence.transform $(host-os)-to-$(target-os)-path :
+ $(PYTHONPATH) ] ;
+ }
+ local path-separator = [ os.path-separator [ translate-os $(target-os) ] ] ;
+ local set-PYTHONPATH = [ common.variable-setting-command PYTHONPATH :
+ $(PYTHONPATH:J=$(path-separator)) ] ;
+ LAUNCHER on $(target) = $(set-PYTHONPATH) [ on $(target) return \"$(PYTHON)\" ] ;
+}
+
+
+rule bpl-test ( name : sources * : requirements * )
+{
+ local s ;
+ sources ?= $(name).py $(name).cpp ;
+ return [ testing.make-test run-pyd : $(sources) /boost/python//boost_python
+ : $(requirements) : $(name) ] ;
+}
+
+
+IMPORT $(__name__) : bpl-test : : bpl-test ;
diff --git a/tools/build/src/tools/qcc.jam b/tools/build/src/tools/qcc.jam
new file mode 100644
index 0000000000..3b3557894b
--- /dev/null
+++ b/tools/build/src/tools/qcc.jam
@@ -0,0 +1,238 @@
+# Copyright (c) 2001 David Abrahams.
+# Copyright (c) 2002-2003 Rene Rivera.
+# Copyright (c) 2002-2003 Vladimir Prus.
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import "class" : new ;
+import common ;
+import errors ;
+import feature ;
+import generators ;
+import os ;
+import property ;
+import set ;
+import toolset ;
+import type ;
+import unix ;
+
+feature.extend toolset : qcc ;
+
+toolset.inherit-generators qcc : unix : unix.link unix.link.dll ;
+generators.override builtin.lib-generator : qcc.prebuilt ;
+toolset.inherit-flags qcc : unix ;
+toolset.inherit-rules qcc : unix ;
+
+# Initializes the qcc toolset for the given version. If necessary, command may
+# be used to specify where the compiler is located. The parameter 'options' is a
+# space-delimited list of options, each one being specified as
+# <option-name>option-value. Valid option names are: cxxflags, linkflags and
+# linker-type. Accepted values for linker-type are gnu and sun, gnu being the
+# default.
+#
+# Example:
+# using qcc : 3.4 : : <cxxflags>foo <linkflags>bar <linker-type>sun ;
+#
+rule init ( version ? : command * : options * )
+{
+ local condition = [ common.check-init-parameters qcc : version $(version) ] ;
+ local command = [ common.get-invocation-command qcc : QCC : $(command) ] ;
+ common.handle-options qcc : $(condition) : $(command) : $(options) ;
+}
+
+
+generators.register-c-compiler qcc.compile.c++ : CPP : OBJ : <toolset>qcc ;
+generators.register-c-compiler qcc.compile.c : C : OBJ : <toolset>qcc ;
+generators.register-c-compiler qcc.compile.asm : ASM : OBJ : <toolset>qcc ;
+
+
+# Declare flags for compilation.
+toolset.flags qcc.compile OPTIONS <debug-symbols>on : -gstabs+ ;
+
+# Declare flags and action for compilation.
+toolset.flags qcc.compile OPTIONS <optimization>off : -O0 ;
+toolset.flags qcc.compile OPTIONS <optimization>speed : -O3 ;
+toolset.flags qcc.compile OPTIONS <optimization>space : -Os ;
+
+toolset.flags qcc.compile OPTIONS <inlining>off : -Wc,-fno-inline ;
+toolset.flags qcc.compile OPTIONS <inlining>on : -Wc,-Wno-inline ;
+toolset.flags qcc.compile OPTIONS <inlining>full : -Wc,-finline-functions -Wc,-Wno-inline ;
+
+toolset.flags qcc.compile OPTIONS <warnings>off : -w ;
+toolset.flags qcc.compile OPTIONS <warnings>all : -Wc,-Wall ;
+toolset.flags qcc.compile OPTIONS <warnings-as-errors>on : -Wc,-Werror ;
+
+toolset.flags qcc.compile OPTIONS <profiling>on : -p ;
+
+toolset.flags qcc.compile OPTIONS <cflags> ;
+toolset.flags qcc.compile.c++ OPTIONS <cxxflags> ;
+toolset.flags qcc.compile DEFINES <define> ;
+toolset.flags qcc.compile INCLUDES <include> ;
+
+toolset.flags qcc.compile OPTIONS <link>shared : -shared ;
+
+toolset.flags qcc.compile.c++ TEMPLATE_DEPTH <c++-template-depth> ;
+
+
+rule compile.c++
+{
+ # Here we want to raise the template-depth parameter value to something
+ # higher than the default value of 17. Note that we could do this using the
+ # feature.set-default rule but we do not want to set the default value for
+ # all toolsets as well.
+ #
+ # TODO: This 'modified default' has been inherited from some 'older Boost
+ # Build implementation' and has most likely been added to make some Boost
+ # library parts compile correctly. We should see what exactly prompted this
+ # and whether we can get around the problem more locally.
+ local template-depth = [ on $(1) return $(TEMPLATE_DEPTH) ] ;
+ if ! $(template-depth)
+ {
+ TEMPLATE_DEPTH on $(1) = 128 ;
+ }
+}
+
+actions compile.c++
+{
+ "$(CONFIG_COMMAND)" -Wc,-ftemplate-depth-$(TEMPLATE_DEPTH) $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions compile.c
+{
+ "$(CONFIG_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+actions compile.asm
+{
+ "$(CONFIG_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
+}
+
+
+# The class checking that we do not try to use the <runtime-link>static property
+# while creating or using a shared library, since it is not supported by qcc/
+# /libc.
+#
+class qcc-linking-generator : unix-linking-generator
+{
+ rule generated-targets ( sources + : property-set : project name ? )
+ {
+ if <runtime-link>static in [ $(property-set).raw ]
+ {
+ local m ;
+ if [ id ] = "qcc.link.dll"
+ {
+ m = "on qcc, DLL can't be build with <runtime-link>static" ;
+ }
+ if ! $(m)
+ {
+ for local s in $(sources)
+ {
+ local type = [ $(s).type ] ;
+ if $(type) && [ type.is-derived $(type) SHARED_LIB ]
+ {
+ m = "on qcc, using DLLS together with the <runtime-link>static options is not possible " ;
+ }
+ }
+ }
+ if $(m)
+ {
+ errors.user-error $(m) : "It is suggested to use"
+ "<runtime-link>static together with <link>static." ;
+ }
+ }
+
+ return [ unix-linking-generator.generated-targets
+ $(sources) : $(property-set) : $(project) $(name) ] ;
+ }
+}
+
+generators.register [ new qcc-linking-generator qcc.link : LIB OBJ : EXE
+ : <toolset>qcc ] ;
+
+generators.register [ new qcc-linking-generator qcc.link.dll : LIB OBJ
+ : SHARED_LIB : <toolset>qcc ] ;
+
+generators.override qcc.prebuilt : builtin.prebuilt ;
+generators.override qcc.searched-lib-generator : searched-lib-generator ;
+
+
+# Declare flags for linking.
+# First, the common flags.
+toolset.flags qcc.link OPTIONS <debug-symbols>on : -gstabs+ ;
+toolset.flags qcc.link OPTIONS <profiling>on : -p ;
+toolset.flags qcc.link OPTIONS <linkflags> ;
+toolset.flags qcc.link LINKPATH <library-path> ;
+toolset.flags qcc.link FINDLIBS-ST <find-static-library> ;
+toolset.flags qcc.link FINDLIBS-SA <find-shared-library> ;
+toolset.flags qcc.link LIBRARIES <library-file> ;
+
+toolset.flags qcc.link FINDLIBS-SA : m ;
+
+# For <runtime-link>static we made sure there are no dynamic libraries in the
+# link.
+toolset.flags qcc.link OPTIONS <runtime-link>static : -static ;
+
+# Assuming this is just like with gcc.
+toolset.flags qcc.link RPATH : <dll-path> : unchecked ;
+toolset.flags qcc.link RPATH_LINK : <xdll-path> : unchecked ;
+
+
+# Declare actions for linking.
+#
+rule link ( targets * : sources * : properties * )
+{
+ SPACE on $(targets) = " " ;
+ # Serialize execution of the 'link' action, since running N links in
+ # parallel is just slower. For now, serialize only qcc links while it might
+ # be a good idea to serialize all links.
+ JAM_SEMAPHORE on $(targets) = <s>qcc-link-semaphore ;
+}
+
+actions link bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA) $(OPTIONS)
+}
+
+
+# Always remove archive and start again. Here is the rationale from Andre Hentz:
+# I had a file, say a1.c, that was included into liba.a. I moved a1.c to a2.c,
+# updated my Jamfiles and rebuilt. My program was crashing with absurd errors.
+# After some debugging I traced it back to the fact that a1.o was *still* in
+# liba.a
+RM = [ common.rm-command ] ;
+if [ os.name ] = NT
+{
+ RM = "if exist \"$(<[1])\" DEL \"$(<[1])\"" ;
+}
+
+
+# Declare action for creating static libraries. The 'r' letter means to add
+# files to the archive with replacement. Since we remove the archive, we do not
+# care about replacement, but there is no option to "add without replacement".
+# The 'c' letter suppresses warnings in case the archive does not exists yet.
+# That warning is produced only on some platforms, for whatever reasons.
+#
+# Use qcc driver to create archive, see
+# http://www.qnx.com/developers/docs/6.3.2/neutrino/utilities/q/qcc.html
+actions piecemeal archive
+{
+ $(RM) "$(<)"
+ "$(CONFIG_COMMAND)" -A "$(<)" "$(>)"
+}
+
+
+rule link.dll ( targets * : sources * : properties * )
+{
+ SPACE on $(targets) = " " ;
+ JAM_SEMAPHORE on $(targets) = <s>qcc-link-semaphore ;
+}
+
+
+# Differ from 'link' above only by -shared.
+#
+actions link.dll bind LIBRARIES
+{
+ "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -o "$(<)" $(HAVE_SONAME)-Wl,-h$(SPACE)-Wl,$(<[1]:D=) -shared "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA) $(OPTIONS)
+}
diff --git a/tools/build/v2/tools/qt.jam b/tools/build/src/tools/qt.jam
index 8aa7ca266c..8aa7ca266c 100644
--- a/tools/build/v2/tools/qt.jam
+++ b/tools/build/src/tools/qt.jam
diff --git a/tools/build/v2/tools/qt3.jam b/tools/build/src/tools/qt3.jam
index f82cf0ac33..f82cf0ac33 100644
--- a/tools/build/v2/tools/qt3.jam
+++ b/tools/build/src/tools/qt3.jam
diff --git a/tools/build/src/tools/qt4.jam b/tools/build/src/tools/qt4.jam
new file mode 100644
index 0000000000..a3aac61b42
--- /dev/null
+++ b/tools/build/src/tools/qt4.jam
@@ -0,0 +1,755 @@
+# Copyright 2002-2006 Vladimir Prus
+# Copyright 2005 Alo Sarv
+# Copyright 2005-2009 Juergen Hunold
+#
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Qt4 library support module
+#
+# The module attempts to auto-detect QT installation location from QTDIR
+# environment variable; failing that, installation location can be passed as
+# argument:
+#
+# toolset.using qt4 : /usr/local/Trolltech/Qt-4.0.0 ;
+#
+# The module supports code generation from .ui and .qrc files, as well as
+# running the moc preprocessor on headers. Note that you must list all your
+# moc-able headers in sources.
+#
+# Example:
+#
+# exe myapp : myapp.cpp myapp.h myapp.ui myapp.qrc
+# /qt4//QtGui /qt4//QtNetwork ;
+#
+# It's also possible to run moc on cpp sources:
+#
+# import cast ;
+#
+# exe myapp : myapp.cpp [ cast _ moccable-cpp : myapp.cpp ] /qt4//QtGui ;
+#
+# When moccing source file myapp.cpp you need to include "myapp.moc" from
+# myapp.cpp. When moccing .h files, the output of moc will be automatically
+# compiled and linked in, you don't need any includes.
+#
+# This is consistent with Qt guidelines:
+# http://qt-project.org/doc/qt-4.8/moc.html
+#
+# The .qrc processing utility supports various command line option (see
+# http://qt-project.org/doc/qt-4.8/rcc.html for a complete list). The
+# module provides default arguments for the "output file" and
+# "initialization function name" options. Other options can be set through
+# the <rccflags> build property. E.g. if you wish the compression settings
+# to be more aggressive than the defaults, you can apply them too all .qrc
+# files like this:
+#
+# project my-qt-project :
+# requirements
+# <rccflags>"-compress 9 -threshold 10"
+# ;
+#
+# Of course, this property can also be specified on individual targets.
+
+
+import modules ;
+import feature ;
+import errors ;
+import type ;
+import "class" : new ;
+import generators ;
+import project ;
+import toolset : flags ;
+import os ;
+import virtual-target ;
+import scanner ;
+
+# Qt3Support control feature
+#
+# Qt4 configure defaults to build Qt4 libraries with Qt3Support.
+# The autodetection is missing, so we default to disable Qt3Support.
+# This prevents the user from inadvertedly using a deprecated API.
+#
+# The Qt3Support library can be activated by adding
+# "<qt3support>on" to requirements
+#
+# Use "<qt3support>on:<define>QT3_SUPPORT_WARNINGS"
+# to get warnings about deprecated Qt3 support funtions and classes.
+# Files ported by the "qt3to4" conversion tool contain _tons_ of
+# warnings, so this define is not set as default.
+#
+# Todo: Detect Qt3Support from Qt's configure data.
+# Or add more auto-configuration (like python).
+feature.feature qt3support : off on : propagated link-incompatible ;
+
+# The Qt version used for requirements
+# Valid are <qt>4.4 or <qt>4.5.0
+# Auto-detection via qmake sets '<qt>major.minor.patch'
+feature.feature qt : : propagated ;
+
+# Extra flags for rcc
+feature.feature rccflags : : free ;
+
+project.initialize $(__name__) ;
+project qt ;
+
+# Save the project so that we tolerate 'import + using' combo.
+.project = [ project.current ] ;
+
+# Helper utils for easy debug output
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+ .debug-configuration = TRUE ;
+}
+
+local rule debug-message ( message * )
+{
+ if $(.debug-configuration) = TRUE
+ {
+ ECHO notice: [qt4-cfg] $(message) ;
+ }
+}
+
+# Capture qmake output line by line
+local rule read-output ( content )
+{
+ local lines ;
+ local nl = "
+" ;
+ local << = "([^$(nl)]*)[$(nl)](.*)" ;
+ local line+ = [ MATCH "$(<<)" : "$(content)" ] ;
+ while $(line+)
+ {
+ lines += $(line+[1]) ;
+ line+ = [ MATCH "$(<<)" : "$(line+[2])" ] ;
+ }
+ return $(lines) ;
+}
+
+# Capture Qt version from qmake
+local rule check-version ( bin_prefix )
+{
+ full-cmd = $(bin_prefix)"/qmake -v" ;
+ debug-message Running '$(full-cmd)' ;
+ local output = [ SHELL $(full-cmd) ] ;
+ for line in [ read-output $(output) ]
+ {
+ # Parse the output to get all the results.
+ if [ MATCH "QMake" : $(line) ]
+ {
+ # Skip first line of output
+ }
+ else
+ {
+ temp = [ MATCH "([0-9]*)\\.([0-9]*)\\.([0-9]*)" : $(line) ] ;
+ }
+ }
+ return $(temp) ;
+}
+
+# Validate the version string and extract the major/minor part we care about.
+#
+local rule split-version ( version )
+{
+ local major-minor = [ MATCH ^([0-9]+)\.([0-9]+)(.*)$ : $(version) : 1 2 3 ] ;
+ if ! $(major-minor[2]) || $(major-minor[3])
+ {
+ ECHO "Warning: 'using qt' expects a two part (major, minor) version number; got" $(version) instead ;
+
+ # Add a zero to account for the missing digit if necessary.
+ major-minor += 0 ;
+ }
+
+ return $(major-minor[1]) $(major-minor[2]) ;
+}
+
+# Initialize the QT support module.
+# Parameters:
+# - 'prefix' parameter tells where Qt is installed.
+# - 'full_bin' optional full path to Qt binaries (qmake,moc,uic,rcc)
+# - 'full_inc' optional full path to Qt top-level include directory
+# - 'full_lib' optional full path to Qt library directory
+# - 'version' optional version of Qt, else autodetected via 'qmake -v'
+# - 'condition' optional requirements
+rule init ( prefix : full_bin ? : full_inc ? : full_lib ? : version ? : condition * )
+{
+ project.push-current $(.project) ;
+
+ debug-message "==== Configuring Qt ... ====" ;
+ for local v in version cmd-or-prefix includes libraries condition
+ {
+ if $($(v))
+ {
+ debug-message " user-specified "$(v): '$($(v))' ;
+ }
+ }
+
+ # Needed as default value
+ .prefix = $(prefix) ;
+
+ # pre-build paths to detect reinitializations changes
+ local inc_prefix lib_prefix bin_prefix ;
+ if $(full_inc)
+ {
+ inc_prefix = $(full_inc) ;
+ }
+ else
+ {
+ inc_prefix = $(prefix)/include ;
+ }
+ if $(full_lib)
+ {
+ lib_prefix = $(full_lib) ;
+ }
+ else
+ {
+ lib_prefix = $(prefix)/lib ;
+ }
+ if $(full_bin)
+ {
+ bin_prefix = $(full_bin) ;
+ }
+ else
+ {
+ bin_prefix = $(prefix)/bin ;
+ }
+
+ # Globally needed variables
+ .incprefix = $(inc_prefix) ;
+ .libprefix = $(lib_prefix) ;
+ .binprefix = $(bin_prefix) ;
+
+ if ! $(.initialized)
+ {
+ # Make sure this is initialised only once
+ .initialized = true ;
+
+ # Generates cpp files from header files using "moc" tool
+ generators.register-standard qt4.moc : H : CPP(moc_%) : <allow>qt4 ;
+
+ # The OBJ result type is a fake, 'H' will be really produced. See
+ # comments on the generator class, defined below the 'init' function.
+ generators.register [ new uic-generator qt4.uic : UI : OBJ :
+ <allow>qt4 ] ;
+
+ # The OBJ result type is a fake here too.
+ generators.register [ new moc-h-generator
+ qt4.moc.inc : MOCCABLE_CPP : OBJ : <allow>qt4 ] ;
+
+ generators.register [ new moc-inc-generator
+ qt4.moc.inc : MOCCABLE_H : OBJ : <allow>qt4 ] ;
+
+ # Generates .cpp files from .qrc files.
+ generators.register-standard qt4.rcc : QRC : CPP(qrc_%) : <allow>qt4 ;
+
+ # dependency scanner for wrapped files.
+ type.set-scanner QRC : qrc-scanner ;
+
+ # Save value of first occuring prefix
+ .PREFIX = $(prefix) ;
+ }
+
+ if $(version)
+ {
+ major-minor = [ split-version $(version) ] ;
+ version = $(major-minor:J=.) ;
+ }
+ else
+ {
+ version = [ check-version $(bin_prefix) ] ;
+ if $(version)
+ {
+ version = $(version:J=.) ;
+ }
+ debug-message Detected version '$(version)' ;
+ }
+
+ local target-requirements = $(condition) ;
+
+ # Add the version, if any, to the target requirements.
+ if $(version)
+ {
+ if ! $(version) in [ feature.values qt ]
+ {
+ feature.extend qt : $(version) ;
+ }
+ target-requirements += <qt>$(version:E=default) ;
+ }
+
+ local target-os = [ feature.get-values target-os : $(condition) ] ;
+ if ! $(target-os)
+ {
+ target-os ?= [ feature.defaults target-os ] ;
+ target-os = $(target-os:G=) ;
+ target-requirements += <target-os>$(target-os) ;
+ }
+
+ # Build exact requirements for the tools
+ local tools-requirements = $(target-requirements:J=/) ;
+
+ debug-message "Details of this Qt configuration:" ;
+ debug-message " prefix: " '$(prefix:E=<empty>)' ;
+ debug-message " binary path: " '$(bin_prefix:E=<empty>)' ;
+ debug-message " include path:" '$(inc_prefix:E=<empty>)' ;
+ debug-message " library path:" '$(lib_prefix:E=<empty>)' ;
+ debug-message " target requirements:" '$(target-requirements)' ;
+ debug-message " tool requirements: " '$(tools-requirements)' ;
+
+ # setup the paths for the tools
+ toolset.flags qt4.moc .BINPREFIX $(tools-requirements) : $(bin_prefix) ;
+ toolset.flags qt4.rcc .BINPREFIX $(tools-requirements) : $(bin_prefix) ;
+ toolset.flags qt4.uic .BINPREFIX $(tools-requirements) : $(bin_prefix) ;
+
+ # TODO: 2009-02-12: Better support for directories
+ # Most likely needed are separate getters for: include,libraries,binaries and sources.
+ toolset.flags qt4.directory .PREFIX $(tools-requirements) : $(prefix) ;
+
+ # Test for a buildable Qt.
+ if [ glob $(.prefix)/Jamroot ]
+ {
+ .bjam-qt = true
+
+ # this will declare QtCore (and qtmain on <target-os>windows)
+ add-shared-library QtCore ;
+ }
+ else
+ # Setup common pre-built Qt.
+ # Special setup for QtCore on which everything depends
+ {
+ local link = [ feature.get-values link : $(condition) ] ;
+
+ local usage-requirements =
+ <include>$(.incprefix)
+ <library-path>$(.libprefix)
+ <threading>multi
+ <allow>qt4 ;
+
+ if $(link) in shared
+ {
+ usage-requirements += <dll-path>$(.libprefix) ;
+ }
+
+ local suffix ;
+
+ # Since Qt-4.2, debug versions on unix have to be built
+ # separately and therefore have no suffix.
+ .suffix_version = "" ;
+ .suffix_debug = "" ;
+
+ # Control flag for auto-configuration of the debug libraries.
+ # This setup requires Qt 'configure -debug-and-release'.
+ # Only available on some platforms.
+ # ToDo: 2009-02-12: Maybe throw this away and
+ # require separate setup with <variant>debug as condition.
+ .have_separate_debug = FALSE ;
+
+ # Setup other platforms
+ if $(target-os) in windows cygwin
+ {
+ .have_separate_debug = TRUE ;
+
+ # On NT, the shared libs have "4" suffix, and "d" suffix in debug builds.
+ if $(link) in shared
+ {
+ .suffix_version = "4" ;
+ }
+ .suffix_debug = "d" ;
+
+ # On Windows we must link against the qtmain library
+ lib qtmain
+ : # sources
+ : # requirements
+ <name>qtmain$(.suffix_debug)
+ <variant>debug
+ $(target-requirements)
+ ;
+
+ lib qtmain
+ : # sources
+ : # requirements
+ <name>qtmain
+ $(target-requirements)
+ ;
+ }
+ else if $(target-os) = darwin
+ {
+ # On MacOS X, both debug and release libraries are available.
+ .suffix_debug = "_debug" ;
+
+ .have_separate_debug = TRUE ;
+
+ alias qtmain ;
+ }
+ else
+ {
+ alias qtmain : : $(target-requirements) ;
+ }
+
+ lib QtCore : qtmain
+ : # requirements
+ <name>QtCore$(.suffix_version)
+ $(target-requirements)
+ : # default-build
+ : # usage-requirements
+ <define>QT_CORE_LIB
+ <define>QT_NO_DEBUG
+ <include>$(.incprefix)/QtCore
+ $(usage-requirements)
+ ;
+
+ if $(.have_separate_debug) = TRUE
+ {
+ debug-message Configure debug libraries with suffix '$(.suffix_debug)' ;
+
+ lib QtCore : $(main)
+ : # requirements
+ <name>QtCore$(.suffix_debug)$(.suffix_version)
+ <variant>debug
+ $(target-requirements)
+ : # default-build
+ : # usage-requirements
+ <define>QT_CORE_LIB
+ <include>$(.incprefix)/QtCore
+ $(usage-requirements)
+ ;
+ }
+ }
+
+ # Initialising the remaining libraries is canonical
+ # parameters 'module' : 'depends-on' : 'usage-define' : 'requirements' : 'include'
+ # 'include' only for non-canonical include paths.
+ add-shared-library QtGui : QtCore : QT_GUI_LIB : $(target-requirements) ;
+ add-shared-library QtNetwork : QtCore : QT_NETWORK_LIB : $(target-requirements) ;
+ add-shared-library QtSql : QtCore : QT_SQL_LIB : $(target-requirements) ;
+ add-shared-library QtXml : QtCore : QT_XML_LIB : $(target-requirements) ;
+
+ add-shared-library Qt3Support : QtGui QtNetwork QtXml QtSql
+ : QT_QT3SUPPORT_LIB QT3_SUPPORT
+ : <qt3support>on $(target-requirements) ;
+
+ # Dummy target to enable "<qt3support>off" and
+ # "<library>/qt//Qt3Support" at the same time. This enables quick
+ # switching from one to the other for test/porting purposes.
+ alias Qt3Support : : <qt3support>off $(target-requirements) ;
+
+ # OpenGl Support
+ add-shared-library QtOpenGL : QtGui : QT_OPENGL_LIB : $(target-requirements) ;
+
+ # SVG-Support (Qt 4.1)
+ add-shared-library QtSvg : QtXml QtOpenGL : QT_SVG_LIB : $(target-requirements) ;
+
+ # Test-Support (Qt 4.1)
+ add-shared-library QtTest : QtCore : : $(target-requirements) ;
+
+ # Qt designer library
+ add-shared-library QtDesigner : QtGui QtXml : : $(target-requirements) ;
+ add-shared-library QtDesignerComponents : QtGui QtXml : : $(target-requirements) ;
+
+ # Support for dynamic Widgets (Qt 4.1)
+ add-static-library QtUiTools : QtGui QtXml : $(target-requirements) ;
+
+ # DBus-Support (Qt 4.2)
+ add-shared-library QtDBus : QtXml : : $(target-requirements) ;
+
+ # Script-Engine (Qt 4.3)
+ add-shared-library QtScript : QtGui QtXml : QT_SCRIPT_LIB : $(target-requirements) ;
+
+ # Tools for the Script-Engine (Qt 4.5)
+ add-shared-library QtScriptTools : QtScript : QT_SCRIPTTOOLS_LIB : $(target-requirements) ;
+
+ # WebKit (Qt 4.4)
+ add-shared-library QtWebKit : QtGui : QT_WEBKIT_LIB : $(target-requirements) ;
+
+ # Phonon Multimedia (Qt 4.4)
+ add-shared-library phonon : QtGui QtXml : QT_PHONON_LIB : $(target-requirements) ;
+
+ # Multimedia engine (Qt 4.6)
+ add-shared-library QtMultimedia : QtGui : QT_MULTIMEDIA_LIB : $(target-requirements) ;
+
+ # XmlPatterns-Engine (Qt 4.4)
+ add-shared-library QtXmlPatterns : QtNetwork : QT_XMLPATTERNS_LIB : $(target-requirements) ;
+
+ # Help-Engine (Qt 4.4)
+ add-shared-library QtHelp : QtGui QtSql QtXml : : $(target-requirements) ;
+ add-shared-library QtCLucene : QCore QtSql QtXml : : $(target-requirements) ;
+
+ # QML-Engine (Qt 4.7)
+ add-shared-library QtDeclarative : QtGui QtXml : : $(target-requirements) ;
+
+ # AssistantClient Support
+ # Compat library removed in 4.7.0
+ # Pre-4.4 help system, use QtHelp for new programs
+ if $(version) < "4.7"
+ {
+ add-shared-library QtAssistantClient : QtGui : : $(target-requirements) : QtAssistant ;
+ }
+ debug-message "==== Configured Qt-$(version) ====" ;
+
+ project.pop-current ;
+}
+
+rule initialized ( )
+{
+ return $(.initialized) ;
+}
+
+
+
+# This custom generator is needed because in QT4, UI files are translated only
+# into H files, and no C++ files are created. Further, the H files need not be
+# passed via MOC. The header is used only via inclusion. If we define a standard
+# UI -> H generator, Boost.Build will run MOC on H, and then compile the
+# resulting cpp. It will give a warning, since output from moc will be empty.
+#
+# This generator is declared with a UI -> OBJ signature, so it gets invoked when
+# linking generator tries to convert sources to OBJ, but it produces target of
+# type H. This is non-standard, but allowed. That header won't be mocced.
+#
+class uic-generator : generator
+{
+ rule __init__ ( * : * )
+ {
+ generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+
+ rule run ( project name ? : property-set : sources * )
+ {
+ if ! $(name)
+ {
+ name = [ $(sources[0]).name ] ;
+ name = $(name:B) ;
+ }
+
+ local a = [ new action $(sources[1]) : qt4.uic : $(property-set) ] ;
+
+ # The 'ui_' prefix is to match qmake's default behavior.
+ local target = [ new file-target ui_$(name) : H : $(project) : $(a) ] ;
+
+ local r = [ virtual-target.register $(target) ] ;
+
+ # Since this generator will return a H target, the linking generator
+ # won't use it at all, and won't set any dependency on it. However, we
+ # need the target to be seen by bjam, so that dependency from sources to
+ # this generated header is detected -- if jam does not know about this
+ # target, it won't do anything.
+ DEPENDS all : [ $(r).actualize ] ;
+
+ return $(r) ;
+ }
+}
+
+
+class moc-h-generator : generator
+{
+ rule __init__ ( * : * )
+ {
+ generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+
+ rule run ( project name ? : property-set : sources * )
+ {
+ if ! $(sources[2]) && [ $(sources[1]).type ] = MOCCABLE_CPP
+ {
+ name = [ $(sources[0]).name ] ;
+ name = $(name:B) ;
+
+ local a = [ new action $(sources[1]) : qt4.moc.inc :
+ $(property-set) ] ;
+
+ local target = [ new file-target $(name) : MOC : $(project) : $(a)
+ ] ;
+
+ local r = [ virtual-target.register $(target) ] ;
+
+ # Since this generator will return a H target, the linking generator
+ # won't use it at all, and won't set any dependency on it. However,
+ # we need the target to be seen by bjam, so that dependency from
+ # sources to this generated header is detected -- if jam does not
+ # know about this target, it won't do anything.
+ DEPENDS all : [ $(r).actualize ] ;
+
+ return $(r) ;
+ }
+ }
+}
+
+
+class moc-inc-generator : generator
+{
+ rule __init__ ( * : * )
+ {
+ generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+
+ rule run ( project name ? : property-set : sources * )
+ {
+ if ! $(sources[2]) && [ $(sources[1]).type ] = MOCCABLE_H
+ {
+ name = [ $(sources[0]).name ] ;
+ name = $(name:B) ;
+
+ local a = [ new action $(sources[1]) : qt4.moc.inc :
+ $(property-set) ] ;
+
+ local target = [ new file-target moc_$(name) : CPP : $(project) :
+ $(a) ] ;
+
+ # Since this generator will return a H target, the linking generator
+ # won't use it at all, and won't set any dependency on it. However,
+ # we need the target to be seen by bjam, so that dependency from
+ # sources to this generated header is detected -- if jam does not
+ # know about this target, it won't do anything.
+ DEPENDS all : [ $(target).actualize ] ;
+
+ return [ virtual-target.register $(target) ] ;
+ }
+ }
+}
+
+
+# Query the installation directory. This is needed in at least two scenarios.
+# First, when re-using sources from the Qt-Tree. Second, to "install" custom Qt
+# plugins to the Qt-Tree.
+#
+rule directory
+{
+ return $(.PREFIX) ;
+}
+
+# Add a shared Qt library.
+rule add-shared-library ( lib-name : depends-on * : usage-defines * : requirements * : include ? )
+{
+ add-library $(lib-name) : $(.suffix_version) : $(depends-on) : $(usage-defines) : $(requirements) : $(include) ;
+}
+
+# Add a static Qt library.
+rule add-static-library ( lib-name : depends-on * : usage-defines * : requirements * : include ? )
+{
+ add-library $(lib-name) : : $(depends-on) : $(usage-defines) : $(requirements) : $(include) ;
+}
+
+# Add a Qt library.
+# Static libs are unversioned, whereas shared libs have the major number as suffix.
+# Creates both release and debug versions on platforms where both are enabled by Qt configure.
+# Flags:
+# - lib-name Qt library Name
+# - version Qt major number used as shared library suffix (QtCore4.so)
+# - depends-on other Qt libraries
+# - usage-defines those are set by qmake, so set them when using this library
+# - requirements addional requirements
+# - include non-canonical include path. The canonical path is $(.incprefix)/$(lib-name).
+rule add-library ( lib-name : version ? : depends-on * : usage-defines * : requirements * : include ? )
+{
+ if $(.bjam-qt)
+ {
+ # Import Qt module
+ # Eveything will be setup there
+ alias $(lib-name)
+ : $(.prefix)//$(lib-name)
+ :
+ :
+ : <allow>qt4 ;
+ }
+ else
+ {
+ local real_include ;
+ real_include ?= $(include) ;
+ real_include ?= $(lib-name) ;
+
+ lib $(lib-name)
+ : # sources
+ $(depends-on)
+ : # requirements
+ <name>$(lib-name)$(version)
+ $(requirements)
+ : # default-build
+ : # usage-requirements
+ <define>$(usage-defines)
+ <include>$(.incprefix)/$(real_include)
+ ;
+
+ if $(.have_separate_debug) = TRUE
+ {
+ lib $(lib-name)
+ : # sources
+ $(depends-on)
+ : # requirements
+ <name>$(lib-name)$(.suffix_debug)$(version)
+ $(requirements)
+ <variant>debug
+ : # default-build
+ : # usage-requirements
+ <define>$(usage-defines)
+ <include>$(.incprefix)/$(real_include)
+ ;
+ }
+ }
+
+ # Make library explicit so that a simple <use>qt4 will not bring in everything.
+ # And some components like QtDBus/Phonon may not be available on all platforms.
+ explicit $(lib-name) ;
+}
+
+# Use $(.BINPREFIX[-1]) for the paths as several tools-requirements can match.
+# The exact match is the last one.
+
+# Get <include> and <defines> from current toolset.
+flags qt4.moc INCLUDES <include> ;
+flags qt4.moc DEFINES <define> ;
+
+# need a newline for expansion of DEFINES and INCLUDES in the response file.
+.nl = "
+" ;
+
+# Processes headers to create Qt MetaObject information. Qt4-moc has its
+# c++-parser, so pass INCLUDES and DEFINES.
+# We use response file with one INCLUDE/DEFINE per line
+#
+actions moc
+{
+ $(.BINPREFIX[-1])/moc -f $(>) -o $(<) @"@($(<).rsp:E=-D$(DEFINES)$(.nl) -I$(INCLUDES:T)$(.nl))"
+}
+
+# When moccing files for include only, we don't need -f, otherwise the generated
+# code will include the .cpp and we'll get duplicated symbols.
+#
+actions moc.inc
+{
+ $(.BINPREFIX[-1])/moc $(>) -o $(<) @"@($(<).rsp:E=-D$(DEFINES)$(.nl) -I$(INCLUDES:T)$(.nl))"
+}
+
+
+# Get extra options for RCC
+flags qt4.rcc RCC_OPTIONS <rccflags> ;
+
+# Generates source files from resource files.
+#
+actions rcc
+{
+ $(.BINPREFIX[-1])/rcc $(>) -name $(>:B) $(RCC_OPTIONS) -o $(<)
+}
+
+
+# Generates user-interface source from .ui files.
+#
+actions uic
+{
+ $(.BINPREFIX[-1])/uic $(>) -o $(<)
+}
+
+
+# Scanner for .qrc files. Look for the CDATA section of the <file> tag. Ignore
+# the "alias" attribute. See http://doc.trolltech.com/qt/resources.html for
+# detailed documentation of the Qt Resource System.
+#
+class qrc-scanner : common-scanner
+{
+ rule pattern ( )
+ {
+ return "<file.*>(.*)</file>" ;
+ }
+}
+
+
+# Wrapped files are "included".
+scanner.register qrc-scanner : include ;
diff --git a/tools/build/src/tools/qt5.jam b/tools/build/src/tools/qt5.jam
new file mode 100644
index 0000000000..46a753019b
--- /dev/null
+++ b/tools/build/src/tools/qt5.jam
@@ -0,0 +1,753 @@
+# Copyright 2002-2006 Vladimir Prus
+# Copyright 2005 Alo Sarv
+# Copyright 2005-2012 Juergen Hunold
+#
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Qt5 library support module
+#
+# The module attempts to auto-detect QT installation location from QTDIR
+# environment variable; failing that, installation location can be passed as
+# argument:
+#
+# toolset.using qt5 : /usr/local/Trolltech/Qt-5.0.0 ;
+#
+# The module supports code generation from .ui and .qrc files, as well as
+# running the moc preprocessor on headers. Note that you must list all your
+# moc-able headers in sources.
+#
+# Example:
+#
+# exe myapp : myapp.cpp myapp.h myapp.ui myapp.qrc
+# /qt5//QtGui /qt5//QtNetwork ;
+#
+# It's also possible to run moc on cpp sources:
+#
+# import cast ;
+#
+# exe myapp : myapp.cpp [ cast _ moccable-cpp : myapp.cpp ] /qt5//QtGui ;
+#
+# When moccing source file myapp.cpp you need to include "myapp.moc" from
+# myapp.cpp. When moccing .h files, the output of moc will be automatically
+# compiled and linked in, you don't need any includes.
+#
+# This is consistent with Qt guidelines:
+# http://qt-project.org/doc/qt-5.0/moc.html
+
+# The .qrc processing utility supports various command line option (see
+# http://qt-project.org/doc/qt-5.0/rcc.html for a complete list). The
+# module provides default arguments for the "output file" and
+# "initialization function name" options. Other options can be set through
+# the <rccflags> build property. E.g. if you wish the compression settings
+# to be more aggressive than the defaults, you can apply them too all .qrc
+# files like this:
+#
+# project my-qt-project :
+# requirements
+# <rccflags>"-compress 9 -threshold 10"
+# ;
+#
+# Of course, this property can also be specified on individual targets.
+
+
+import modules ;
+import feature ;
+import errors ;
+import type ;
+import "class" : new ;
+import generators ;
+import project ;
+import toolset : flags ;
+import os ;
+import virtual-target ;
+import scanner ;
+
+# The Qt version used for requirements
+# Valid are <qt>5.0 or <qt>5.1.0
+# Auto-detection via qmake sets '<qt>major.minor.patch'
+feature.feature qt5 : : propagated ;
+
+# Extra flags for rcc
+# $TODO: figure out how to declare this only once
+# feature.feature rccflags : : free ;
+
+project.initialize $(__name__) ;
+project qt5 ;
+
+# Save the project so that we tolerate 'import + using' combo.
+.project = [ project.current ] ;
+
+# Helper utils for easy debug output
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+ .debug-configuration = TRUE ;
+}
+
+local rule debug-message ( message * )
+{
+ if $(.debug-configuration) = TRUE
+ {
+ ECHO notice: [qt5-cfg] $(message) ;
+ }
+}
+
+# Capture qmake output line by line
+local rule read-output ( content )
+{
+ local lines ;
+ local nl = "
+" ;
+ local << = "([^$(nl)]*)[$(nl)](.*)" ;
+ local line+ = [ MATCH "$(<<)" : "$(content)" ] ;
+ while $(line+)
+ {
+ lines += $(line+[1]) ;
+ line+ = [ MATCH "$(<<)" : "$(line+[2])" ] ;
+ }
+ return $(lines) ;
+}
+
+# Capture Qt version from qmake
+local rule check-version ( bin_prefix )
+{
+ full-cmd = $(bin_prefix)"/qmake -v" ;
+ debug-message Running '$(full-cmd)' ;
+ local output = [ SHELL $(full-cmd) ] ;
+ for line in [ read-output $(output) ]
+ {
+ # Parse the output to get all the results.
+ if [ MATCH "QMake" : $(line) ]
+ {
+ # Skip first line of output
+ }
+ else
+ {
+ temp = [ MATCH "([0-9]*)\\.([0-9]*)\\.([0-9]*)" : $(line) ] ;
+ }
+ }
+ return $(temp) ;
+}
+
+# Validate the version string and extract the major/minor part we care about.
+#
+local rule split-version ( version )
+{
+ local major-minor = [ MATCH ^([0-9]+)\.([0-9]+)(.*)$ : $(version) : 1 2 3 ] ;
+ if ! $(major-minor[2]) || $(major-minor[3])
+ {
+ ECHO "Warning: 'using qt' expects a two part (major, minor) version number; got" $(version) instead ;
+
+ # Add a zero to account for the missing digit if necessary.
+ major-minor += 0 ;
+ }
+
+ return $(major-minor[1]) $(major-minor[2]) ;
+}
+
+# Initialize the QT support module.
+# Parameters:
+# - 'prefix' parameter tells where Qt is installed.
+# - 'version' optional version of Qt, else autodetected via 'qmake -v'
+# - 'condition' optional requirements
+# - 'namespace' optional support for configure -qtnamespace
+# - 'infix' optional support for configure -qtlibinfix
+# - 'full_bin' optional full path to Qt binaries (qmake,moc,uic,rcc)
+# - 'full_inc' optional full path to Qt top-level include directory
+# - 'full_lib' optional full path to Qt library directory
+rule init ( prefix : version ? : condition * : namespace ? : infix ? : full_bin ? : full_inc ? : full_lib ? )
+{
+ project.push-current $(.project) ;
+
+ debug-message "==== Configuring Qt ... ====" ;
+ for local v in version prefix condition namespace infix full_bin full_inc full_lib
+ {
+ if $($(v))
+ {
+ debug-message " user-specified "$(v): '$($(v))' ;
+ }
+ }
+
+ # Needed as default value
+ .prefix = $(prefix) ;
+
+ # pre-build paths to detect reinitializations changes
+ local inc_prefix lib_prefix bin_prefix ;
+ if $(full_inc)
+ {
+ inc_prefix = $(full_inc) ;
+ }
+ else
+ {
+ inc_prefix = $(prefix)/include ;
+ }
+ if $(full_lib)
+ {
+ lib_prefix = $(full_lib) ;
+ }
+ else
+ {
+ lib_prefix = $(prefix)/lib ;
+ }
+ if $(full_bin)
+ {
+ bin_prefix = $(full_bin) ;
+ }
+ else
+ {
+ bin_prefix = $(prefix)/bin ;
+ }
+
+ # Globally needed variables
+ .incprefix = $(inc_prefix) ;
+ .libprefix = $(lib_prefix) ;
+ .binprefix = $(bin_prefix) ;
+
+ if ! $(.initialized)
+ {
+ # Make sure this is initialised only once
+ .initialized = true ;
+
+ # Generates cpp files from header files using "moc" tool
+ generators.register-standard qt5.moc : H : CPP(moc_%) : <allow>qt5 ;
+
+ # The OBJ result type is a fake, 'H' will be really produced. See
+ # comments on the generator class, defined below the 'init' function.
+ generators.register [ new uic-5-generator qt5.uic : UI : OBJ :
+ <allow>qt5 ] ;
+
+ # The OBJ result type is a fake here too.
+ generators.register [ new moc-h-5-generator
+ qt5.moc.inc : MOCCABLE5_CPP : OBJ : <allow>qt5 ] ;
+
+ generators.register [ new moc-inc-5-generator
+ qt5.moc.inc : MOCCABLE5_H : OBJ : <allow>qt5 ] ;
+
+ # Generates .cpp files from .qrc files.
+ generators.register-standard qt5.rcc : QRC : CPP(qrc_%) : <allow>qt5 ;
+
+ # dependency scanner for wrapped files.
+ type.set-scanner QRC : qrc-5-scanner ;
+
+ # Save value of first occuring prefix
+ .PREFIX = $(prefix) ;
+ }
+
+ if $(version)
+ {
+ major-minor = [ split-version $(version) ] ;
+ version = $(major-minor:J=.) ;
+ }
+ else
+ {
+ version = [ check-version $(bin_prefix) ] ;
+ if $(version)
+ {
+ version = $(version:J=.) ;
+ }
+ debug-message Detected version '$(version)' ;
+ }
+
+ local target-requirements = $(condition) ;
+
+ # Add the version, if any, to the target requirements.
+ if $(version)
+ {
+ if ! $(version) in [ feature.values qt5 ]
+ {
+ feature.extend qt5 : $(version) ;
+ }
+ target-requirements += <qt5>$(version:E=default) ;
+ }
+
+ local target-os = [ feature.get-values target-os : $(condition) ] ;
+ if ! $(target-os)
+ {
+ target-os ?= [ feature.defaults target-os ] ;
+ target-os = $(target-os:G=) ;
+ target-requirements += <target-os>$(target-os) ;
+ }
+
+ # Build exact requirements for the tools
+ local tools-requirements = $(target-requirements:J=/) ;
+
+ debug-message "Details of this Qt configuration:" ;
+ debug-message " prefix: " '$(prefix:E=<empty>)' ;
+ debug-message " binary path: " '$(bin_prefix:E=<empty>)' ;
+ debug-message " include path:" '$(inc_prefix:E=<empty>)' ;
+ debug-message " library path:" '$(lib_prefix:E=<empty>)' ;
+ debug-message " target requirements:" '$(target-requirements)' ;
+ debug-message " tool requirements: " '$(tools-requirements)' ;
+
+ # setup the paths for the tools
+ toolset.flags qt5.moc .BINPREFIX $(tools-requirements) : $(bin_prefix) ;
+ toolset.flags qt5.rcc .BINPREFIX $(tools-requirements) : $(bin_prefix) ;
+ toolset.flags qt5.uic .BINPREFIX $(tools-requirements) : $(bin_prefix) ;
+
+ # TODO: 2009-02-12: Better support for directories
+ # Most likely needed are separate getters for: include,libraries,binaries and sources.
+ toolset.flags qt5.directory .PREFIX $(tools-requirements) : $(prefix) ;
+
+ # Test for a buildable Qt.
+ if [ glob $(.prefix)/Jamroot ]
+ {
+ .bjam-qt = true
+
+ # this will declare QtCore (and qtmain on <target-os>windows)
+ add-shared-library QtCore ;
+ }
+ else
+ # Setup common pre-built Qt.
+ # Special setup for QtCore on which everything depends
+ {
+ local link = [ feature.get-values link : $(condition) ] ;
+
+ local usage-requirements =
+ <include>$(.incprefix)
+ <library-path>$(.libprefix)
+ <threading>multi
+ <allow>qt5 ;
+
+ if $(link) in shared
+ {
+ usage-requirements += <dll-path>$(.libprefix) ;
+ usage-requirements += <target-os>windows:<dll-path>$(.binprefix) ;
+ }
+
+ local suffix ;
+
+ # debug versions on unix have to be built
+ # separately and therefore have no suffix.
+ .infix_version = "" ;
+ .suffix_debug = "" ;
+
+ # Control flag for auto-configuration of the debug libraries.
+ # This setup requires Qt 'configure -debug-and-release'.
+ # Only available on some platforms.
+ # ToDo: 2009-02-12: Maybe throw this away and
+ # require separate setup with <variant>debug as condition.
+ .have_separate_debug = FALSE ;
+
+ # Setup other platforms
+ if $(target-os) in windows cygwin
+ {
+ .have_separate_debug = TRUE ;
+
+ # On NT, the libs have "d" suffix in debug builds.
+ .suffix_debug = "d" ;
+
+ .infix_version = "5" ;
+
+ # On Windows we must link against the qtmain library
+ lib qtmain
+ : # sources
+ : # requirements
+ <name>qtmain$(.suffix_debug)
+ <variant>debug
+ $(target-requirements)
+ ;
+
+ lib qtmain
+ : # sources
+ : # requirements
+ <name>qtmain
+ $(target-requirements)
+ ;
+ }
+ else if $(target-os) = darwin
+ {
+ # On MacOS X, both debug and release libraries are available.
+ .suffix_debug = "_debug" ;
+
+ .have_separate_debug = TRUE ;
+
+ alias qtmain ;
+ }
+ else
+ {
+ alias qtmain : : $(target-requirements) ;
+ .infix_version = "5" ;
+ }
+
+ lib QtCore : qtmain
+ : # requirements
+ <name>Qt$(.infix_version)Core
+ $(target-requirements)
+ : # default-build
+ : # usage-requirements
+ <define>QT_CORE_LIB
+ <define>QT_NO_DEBUG
+ <include>$(.incprefix)/QtCore
+ $(usage-requirements)
+ ;
+
+ if $(.have_separate_debug) = TRUE
+ {
+ debug-message Configure debug libraries with suffix '$(.suffix_debug)' ;
+
+ lib QtCore : $(main)
+ : # requirements
+ <name>Qt$(.infix_version)Core$(.suffix_debug)
+ <variant>debug
+ $(target-requirements)
+ : # default-build
+ : # usage-requirements
+ <define>QT_CORE_LIB
+ <include>$(.incprefix)/QtCore
+ $(usage-requirements)
+ ;
+ }
+ }
+
+ if [ glob $(.incprefix)/QtAngle ]
+ {
+ # Setup support of ANGLE builds.
+ alias QtAngle
+ : # sources
+ : # requirements
+ $(target-requirements)
+ : # default-build
+ : # usage-requirements
+ <define>QT_OPENGL_ES_2
+ <define>QT_OPENGL_ES_2_ANGLE
+ <include>$(.incprefix)/QtAngle
+ $(usage-requirements)
+ ;
+ }
+ else
+ {
+ alias QtAngle
+ : # sources
+ : # requirements
+ $(target-requirements)
+ ;
+ }
+
+ # Initialising the remaining libraries is canonical
+ # parameters 'module' : 'depends-on' : 'usage-define' : 'requirements' : 'include'
+ # 'include' only for non-canonical include paths.
+ add-shared-library QtGui : QtCore QtAngle : QT_GUI_LIB : $(target-requirements) ;
+ add-shared-library QtWidgets : QtGui : QT_WIDGETS_LIB : $(target-requirements) ;
+ add-shared-library QtNetwork : QtCore : QT_NETWORK_LIB : $(target-requirements) ;
+ add-shared-library QtSql : QtCore : QT_SQL_LIB : $(target-requirements) ;
+ add-shared-library QtXml : QtCore : QT_XML_LIB : $(target-requirements) ;
+ add-shared-library QtPrintSupport : QtGui : QT_PRINTSUPPORT_LIB : $(target-requirements) ;
+ add-shared-library QtConcurrent : QtCore : QT_CONCURRENT_LIB : $(target-requirements) ;
+
+ add-shared-library QtOpenGL : QtGui : QT_OPENGL_LIB : $(target-requirements) ;
+ add-shared-library QtSvg : QtXml QtOpenGL : QT_SVG_LIB : $(target-requirements) ;
+
+ add-shared-library QtTest : QtCore : : $(target-requirements) ;
+
+ # Qt designer library et. al.
+ add-shared-library QtDesigner : QtGui QtXml : : $(target-requirements) ;
+ add-shared-library QtDesignerComponents : QtGui QtXml : : $(target-requirements) ;
+ add-static-library QtUiTools : QtGui QtXml : $(target-requirements) ;
+
+ # DBus-Support
+ add-shared-library QtDBus : QtXml : : $(target-requirements) ;
+
+ # Script-Engine and Tools
+ add-shared-library QtScript : QtGui QtXml : QT_SCRIPT_LIB : $(target-requirements) ;
+ add-shared-library QtScriptTools : QtScript : QT_SCRIPTTOOLS_LIB : $(target-requirements) ;
+
+ # WebKit
+ add-shared-library QtWebKit : QtGui : QT_WEBKIT_LIB : $(target-requirements) ;
+ add-shared-library QtWebKitWidgets : QtGui : QT_WEBKITWIDGETS_LIB : $(target-requirements) ;
+
+ # Multimedia engine
+ add-shared-library QtMultimedia : QtGui : QT_MULTIMEDIA_LIB : $(target-requirements) ;
+ add-shared-library QtMultimediaWidgets : QtMultimedia : QT_MULTIMEDIAWIDGETS_LIB : $(target-requirements) ;
+
+ #
+ add-shared-library QtXmlPatterns : QtNetwork : QT_XMLPATTERNS_LIB : $(target-requirements) ;
+
+ # Help-Engine
+ add-shared-library QtHelp : QtGui QtSql QtXml : : $(target-requirements) ;
+ add-shared-library QtCLucene : QCore QtSql QtXml : : $(target-requirements) ;
+
+ # QtQuick
+ add-shared-library QtQml : QtCore QtNetwork QtGui : QT_QML_LIB : $(target-requirements) ;
+ add-shared-library QtQuick : QtQml : QT_QUICK_LIB : $(target-requirements) ;
+ add-shared-library QtQuickParticles : QtQml : : $(target-requirements) ;
+ add-shared-library QtQuickTest : QtQml : : $(target-requirements) ;
+
+ # Regular expression support
+ add-shared-library QtV8 : QtCore : : $(target-requirements) ;
+
+ # QML-Engine version1
+ add-shared-library QtDeclarative : QtXml : : $(target-requirements) ;
+
+ debug-message "==== Configured Qt-$(version) ====" ;
+
+ project.pop-current ;
+}
+
+rule initialized ( )
+{
+ return $(.initialized) ;
+}
+
+
+
+# This custom generator is needed because in QT5, UI files are translated only
+# into H files, and no C++ files are created. Further, the H files need not be
+# passed via MOC. The header is used only via inclusion. If we define a standard
+# UI -> H generator, Boost.Build will run MOC on H, and then compile the
+# resulting cpp. It will give a warning, since output from moc will be empty.
+#
+# This generator is declared with a UI -> OBJ signature, so it gets invoked when
+# linking generator tries to convert sources to OBJ, but it produces target of
+# type H. This is non-standard, but allowed. That header won't be mocced.
+#
+class uic-5-generator : generator
+{
+ rule __init__ ( * : * )
+ {
+ generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+
+ rule run ( project name ? : property-set : sources * )
+ {
+ if ! $(name)
+ {
+ name = [ $(sources[0]).name ] ;
+ name = $(name:B) ;
+ }
+
+ local a = [ new action $(sources[1]) : qt5.uic : $(property-set) ] ;
+
+ # The 'ui_' prefix is to match qmake's default behavior.
+ local target = [ new file-target ui_$(name) : H : $(project) : $(a) ] ;
+
+ local r = [ virtual-target.register $(target) ] ;
+
+ # Since this generator will return a H target, the linking generator
+ # won't use it at all, and won't set any dependency on it. However, we
+ # need the target to be seen by bjam, so that dependency from sources to
+ # this generated header is detected -- if jam does not know about this
+ # target, it won't do anything.
+ DEPENDS all : [ $(r).actualize ] ;
+
+ return $(r) ;
+ }
+}
+
+
+class moc-h-5-generator : generator
+{
+ rule __init__ ( * : * )
+ {
+ generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+
+ rule run ( project name ? : property-set : sources * )
+ {
+ if ! $(sources[2]) && [ $(sources[1]).type ] = MOCCABLE5_CPP
+ {
+ name = [ $(sources[0]).name ] ;
+ name = $(name:B) ;
+
+ local a = [ new action $(sources[1]) : qt5.moc.inc :
+ $(property-set) ] ;
+
+ local target = [ new file-target $(name) : MOC : $(project) : $(a)
+ ] ;
+
+ local r = [ virtual-target.register $(target) ] ;
+
+ # Since this generator will return a H target, the linking generator
+ # won't use it at all, and won't set any dependency on it. However,
+ # we need the target to be seen by bjam, so that dependency from
+ # sources to this generated header is detected -- if jam does not
+ # know about this target, it won't do anything.
+ DEPENDS all : [ $(r).actualize ] ;
+
+ return $(r) ;
+ }
+ }
+}
+
+
+class moc-inc-5-generator : generator
+{
+ rule __init__ ( * : * )
+ {
+ generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
+ }
+
+ rule run ( project name ? : property-set : sources * )
+ {
+ if ! $(sources[2]) && [ $(sources[1]).type ] = MOCCABLE5_H
+ {
+ name = [ $(sources[0]).name ] ;
+ name = $(name:B) ;
+
+ local a = [ new action $(sources[1]) : qt5.moc.inc :
+ $(property-set) ] ;
+
+ local target = [ new file-target moc_$(name) : CPP : $(project) :
+ $(a) ] ;
+
+ # Since this generator will return a H target, the linking generator
+ # won't use it at all, and won't set any dependency on it. However,
+ # we need the target to be seen by bjam, so that dependency from
+ # sources to this generated header is detected -- if jam does not
+ # know about this target, it won't do anything.
+ DEPENDS all : [ $(target).actualize ] ;
+
+ return [ virtual-target.register $(target) ] ;
+ }
+ }
+}
+
+
+# Query the installation directory. This is needed in at least two scenarios.
+# First, when re-using sources from the Qt-Tree. Second, to "install" custom Qt
+# plugins to the Qt-Tree.
+#
+rule directory
+{
+ return $(.PREFIX) ;
+}
+
+# Add a shared Qt library.
+rule add-shared-library ( lib-name : depends-on * : usage-defines * : requirements * : include ? )
+{
+ add-library $(lib-name) : $(.infix_version) : $(depends-on) : $(usage-defines) : $(requirements) : $(include) ;
+}
+
+# Add a static Qt library.
+rule add-static-library ( lib-name : depends-on * : usage-defines * : requirements * : include ? )
+{
+ add-library $(lib-name) : $(.infix_version) : $(depends-on) : $(usage-defines) : $(requirements) : $(include) ;
+}
+
+# Add a Qt library.
+# Static libs are unversioned, whereas shared libs have the major number as suffix.
+# Creates both release and debug versions on platforms where both are enabled by Qt configure.
+# Flags:
+# - lib-name Qt library Name
+# - version Qt major number used as shared library suffix (QtCore5.so)
+# - depends-on other Qt libraries
+# - usage-defines those are set by qmake, so set them when using this library
+# - requirements addional requirements
+# - include non-canonical include path. The canonical path is $(.incprefix)/$(lib-name).
+rule add-library ( lib-name : version ? : depends-on * : usage-defines * : requirements * : include ? )
+{
+ if $(.bjam-qt)
+ {
+ # Import Qt module
+ # Eveything will be setup there
+ alias $(lib-name)
+ : $(.prefix)//$(lib-name)
+ :
+ :
+ : <allow>qt5 ;
+ }
+ else
+ {
+ local real_include ;
+ real_include ?= $(include) ;
+ real_include ?= $(lib-name) ;
+
+ local real_name = [ MATCH ^Qt(.*) : $(lib-name) ] ;
+
+ lib $(lib-name)
+ : # sources
+ $(depends-on)
+ : # requirements
+ <name>Qt$(version)$(real_name)
+ $(requirements)
+ : # default-build
+ : # usage-requirements
+ <define>$(usage-defines)
+ <include>$(.incprefix)/$(real_include)
+ ;
+
+ if $(.have_separate_debug) = TRUE
+ {
+ lib $(lib-name)
+ : # sources
+ $(depends-on)
+ : # requirements
+ <name>Qt$(version)$(real_name)$(.suffix_debug)
+ $(requirements)
+ <variant>debug
+ : # default-build
+ : # usage-requirements
+ <define>$(usage-defines)
+ <include>$(.incprefix)/$(real_include)
+ ;
+ }
+ }
+
+ # Make library explicit so that a simple <use>qt5 will not bring in everything.
+ # And some components like QtDBus/Phonon may not be available on all platforms.
+ explicit $(lib-name) ;
+}
+
+# Use $(.BINPREFIX[-1]) for the paths as several tools-requirements can match.
+# The exact match is the last one.
+
+# Get <include> and <defines> from current toolset.
+flags qt5.moc INCLUDES <include> ;
+flags qt5.moc DEFINES <define> ;
+
+# need a newline for expansion of DEFINES and INCLUDES in the response file.
+.nl = "
+" ;
+
+# Processes headers to create Qt MetaObject information. Qt5-moc has its
+# c++-parser, so pass INCLUDES and DEFINES.
+# We use response file with one INCLUDE/DEFINE per line
+#
+actions moc
+{
+ $(.BINPREFIX[-1])/moc $(>) -o $(<) @"@($(<).rsp:E=-D$(DEFINES)$(.nl) -I$(INCLUDES:T)$(.nl))"
+}
+
+# When moccing files for include only, we don't need -f, otherwise the generated
+# code will include the .cpp and we'll get duplicated symbols.
+#
+actions moc.inc
+{
+ $(.BINPREFIX[-1])/moc $(>) -o $(<) @"@($(<).rsp:E=-D$(DEFINES)$(.nl) -I$(INCLUDES:T)$(.nl))"
+}
+
+
+# Get extra options for RCC
+flags qt5.rcc RCC_OPTIONS <rccflags> ;
+
+# Generates source files from resource files.
+#
+actions rcc
+{
+ $(.BINPREFIX[-1])/rcc $(>) -name $(>:B) $(RCC_OPTIONS) -o $(<)
+}
+
+
+# Generates user-interface source from .ui files.
+#
+actions uic
+{
+ $(.BINPREFIX[-1])/uic $(>) -o $(<)
+}
+
+
+# Scanner for .qrc files. Look for the CDATA section of the <file> tag. Ignore
+# the "alias" attribute. See http://doc.trolltech.com/qt/resources.html for
+# detailed documentation of the Qt Resource System.
+#
+class qrc-5-scanner : common-scanner
+{
+ rule pattern ( )
+ {
+ return "<file.*>(.*)</file>" ;
+ }
+}
+
+
+# Wrapped files are "included".
+scanner.register qrc-5-scanner : include ;
diff --git a/tools/build/v2/tools/quickbook-config.jam b/tools/build/src/tools/quickbook-config.jam
index e983a78a8e..e983a78a8e 100644
--- a/tools/build/v2/tools/quickbook-config.jam
+++ b/tools/build/src/tools/quickbook-config.jam
diff --git a/tools/build/v2/tools/quickbook.jam b/tools/build/src/tools/quickbook.jam
index 6de2d42f84..6de2d42f84 100644
--- a/tools/build/v2/tools/quickbook.jam
+++ b/tools/build/src/tools/quickbook.jam
diff --git a/tools/build/src/tools/rc.jam b/tools/build/src/tools/rc.jam
new file mode 100644
index 0000000000..de4071f294
--- /dev/null
+++ b/tools/build/src/tools/rc.jam
@@ -0,0 +1,155 @@
+# Copyright (C) Andre Hentz 2003. Permission to copy, use, modify, sell and
+# distribute this software is granted provided this copyright notice appears in
+# all copies. This software is provided "as is" without express or implied
+# warranty, and with no claim as to its suitability for any purpose.
+#
+# Copyright (c) 2006 Rene Rivera.
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import generators ;
+import feature ;
+import scanner ;
+import toolset : flags ;
+import type ;
+
+if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
+{
+ .debug-configuration = true ;
+}
+
+type.register RC : rc ;
+
+rule init ( )
+{
+}
+
+# Configures a new resource compilation command specific to a condition,
+# usually a toolset selection condition. The possible options are:
+#
+# * <rc-type>(rc|windres) - Indicates the type of options the command
+# accepts.
+#
+# Even though the arguments are all optional, only when a command, condition,
+# and at minimum the rc-type option are given will the command be configured.
+# This is so that callers don't have to check auto-configuration values before
+# calling this. And still get the functionality of build failures when the
+# resource compiler can not be found.
+#
+rule configure ( command ? : condition ? : options * )
+{
+ local rc-type = [ feature.get-values <rc-type> : $(options) ] ;
+
+ if $(command) && $(condition) && $(rc-type)
+ {
+ flags rc.compile.resource .RC $(condition) : $(command) ;
+ flags rc.compile.resource .RC_TYPE $(condition) : $(rc-type:L) ;
+ flags rc.compile.resource DEFINES <define> ;
+ flags rc.compile.resource INCLUDES <include> ;
+ if $(.debug-configuration)
+ {
+ ECHO notice: using rc compiler :: $(condition) :: $(command) ;
+ }
+ }
+}
+
+rule compile.resource ( target : sources * : properties * )
+{
+ local rc-type = [ on $(target) return $(.RC_TYPE) ] ;
+ rc-type ?= null ;
+ compile.resource.$(rc-type) $(target) : $(sources[1]) ;
+}
+
+actions compile.resource.rc
+{
+ "$(.RC)" -l 0x409 "-U$(UNDEFS)" "-D$(DEFINES)" -I"$(>:D)" -I"$(<:D)" -I"$(INCLUDES)" -fo "$(<)" "$(>)"
+}
+
+actions compile.resource.windres
+{
+ "$(.RC)" "-U$(UNDEFS)" "-D$(DEFINES)" -I"$(>:D)" -I"$(<:D)" -I"$(INCLUDES)" -o "$(<)" -i "$(>)"
+}
+
+actions quietly compile.resource.null
+{
+ as /dev/null -o "$(<)"
+}
+
+# Since it is common practice to write
+# exe hello : hello.cpp hello.rc
+# we change the name of object created from RC file, to avoid conflict with
+# hello.cpp. The reason we generate OBJ and not RES, is that gcc does not seem
+# to like RES files, but works OK with OBJ (see
+# http://article.gmane.org/gmane.comp.lib.boost.build/5643).
+#
+# Using 'register-c-compiler' adds the build directory to INCLUDES
+generators.register-c-compiler rc.compile.resource : RC : OBJ(%_res) ;
+
+# Register scanner for resources
+class res-scanner : scanner
+{
+ import path ;
+ import regex ;
+ import scanner ;
+ import virtual-target ;
+
+ rule __init__ ( includes * )
+ {
+ scanner.__init__ ;
+ self.includes = $(includes) ;
+ }
+
+ rule pattern ( )
+ {
+ return "(([^ ]+[ ]+(BITMAP|CURSOR|FONT|ICON|MESSAGETABLE|RT_MANIFEST)[ ]+([^ \"]+|\"[^\"]+\"))|(#include[ ]*(<[^<]+>|\"[^\"]+\")))" ;
+ }
+
+ rule process ( target : matches * : binding )
+ {
+ local angle = [ regex.transform $(matches) : "#include[ ]*<([^<]+)>" ] ;
+ local quoted = [ regex.transform $(matches) : "#include[ ]*\"([^\"]+)\"" ] ;
+ local res = [ regex.transform $(matches) : "[^ ]+[ ]+(BITMAP|CURSOR|FONT|ICON|MESSAGETABLE|RT_MANIFEST)[ ]+(([^ \"]+)|\"([^\"]+)\")" : 3 4 ] ;
+
+ # Icons and other includes may be referenced as
+ #
+ # IDR_MAINFRAME ICON "res\\icon.ico"
+ #
+ # so we have to replace double backslashes with single ones.
+ res = [ regex.replace-list $(res) : "\\\\\\\\" : "/" ] ;
+
+ # CONSIDER: the new scoping rules seem to defeat "on target" variables.
+ local g = [ on $(target) return $(HDRGRIST) ] ;
+ local b = [ NORMALIZE_PATH $(binding:D) ] ;
+
+ # Attach binding of including file to included targets. When a target is
+ # directly created from a virtual target this extra information is
+ # unnecessary. But in other cases, it allows us to distinguish between
+ # two headers of the same name included from different places. We do not
+ # need this extra information for angle includes, since they should not
+ # depend on the including file (we can not get literal "." in the
+ # include path).
+ local g2 = $(g)"#"$(b) ;
+
+ angle = $(angle:G=$(g)) ;
+ quoted = $(quoted:G=$(g2)) ;
+ res = $(res:G=$(g2)) ;
+
+ local all = $(angle) $(quoted) $(res) ;
+
+ INCLUDES $(target) : $(all) ;
+ NOCARE $(all) ;
+ SEARCH on $(angle) = $(self.includes:G=) ;
+ SEARCH on $(quoted) $(res) = $(b) $(self.includes:G=) ;
+
+ # Just propagate the current scanner to includes, in hope that includes
+ # do not change scanners.
+ scanner.propagate $(__name__) : $(angle) $(quoted) : $(target) ;
+
+ ISFILE $(all) ;
+ }
+}
+
+scanner.register res-scanner : include ;
+type.set-scanner RC : res-scanner ;
diff --git a/tools/build/src/tools/rc.py b/tools/build/src/tools/rc.py
new file mode 100644
index 0000000000..d026480d85
--- /dev/null
+++ b/tools/build/src/tools/rc.py
@@ -0,0 +1,196 @@
+# Status: being ported by Steven Watanabe
+# Base revision: 47077
+#
+# Copyright (C) Andre Hentz 2003. Permission to copy, use, modify, sell and
+# distribute this software is granted provided this copyright notice appears in
+# all copies. This software is provided "as is" without express or implied
+# warranty, and with no claim as to its suitability for any purpose.
+#
+# Copyright (c) 2006 Rene Rivera.
+#
+# Copyright (c) 2008 Steven Watanabe
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+##import type ;
+##import generators ;
+##import feature ;
+##import errors ;
+##import scanner ;
+##import toolset : flags ;
+
+import os.path
+import re
+
+import bjam
+
+from b2.build import type, toolset, generators, scanner, feature
+from b2.exceptions import AlreadyDefined
+from b2.tools import builtin
+from b2.util import regex
+from b2.build.toolset import flags
+from b2.manager import get_manager
+from b2.util import utility
+
+__debug = None
+
+def debug():
+ global __debug
+ if __debug is None:
+ __debug = "--debug-configuration" in bjam.variable("ARGV")
+ return __debug
+
+type.register('RC', ['rc'])
+
+def init():
+ pass
+
+def configure (command = None, condition = None, options = None):
+ """
+ Configures a new resource compilation command specific to a condition,
+ usually a toolset selection condition. The possible options are:
+
+ * <rc-type>(rc|windres) - Indicates the type of options the command
+ accepts.
+
+ Even though the arguments are all optional, only when a command, condition,
+ and at minimum the rc-type option are given will the command be configured.
+ This is so that callers don't have to check auto-configuration values
+ before calling this. And still get the functionality of build failures when
+ the resource compiler can't be found.
+ """
+ rc_type = feature.get_values('<rc-type>', options)
+ if rc_type:
+ assert(len(rc_type) == 1)
+ rc_type = rc_type[0]
+
+ if command and condition and rc_type:
+ flags('rc.compile.resource', '.RC', condition, command)
+ flags('rc.compile.resource', '.RC_TYPE', condition, rc_type.lower())
+ flags('rc.compile.resource', 'DEFINES', [], ['<define>'])
+ flags('rc.compile.resource', 'INCLUDES', [], ['<include>'])
+ if debug():
+ print 'notice: using rc compiler ::', condition, '::', command
+
+engine = get_manager().engine()
+
+class RCAction:
+ """Class representing bjam action defined from Python.
+ The function must register the action to execute."""
+
+ def __init__(self, action_name, function):
+ self.action_name = action_name
+ self.function = function
+
+ def __call__(self, targets, sources, property_set):
+ if self.function:
+ self.function(targets, sources, property_set)
+
+# FIXME: What is the proper way to dispatch actions?
+def rc_register_action(action_name, function = None):
+ global engine
+ if engine.actions.has_key(action_name):
+ raise AlreadyDefined("Bjam action %s is already defined" % action_name)
+ engine.actions[action_name] = RCAction(action_name, function)
+
+def rc_compile_resource(targets, sources, properties):
+ rc_type = bjam.call('get-target-variable', targets, '.RC_TYPE')
+ global engine
+ engine.set_update_action('rc.compile.resource.' + rc_type, targets, sources, properties)
+
+rc_register_action('rc.compile.resource', rc_compile_resource)
+
+
+engine.register_action(
+ 'rc.compile.resource.rc',
+ '"$(.RC)" -l 0x409 "-U$(UNDEFS)" "-D$(DEFINES)" -I"$(>:D)" -I"$(<:D)" -I"$(INCLUDES)" -fo "$(<)" "$(>)"')
+
+engine.register_action(
+ 'rc.compile.resource.windres',
+ '"$(.RC)" "-U$(UNDEFS)" "-D$(DEFINES)" -I"$(>:D)" -I"$(<:D)" -I"$(INCLUDES)" -o "$(<)" -i "$(>)"')
+
+# FIXME: this was originally declared quietly
+engine.register_action(
+ 'compile.resource.null',
+ 'as /dev/null -o "$(<)"')
+
+# Since it's a common practice to write
+# exe hello : hello.cpp hello.rc
+# we change the name of object created from RC file, to
+# avoid conflict with hello.cpp.
+# The reason we generate OBJ and not RES, is that gcc does not
+# seem to like RES files, but works OK with OBJ.
+# See http://article.gmane.org/gmane.comp.lib.boost.build/5643/
+#
+# Using 'register-c-compiler' adds the build directory to INCLUDES
+# FIXME: switch to generators
+builtin.register_c_compiler('rc.compile.resource', ['RC'], ['OBJ(%_res)'], [])
+
+__angle_include_re = "#include[ ]*<([^<]+)>"
+
+# Register scanner for resources
+class ResScanner(scanner.Scanner):
+
+ def __init__(self, includes):
+ scanner.__init__ ;
+ self.includes = includes
+
+ def pattern(self):
+ return "(([^ ]+[ ]+(BITMAP|CURSOR|FONT|ICON|MESSAGETABLE|RT_MANIFEST)" +\
+ "[ ]+([^ \"]+|\"[^\"]+\"))|(#include[ ]*(<[^<]+>|\"[^\"]+\")))" ;
+
+ def process(self, target, matches, binding):
+ binding = binding[0]
+ angle = regex.transform(matches, "#include[ ]*<([^<]+)>")
+ quoted = regex.transform(matches, "#include[ ]*\"([^\"]+)\"")
+ res = regex.transform(matches,
+ "[^ ]+[ ]+(BITMAP|CURSOR|FONT|ICON|MESSAGETABLE|RT_MANIFEST)" +\
+ "[ ]+(([^ \"]+)|\"([^\"]+)\")", [3, 4])
+
+ # Icons and other includes may referenced as
+ #
+ # IDR_MAINFRAME ICON "res\\icon.ico"
+ #
+ # so we have to replace double backslashes to single ones.
+ res = [ re.sub(r'\\\\', '/', match) for match in res if match is not None ]
+
+ # CONSIDER: the new scoping rule seem to defeat "on target" variables.
+ g = bjam.call('get-target-variable', target, 'HDRGRIST')[0]
+ b = os.path.normpath(os.path.dirname(binding))
+
+ # Attach binding of including file to included targets.
+ # When target is directly created from virtual target
+ # this extra information is unnecessary. But in other
+ # cases, it allows to distinguish between two headers of the
+ # same name included from different places.
+ # We don't need this extra information for angle includes,
+ # since they should not depend on including file (we can't
+ # get literal "." in include path).
+ g2 = g + "#" + b
+
+ g = "<" + g + ">"
+ g2 = "<" + g2 + ">"
+ angle = [g + x for x in angle]
+ quoted = [g2 + x for x in quoted]
+ res = [g2 + x for x in res]
+
+ all = angle + quoted
+
+ bjam.call('mark-included', target, all)
+
+ engine = get_manager().engine()
+
+ engine.add_dependency(target, res)
+ bjam.call('NOCARE', all + res)
+ engine.set_target_variable(angle, 'SEARCH', [utility.get_value(inc) for inc in self.includes])
+ engine.set_target_variable(quoted, 'SEARCH', [b + utility.get_value(inc) for inc in self.includes])
+ engine.set_target_variable(res, 'SEARCH', [b + utility.get_value(inc) for inc in self.includes])
+
+ # Just propagate current scanner to includes, in a hope
+ # that includes do not change scanners.
+ get_manager().scanners().propagate(self, angle + quoted)
+
+scanner.register(ResScanner, 'include')
+type.set_scanner('RC', ResScanner)
diff --git a/tools/build/src/tools/stage.jam b/tools/build/src/tools/stage.jam
new file mode 100644
index 0000000000..8d005ae02e
--- /dev/null
+++ b/tools/build/src/tools/stage.jam
@@ -0,0 +1,519 @@
+# Copyright 2003 Dave Abrahams
+# Copyright 2005, 2006 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# This module defines the 'install' rule, used to copy a set of targets to a
+# single location.
+
+import "class" : new ;
+import feature ;
+import generators ;
+import path ;
+import project ;
+import targets ;
+import type ;
+import types/register ;
+import virtual-target ;
+
+
+feature.feature <install-dependencies> : off on : incidental ;
+feature.feature <install-type> : : free incidental ;
+feature.feature <install-source-root> : : free path ;
+feature.feature <so-version> : : free incidental ;
+
+# If 'on', version symlinks for shared libraries will not be created. Affects
+# Unix builds only.
+feature.feature <install-no-version-symlinks> : on : optional incidental ;
+
+
+class install-target-class : basic-target
+{
+ import "class" : new ;
+ import feature ;
+ import generators ;
+ import path ;
+ import project ;
+ import property ;
+ import property-set ;
+ import stage ;
+ import type ;
+
+ rule __init__ ( name-and-dir : project : sources * : requirements * :
+ default-build * : usage-requirements * )
+ {
+ # The usage-requirements specified here are ignored but are taken as a
+ # parameter to have this metatarget class have the same standard
+ # instantiation interface as all the other Boost Build metatarget
+ # classes.
+ basic-target.__init__ $(name-and-dir) : $(project) : $(sources) :
+ $(requirements) : $(default-build) ;
+ }
+
+ # If <location> is not set, sets it based on the project data.
+ #
+ rule update-location ( property-set )
+ {
+ local loc = [ $(property-set).get <location> ] ;
+ if ! $(loc)
+ {
+ loc = [ path.root $(self.name) [ $(self.project).get location ] ] ;
+ property-set = [ $(property-set).add-raw $(loc:G=<location>) ] ;
+ }
+
+ return $(property-set) ;
+ }
+
+ # Takes a target that is installed and a property set which is used when
+ # installing.
+ #
+ rule adjust-properties ( target : build-property-set )
+ {
+ local ps-raw ;
+ local a = [ $(target).action ] ;
+ if $(a)
+ {
+ local ps = [ $(a).properties ] ;
+ ps-raw = [ $(ps).raw ] ;
+
+ # Unless <hardcode-dll-paths>true is in properties, which can happen
+ # only if the user has explicitly requested it, nuke all <dll-path>
+ # properties.
+ if [ $(build-property-set).get <hardcode-dll-paths> ] != true
+ {
+ ps-raw = [ property.change $(ps-raw) : <dll-path> ] ;
+ }
+
+ # If any <dll-path> properties were specified for installing, add
+ # them.
+ local l = [ $(build-property-set).get <dll-path> ] ;
+ ps-raw += $(l:G=<dll-path>) ;
+
+ # Also copy <linkflags> feature from current build set, to be used
+ # for relinking.
+ local l = [ $(build-property-set).get <linkflags> ] ;
+ ps-raw += $(l:G=<linkflags>) ;
+
+ # Remove the <tag> feature on original targets.
+ ps-raw = [ property.change $(ps-raw) : <tag> ] ;
+
+ # And <location>. If stage target has another stage target in
+ # sources, then we shall get virtual targets with the <location>
+ # property set.
+ ps-raw = [ property.change $(ps-raw) : <location> ] ;
+ }
+
+ local d = [ $(build-property-set).get <dependency> ] ;
+ ps-raw += $(d:G=<dependency>) ;
+
+ local d = [ $(build-property-set).get <location> ] ;
+ ps-raw += $(d:G=<location>) ;
+
+ local ns = [ $(build-property-set).get <install-no-version-symlinks> ] ;
+ ps-raw += $(ns:G=<install-no-version-symlinks>) ;
+
+ local d = [ $(build-property-set).get <install-source-root> ] ;
+ # Make the path absolute: we shall use it to compute relative paths and
+ # making the path absolute will help.
+ if $(d)
+ {
+ d = [ path.root $(d) [ path.pwd ] ] ;
+ ps-raw += $(d:G=<install-source-root>) ;
+ }
+
+ if $(ps-raw)
+ {
+ return [ property-set.create $(ps-raw) ] ;
+ }
+ else
+ {
+ return [ property-set.empty ] ;
+ }
+ }
+
+ rule construct ( name : source-targets * : property-set )
+ {
+ source-targets = [ targets-to-stage $(source-targets) :
+ $(property-set) ] ;
+
+ property-set = [ update-location $(property-set) ] ;
+
+ local ename = [ $(property-set).get <name> ] ;
+
+ if $(ename) && $(source-targets[2])
+ {
+ import errors : error : $(__name__) : errors.error ;
+ errors.error When <name> property is used "in" 'install', only one
+ source is allowed. ;
+ }
+
+ local result ;
+ for local i in $(source-targets)
+ {
+ local staged-targets ;
+
+ local new-properties = [ adjust-properties $(i) :
+ $(property-set) ] ;
+
+ # See if something special should be done when staging this type. It
+ # is indicated by the presence of a special "INSTALLED_" type.
+ local t = [ $(i).type ] ;
+ if $(t) && [ type.registered INSTALLED_$(t) ]
+ {
+ if $(ename)
+ {
+ import errors : error : $(__name__) : errors.error ;
+ errors.error In 'install': <name> property specified with
+ target that requires relinking. ;
+ }
+ else
+ {
+ local targets = [ generators.construct $(self.project)
+ $(name) : INSTALLED_$(t) : $(new-properties) : $(i) ] ;
+ staged-targets += $(targets[2-]) ;
+ }
+ }
+ else
+ {
+ staged-targets = [ stage.copy-file $(self.project) $(ename) :
+ $(i) : $(new-properties) ] ;
+ }
+
+ if ! $(staged-targets)
+ {
+ import errors : error : $(__name__) : errors.error ;
+ errors.error Unable to generate staged version of
+ [ $(source).str ] ;
+ }
+
+ for t in $(staged-targets)
+ {
+ result += [ virtual-target.register $(t) ] ;
+ }
+ }
+
+ return [ property-set.empty ] $(result) ;
+ }
+
+ # Given the list of source targets explicitly passed to 'stage', returns the
+ # list of targets which must be staged.
+ #
+ rule targets-to-stage ( source-targets * : property-set )
+ {
+ local result ;
+
+ # Traverse the dependencies, if needed.
+ if [ $(property-set).get <install-dependencies> ] = "on"
+ {
+ source-targets = [ collect-targets $(source-targets) ] ;
+ }
+
+ # Filter the target types, if needed.
+ local included-types = [ $(property-set).get <install-type> ] ;
+ for local r in $(source-targets)
+ {
+ local ty = [ $(r).type ] ;
+ if $(ty)
+ {
+ # Do not stage searched libs.
+ if $(ty) != SEARCHED_LIB
+ {
+ if $(included-types)
+ {
+ if [ include-type $(ty) : $(included-types) ]
+ {
+ result += $(r) ;
+ }
+ }
+ else
+ {
+ result += $(r) ;
+ }
+ }
+ }
+ else if ! $(included-types)
+ {
+ # Do not install typeless targets if there is an explicit list
+ # of allowed types.
+ result += $(r) ;
+ }
+ }
+
+ return $(result) ;
+ }
+
+ # CONSIDER: figure out why we can not use virtual-target.traverse here.
+ #
+ rule collect-targets ( targets * )
+ {
+ # Find subvariants
+ local s ;
+ for local t in $(targets)
+ {
+ s += [ $(t).creating-subvariant ] ;
+ }
+ s = [ sequence.unique $(s) ] ;
+
+ local result = [ new set ] ;
+ $(result).add $(targets) ;
+
+ for local i in $(s)
+ {
+ $(i).all-referenced-targets $(result) ;
+ }
+ local result2 ;
+ for local r in [ $(result).list ]
+ {
+ if $(r:G) != <use>
+ {
+ result2 += $(r:G=) ;
+ }
+ }
+ DELETE_MODULE $(result) ;
+ return [ sequence.unique $(result2) ] ;
+ }
+
+ # Returns true iff 'type' is subtype of some element of 'types-to-include'.
+ #
+ local rule include-type ( type : types-to-include * )
+ {
+ local found ;
+ while $(types-to-include) && ! $(found)
+ {
+ if [ type.is-subtype $(type) $(types-to-include[1]) ]
+ {
+ found = true ;
+ }
+ types-to-include = $(types-to-include[2-]) ;
+ }
+
+ return $(found) ;
+ }
+}
+
+
+# Creates a copy of target 'source'. The 'properties' object should have a
+# <location> property which specifies where the target must be placed.
+#
+rule copy-file ( project name ? : source : properties )
+{
+ name ?= [ $(source).name ] ;
+ local relative ;
+
+ local new-a = [ new non-scanning-action $(source) : common.copy :
+ $(properties) ] ;
+ local source-root = [ $(properties).get <install-source-root> ] ;
+ if $(source-root)
+ {
+ # Get the real path of the target. We probably need to strip relative
+ # path from the target name at construction.
+ local path = [ $(source).path ] ;
+ path = [ path.root $(name:D) $(path) ] ;
+ # Make the path absolute. Otherwise, it would be hard to compute the
+ # relative path. The 'source-root' is already absolute, see the
+ # 'adjust-properties' method above.
+ path = [ path.root $(path) [ path.pwd ] ] ;
+
+ relative = [ path.relative-to $(source-root) $(path) ] ;
+ }
+
+ # Note: Using $(name:D=$(relative)) might be faster here, but then we would
+ # need to explicitly check that relative is not ".", otherwise we might get
+ # paths like '<prefix>/boost/.', try to create it and mkdir would obviously
+ # fail.
+ name = [ path.join $(relative) $(name:D=) ] ;
+
+ return [ new file-target $(name) exact : [ $(source).type ] : $(project) :
+ $(new-a) ] ;
+}
+
+
+rule symlink ( name : project : source : properties )
+{
+ local a = [ new action $(source) : symlink.ln : $(properties) ] ;
+ local t = [ new file-target $(name) exact : [ $(source).type ] : $(project)
+ : $(a) ] ;
+ return [ virtual-target.register $(t) ] ;
+}
+
+
+rule relink-file ( project : source : property-set )
+{
+ local action = [ $(source).action ] ;
+ local cloned-action = [ virtual-target.clone-action $(action) : $(project) :
+ "" : $(property-set) ] ;
+ return [ $(cloned-action).targets ] ;
+}
+
+
+# Declare installed version of the EXE type. Generator for this type will cause
+# relinking to the new location.
+type.register INSTALLED_EXE : : EXE ;
+
+
+class installed-exe-generator : generator
+{
+ import type ;
+ import property-set ;
+ import modules ;
+ import stage ;
+
+ rule __init__ ( )
+ {
+ generator.__init__ install-exe : EXE : INSTALLED_EXE ;
+ }
+
+ rule run ( project name ? : property-set : source : multiple ? )
+ {
+ local stage-rule = stage.copy-file ;
+
+ if ! [ $(property-set).get <os> ] in NT CYGWIN &&
+ ! [ $(property-set).get <target-os> ] in windows cygwin
+ {
+ # If dll-path properties have been changed for the stage target,
+ # relink instead of copying.
+ local a = [ $(source).action ] ;
+ local p = [ $(a).properties ] ;
+ local original = [ $(p).get <dll-path> ] ;
+ local current = [ $(property-set).get <dll-path> ] ;
+
+ if $(current) != $(original)
+ {
+ stage-rule = stage.relink-file ;
+ }
+ }
+
+ return [ $(stage-rule) $(project) : $(source) : $(property-set) ] ;
+ }
+}
+
+
+generators.register [ new installed-exe-generator ] ;
+
+
+# Installing a shared link on Unix might cause a creation of versioned symbolic
+# links.
+type.register INSTALLED_SHARED_LIB : : SHARED_LIB ;
+
+
+class installed-shared-lib-generator : generator
+{
+ import type ;
+ import property-set ;
+ import modules ;
+ import stage ;
+
+ rule __init__ ( )
+ {
+ generator.__init__ install-shared-lib : SHARED_LIB :
+ INSTALLED_SHARED_LIB ;
+ }
+
+ rule run ( project name ? : property-set : source : multiple ? )
+ {
+ if [ $(property-set).get <os> ] in NT CYGWIN ||
+ [ $(property-set).get <target-os> ] in windows cygwin
+ {
+ local copied = [ stage.copy-file $(project) : $(source) :
+ $(property-set) ] ;
+ return [ virtual-target.register $(copied) ] ;
+ }
+ else
+ {
+ local a = [ $(source).action ] ;
+ local copied ;
+ if ! $(a)
+ {
+ # Non-derived file, just copy.
+ copied = [ stage.copy-file $(project) : $(source) :
+ $(property-set) ] ;
+ }
+ else
+ {
+ local cp = [ $(a).properties ] ;
+ local current-dll-path = [ $(cp).get <dll-path> ] ;
+ local new-dll-path = [ $(property-set).get <dll-path> ] ;
+
+ if $(current-dll-path) != $(new-dll-path)
+ {
+ # Rpath changed, need to relink.
+ copied = [ stage.relink-file $(project) : $(source) :
+ $(property-set) ] ;
+ }
+ else
+ {
+ copied = [ stage.copy-file $(project) : $(source) :
+ $(property-set) ] ;
+ }
+ }
+
+ copied = [ virtual-target.register $(copied) ] ;
+
+ local result = $(copied) ;
+ # If the name is in the form NNN.XXX.YYY.ZZZ, where all 'X', 'Y' and
+ # 'Z' are numbers, we need to create NNN.XXX and NNN.XXX.YYY
+ # symbolic links.
+ local m = [ MATCH
+ (.*)\\.([0123456789]+)\\.([0123456789]+)\\.([0123456789]+)$ :
+ [ $(copied).name ] ] ;
+ if $(m)
+ {
+ # Symlink without version at all is used to make
+ # -lsome_library work.
+ result += [ stage.symlink $(m[1]) : $(project) : $(copied) :
+ $(property-set) ] ;
+
+ # Symlinks of some libfoo.N and libfoo.N.M are used so that
+ # library can found at runtime, if libfoo.N.M.X has soname of
+ # libfoo.N. That happens when the library makes some binary
+ # compatibility guarantees. If not, it is possible to skip those
+ # symlinks.
+ local suppress = [ $(property-set).get
+ <install-no-version-symlinks> ] ;
+
+ if $(suppress) != "on"
+ {
+ result += [ stage.symlink $(m[1]).$(m[2]) : $(project) :
+ $(copied) : $(property-set) ] ;
+ result += [ stage.symlink $(m[1]).$(m[2]).$(m[3]) :
+ $(project) : $(copied) : $(property-set) ] ;
+ }
+ }
+
+ return $(result) ;
+ }
+ }
+}
+
+generators.register [ new installed-shared-lib-generator ] ;
+
+
+# Main target rule for 'install'.
+#
+rule install ( name : sources * : requirements * : default-build * )
+{
+ local project = [ project.current ] ;
+
+ # Unless the user has explicitly asked us to hardcode dll paths, add
+ # <hardcode-dll-paths>false in requirements, to override default value.
+ if ! <hardcode-dll-paths>true in $(requirements)
+ {
+ requirements += <hardcode-dll-paths>false ;
+ }
+
+ if <tag> in $(requirements:G)
+ {
+ import errors ;
+ errors.user-error The <tag> property is not allowed for the 'install'
+ rule. ;
+ }
+
+ targets.create-metatarget install-target-class : $(project) : $(name) :
+ $(sources) : $(requirements) : $(default-build) ;
+}
+
+
+IMPORT $(__name__) : install : : install ;
+IMPORT $(__name__) : install : : stage ;
diff --git a/tools/build/src/tools/stage.py b/tools/build/src/tools/stage.py
new file mode 100644
index 0000000000..8eda4e2585
--- /dev/null
+++ b/tools/build/src/tools/stage.py
@@ -0,0 +1,350 @@
+# Status: ported.
+# Base revision 64444.
+#
+# Copyright 2003 Dave Abrahams
+# Copyright 2005, 2006 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005, 2006, 2010 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This module defines the 'install' rule, used to copy a set of targets to a
+# single location.
+
+import b2.build.feature as feature
+import b2.build.targets as targets
+import b2.build.property as property
+import b2.build.property_set as property_set
+import b2.build.generators as generators
+import b2.build.virtual_target as virtual_target
+
+from b2.manager import get_manager
+from b2.util.sequence import unique
+from b2.util import bjam_signature
+
+import b2.build.type
+
+import os.path
+import re
+import types
+
+feature.feature('install-dependencies', ['off', 'on'], ['incidental'])
+feature.feature('install-type', [], ['free', 'incidental'])
+feature.feature('install-source-root', [], ['free', 'path'])
+feature.feature('so-version', [], ['free', 'incidental'])
+
+# If 'on', version symlinks for shared libraries will not be created. Affects
+# Unix builds only.
+feature.feature('install-no-version-symlinks', ['on'], ['optional', 'incidental'])
+
+class InstallTargetClass(targets.BasicTarget):
+
+ def update_location(self, ps):
+ """If <location> is not set, sets it based on the project data."""
+
+ loc = ps.get('location')
+ if not loc:
+ loc = os.path.join(self.project().get('location'), self.name())
+ ps = ps.add_raw(["<location>" + loc])
+
+ return ps
+
+ def adjust_properties(self, target, build_ps):
+ a = target.action()
+ properties = []
+ if a:
+ ps = a.properties()
+ properties = ps.all()
+
+ # Unless <hardcode-dll-paths>true is in properties, which can happen
+ # only if the user has explicitly requested it, nuke all <dll-path>
+ # properties.
+
+ if build_ps.get('hardcode-dll-paths') != ['true']:
+ properties = [p for p in properties if p.feature().name() != 'dll-path']
+
+ # If any <dll-path> properties were specified for installing, add
+ # them.
+ properties.extend(build_ps.get_properties('dll-path'))
+
+ # Also copy <linkflags> feature from current build set, to be used
+ # for relinking.
+ properties.extend(build_ps.get_properties('linkflags'))
+
+ # Remove the <tag> feature on original targets.
+ # And <location>. If stage target has another stage target in
+ # sources, then we shall get virtual targets with the <location>
+ # property set.
+ properties = [p for p in properties
+ if not p.feature().name() in ['tag', 'location']]
+
+ properties.extend(build_ps.get_properties('dependency'))
+
+ properties.extend(build_ps.get_properties('location'))
+
+
+ properties.extend(build_ps.get_properties('install-no-version-symlinks'))
+
+ d = build_ps.get_properties('install-source-root')
+
+ # Make the path absolute: we shall use it to compute relative paths and
+ # making the path absolute will help.
+ if d:
+ p = d[0]
+ properties.append(property.Property(p.feature(), os.path.abspath(p.value())))
+
+ return property_set.create(properties)
+
+
+ def construct(self, name, source_targets, ps):
+
+ source_targets = self.targets_to_stage(source_targets, ps)
+ ps = self.update_location(ps)
+
+ ename = ps.get('name')
+ if ename:
+ ename = ename[0]
+ if ename and len(source_targets) > 1:
+ get_manager().errors()("When <name> property is used in 'install', only one source is allowed")
+
+ result = []
+
+ for i in source_targets:
+
+ staged_targets = []
+ new_ps = self.adjust_properties(i, ps)
+
+ # See if something special should be done when staging this type. It
+ # is indicated by the presence of a special "INSTALLED_" type.
+ t = i.type()
+ if t and b2.build.type.registered("INSTALLED_" + t):
+
+ if ename:
+ get_manager().errors()("In 'install': <name> property specified with target that requires relinking.")
+ else:
+ (r, targets) = generators.construct(self.project(), name, "INSTALLED_" + t,
+ new_ps, [i])
+ assert isinstance(r, property_set.PropertySet)
+ staged_targets.extend(targets)
+
+ else:
+ staged_targets.append(copy_file(self.project(), ename, i, new_ps))
+
+ if not staged_targets:
+ get_manager().errors()("Unable to generate staged version of " + i)
+
+ result.extend(get_manager().virtual_targets().register(t) for t in staged_targets)
+
+ return (property_set.empty(), result)
+
+ def targets_to_stage(self, source_targets, ps):
+ """Given the list of source targets explicitly passed to 'stage', returns the
+ list of targets which must be staged."""
+
+ result = []
+
+ # Traverse the dependencies, if needed.
+ if ps.get('install-dependencies') == ['on']:
+ source_targets = self.collect_targets(source_targets)
+
+ # Filter the target types, if needed.
+ included_types = ps.get('install-type')
+ for r in source_targets:
+ ty = r.type()
+ if ty:
+ # Do not stage searched libs.
+ if ty != "SEARCHED_LIB":
+ if included_types:
+ if self.include_type(ty, included_types):
+ result.append(r)
+ else:
+ result.append(r)
+ elif not included_types:
+ # Don't install typeless target if there is an explicit list of
+ # allowed types.
+ result.append(r)
+
+ return result
+
+ # CONSIDER: figure out why we can not use virtual-target.traverse here.
+ #
+ def collect_targets(self, targets):
+
+ s = [t.creating_subvariant() for t in targets]
+ s = unique(filter(lambda l: l != None,s))
+
+ result = set(targets)
+ for i in s:
+ i.all_referenced_targets(result)
+
+ result2 = []
+ for r in result:
+ if isinstance(r, property.Property):
+
+ if r.feature().name() != 'use':
+ result2.append(r.value())
+ else:
+ result2.append(r)
+ result2 = unique(result2)
+ return result2
+
+ # Returns true iff 'type' is subtype of some element of 'types-to-include'.
+ #
+ def include_type(self, type, types_to_include):
+ return any(b2.build.type.is_subtype(type, ti) for ti in types_to_include)
+
+# Creates a copy of target 'source'. The 'properties' object should have a
+# <location> property which specifies where the target must be placed.
+#
+def copy_file(project, name, source, ps):
+
+ if not name:
+ name = source.name()
+
+ relative = ""
+
+ new_a = virtual_target.NonScanningAction([source], "common.copy", ps)
+ source_root = ps.get('install-source-root')
+ if source_root:
+ source_root = source_root[0]
+ # Get the real path of the target. We probably need to strip relative
+ # path from the target name at construction.
+ path = os.path.join(source.path(), os.path.dirname(name))
+ # Make the path absolute. Otherwise, it would be hard to compute the
+ # relative path. The 'source-root' is already absolute, see the
+ # 'adjust-properties' method above.
+ path = os.path.abspath(path)
+
+ relative = os.path.relpath(path, source_root)
+
+ name = os.path.join(relative, os.path.basename(name))
+ return virtual_target.FileTarget(name, source.type(), project, new_a, exact=True)
+
+def symlink(name, project, source, ps):
+ a = virtual_target.Action([source], "symlink.ln", ps)
+ return virtual_target.FileTarget(name, source.type(), project, a, exact=True)
+
+def relink_file(project, source, ps):
+ action = source[0].action()
+ cloned_action = virtual_target.clone_action(action, project, "", ps)
+ targets = cloned_action.targets()
+ # We relink only on Unix, where exe or shared lib is always a single file.
+ assert len(targets) == 1
+ return targets[0]
+
+
+# Declare installed version of the EXE type. Generator for this type will cause
+# relinking to the new location.
+b2.build.type.register('INSTALLED_EXE', [], 'EXE')
+
+class InstalledExeGenerator(generators.Generator):
+
+ def __init__(self):
+ generators.Generator.__init__(self, "install-exe", False, ['EXE'], ['INSTALLED_EXE'])
+
+ def run(self, project, name, ps, source):
+
+ need_relink = False;
+
+ if ps.get('os') in ['NT', 'CYGWIN'] or ps.get('target-os') in ['windows', 'cygwin']:
+ # Never relink
+ pass
+ else:
+ # See if the dll-path properties are not changed during
+ # install. If so, copy, don't relink.
+ need_relink = source[0].action() and ps.get('dll-path') != source[0].action().properties().get('dll-path')
+
+ if need_relink:
+ return [relink_file(project, source, ps)]
+ else:
+ return [copy_file(project, None, source[0], ps)]
+
+generators.register(InstalledExeGenerator())
+
+
+# Installing a shared link on Unix might cause a creation of versioned symbolic
+# links.
+b2.build.type.register('INSTALLED_SHARED_LIB', [], 'SHARED_LIB')
+
+class InstalledSharedLibGenerator(generators.Generator):
+
+ def __init__(self):
+ generators.Generator.__init__(self, 'install-shared-lib', False, ['SHARED_LIB'], ['INSTALLED_SHARED_LIB'])
+
+ def run(self, project, name, ps, source):
+
+ source = source[0]
+ if ps.get('os') in ['NT', 'CYGWIN'] or ps.get('target-os') in ['windows', 'cygwin']:
+ copied = copy_file(project, None, source, ps)
+ return [get_manager().virtual_targets().register(copied)]
+ else:
+ a = source.action()
+ if not a:
+ # Non-derived file, just copy.
+ copied = copy_file(project, None, source, ps)
+ else:
+
+ need_relink = ps.get('dll-path') != source.action().properties().get('dll-path')
+
+ if need_relink:
+ # Rpath changed, need to relink.
+ copied = relink_file(project, source, ps)
+ else:
+ copied = copy_file(project, None, source, ps)
+
+ result = [get_manager().virtual_targets().register(copied)]
+ # If the name is in the form NNN.XXX.YYY.ZZZ, where all 'X', 'Y' and
+ # 'Z' are numbers, we need to create NNN.XXX and NNN.XXX.YYY
+ # symbolic links.
+ m = re.match("(.*)\\.([0123456789]+)\\.([0123456789]+)\\.([0123456789]+)$",
+ copied.name());
+ if m:
+ # Symlink without version at all is used to make
+ # -lsome_library work.
+ result.append(symlink(m.group(1), project, copied, ps))
+
+ # Symlinks of some libfoo.N and libfoo.N.M are used so that
+ # library can found at runtime, if libfoo.N.M.X has soname of
+ # libfoo.N. That happens when the library makes some binary
+ # compatibility guarantees. If not, it is possible to skip those
+ # symlinks.
+ if ps.get('install-no-version-symlinks') != ['on']:
+
+ result.append(symlink(m.group(1) + '.' + m.group(2), project, copied, ps))
+ result.append(symlink(m.group(1) + '.' + m.group(2) + '.' + m.group(3),
+ project, copied, ps))
+
+ return result
+
+generators.register(InstalledSharedLibGenerator())
+
+
+# Main target rule for 'install'.
+#
+@bjam_signature((["name"], ["sources", "*"], ["requirements", "*"],
+ ["default_build", "*"], ["usage_requirements", "*"]))
+def install(name, sources, requirements=[], default_build=[], usage_requirements=[]):
+
+ requirements = requirements[:]
+ # Unless the user has explicitly asked us to hardcode dll paths, add
+ # <hardcode-dll-paths>false in requirements, to override default value.
+ if not '<hardcode-dll-paths>true' in requirements:
+ requirements.append('<hardcode-dll-paths>false')
+
+ if any(r.startswith('<tag>') for r in requirements):
+ get_manager().errors()("The <tag> property is not allowed for the 'install' rule")
+
+ from b2.manager import get_manager
+ t = get_manager().targets()
+
+ project = get_manager().projects().current()
+
+ return t.main_target_alternative(
+ InstallTargetClass(name, project,
+ t.main_target_sources(sources, name),
+ t.main_target_requirements(requirements, project),
+ t.main_target_default_build(default_build, project),
+ t.main_target_usage_requirements(usage_requirements, project)))
+
+get_manager().projects().add_rule("install", install)
+get_manager().projects().add_rule("stage", install)
+
diff --git a/tools/build/src/tools/stlport.jam b/tools/build/src/tools/stlport.jam
new file mode 100644
index 0000000000..ed0947ca56
--- /dev/null
+++ b/tools/build/src/tools/stlport.jam
@@ -0,0 +1,309 @@
+# Copyright Gennadiy Rozental
+# Copyright 2006 Rene Rivera
+# Copyright 2003, 2004, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# The STLPort is usable by means of 'stdlib' feature. When
+# stdlib=stlport is specified, default version of STLPort will be used,
+# while stdlib=stlport-4.5 will use specific version.
+# The subfeature value 'hostios' means to use host compiler's iostreams.
+#
+# The specific version of stlport is selected by features:
+# The <runtime-link> feature selects between static and shared library
+# The <runtime-debugging>on selects STLPort with debug symbols
+# and stl debugging.
+# There's no way to use STLPort with debug symbols but without
+# stl debugging.
+
+# TODO: must implement selection of different STLPort installations based
+# on used toolset.
+# Also, finish various flags:
+#
+# This is copied from V1 toolset, "+" means "implemented"
+#+flags $(CURR_TOOLSET) DEFINES <stlport-iostream>off : _STLP_NO_OWN_IOSTREAMS=1 _STLP_HAS_NO_NEW_IOSTREAMS=1 ;
+#+flags $(CURR_TOOLSET) DEFINES <stlport-extensions>off : _STLP_NO_EXTENSIONS=1 ;
+# flags $(CURR_TOOLSET) DEFINES <stlport-anachronisms>off : _STLP_NO_ANACHRONISMS=1 ;
+# flags $(CURR_TOOLSET) DEFINES <stlport-cstd-namespace>global : _STLP_VENDOR_GLOBAL_CSTD=1 ;
+# flags $(CURR_TOOLSET) DEFINES <exception-handling>off : _STLP_NO_EXCEPTIONS=1 ;
+# flags $(CURR_TOOLSET) DEFINES <stlport-debug-alloc>on : _STLP_DEBUG_ALLOC=1 ;
+#+flags $(CURR_TOOLSET) DEFINES <runtime-build>debug : _STLP_DEBUG=1 _STLP_DEBUG_UNINITIALIZED=1 ;
+#+flags $(CURR_TOOLSET) DEFINES <runtime-link>dynamic : _STLP_USE_DYNAMIC_LIB=1 ;
+
+
+import feature : feature subfeature ;
+import project ;
+import "class" : new ;
+import targets ;
+import property-set ;
+import common ;
+import type ;
+
+# Make this module into a project.
+project.initialize $(__name__) ;
+project stlport ;
+
+# The problem: how to request to use host compiler's iostreams?
+#
+# Solution 1: Global 'stlport-iostream' feature.
+# That's ugly. Subfeature make more sense for stlport-specific thing.
+# Solution 2: Use subfeature with two values, one of which ("use STLPort iostream")
+# is default.
+# The problem is that such subfeature will appear in target paths, and that's ugly
+# Solution 3: Use optional subfeature with only one value.
+
+feature.extend stdlib : stlport ;
+feature.compose <stdlib>stlport : <library>/stlport//stlport ;
+
+# STLport iostreams or native iostreams
+subfeature stdlib stlport : iostream : hostios : optional propagated ;
+
+# STLport extensions
+subfeature stdlib stlport : extensions : noext : optional propagated ;
+
+# STLport anachronisms -- NOT YET SUPPORTED
+# subfeature stdlib stlport : anachronisms : on off ;
+
+# STLport debug allocation -- NOT YET SUPPORTED
+#subfeature stdlib stlport : debug-alloc : off on ;
+
+# Declare a special target class to handle the creation of search-lib-target
+# instances for STLport. We need a special class, because otherwise we'll have
+# - declare prebuilt targets for all possible toolsets. And by the time 'init'
+# is called we don't even know the list of toolsets that are registered
+# - when host iostreams are used, we really should produce nothing. It would
+# be hard/impossible to achieve this using prebuilt targets.
+
+class stlport-target-class : basic-target
+{
+ import feature project type errors generators ;
+ import set : difference ;
+
+ rule __init__ ( project : headers ? : libraries * : version ? )
+ {
+ basic-target.__init__ stlport : $(project) ;
+ self.headers = $(headers) ;
+ self.libraries = $(libraries) ;
+ self.version = $(version) ;
+ self.version.5 = [ MATCH "^(5[.][0123456789]+).*" : $(version) ] ;
+
+ local requirements ;
+ requirements += <stdlib-stlport:version>$(self.version) ;
+ self.requirements = [ property-set.create $(requirements) ] ;
+ }
+
+ rule generate ( property-set )
+ {
+ # Since this target is built with <stdlib>stlport, it will also
+ # have <library>/stlport//stlport in requirements, which will
+ # cause a loop in main target references. Remove that property
+ # manually.
+
+ property-set = [ property-set.create
+ [ difference
+ [ $(property-set).raw ] :
+ <library>/stlport//stlport
+ <stdlib>stlport
+ ]
+ ] ;
+ return [ basic-target.generate $(property-set) ] ;
+ }
+
+ rule construct ( name : source-targets * : property-set )
+ {
+ # Deduce the name of stlport library, based on toolset and
+ # debug setting.
+ local raw = [ $(property-set).raw ] ;
+ local hostios = [ feature.get-values <stdlib-stlport:iostream> : $(raw) ] ;
+ local toolset = [ feature.get-values <toolset> : $(raw) ] ;
+
+ if $(self.version.5)
+ {
+ # Version 5.x
+
+ # STLport host IO streams no longer supported. So we always
+ # need libraries.
+
+ # name: stlport(stl)?[dg]?(_static)?.M.R
+ local name = stlport ;
+ if [ feature.get-values <runtime-debugging> : $(raw) ] = "on"
+ {
+ name += stl ;
+ switch $(toolset)
+ {
+ case gcc* : name += g ;
+ case darwin* : name += g ;
+ case * : name += d ;
+ }
+ }
+
+ if [ feature.get-values <runtime-link> : $(raw) ] = "static"
+ {
+ name += _static ;
+ }
+
+ # Starting with version 5.2.0, the STLport static libraries no
+ # longer include a version number in their name
+ local version.pre.5.2 = [ MATCH "^(5[.][01]+).*" : $(version) ] ;
+ if $(version.pre.5.2) || [ feature.get-values <runtime-link> :
+ $(raw) ] != "static"
+ {
+ name += .$(self.version.5) ;
+ }
+
+ name = $(name:J=) ;
+
+ if [ feature.get-values <install-dependencies> : $(raw) ] = "on"
+ {
+ #~ Allow explicitly asking to install the STLport lib by
+ #~ referring to it directly:
+ #~ /stlport//stlport/<install-dependencies>on
+ #~ This allows for install packaging of all libs one might need
+ #~ for a standalone distribution.
+ import path : make : path-make ;
+ local runtime-link
+ = [ feature.get-values <runtime-link> : $(raw) ] ;
+ local lib-file.props
+ = [ property-set.create $(raw) <link>$(runtime-link) ] ;
+ local lib-file.prefix
+ = [ type.generated-target-prefix $(runtime-link:U)_LIB :
+ $(lib-file.props) ] ;
+ local lib-file.suffix
+ = [ type.generated-target-suffix $(runtime-link:U)_LIB :
+ $(lib-file.props) ] ;
+ lib-file.prefix
+ ?= "" "lib" ;
+ lib-file.suffix
+ ?= "" ;
+ local lib-file
+ = [ GLOB $(self.libraries) [ modules.peek : PATH ] :
+ $(lib-file.prefix)$(name).$(lib-file.suffix) ] ;
+ lib-file
+ = [ new file-reference [ path-make $(lib-file[1]) ] :
+ $(self.project) ] ;
+ lib-file
+ = [ $(lib-file).generate "" ] ;
+ local lib-file.requirements
+ = [ targets.main-target-requirements
+ [ $(lib-file.props).raw ] <file>$(lib-file[-1])
+ : $(self.project) ] ;
+ return [ generators.construct $(self.project) $(name) : LIB :
+ $(lib-file.requirements) ] ;
+ }
+ else
+ {
+ #~ Otherwise, it is just regular library usage.
+ return [ generators.construct
+ $(self.project) $(name) : SEARCHED_LIB : $(property-set) ] ;
+ }
+ }
+ else if ! $(hostios) && $(toolset) != msvc
+ {
+ # We don't need libraries if host istreams are used. For
+ # msvc, automatic library selection will be used.
+
+ # name: stlport_<toolset>(_stldebug)?
+ local name = stlport ;
+ name = $(name)_$(toolset) ;
+ if [ feature.get-values <runtime-debugging> : $(raw) ] = "on"
+ {
+ name = $(name)_stldebug ;
+ }
+
+ return [ generators.construct
+ $(self.project) $(name) : SEARCHED_LIB : $(property-set) ] ;
+ }
+ else
+ {
+ return [ property-set.empty ] ;
+ }
+ }
+
+ rule compute-usage-requirements ( subvariant )
+ {
+ local usage-requirements =
+ <include>$(self.headers)
+ <dll-path>$(self.libraries)
+ <library-path>$(self.libraries)
+ ;
+
+ local rproperties = [ $(subvariant).build-properties ] ;
+ # CONSIDER: should this "if" sequence be replaced with
+ # some use of 'property-map' class?
+ if [ $(rproperties).get <runtime-debugging> ] = "on"
+ {
+ usage-requirements +=
+ <define>_STLP_DEBUG=1
+ <define>_STLP_DEBUG_UNINITIALIZED=1 ;
+ }
+ if [ $(rproperties).get <runtime-link> ] = "shared"
+ {
+ usage-requirements +=
+ <define>_STLP_USE_DYNAMIC_LIB=1 ;
+ }
+ if [ $(rproperties).get <stdlib-stlport:extensions> ] = noext
+ {
+ usage-requirements +=
+ <define>_STLP_NO_EXTENSIONS=1 ;
+ }
+ if [ $(rproperties).get <stdlib-stlport:iostream> ] = hostios
+ {
+ usage-requirements +=
+ <define>_STLP_NO_OWN_IOSTREAMS=1
+ <define>_STLP_HAS_NO_NEW_IOSTREAMS=1 ;
+ }
+ if $(self.version.5)
+ {
+ # Version 5.x
+ if [ $(rproperties).get <threading> ] = "single"
+ {
+ # Since STLport5 doesn't normally support single-thread
+ # we force STLport5 into the multi-thread mode. Hence
+ # getting what other libs provide of single-thread code
+ # linking against a multi-thread lib.
+ usage-requirements +=
+ <define>_STLP_THREADS=1 ;
+ }
+ }
+
+ return [ property-set.create $(usage-requirements) ] ;
+ }
+}
+
+rule stlport-target ( headers ? : libraries * : version ? )
+{
+ local project = [ project.current ] ;
+
+ targets.main-target-alternative
+ [ new stlport-target-class $(project) : $(headers) : $(libraries)
+ : $(version)
+ ] ;
+}
+
+local .version-subfeature-defined ;
+
+# Initialize stlport support.
+rule init (
+ version ? :
+ headers : # Location of header files
+ libraries * # Location of libraries, lib and bin subdirs of STLport.
+ )
+{
+ # FIXME: need to use common.check-init-parameters here.
+ # At the moment, that rule always tries to define subfeature
+ # of the 'toolset' feature, while we need to define subfeature
+ # of <stdlib>stlport, so tweaks to check-init-parameters are needed.
+ if $(version)
+ {
+ if ! $(.version-subfeature-defined)
+ {
+ feature.subfeature stdlib stlport : version : : propagated ;
+ .version-subfeature-defined = true ;
+ }
+ feature.extend-subfeature stdlib stlport : version : $(version) ;
+ }
+
+ # Declare the main target for this STLPort version.
+ stlport-target $(headers) : $(libraries) : $(version) ;
+}
+
diff --git a/tools/build/v2/tools/sun.jam b/tools/build/src/tools/sun.jam
index 0ca927d3e4..0ca927d3e4 100644
--- a/tools/build/v2/tools/sun.jam
+++ b/tools/build/src/tools/sun.jam
diff --git a/tools/build/v2/tools/symlink.jam b/tools/build/src/tools/symlink.jam
index b33e8260c2..b33e8260c2 100644
--- a/tools/build/v2/tools/symlink.jam
+++ b/tools/build/src/tools/symlink.jam
diff --git a/tools/build/v2/tools/symlink.py b/tools/build/src/tools/symlink.py
index 6345ded6d3..6345ded6d3 100644
--- a/tools/build/v2/tools/symlink.py
+++ b/tools/build/src/tools/symlink.py
diff --git a/tools/build/src/tools/testing-aux.jam b/tools/build/src/tools/testing-aux.jam
new file mode 100644
index 0000000000..64ba003874
--- /dev/null
+++ b/tools/build/src/tools/testing-aux.jam
@@ -0,0 +1,220 @@
+# This module is imported by testing.py. The definitions here are
+# too tricky to do in Python
+
+# Causes the 'target' to exist after bjam invocation if and only if all the
+# dependencies were successfully built.
+#
+rule expect-success ( target : dependency + : requirements * )
+{
+ **passed** $(target) : $(sources) ;
+}
+IMPORT testing : expect-success : : testing.expect-success ;
+
+# Causes the 'target' to exist after bjam invocation if and only if all some of
+# the dependencies were not successfully built.
+#
+rule expect-failure ( target : dependency + : properties * )
+{
+ local grist = [ MATCH ^<(.*)> : $(dependency:G) ] ;
+ local marker = $(dependency:G=$(grist)*fail) ;
+ (failed-as-expected) $(marker) ;
+ FAIL_EXPECTED $(dependency) ;
+ LOCATE on $(marker) = [ on $(dependency) return $(LOCATE) ] ;
+ RMOLD $(marker) ;
+ DEPENDS $(marker) : $(dependency) ;
+ DEPENDS $(target) : $(marker) ;
+ **passed** $(target) : $(marker) ;
+}
+IMPORT testing : expect-failure : : testing.expect-failure ;
+
+# The rule/action combination used to report successful passing of a test.
+#
+rule **passed**
+{
+ # Force deletion of the target, in case any dependencies failed to build.
+ RMOLD $(<) ;
+}
+
+
+# Used to create test files signifying passed tests.
+#
+actions **passed**
+{
+ echo passed > "$(<)"
+}
+
+
+# Used to create replacement object files that do not get created during tests
+# that are expected to fail.
+#
+actions (failed-as-expected)
+{
+ echo failed as expected > "$(<)"
+}
+
+# Runs executable 'sources' and stores stdout in file 'target'. Unless
+# --preserve-test-targets command line option has been specified, removes the
+# executable. The 'target-to-remove' parameter controls what should be removed:
+# - if 'none', does not remove anything, ever
+# - if empty, removes 'source'
+# - if non-empty and not 'none', contains a list of sources to remove.
+#
+rule capture-output ( target : source : properties * : targets-to-remove * )
+{
+ output-file on $(target) = $(target:S=.output) ;
+ LOCATE on $(target:S=.output) = [ on $(target) return $(LOCATE) ] ;
+
+ # The INCLUDES kill a warning about independent target...
+ INCLUDES $(target) : $(target:S=.output) ;
+ # but it also puts .output into dependency graph, so we must tell jam it is
+ # OK if it cannot find the target or updating rule.
+ NOCARE $(target:S=.output) ;
+
+ # This has two-fold effect. First it adds input files to the dependendency
+ # graph, preventing a warning. Second, it causes input files to be bound
+ # before target is created. Therefore, they are bound using SEARCH setting
+ # on them and not LOCATE setting of $(target), as in other case (due to jam
+ # bug).
+ DEPENDS $(target) : [ on $(target) return $(INPUT_FILES) ] ;
+
+ if $(targets-to-remove) = none
+ {
+ targets-to-remove = ;
+ }
+ else if ! $(targets-to-remove)
+ {
+ targets-to-remove = $(source) ;
+ }
+
+ if [ on $(target) return $(REMOVE_TEST_TARGETS) ]
+ {
+ TEMPORARY $(targets-to-remove) ;
+ # Set a second action on target that will be executed after capture
+ # output action. The 'RmTemps' rule has the 'ignore' modifier so it is
+ # always considered succeeded. This is needed for 'run-fail' test. For
+ # that test the target will be marked with FAIL_EXPECTED, and without
+ # 'ignore' successful execution will be negated and be reported as
+ # failure. With 'ignore' we do not detect a case where removing files
+ # fails, but it is not likely to happen.
+ RmTemps $(target) : $(targets-to-remove) ;
+ }
+}
+
+
+if [ os.name ] = NT
+{
+ .STATUS = %status% ;
+ .SET_STATUS = "set status=%ERRORLEVEL%" ;
+ .RUN_OUTPUT_NL = "echo." ;
+ .STATUS_0 = "%status% EQU 0 (" ;
+ .STATUS_NOT_0 = "%status% NEQ 0 (" ;
+ .VERBOSE = "%verbose% EQU 1 (" ;
+ .ENDIF = ")" ;
+ .SHELL_SET = "set " ;
+ .CATENATE = type ;
+ .CP = copy ;
+}
+else
+{
+ .STATUS = "$status" ;
+ .SET_STATUS = "status=$?" ;
+ .RUN_OUTPUT_NL = "echo" ;
+ .STATUS_0 = "test $status -eq 0 ; then" ;
+ .STATUS_NOT_0 = "test $status -ne 0 ; then" ;
+ .VERBOSE = "test $verbose -eq 1 ; then" ;
+ .ENDIF = "fi" ;
+ .SHELL_SET = "" ;
+ .CATENATE = cat ;
+ .CP = cp ;
+}
+
+
+.VERBOSE_TEST = 0 ;
+if --verbose-test in [ modules.peek : ARGV ]
+{
+ .VERBOSE_TEST = 1 ;
+}
+
+
+.RM = [ common.rm-command ] ;
+
+
+actions capture-output bind INPUT_FILES output-file
+{
+ $(PATH_SETUP)
+ $(LAUNCHER) "$(>)" $(ARGS) "$(INPUT_FILES)" > "$(output-file)" 2>&1
+ $(.SET_STATUS)
+ $(.RUN_OUTPUT_NL) >> "$(output-file)"
+ echo EXIT STATUS: $(.STATUS) >> "$(output-file)"
+ if $(.STATUS_0)
+ $(.CP) "$(output-file)" "$(<)"
+ $(.ENDIF)
+ $(.SHELL_SET)verbose=$(.VERBOSE_TEST)
+ if $(.STATUS_NOT_0)
+ $(.SHELL_SET)verbose=1
+ $(.ENDIF)
+ if $(.VERBOSE)
+ echo ====== BEGIN OUTPUT ======
+ $(.CATENATE) "$(output-file)"
+ echo ====== END OUTPUT ======
+ $(.ENDIF)
+ exit $(.STATUS)
+}
+
+IMPORT testing : capture-output : : testing.capture-output ;
+
+
+actions quietly updated ignore piecemeal together RmTemps
+{
+ $(.RM) "$(>)"
+}
+
+
+.MAKE_FILE = [ common.file-creation-command ] ;
+
+actions unit-test
+{
+ $(PATH_SETUP)
+ $(LAUNCHER) "$(>)" $(ARGS) && $(.MAKE_FILE) "$(<)"
+}
+
+# Note that this rule may be called multiple times for a single target in case
+# there are multiple actions operating on the same target in sequence. One such
+# example are msvc exe targets first created by a linker action and then updated
+# with an embedded manifest file by a separate action.
+rule record-time ( target : source : start end user system )
+{
+ local src-string = [$(source:G=:J=",")"] " ;
+ USER_TIME on $(target) += $(src-string)$(user) ;
+ SYSTEM_TIME on $(target) += $(src-string)$(system) ;
+
+ # We need the following variables because attempting to perform such
+ # variable expansion in actions would not work due to quotes getting treated
+ # as regular characters.
+ USER_TIME_SECONDS on $(target) += $(src-string)$(user)" seconds" ;
+ SYSTEM_TIME_SECONDS on $(target) += $(src-string)$(system)" seconds" ;
+}
+
+# Calling this rule requests that Boost Build time how long it takes to build
+# the 'source' target and display the results both on the standard output and in
+# the 'target' file.
+#
+rule time ( target : sources + : properties * )
+{
+ # Set up rule for recording timing information.
+ __TIMING_RULE__ on $(sources) = testing.record-time $(target) ;
+
+ # Make sure the sources get rebuilt any time we need to retrieve that
+ # information.
+ REBUILDS $(target) : $(sources) ;
+}
+
+
+actions time
+{
+ echo user: $(USER_TIME)
+ echo system: $(SYSTEM_TIME)
+
+ echo user: $(USER_TIME_SECONDS) > "$(<)"
+ echo system: $(SYSTEM_TIME_SECONDS) >> "$(<)"
+}
diff --git a/tools/build/src/tools/testing.jam b/tools/build/src/tools/testing.jam
new file mode 100644
index 0000000000..e62c378ab9
--- /dev/null
+++ b/tools/build/src/tools/testing.jam
@@ -0,0 +1,673 @@
+# Copyright 2005 Dave Abrahams
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Copyright 2014 Rene Rivera
+# Copyright 2014 Microsoft Corporation
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This module implements regression testing framework. It declares a number of
+# main target rules which perform some action and, if the results are OK,
+# creates an output file.
+#
+# The exact list of rules is:
+# 'compile' -- creates .test file if compilation of sources was
+# successful.
+# 'compile-fail' -- creates .test file if compilation of sources failed.
+# 'run' -- creates .test file is running of executable produced from
+# sources was successful. Also leaves behind .output file
+# with the output from program run.
+# 'run-fail' -- same as above, but .test file is created if running fails.
+#
+# In all cases, presence of .test file is an indication that the test passed.
+# For more convenient reporting, you might want to use C++ Boost regression
+# testing utilities (see http://www.boost.org/more/regression.html).
+#
+# For historical reason, a 'unit-test' rule is available which has the same
+# syntax as 'exe' and behaves just like 'run'.
+
+# Things to do:
+# - Teach compiler_status handle Jamfile.v2.
+# Notes:
+# - <no-warn> is not implemented, since it is Como-specific, and it is not
+# clear how to implement it
+# - std::locale-support is not implemented (it is used in one test).
+
+
+import alias ;
+import "class" ;
+import common ;
+import errors ;
+import feature ;
+import generators ;
+import os ;
+import path ;
+import project ;
+import property ;
+import property-set ;
+import regex ;
+import sequence ;
+import targets ;
+import toolset ;
+import type ;
+import virtual-target ;
+
+
+rule init ( )
+{
+}
+
+
+# Feature controlling the command used to launch test programs.
+feature.feature testing.launcher : : free optional ;
+
+feature.feature test-info : : free incidental ;
+feature.feature testing.arg : : free incidental ;
+feature.feature testing.input-file : : free dependency ;
+
+feature.feature preserve-test-targets : on off : incidental propagated ;
+
+# Feature to control whether executable binaries are run as part of test.
+# This can be used to just compile test cases in cross compilation situations.
+feature.feature testing.execute : on off : incidental propagated ;
+feature.set-default testing.execute : on ;
+
+# Register target types.
+type.register TEST : test ;
+type.register COMPILE : : TEST ;
+type.register COMPILE_FAIL : : TEST ;
+type.register RUN_OUTPUT : run ;
+type.register RUN : : TEST ;
+type.register RUN_FAIL : : TEST ;
+type.register LINK_FAIL : : TEST ;
+type.register LINK : : TEST ;
+type.register UNIT_TEST : passed : TEST ;
+
+
+# Declare the rules which create main targets. While the 'type' module already
+# creates rules with the same names for us, we need extra convenience: default
+# name of main target, so write our own versions.
+
+# Helper rule. Create a test target, using basename of first source if no target
+# name is explicitly passed. Remembers the created target in a global variable.
+#
+rule make-test ( target-type : sources + : requirements * : target-name ? )
+{
+ target-name ?= $(sources[1]:D=:S=) ;
+
+ # Having periods (".") in the target name is problematic because the typed
+ # generator will strip the suffix and use the bare name for the file
+ # targets. Even though the location-prefix averts problems most times it
+ # does not prevent ambiguity issues when referring to the test targets. For
+ # example when using the XML log output. So we rename the target to remove
+ # the periods, and provide an alias for users.
+ local real-name = [ regex.replace $(target-name) "[.]" "~" ] ;
+
+ local project = [ project.current ] ;
+ # The <location-prefix> forces the build system for generate paths in the
+ # form '$build_dir/array1.test/gcc/debug'. This is necessary to allow
+ # post-processing tools to work.
+ local t = [ targets.create-typed-target [ type.type-from-rule-name
+ $(target-type) ] : $(project) : $(real-name) : $(sources) :
+ $(requirements) <location-prefix>$(real-name).test ] ;
+
+ # The alias to the real target, per period replacement above.
+ if $(real-name) != $(target-name)
+ {
+ alias $(target-name) : $(t) ;
+ }
+
+ # Remember the test (for --dump-tests). A good way would be to collect all
+ # given a project. This has some technical problems: e.g. we can not call
+ # this dump from a Jamfile since projects referred by 'build-project' are
+ # not available until the whole Jamfile has been loaded.
+ .all-tests += $(t) ;
+ return $(t) ;
+}
+
+
+# Note: passing more that one cpp file here is known to fail. Passing a cpp file
+# and a library target works.
+#
+rule compile ( sources + : requirements * : target-name ? )
+{
+ return [ make-test compile : $(sources) : $(requirements) : $(target-name) ]
+ ;
+}
+
+
+rule compile-fail ( sources + : requirements * : target-name ? )
+{
+ return [ make-test compile-fail : $(sources) : $(requirements) :
+ $(target-name) ] ;
+}
+
+
+rule link ( sources + : requirements * : target-name ? )
+{
+ return [ make-test link : $(sources) : $(requirements) : $(target-name) ] ;
+}
+
+
+rule link-fail ( sources + : requirements * : target-name ? )
+{
+ return [ make-test link-fail : $(sources) : $(requirements) : $(target-name)
+ ] ;
+}
+
+
+rule handle-input-files ( input-files * )
+{
+ if $(input-files[2])
+ {
+ # Check that sorting made when creating property-set instance will not
+ # change the ordering.
+ if [ sequence.insertion-sort $(input-files) ] != $(input-files)
+ {
+ errors.user-error "Names of input files must be sorted alphabetically"
+ : "due to internal limitations" ;
+ }
+ }
+ return <testing.input-file>$(input-files) ;
+}
+
+
+rule run ( sources + : args * : input-files * : requirements * : target-name ? :
+ default-build * )
+{
+ requirements += <testing.arg>$(args:J=" ") ;
+ requirements += [ handle-input-files $(input-files) ] ;
+ return [ make-test run : $(sources) : $(requirements) : $(target-name) ] ;
+}
+
+
+rule run-fail ( sources + : args * : input-files * : requirements * :
+ target-name ? : default-build * )
+{
+ requirements += <testing.arg>$(args:J=" ") ;
+ requirements += [ handle-input-files $(input-files) ] ;
+ return [ make-test run-fail : $(sources) : $(requirements) : $(target-name)
+ ] ;
+}
+
+
+# Use 'test-suite' as a synonym for 'alias', for backward compatibility.
+IMPORT : alias : : test-suite ;
+
+
+# For all main targets in 'project-module', which are typed targets with type
+# derived from 'TEST', produce some interesting information.
+#
+rule dump-tests
+{
+ for local t in $(.all-tests)
+ {
+ dump-test $(t) ;
+ }
+}
+
+
+# Given a project location in normalized form (slashes are forward), compute the
+# name of the Boost library.
+#
+local rule get-library-name ( path )
+{
+ # Path is in normalized form, so all slashes are forward.
+ local match1 = [ MATCH /(tools|libs)/(.*)/(test|example) : $(path) ] ;
+ local match2 = [ MATCH /(tools|libs)/(.*)$ : $(path) ] ;
+ local match3 = [ MATCH (/status$) : $(path) ] ;
+
+ if $(match1) { return $(match1[2]) ; }
+ else if $(match2) { return $(match2[2]) ; }
+ else if $(match3) { return "" ; }
+ else if --dump-tests in [ modules.peek : ARGV ]
+ {
+ # The 'run' rule and others might be used outside boost. In that case,
+ # just return the path, since the 'library name' makes no sense.
+ return $(path) ;
+ }
+}
+
+
+# Was an XML dump requested?
+.out-xml = [ MATCH --out-xml=(.*) : [ modules.peek : ARGV ] ] ;
+
+
+# Takes a target (instance of 'basic-target') and prints
+# - its type
+# - its name
+# - comments specified via the <test-info> property
+# - relative location of all source from the project root.
+#
+rule dump-test ( target )
+{
+ local type = [ $(target).type ] ;
+ local name = [ $(target).name ] ;
+ local project = [ $(target).project ] ;
+
+ local project-root = [ $(project).get project-root ] ;
+ local library = [ get-library-name [ path.root [ $(project).get location ]
+ [ path.pwd ] ] ] ;
+ if $(library)
+ {
+ name = $(library)/$(name) ;
+ }
+
+ local sources = [ $(target).sources ] ;
+ local source-files ;
+ for local s in $(sources)
+ {
+ if [ class.is-a $(s) : file-reference ]
+ {
+ local location = [ path.root [ path.root [ $(s).name ]
+ [ $(s).location ] ] [ path.pwd ] ] ;
+
+ source-files += [ path.relative-to [ path.root $(project-root)
+ [ path.pwd ] ] $(location) ] ;
+ }
+ }
+
+ local target-name = [ $(project).get location ] // [ $(target).name ] .test
+ ;
+ target-name = $(target-name:J=) ;
+
+ local r = [ $(target).requirements ] ;
+ # Extract values of the <test-info> feature.
+ local test-info = [ $(r).get <test-info> ] ;
+
+ # If the user requested XML output on the command-line, add the test info to
+ # that XML file rather than dumping them to stdout.
+ if $(.out-xml)
+ {
+ local nl = "
+" ;
+ .contents on $(.out-xml) +=
+ "$(nl) <test type=\"$(type)\" name=\"$(name)\">"
+ "$(nl) <target><![CDATA[$(target-name)]]></target>"
+ "$(nl) <info><![CDATA[$(test-info)]]></info>"
+ "$(nl) <source><![CDATA[$(source-files)]]></source>"
+ "$(nl) </test>"
+ ;
+ }
+ else
+ {
+ # Format them into a single string of quoted strings.
+ test-info = \"$(test-info:J=\"\ \")\" ;
+
+ ECHO boost-test($(type)) \"$(name)\" [$(test-info)] ":"
+ \"$(source-files)\" ;
+ }
+}
+
+
+# Register generators. Depending on target type, either 'expect-success' or
+# 'expect-failure' rule will be used.
+generators.register-standard testing.expect-success : OBJ : COMPILE ;
+generators.register-standard testing.expect-failure : OBJ : COMPILE_FAIL ;
+generators.register-standard testing.expect-success : RUN_OUTPUT : RUN ;
+generators.register-standard testing.expect-failure : RUN_OUTPUT : RUN_FAIL ;
+generators.register-standard testing.expect-failure : EXE : LINK_FAIL ;
+generators.register-standard testing.expect-success : EXE : LINK ;
+
+# Generator which runs an EXE and captures output.
+generators.register-standard testing.capture-output : EXE : RUN_OUTPUT ;
+
+# Generator which creates a target if sources run successfully. Differs from RUN
+# in that run output is not captured. The reason why it exists is that the 'run'
+# rule is much better for automated testing, but is not user-friendly (see
+# http://article.gmane.org/gmane.comp.lib.boost.build/6353).
+generators.register-standard testing.unit-test : EXE : UNIT_TEST ;
+
+
+# The action rules called by generators.
+
+# Causes the 'target' to exist after bjam invocation if and only if all the
+# dependencies were successfully built.
+#
+rule expect-success ( target : dependency + : requirements * )
+{
+ **passed** $(target) : $(dependency) ;
+}
+
+
+# Causes the 'target' to exist after bjam invocation if and only if all some of
+# the dependencies were not successfully built.
+#
+rule expect-failure ( target : dependency + : properties * )
+{
+ local grist = [ MATCH ^<(.*)> : $(dependency:G) ] ;
+ local marker = $(dependency:G=$(grist)*fail) ;
+ (failed-as-expected) $(marker) ;
+ FAIL_EXPECTED $(dependency) ;
+ LOCATE on $(marker) = [ on $(dependency) return $(LOCATE) ] ;
+ RMOLD $(marker) ;
+ DEPENDS $(marker) : $(dependency) ;
+ DEPENDS $(target) : $(marker) ;
+ **passed** $(target) : $(marker) ;
+}
+
+
+# The rule/action combination used to report successful passing of a test.
+#
+rule **passed**
+{
+ remove-test-targets $(<) ;
+
+ # Dump all the tests, if needed. We do it here, since dump should happen
+ # only after all Jamfiles have been read, and there is no such place
+ # currently defined (but there should be).
+ if ! $(.dumped-tests) && ( --dump-tests in [ modules.peek : ARGV ] )
+ {
+ .dumped-tests = true ;
+ dump-tests ;
+ }
+
+ # Force deletion of the target, in case any dependencies failed to build.
+ RMOLD $(<) ;
+}
+
+
+# Used to create test files signifying passed tests.
+#
+actions **passed**
+{
+ echo passed > "$(<)"
+}
+
+
+# Used to create replacement object files that do not get created during tests
+# that are expected to fail.
+#
+actions (failed-as-expected)
+{
+ echo failed as expected > "$(<)"
+}
+
+
+rule run-path-setup ( target : source : properties * )
+{
+ # For testing, we need to make sure that all dynamic libraries needed by the
+ # test are found. So, we collect all paths from dependency libraries (via
+ # xdll-path property) and add whatever explicit dll-path user has specified.
+ # The resulting paths are added to the environment on each test invocation.
+ local dll-paths = [ feature.get-values <dll-path> : $(properties) ] ;
+ dll-paths += [ feature.get-values <xdll-path> : $(properties) ] ;
+ dll-paths += [ on $(source) return $(RUN_PATH) ] ;
+ dll-paths = [ sequence.unique $(dll-paths) ] ;
+ if $(dll-paths)
+ {
+ dll-paths = [ sequence.transform path.native : $(dll-paths) ] ;
+ PATH_SETUP on $(target) = [ common.prepend-path-variable-command
+ [ os.shared-library-path-variable ] : $(dll-paths) ] ;
+ }
+}
+
+
+local argv = [ modules.peek : ARGV ] ;
+
+toolset.flags testing.capture-output ARGS <testing.arg> ;
+toolset.flags testing.capture-output INPUT_FILES <testing.input-file> ;
+toolset.flags testing.capture-output LAUNCHER <testing.launcher> ;
+
+.preserve-test-targets = on ;
+if --remove-test-targets in [ modules.peek : ARGV ]
+{
+ .preserve-test-targets = off ;
+}
+
+
+# Runs executable 'sources' and stores stdout in file 'target'. Unless
+# --preserve-test-targets command line option has been specified, removes the
+# executable. The 'target-to-remove' parameter controls what should be removed:
+# - if 'none', does not remove anything, ever
+# - if empty, removes 'source'
+# - if non-empty and not 'none', contains a list of sources to remove.
+#
+rule capture-output ( target : source : properties * : targets-to-remove * )
+{
+ output-file on $(target) = $(target:S=.output) ;
+ LOCATE on $(target:S=.output) = [ on $(target) return $(LOCATE) ] ;
+
+ # The INCLUDES kill a warning about independent target...
+ INCLUDES $(target) : $(target:S=.output) ;
+ # but it also puts .output into dependency graph, so we must tell jam it is
+ # OK if it cannot find the target or updating rule.
+ NOCARE $(target:S=.output) ;
+
+ # This has two-fold effect. First it adds input files to the dependendency
+ # graph, preventing a warning. Second, it causes input files to be bound
+ # before target is created. Therefore, they are bound using SEARCH setting
+ # on them and not LOCATE setting of $(target), as in other case (due to jam
+ # bug).
+ DEPENDS $(target) : [ on $(target) return $(INPUT_FILES) ] ;
+
+ if $(targets-to-remove) = none
+ {
+ targets-to-remove = ;
+ }
+ else if ! $(targets-to-remove)
+ {
+ targets-to-remove = $(source) ;
+ }
+
+ run-path-setup $(target) : $(source) : $(properties) ;
+
+ DISABLE_TEST_EXECUTION on $(target) = 0 ;
+ if [ feature.get-values testing.execute : $(properties) ] = off
+ {
+ DISABLE_TEST_EXECUTION on $(target) = 1 ;
+ }
+
+ if [ feature.get-values preserve-test-targets : $(properties) ] = off
+ || $(.preserve-test-targets) = off
+ {
+ rmtemp-sources $(target) : $(targets-to-remove) ;
+ for local to-remove in $(targets-to-remove)
+ {
+ rmtemp-all-sources $(to-remove) ;
+ }
+ }
+}
+
+.types-to-remove = EXE OBJ ;
+
+local rule remove-test-targets ( targets + )
+{
+ if $(.preserve-test-targets) = off
+ {
+ rmtemp-all-sources $(target) ;
+ }
+}
+
+local rule rmtemp-all-sources ( target )
+{
+ local sources ;
+ local action = [ on $(target) return $(.action) ] ;
+ if $(action)
+ {
+ local action-sources = [ $(action).sources ] ;
+ for local source in $(action-sources)
+ {
+ local source-type = [ $(source).type ] ;
+ if $(source-type) in $(.types-to-remove)
+ {
+ sources += [ $(source).actual-name ] ;
+ }
+ else
+ {
+ # ECHO IGNORED: $(source) :: $(source-type) ;
+ }
+ }
+ if $(sources)
+ {
+ rmtemp-sources $(target) : $(sources) ;
+ for local source in $(sources)
+ {
+ rmtemp-all-sources $(source) ;
+ }
+ }
+ }
+}
+
+local rule rmtemp-sources ( target : sources * )
+{
+ if $(sources)
+ {
+ TEMPORARY $(sources) ;
+ # Set a second action on target that will be executed after capture
+ # output action. The 'RmTemps' rule has the 'ignore' modifier so it is
+ # always considered succeeded. This is needed for 'run-fail' test. For
+ # that test the target will be marked with FAIL_EXPECTED, and without
+ # 'ignore' successful execution will be negated and be reported as
+ # failure. With 'ignore' we do not detect a case where removing files
+ # fails, but it is not likely to happen.
+ RmTemps $(target) : $(sources) ;
+ }
+}
+
+
+if [ os.name ] = NT
+{
+ .STATUS = %status% ;
+ .SET_STATUS = "set status=%ERRORLEVEL%" ;
+ .RUN_OUTPUT_NL = "echo." ;
+ .STATUS_0 = "%status% EQU 0 (" ;
+ .STATUS_NOT_0 = "%status% NEQ 0 (" ;
+ .VERBOSE = "%verbose% EQU 1 (" ;
+ .ENDIF = ")" ;
+ .SHELL_SET = "set " ;
+ .CATENATE = type ;
+ .CP = copy ;
+}
+else
+{
+ .STATUS = "$status" ;
+ .SET_STATUS = "status=$?" ;
+ .RUN_OUTPUT_NL = "echo" ;
+ .STATUS_0 = "test $status -eq 0 ; then" ;
+ .STATUS_NOT_0 = "test $status -ne 0 ; then" ;
+ .VERBOSE = "test $verbose -eq 1 ; then" ;
+ .ENDIF = "fi" ;
+ .SHELL_SET = "" ;
+ .CATENATE = cat ;
+ .CP = cp ;
+}
+
+
+.VERBOSE_TEST = 0 ;
+if --verbose-test in [ modules.peek : ARGV ]
+{
+ .VERBOSE_TEST = 1 ;
+}
+
+
+.RM = [ common.rm-command ] ;
+
+
+actions capture-output bind INPUT_FILES output-file
+{
+ $(PATH_SETUP)
+ $(.SHELL_SET)status=$(DISABLE_TEST_EXECUTION)
+ if $(.STATUS_NOT_0)
+ echo Skipping test execution due to testing.execute=off
+ exit 0
+ $(.ENDIF)
+ $(LAUNCHER) "$(>)" $(ARGS) "$(INPUT_FILES)" > "$(output-file)" 2>&1
+ $(.SET_STATUS)
+ $(.RUN_OUTPUT_NL) >> "$(output-file)"
+ echo EXIT STATUS: $(.STATUS) >> "$(output-file)"
+ if $(.STATUS_0)
+ $(.CP) "$(output-file)" "$(<)"
+ $(.ENDIF)
+ $(.SHELL_SET)verbose=$(.VERBOSE_TEST)
+ if $(.STATUS_NOT_0)
+ $(.SHELL_SET)verbose=1
+ $(.ENDIF)
+ if $(.VERBOSE)
+ echo ====== BEGIN OUTPUT ======
+ $(.CATENATE) "$(output-file)"
+ echo ====== END OUTPUT ======
+ $(.ENDIF)
+ exit $(.STATUS)
+}
+
+
+actions quietly updated ignore piecemeal together RmTemps
+{
+ $(.RM) "$(>)"
+}
+
+
+.MAKE_FILE = [ common.file-creation-command ] ;
+
+toolset.flags testing.unit-test LAUNCHER <testing.launcher> ;
+toolset.flags testing.unit-test ARGS <testing.arg> ;
+
+
+rule unit-test ( target : source : properties * )
+{
+ run-path-setup $(target) : $(source) : $(properties) ;
+}
+
+
+actions unit-test
+{
+ $(PATH_SETUP)
+ $(LAUNCHER) "$(>)" $(ARGS) && $(.MAKE_FILE) "$(<)"
+}
+
+
+IMPORT $(__name__) : compile compile-fail run run-fail link link-fail
+ : : compile compile-fail run run-fail link link-fail ;
+
+
+# This is a composing generator to support cases where a generator for the
+# specified target constructs other targets as well. One such example is msvc's
+# exe generator that constructs both EXE and PDB targets.
+type.register TIME : time ;
+generators.register-composing testing.time : : TIME ;
+
+
+# Note that this rule may be called multiple times for a single target in case
+# there are multiple actions operating on the same target in sequence. One such
+# example are msvc exe targets first created by a linker action and then updated
+# with an embedded manifest file by a separate action.
+rule record-time ( target : source : start end user system )
+{
+ local src-string = [$(source:G=:J=",")"] " ;
+ USER_TIME on $(target) += $(src-string)$(user) ;
+ SYSTEM_TIME on $(target) += $(src-string)$(system) ;
+
+ # We need the following variables because attempting to perform such
+ # variable expansion in actions would not work due to quotes getting treated
+ # as regular characters.
+ USER_TIME_SECONDS on $(target) += $(src-string)$(user)" seconds" ;
+ SYSTEM_TIME_SECONDS on $(target) += $(src-string)$(system)" seconds" ;
+}
+
+
+IMPORT testing : record-time : : testing.record-time ;
+
+
+# Calling this rule requests that Boost Build time how long it takes to build
+# the 'source' target and display the results both on the standard output and in
+# the 'target' file.
+#
+rule time ( target : sources + : properties * )
+{
+ # Set up rule for recording timing information.
+ __TIMING_RULE__ on $(sources) = testing.record-time $(target) ;
+
+ # Make sure the sources get rebuilt any time we need to retrieve that
+ # information.
+ REBUILDS $(target) : $(sources) ;
+}
+
+
+actions time
+{
+ echo user: $(USER_TIME)
+ echo system: $(SYSTEM_TIME)
+
+ echo user: $(USER_TIME_SECONDS) > "$(<)"
+ echo system: $(SYSTEM_TIME_SECONDS) >> "$(<)"
+}
diff --git a/tools/build/src/tools/testing.py b/tools/build/src/tools/testing.py
new file mode 100644
index 0000000000..a3b3f01174
--- /dev/null
+++ b/tools/build/src/tools/testing.py
@@ -0,0 +1,345 @@
+# Status: ported, except for --out-xml
+# Base revision: 64488
+#
+# Copyright 2005 Dave Abrahams
+# Copyright 2002, 2003, 2004, 2005, 2010 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This module implements regression testing framework. It declares a number of
+# main target rules which perform some action and, if the results are OK,
+# creates an output file.
+#
+# The exact list of rules is:
+# 'compile' -- creates .test file if compilation of sources was
+# successful.
+# 'compile-fail' -- creates .test file if compilation of sources failed.
+# 'run' -- creates .test file is running of executable produced from
+# sources was successful. Also leaves behind .output file
+# with the output from program run.
+# 'run-fail' -- same as above, but .test file is created if running fails.
+#
+# In all cases, presence of .test file is an indication that the test passed.
+# For more convenient reporting, you might want to use C++ Boost regression
+# testing utilities (see http://www.boost.org/more/regression.html).
+#
+# For historical reason, a 'unit-test' rule is available which has the same
+# syntax as 'exe' and behaves just like 'run'.
+
+# Things to do:
+# - Teach compiler_status handle Jamfile.v2.
+# Notes:
+# - <no-warn> is not implemented, since it is Como-specific, and it is not
+# clear how to implement it
+# - std::locale-support is not implemented (it is used in one test).
+
+import b2.build.feature as feature
+import b2.build.type as type
+import b2.build.targets as targets
+import b2.build.generators as generators
+import b2.build.toolset as toolset
+import b2.tools.common as common
+import b2.util.option as option
+import b2.build_system as build_system
+
+
+
+from b2.manager import get_manager
+from b2.util import stem, bjam_signature
+from b2.util.sequence import unique
+
+import bjam
+
+import re
+import os.path
+import sys
+
+def init():
+ pass
+
+# Feature controling the command used to lanch test programs.
+feature.feature("testing.launcher", [], ["free", "optional"])
+
+feature.feature("test-info", [], ["free", "incidental"])
+feature.feature("testing.arg", [], ["free", "incidental"])
+feature.feature("testing.input-file", [], ["free", "dependency"])
+
+feature.feature("preserve-test-targets", ["on", "off"], ["incidental", "propagated"])
+
+# Register target types.
+type.register("TEST", ["test"])
+type.register("COMPILE", [], "TEST")
+type.register("COMPILE_FAIL", [], "TEST")
+
+type.register("RUN_OUTPUT", ["run"])
+type.register("RUN", [], "TEST")
+type.register("RUN_FAIL", [], "TEST")
+
+type.register("LINK", [], "TEST")
+type.register("LINK_FAIL", [], "TEST")
+type.register("UNIT_TEST", ["passed"], "TEST")
+
+__all_tests = []
+
+# Declare the rules which create main targets. While the 'type' module already
+# creates rules with the same names for us, we need extra convenience: default
+# name of main target, so write our own versions.
+
+# Helper rule. Create a test target, using basename of first source if no target
+# name is explicitly passed. Remembers the created target in a global variable.
+def make_test(target_type, sources, requirements, target_name=None):
+
+ if not target_name:
+ target_name = stem(os.path.basename(sources[0]))
+
+ # Having periods (".") in the target name is problematic because the typed
+ # generator will strip the suffix and use the bare name for the file
+ # targets. Even though the location-prefix averts problems most times it
+ # does not prevent ambiguity issues when referring to the test targets. For
+ # example when using the XML log output. So we rename the target to remove
+ # the periods, and provide an alias for users.
+ real_name = target_name.replace(".", "~")
+
+ project = get_manager().projects().current()
+ # The <location-prefix> forces the build system for generate paths in the
+ # form '$build_dir/array1.test/gcc/debug'. This is necessary to allow
+ # post-processing tools to work.
+ t = get_manager().targets().create_typed_target(
+ type.type_from_rule_name(target_type), project, real_name, sources,
+ requirements + ["<location-prefix>" + real_name + ".test"], [], [])
+
+ # The alias to the real target, per period replacement above.
+ if real_name != target_name:
+ get_manager().projects().project_rules().all_names_["alias"](
+ target_name, [t])
+
+ # Remember the test (for --dump-tests). A good way would be to collect all
+ # given a project. This has some technical problems: e.g. we can not call
+ # this dump from a Jamfile since projects referred by 'build-project' are
+ # not available until the whole Jamfile has been loaded.
+ __all_tests.append(t)
+ return t
+
+
+# Note: passing more that one cpp file here is known to fail. Passing a cpp file
+# and a library target works.
+#
+@bjam_signature((["sources", "*"], ["requirements", "*"], ["target_name", "?"]))
+def compile(sources, requirements, target_name=None):
+ return make_test("compile", sources, requirements, target_name)
+
+@bjam_signature((["sources", "*"], ["requirements", "*"], ["target_name", "?"]))
+def compile_fail(sources, requirements, target_name=None):
+ return make_test("compile-fail", sources, requirements, target_name)
+
+@bjam_signature((["sources", "*"], ["requirements", "*"], ["target_name", "?"]))
+def link(sources, requirements, target_name=None):
+ return make_test("link", sources, requirements, target_name)
+
+@bjam_signature((["sources", "*"], ["requirements", "*"], ["target_name", "?"]))
+def link_fail(sources, requirements, target_name=None):
+ return make_test("link-fail", sources, requirements, target_name)
+
+def handle_input_files(input_files):
+ if len(input_files) > 1:
+ # Check that sorting made when creating property-set instance will not
+ # change the ordering.
+ if sorted(input_files) != input_files:
+ get_manager().errors()("Names of input files must be sorted alphabetically\n" +
+ "due to internal limitations")
+ return ["<testing.input-file>" + f for f in input_files]
+
+@bjam_signature((["sources", "*"], ["args", "*"], ["input_files", "*"],
+ ["requirements", "*"], ["target_name", "?"],
+ ["default_build", "*"]))
+def run(sources, args, input_files, requirements, target_name=None, default_build=[]):
+ if args:
+ requirements.append("<testing.arg>" + " ".join(args))
+ requirements.extend(handle_input_files(input_files))
+ return make_test("run", sources, requirements, target_name)
+
+@bjam_signature((["sources", "*"], ["args", "*"], ["input_files", "*"],
+ ["requirements", "*"], ["target_name", "?"],
+ ["default_build", "*"]))
+def run_fail(sources, args, input_files, requirements, target_name=None, default_build=[]):
+ if args:
+ requirements.append("<testing.arg>" + " ".join(args))
+ requirements.extend(handle_input_files(input_files))
+ return make_test("run-fail", sources, requirements, target_name)
+
+# Register all the rules
+for name in ["compile", "compile-fail", "link", "link-fail", "run", "run-fail"]:
+ get_manager().projects().add_rule(name, getattr(sys.modules[__name__], name.replace("-", "_")))
+
+# Use 'test-suite' as a synonym for 'alias', for backward compatibility.
+from b2.build.alias import alias
+get_manager().projects().add_rule("test-suite", alias)
+
+# For all main targets in 'project-module', which are typed targets with type
+# derived from 'TEST', produce some interesting information.
+#
+def dump_tests():
+ for t in __all_tests:
+ dump_test(t)
+
+# Given a project location in normalized form (slashes are forward), compute the
+# name of the Boost library.
+#
+__ln1 = re.compile("/(tools|libs)/(.*)/(test|example)")
+__ln2 = re.compile("/(tools|libs)/(.*)$")
+__ln3 = re.compile("(/status$)")
+def get_library_name(path):
+
+ path = path.replace("\\", "/")
+ match1 = __ln1.match(path)
+ match2 = __ln2.match(path)
+ match3 = __ln3.match(path)
+
+ if match1:
+ return match1.group(2)
+ elif match2:
+ return match2.group(2)
+ elif match3:
+ return ""
+ elif option.get("dump-tests", False, True):
+ # The 'run' rule and others might be used outside boost. In that case,
+ # just return the path, since the 'library name' makes no sense.
+ return path
+
+# Was an XML dump requested?
+__out_xml = option.get("out-xml", False, True)
+
+# Takes a target (instance of 'basic-target') and prints
+# - its type
+# - its name
+# - comments specified via the <test-info> property
+# - relative location of all source from the project root.
+#
+def dump_test(target):
+ type = target.type()
+ name = target.name()
+ project = target.project()
+
+ project_root = project.get('project-root')
+ library = get_library_name(os.path.abspath(project.get('location')))
+ if library:
+ name = library + "/" + name
+
+ sources = target.sources()
+ source_files = []
+ for s in sources:
+ if isinstance(s, targets.FileReference):
+ location = os.path.abspath(os.path.join(s.location(), s.name()))
+ source_files.append(os.path.relpath(location, os.path.abspath(project_root)))
+
+ target_name = project.get('location') + "//" + target.name() + ".test"
+
+ test_info = target.requirements().get('test-info')
+ test_info = " ".join('"' + ti + '"' for ti in test_info)
+
+ # If the user requested XML output on the command-line, add the test info to
+ # that XML file rather than dumping them to stdout.
+ #if $(.out-xml)
+ #{
+# local nl = "
+#" ;
+# .contents on $(.out-xml) +=
+# "$(nl) <test type=\"$(type)\" name=\"$(name)\">"
+# "$(nl) <target><![CDATA[$(target-name)]]></target>"
+# "$(nl) <info><![CDATA[$(test-info)]]></info>"
+# "$(nl) <source><![CDATA[$(source-files)]]></source>"
+# "$(nl) </test>"
+# ;
+# }
+# else
+
+ source_files = " ".join('"' + s + '"' for s in source_files)
+ if test_info:
+ print 'boost-test(%s) "%s" [%s] : %s' % (type, name, test_info, source_files)
+ else:
+ print 'boost-test(%s) "%s" : %s' % (type, name, source_files)
+
+# Register generators. Depending on target type, either 'expect-success' or
+# 'expect-failure' rule will be used.
+generators.register_standard("testing.expect-success", ["OBJ"], ["COMPILE"])
+generators.register_standard("testing.expect-failure", ["OBJ"], ["COMPILE_FAIL"])
+generators.register_standard("testing.expect-success", ["RUN_OUTPUT"], ["RUN"])
+generators.register_standard("testing.expect-failure", ["RUN_OUTPUT"], ["RUN_FAIL"])
+generators.register_standard("testing.expect-success", ["EXE"], ["LINK"])
+generators.register_standard("testing.expect-failure", ["EXE"], ["LINK_FAIL"])
+
+# Generator which runs an EXE and captures output.
+generators.register_standard("testing.capture-output", ["EXE"], ["RUN_OUTPUT"])
+
+# Generator which creates a target if sources run successfully. Differs from RUN
+# in that run output is not captured. The reason why it exists is that the 'run'
+# rule is much better for automated testing, but is not user-friendly (see
+# http://article.gmane.org/gmane.comp.lib.boost.build/6353).
+generators.register_standard("testing.unit-test", ["EXE"], ["UNIT_TEST"])
+
+# FIXME: if those calls are after bjam.call, then bjam will crash
+# when toolset.flags calls bjam.caller.
+toolset.flags("testing.capture-output", "ARGS", [], ["<testing.arg>"])
+toolset.flags("testing.capture-output", "INPUT_FILES", [], ["<testing.input-file>"])
+toolset.flags("testing.capture-output", "LAUNCHER", [], ["<testing.launcher>"])
+
+toolset.flags("testing.unit-test", "LAUNCHER", [], ["<testing.launcher>"])
+toolset.flags("testing.unit-test", "ARGS", [], ["<testing.arg>"])
+
+# This is a composing generator to support cases where a generator for the
+# specified target constructs other targets as well. One such example is msvc's
+# exe generator that constructs both EXE and PDB targets.
+type.register("TIME", ["time"])
+generators.register_composing("testing.time", [], ["TIME"])
+
+
+# The following code sets up actions for this module. It's pretty convoluted,
+# but the basic points is that we most of actions are defined by Jam code
+# contained in testing-aux.jam, which we load into Jam module named 'testing'
+
+def run_path_setup(target, sources, ps):
+
+ # For testing, we need to make sure that all dynamic libraries needed by the
+ # test are found. So, we collect all paths from dependency libraries (via
+ # xdll-path property) and add whatever explicit dll-path user has specified.
+ # The resulting paths are added to the environment on each test invocation.
+ dll_paths = ps.get('dll-path')
+ dll_paths.extend(ps.get('xdll-path'))
+ dll_paths.extend(bjam.call("get-target-variable", sources, "RUN_PATH"))
+ dll_paths = unique(dll_paths)
+ if dll_paths:
+ bjam.call("set-target-variable", target, "PATH_SETUP",
+ common.prepend_path_variable_command(
+ common.shared_library_path_variable(), dll_paths))
+
+def capture_output_setup(target, sources, ps):
+ run_path_setup(target, sources, ps)
+
+ if ps.get('preserve-test-targets') == ['off']:
+ bjam.call("set-target-variable", target, "REMOVE_TEST_TARGETS", "1")
+
+get_manager().engine().register_bjam_action("testing.capture-output",
+ capture_output_setup)
+
+
+path = os.path.dirname(__file__)
+import b2.util.os_j
+get_manager().projects().project_rules()._import_rule("testing", "os.name",
+ b2.util.os_j.name)
+import b2.tools.common
+get_manager().projects().project_rules()._import_rule("testing", "common.rm-command",
+ b2.tools.common.rm_command)
+get_manager().projects().project_rules()._import_rule("testing", "common.file-creation-command",
+ b2.tools.common.file_creation_command)
+
+bjam.call("load", "testing", os.path.join(path, "testing-aux.jam"))
+
+
+for name in ["expect-success", "expect-failure", "time"]:
+ get_manager().engine().register_bjam_action("testing." + name)
+
+get_manager().engine().register_bjam_action("testing.unit-test",
+ run_path_setup)
+
+if option.get("dump-tests", False, True):
+ build_system.add_pre_build_hook(dump_tests)
diff --git a/tools/build/v2/tools/types/__init__.py b/tools/build/src/tools/types/__init__.py
index 9ee31d13a3..9ee31d13a3 100644
--- a/tools/build/v2/tools/types/__init__.py
+++ b/tools/build/src/tools/types/__init__.py
diff --git a/tools/build/v2/tools/types/asm.jam b/tools/build/src/tools/types/asm.jam
index a340db36a3..a340db36a3 100644
--- a/tools/build/v2/tools/types/asm.jam
+++ b/tools/build/src/tools/types/asm.jam
diff --git a/tools/build/src/tools/types/asm.py b/tools/build/src/tools/types/asm.py
new file mode 100644
index 0000000000..a4b4aee61e
--- /dev/null
+++ b/tools/build/src/tools/types/asm.py
@@ -0,0 +1,33 @@
+# Copyright Craig Rodrigues 2005.
+# Copyright (c) 2008 Steven Watanabe
+#
+# Distributed under the Boost
+# Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+from b2.build import type as type_
+from b2.manager import get_manager
+from b2.tools.cast import cast
+from b2.util import bjam_signature
+
+
+MANAGER = get_manager()
+PROJECT_REGISTRY = MANAGER.projects()
+
+# maps project.name() + type to type
+_project_types = {}
+
+type_.register_type('ASM', ['s', 'S', 'asm'])
+
+
+@bjam_signature((['type_'], ['sources', '*'], ['name', '?']))
+def set_asm_type(type_, sources, name=''):
+ project = PROJECT_REGISTRY.current()
+ _project_types[project.name() + type_] = _project_types.get(
+ project.name() + type_, type_) + '_'
+
+ name = name if name else _project_types[project.name() + type_]
+ type_ += '.asm'
+ cast(name, type_.upper(), sources, [], [], [])
+
+
+PROJECT_REGISTRY.add_rule("set-asm-type", set_asm_type)
diff --git a/tools/build/src/tools/types/cpp.jam b/tools/build/src/tools/types/cpp.jam
new file mode 100644
index 0000000000..3fcf449a2f
--- /dev/null
+++ b/tools/build/src/tools/types/cpp.jam
@@ -0,0 +1,90 @@
+# Copyright 2004 David Abrahams
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Copyright 2010 Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import scanner ;
+import type ;
+
+
+class c-scanner : scanner
+{
+ import path ;
+ import regex ;
+ import scanner ;
+ import sequence ;
+ import toolset ;
+ import virtual-target ;
+
+ rule __init__ ( includes * )
+ {
+ scanner.__init__ ;
+
+ # toolset.handle-flag-value is a bit of overkill, but it
+ # does correctly handle the topological sort of && separated
+ # include paths
+ self.includes = [ toolset.handle-flag-value <include> : $(includes) ] ;
+ }
+
+ rule pattern ( )
+ {
+ return "#[ \t]*include[ \t]*(<(.*)>|\"(.*)\")" ;
+ }
+
+ rule process ( target : matches * : binding )
+ {
+ local angle = [ regex.transform $(matches) : "<(.*)>" ] ;
+ angle = [ sequence.transform path.native : $(angle) ] ;
+ local quoted = [ regex.transform $(matches) : "\"(.*)\"" ] ;
+ quoted = [ sequence.transform path.native : $(quoted) ] ;
+
+ # CONSIDER: the new scoping rules seem to defeat "on target" variables.
+ local g = [ on $(target) return $(HDRGRIST) ] ;
+ local b = [ NORMALIZE_PATH $(binding:D) ] ;
+
+ # Attach binding of including file to included targets. When a target is
+ # directly created from a virtual target this extra information is
+ # unnecessary. But in other cases, it allows us to distinguish between
+ # two headers of the same name included from different places. We do not
+ # need this extra information for angle includes, since they should not
+ # depend on the including file (we can not get literal "." in the
+ # include path).
+ local g2 = $(g)"#"$(b) ;
+
+ angle = $(angle:G=$(g)) ;
+ quoted = $(quoted:G=$(g2)) ;
+
+ local all = $(angle) $(quoted) ;
+
+ INCLUDES $(target) : $(all) ;
+ NOCARE $(all) ;
+ SEARCH on $(angle) = $(self.includes:G=) ;
+ SEARCH on $(quoted) = $(b) $(self.includes:G=) ;
+
+ # Just propagate the current scanner to includes, in hope that includes
+ # do not change scanners.
+ scanner.propagate $(__name__) : $(all) : $(target) ;
+
+ ISFILE $(all) ;
+ }
+}
+
+scanner.register c-scanner : include ;
+
+type.register CPP : cpp cxx cc ;
+type.register H : h ;
+type.register HPP : hpp : H ;
+type.register C : c ;
+
+# It most cases where a CPP file or a H file is a source of some action, we
+# should rebuild the result if any of files included by CPP/H are changed. One
+# case when this is not needed is installation, which is handled specifically.
+type.set-scanner CPP : c-scanner ;
+type.set-scanner C : c-scanner ;
+# One case where scanning of H/HPP files is necessary is PCH generation -- if
+# any header included by HPP being precompiled changes, we need to recompile the
+# header.
+type.set-scanner H : c-scanner ;
+type.set-scanner HPP : c-scanner ;
diff --git a/tools/build/src/tools/types/cpp.py b/tools/build/src/tools/types/cpp.py
new file mode 100644
index 0000000000..22f4dece45
--- /dev/null
+++ b/tools/build/src/tools/types/cpp.py
@@ -0,0 +1,84 @@
+# Copyright David Abrahams 2004. Distributed under the Boost
+# Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+import os
+import re
+
+import bjam
+
+from b2.build import type as type_, scanner
+from b2.manager import get_manager
+from b2.util.utility import replace_grist
+
+
+MANAGER = get_manager()
+ENGINE = MANAGER.engine()
+SCANNERS = MANAGER.scanners()
+
+
+class CScanner(scanner.Scanner):
+ def __init__(self, includes):
+ scanner.Scanner.__init__(self)
+ self.includes = []
+ for include in includes:
+ self.includes.extend(replace_grist(include, '').split('&&'))
+
+ def pattern(self):
+ return '#\s*include\s*(<(.*)>|"(.*)")'
+
+ def process(self, target, matches, binding):
+ # create a single string so that findall
+ # can be used since it returns a list of
+ # all grouped matches
+ match_str = ' '.join(matches)
+ # the question mark makes the regexes non-greedy
+ angles = re.findall(r'<(.*?)>', match_str)
+ quoted = re.findall(r'"(.*?)"', match_str)
+
+ # CONSIDER: the new scoping rules seem to defeat "on target" variables.
+ g = ENGINE.get_target_variable(target, 'HDRGRIST')
+ b = os.path.normpath(os.path.dirname(binding))
+
+ # Attach binding of including file to included targets. When a target is
+ # directly created from a virtual target this extra information is
+ # unnecessary. But in other cases, it allows us to distinguish between
+ # two headers of the same name included from different places. We do not
+ # need this extra information for angle includes, since they should not
+ # depend on the including file (we can not get literal "." in the
+ # include path).
+ # local g2 = $(g)"#"$(b) ;
+ g2 = g + '#' + b
+
+ angles = [replace_grist(angle, g) for angle in angles]
+ quoted = [replace_grist(quote, g2) for quote in quoted]
+
+ includes = angles + quoted
+
+ bjam.call('INCLUDES', target, includes)
+ bjam.call('NOCARE', includes)
+ ENGINE.set_target_variable(angles, 'SEARCH', self.includes)
+ ENGINE.set_target_variable(quoted, 'SEARCH', [b] + self.includes)
+
+ # Just propagate the current scanner to includes, in hope that includes
+ # do not change scanners.
+ SCANNERS.propagate(self, includes)
+
+ bjam.call('ISFILE', includes)
+
+
+scanner.register(CScanner, 'include')
+
+type_.register_type('CPP', ['cpp', 'cxx', 'cc'])
+type_.register_type('H', ['h'])
+type_.register_type('HPP', ['hpp'], 'H')
+type_.register_type('C', ['c'])
+# It most cases where a CPP file or a H file is a source of some action, we
+# should rebuild the result if any of files included by CPP/H are changed. One
+# case when this is not needed is installation, which is handled specifically.
+type_.set_scanner('CPP', CScanner)
+type_.set_scanner('C', CScanner)
+# One case where scanning of H/HPP files is necessary is PCH generation -- if
+# any header included by HPP being precompiled changes, we need to recompile the
+# header.
+type_.set_scanner('H', CScanner)
+type_.set_scanner('HPP', CScanner)
diff --git a/tools/build/v2/tools/types/exe.jam b/tools/build/src/tools/types/exe.jam
index 47109513a4..47109513a4 100644
--- a/tools/build/v2/tools/types/exe.jam
+++ b/tools/build/src/tools/types/exe.jam
diff --git a/tools/build/v2/tools/types/exe.py b/tools/build/src/tools/types/exe.py
index a4935e24ed..a4935e24ed 100644
--- a/tools/build/v2/tools/types/exe.py
+++ b/tools/build/src/tools/types/exe.py
diff --git a/tools/build/v2/tools/types/html.jam b/tools/build/src/tools/types/html.jam
index 5cd337d094..5cd337d094 100644
--- a/tools/build/v2/tools/types/html.jam
+++ b/tools/build/src/tools/types/html.jam
diff --git a/tools/build/v2/tools/types/html.py b/tools/build/src/tools/types/html.py
index 63af4d9078..63af4d9078 100644
--- a/tools/build/v2/tools/types/html.py
+++ b/tools/build/src/tools/types/html.py
diff --git a/tools/build/v2/tools/types/lib.jam b/tools/build/src/tools/types/lib.jam
index 854ab8fd5c..854ab8fd5c 100644
--- a/tools/build/v2/tools/types/lib.jam
+++ b/tools/build/src/tools/types/lib.jam
diff --git a/tools/build/v2/tools/types/lib.py b/tools/build/src/tools/types/lib.py
index d0ec1fb52a..d0ec1fb52a 100644
--- a/tools/build/v2/tools/types/lib.py
+++ b/tools/build/src/tools/types/lib.py
diff --git a/tools/build/v2/tools/types/obj.jam b/tools/build/src/tools/types/obj.jam
index 6afbcaa6f3..6afbcaa6f3 100644
--- a/tools/build/v2/tools/types/obj.jam
+++ b/tools/build/src/tools/types/obj.jam
diff --git a/tools/build/v2/tools/types/obj.py b/tools/build/src/tools/types/obj.py
index e61e99a81c..e61e99a81c 100644
--- a/tools/build/v2/tools/types/obj.py
+++ b/tools/build/src/tools/types/obj.py
diff --git a/tools/build/v2/tools/types/objc.jam b/tools/build/src/tools/types/objc.jam
index 709cbd0c7d..709cbd0c7d 100644
--- a/tools/build/v2/tools/types/objc.jam
+++ b/tools/build/src/tools/types/objc.jam
diff --git a/tools/build/v2/tools/types/preprocessed.jam b/tools/build/src/tools/types/preprocessed.jam
index c9187ba671..c9187ba671 100644
--- a/tools/build/v2/tools/types/preprocessed.jam
+++ b/tools/build/src/tools/types/preprocessed.jam
diff --git a/tools/build/v2/tools/types/preprocessed.py b/tools/build/src/tools/types/preprocessed.py
index f591043347..f591043347 100644
--- a/tools/build/v2/tools/types/preprocessed.py
+++ b/tools/build/src/tools/types/preprocessed.py
diff --git a/tools/build/src/tools/types/qt.jam b/tools/build/src/tools/types/qt.jam
new file mode 100644
index 0000000000..4951063e3e
--- /dev/null
+++ b/tools/build/src/tools/types/qt.jam
@@ -0,0 +1,12 @@
+# Copyright Vladimir Prus 2005. Distributed under the Boost
+# Software License, Version 1.0. (See accompanying
+# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+type UI : ui ;
+type QRC : qrc ;
+type MOCCABLE_CPP ;
+type MOCCABLE_H ;
+type MOCCABLE5_CPP ;
+type MOCCABLE5_H ;
+# Result of running moc.
+type MOC : moc : H ;
diff --git a/tools/build/v2/tools/types/register.jam b/tools/build/src/tools/types/register.jam
index 203992ca92..203992ca92 100644
--- a/tools/build/v2/tools/types/register.jam
+++ b/tools/build/src/tools/types/register.jam
diff --git a/tools/build/v2/tools/types/rsp.jam b/tools/build/src/tools/types/rsp.jam
index bdf8a7c989..bdf8a7c989 100644
--- a/tools/build/v2/tools/types/rsp.jam
+++ b/tools/build/src/tools/types/rsp.jam
diff --git a/tools/build/v2/tools/types/rsp.py b/tools/build/src/tools/types/rsp.py
index ccb379e951..ccb379e951 100644
--- a/tools/build/v2/tools/types/rsp.py
+++ b/tools/build/src/tools/types/rsp.py
diff --git a/tools/build/v2/tools/unix.jam b/tools/build/src/tools/unix.jam
index 75949851a0..75949851a0 100644
--- a/tools/build/v2/tools/unix.jam
+++ b/tools/build/src/tools/unix.jam
diff --git a/tools/build/v2/tools/unix.py b/tools/build/src/tools/unix.py
index 34758f57b5..34758f57b5 100644
--- a/tools/build/v2/tools/unix.py
+++ b/tools/build/src/tools/unix.py
diff --git a/tools/build/v2/tools/vacpp.jam b/tools/build/src/tools/vacpp.jam
index f4080fc04d..f4080fc04d 100644
--- a/tools/build/v2/tools/vacpp.jam
+++ b/tools/build/src/tools/vacpp.jam
diff --git a/tools/build/v2/tools/whale.jam b/tools/build/src/tools/whale.jam
index 9335ff0c0c..9335ff0c0c 100644
--- a/tools/build/v2/tools/whale.jam
+++ b/tools/build/src/tools/whale.jam
diff --git a/tools/build/v2/tools/xlf.jam b/tools/build/src/tools/xlf.jam
index e7fcc6086a..e7fcc6086a 100644
--- a/tools/build/v2/tools/xlf.jam
+++ b/tools/build/src/tools/xlf.jam
diff --git a/tools/build/src/tools/xsltproc-config.jam b/tools/build/src/tools/xsltproc-config.jam
new file mode 100644
index 0000000000..d1be25fa85
--- /dev/null
+++ b/tools/build/src/tools/xsltproc-config.jam
@@ -0,0 +1,36 @@
+#~ Copyright 2005 Rene Rivera.
+#~ Distributed under the Boost Software License, Version 1.0.
+#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Automatic configuration for the xsltproc toolset. To use, just import this
+# module.
+
+import os ;
+import toolset : using ;
+
+
+local rule locate-executable ( name )
+{
+ local path = [ modules.peek : PATH ] ;
+ local exe ;
+ if [ os.name ] = NT
+ {
+ exe = [ GLOB $(path) "C:\\Boost\\bin" : $(name)\.exe ] ;
+ }
+ else
+ {
+ exe = [ GLOB $(path) : $(name) ] ;
+ }
+ return $(exe[1]) ;
+}
+
+
+local xsltproc-exe = [ locate-executable xsltproc ] ;
+if $(xsltproc-exe)
+{
+ if --debug-configuration in [ modules.peek : ARGV ]
+ {
+ ECHO notice: using xsltproc ":" $(xsltproc-exe) ;
+ }
+ using xsltproc : $(xsltproc-exe) ;
+}
diff --git a/tools/build/src/tools/xsltproc.jam b/tools/build/src/tools/xsltproc.jam
new file mode 100644
index 0000000000..d8476461c3
--- /dev/null
+++ b/tools/build/src/tools/xsltproc.jam
@@ -0,0 +1,205 @@
+# Copyright (C) 2003 Doug Gregor. Permission to copy, use, modify, sell and
+# distribute this software is granted provided this copyright notice appears in
+# all copies. This software is provided "as is" without express or implied
+# warranty, and with no claim as to its suitability for any purpose.
+
+# This module defines rules to apply an XSLT stylesheet to an XML file using the
+# xsltproc driver, part of libxslt.
+
+import common ;
+import feature ;
+import modules ;
+import os ;
+import path ;
+import regex ;
+import sequence ;
+
+
+feature.feature xsl:param : : free ;
+feature.feature xsl:path : : free ;
+feature.feature catalog : : free ;
+
+
+# Initialize xsltproc support. The parameters are:
+# xsltproc: The xsltproc executable
+#
+rule init ( xsltproc ? )
+{
+ if $(xsltproc)
+ {
+ modify-config ;
+ .xsltproc = $(xsltproc) ;
+ check-xsltproc ;
+ }
+}
+
+
+rule freeze-config ( )
+{
+ if ! $(.config-frozen)
+ {
+ .config-frozen = true ;
+ .xsltproc ?= [ modules.peek : XSLTPROC ] ;
+ .xsltproc ?= xsltproc ;
+ check-xsltproc ;
+ .is-cygwin = [ .is-cygwin $(.xsltproc) ] ;
+ }
+}
+
+
+rule modify-config ( )
+{
+ if $(.config-frozen)
+ {
+ import errors ;
+ errors.user-error
+ "xsltproc: Cannot change xsltproc command after it has been used." ;
+ }
+}
+
+
+rule check-xsltproc ( )
+{
+ if $(.xsltproc)
+ {
+ local status = [ SHELL "\"$(.xsltproc)\" -V" : no-output : exit-status ]
+ ;
+ if $(status[2]) != 0
+ {
+ import errors ;
+ errors.user-error "xsltproc: Could not run \"$(.xsltproc)\" -V." ;
+ }
+ }
+}
+
+
+# Returns a non-empty string if a cygwin xsltproc binary was specified.
+#
+rule is-cygwin ( )
+{
+ freeze-config ;
+ return $(.is-cygwin) ;
+}
+
+
+rule .is-cygwin ( xsltproc )
+{
+ if [ os.on-windows ]
+ {
+ local file = [ path.make [ modules.binding $(__name__) ] ] ;
+ local dir = [ path.native [ path.join [ path.parent $(file) ] xsltproc ]
+ ] ;
+ if [ os.name ] = CYGWIN
+ {
+ dir = $(dir:W) ;
+ }
+ local command =
+ "\"$(xsltproc)\" \"$(dir)\\test.xsl\" \"$(dir)\\test.xml\" 2>&1" ;
+ local status = [ SHELL $(command) : no-output : exit-status ] ;
+ if $(status[2]) != "0"
+ {
+ return true ;
+ }
+ }
+}
+
+
+rule compute-xslt-flags ( target : properties * )
+{
+ # Raw flags.
+ local flags = [ feature.get-values <flags> : $(properties) ] ;
+
+ # Translate <xsl:param> into command line flags.
+ for local param in [ feature.get-values <xsl:param> : $(properties) ]
+ {
+ local namevalue = [ regex.split $(param) "=" ] ;
+ flags += --stringparam $(namevalue[1]) \"$(namevalue[2])\" ;
+ }
+
+ # Translate <xsl:path>.
+ for local path in [ feature.get-values <xsl:path> : $(properties) ]
+ {
+ flags += --path \"$(path:G=)\" ;
+ }
+
+ # Take care of implicit dependencies.
+ local other-deps ;
+ for local dep in [ feature.get-values <implicit-dependency> : $(properties)
+ ]
+ {
+ other-deps += [ $(dep:G=).creating-subvariant ] ;
+ }
+
+ local implicit-target-directories ;
+ for local dep in [ sequence.unique $(other-deps) ]
+ {
+ implicit-target-directories += [ $(dep).all-target-directories ] ;
+ }
+
+ for local dir in $(implicit-target-directories)
+ {
+ flags += --path \"$(dir:T)\" ;
+ }
+
+ return $(flags) ;
+}
+
+
+local rule .xsltproc ( target : source stylesheet : properties * : dirname ? :
+ action )
+{
+ freeze-config ;
+ STYLESHEET on $(target) = $(stylesheet) ;
+ FLAGS on $(target) += [ compute-xslt-flags $(target) : $(properties) ] ;
+ NAME on $(target) = $(.xsltproc) ;
+
+ for local catalog in [ feature.get-values <catalog> : $(properties) ]
+ {
+ CATALOG = [ common.variable-setting-command XML_CATALOG_FILES :
+ $(catalog:T) ] ;
+ }
+
+ if [ os.on-windows ] && ! [ is-cygwin ]
+ {
+ action = $(action).windows ;
+ }
+
+ $(action) $(target) : $(source) ;
+}
+
+
+rule xslt ( target : source stylesheet : properties * )
+{
+ return [ .xsltproc $(target) : $(source) $(stylesheet) : $(properties) : :
+ xslt-xsltproc ] ;
+}
+
+
+rule xslt-dir ( target : source stylesheet : properties * : dirname )
+{
+ return [ .xsltproc $(target) : $(source) $(stylesheet) : $(properties) :
+ $(dirname) : xslt-xsltproc-dir ] ;
+}
+
+actions xslt-xsltproc.windows
+{
+ $(CATALOG) "$(NAME:E=xsltproc)" $(FLAGS) --xinclude -o "$(<)" "$(STYLESHEET:W)" "$(>:W)"
+}
+
+
+actions xslt-xsltproc bind STYLESHEET
+{
+ $(CATALOG) "$(NAME:E=xsltproc)" $(FLAGS) --xinclude -o "$(<)" "$(STYLESHEET:T)" "$(>:T)"
+}
+
+
+actions xslt-xsltproc-dir.windows bind STYLESHEET
+{
+ $(CATALOG) "$(NAME:E=xsltproc)" $(FLAGS) --xinclude -o "$(<:D)/" "$(STYLESHEET:W)" "$(>:W)"
+}
+
+
+actions xslt-xsltproc-dir bind STYLESHEET
+{
+ $(CATALOG) "$(NAME:E=xsltproc)" $(FLAGS) --xinclude -o "$(<:D)/" "$(STYLESHEET:T)" "$(>:T)"
+}
diff --git a/tools/build/v2/tools/xsltproc/included.xsl b/tools/build/src/tools/xsltproc/included.xsl
index ef86394a99..ef86394a99 100644
--- a/tools/build/v2/tools/xsltproc/included.xsl
+++ b/tools/build/src/tools/xsltproc/included.xsl
diff --git a/tools/build/v2/tools/xsltproc/test.xml b/tools/build/src/tools/xsltproc/test.xml
index 57c8ba187e..57c8ba187e 100644
--- a/tools/build/v2/tools/xsltproc/test.xml
+++ b/tools/build/src/tools/xsltproc/test.xml
diff --git a/tools/build/v2/tools/xsltproc/test.xsl b/tools/build/src/tools/xsltproc/test.xsl
index a142c91dd6..a142c91dd6 100644
--- a/tools/build/v2/tools/xsltproc/test.xsl
+++ b/tools/build/src/tools/xsltproc/test.xsl
diff --git a/tools/build/src/tools/zlib.jam b/tools/build/src/tools/zlib.jam
new file mode 100644
index 0000000000..8095eeeeef
--- /dev/null
+++ b/tools/build/src/tools/zlib.jam
@@ -0,0 +1,227 @@
+# Copyright (c) 2010 Vladimir Prus.
+# Copyright (c) 2013 Steven Watanabe
+#
+# Use, modification and distribution is subject to the Boost Software
+# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Supports the zlib library
+#
+# After 'using zlib', the following targets are available:
+#
+# /zlib//zlib -- The zlib library
+
+import project ;
+import ac ;
+import errors ;
+import "class" : new ;
+import targets ;
+import path ;
+import modules ;
+import errors ;
+import indirect ;
+import property ;
+import property-set ;
+
+header = zlib.h ;
+names = z zlib zll zdll ;
+
+sources = adler32.c compress.c
+ crc32.c deflate.c gzclose.c gzio.c gzlib.c gzread.c gzwrite.c
+ infback.c inffast.c inflate.c inftrees.c trees.c uncompr.c zutil.c ;
+
+library-id = 0 ;
+
+if --debug-configuration in [ modules.peek : ARGV ]
+{
+ .debug = true ;
+}
+
+# Initializes the zlib library.
+#
+# zlib can be configured either to use pre-existing binaries
+# or to build the library from source.
+#
+# Options for configuring a prebuilt zlib::
+#
+# <search>
+# The directory containing the zlib binaries.
+# <name>
+# Overrides the default library name.
+# <include>
+# The directory containing the zlib headers.
+#
+# If none of these options is specified, then the environmental
+# variables ZLIB_LIBRARY_PATH, ZLIB_NAME, and ZLIB_INCLUDE will
+# be used instead.
+#
+# Options for building zlib from source::
+#
+# <source>
+# The zlib source directory. Defaults to the environmental variable
+# ZLIB_SOURCE.
+# <tag>
+# A rule which computes the actual name of the compiled
+# libraries based on the build properties. Ignored
+# when using precompiled binaries.
+# <build-name>
+# The base name to use for the compiled library. Ignored
+# when using precompiled binaries.
+#
+# Examples::
+#
+# # Find zlib in the default system location
+# using zlib ;
+# # Build zlib from source
+# using zlib : 1.2.7 : <source>/home/steven/zlib-1.2.7 ;
+# # Find zlib in /usr/local
+# using zlib : 1.2.7
+# : <include>/usr/local/include <search>/usr/local/lib ;
+# # Build zlib from source for msvc and find
+# # prebuilt binaries for gcc.
+# using zlib : 1.2.7 : <source>C:/Devel/src/zlib-1.2.7 : <toolset>msvc ;
+# using zlib : 1.2.7 : : <toolset>gcc ;
+#
+rule init (
+ version ?
+ # The zlib version (currently ignored)
+
+ : options *
+ # A list of the options to use
+
+ : requirements *
+ # The requirements for the zlib target
+
+ : is-default ?
+ # Default configurations are only used when zlib
+ # has not yet been configured.
+ )
+{
+ local caller = [ project.current ] ;
+
+ if ! $(.initialized)
+ {
+ .initialized = true ;
+
+ project.initialize $(__name__) ;
+ .project = [ project.current ] ;
+ project zlib ;
+ }
+
+ local library-path = [ property.select <search> : $(options) ] ;
+ library-path = $(library-path:G=) ;
+ local include-path = [ property.select <include> : $(options) ] ;
+ include-path = $(include-path:G=) ;
+ local source-path = [ property.select <source> : $(options) ] ;
+ source-path = $(source-path:G=) ;
+ local library-name = [ property.select <name> : $(options) ] ;
+ library-name = $(library-name:G=) ;
+ local tag = [ property.select <tag> : $(options) ] ;
+ tag = $(tag:G=) ;
+ local build-name = [ property.select <build-name> : $(options) ] ;
+ build-name = $(build-name:G=) ;
+
+ condition = [ property-set.create $(requirements) ] ;
+ condition = [ property-set.create [ $(condition).base ] ] ;
+
+ local no-build-from-source ;
+ # Ignore environmental ZLIB_SOURCE if this initialization
+ # requested to search for a specific pre-built library.
+ if $(library-path) || $(include-path) || $(library-name)
+ {
+ if $(source-path) || $(tag) || $(build-name)
+ {
+ errors.user-error "incompatible options for zlib:"
+ [ property.select <search> <include> <name> : $(options) ] "and"
+ [ property.select <source> <tag> <build-name> : $(options) ] ;
+ }
+ else
+ {
+ no-build-from-source = true ;
+ }
+ }
+
+ source-path ?= [ modules.peek : ZLIB_SOURCE ] ;
+
+ if $(.configured.$(condition))
+ {
+ if $(is-default)
+ {
+ if $(.debug)
+ {
+ ECHO "notice: [zlib] zlib is already configured" ;
+ }
+ }
+ else
+ {
+ errors.user-error "zlib is already configured" ;
+ }
+ return ;
+ }
+ else if $(source-path) && ! $(no-build-from-source)
+ {
+ build-name ?= z ;
+ library-id = [ CALC $(library-id) + 1 ] ;
+ tag = [ MATCH ^@?(.*)$ : $(tag) ] ;
+ if $(tag) && ! [ MATCH ^([^%]*)%([^%]+)$ : $(tag) ]
+ {
+ tag = [ indirect.make $(tag) : [ $(caller).project-module ] ] ;
+ }
+ sources = [ path.glob $(source-path) : $(sources) ] ;
+ if $(.debug)
+ {
+ ECHO "notice: [zlib] Building zlib from source as $(build-name)" ;
+ if $(condition)
+ {
+ ECHO "notice: [zlib] Condition" [ $(condition).raw ] ;
+ }
+ if $(sources)
+ {
+ ECHO "notice: [zlib] found zlib source in $(source-path)" ;
+ }
+ else
+ {
+ ECHO "warning: [zlib] could not find zlib source in $(source-path)" ;
+ }
+ }
+ local target ;
+ if $(sources) {
+ target = [ targets.create-typed-target LIB : $(.project)
+ : $(build-name).$(library-id)
+ : $(sources)
+ : $(requirements)
+ <tag>@$(tag)
+ <include>$(source-path)
+ <toolset>msvc:<define>_CRT_SECURE_NO_DEPRECATE
+ <toolset>msvc:<define>_SCL_SECURE_NO_DEPRECATE
+ <link>shared:<define>ZLIB_DLL
+ :
+ : <include>$(source-path) ] ;
+ }
+
+ local mt = [ new ac-library zlib : $(.project) : $(condition) ] ;
+ $(mt).set-header $(header) ;
+ $(mt).set-default-names $(names) ;
+ if $(target)
+ {
+ $(mt).set-target $(target) ;
+ }
+ targets.main-target-alternative $(mt) ;
+ } else {
+ if $(.debug)
+ {
+ ECHO "notice: [zlib] Using pre-installed library" ;
+ if $(condition)
+ {
+ ECHO "notice: [zlib] Condition" [ $(condition).raw ] ;
+ }
+ }
+
+ local mt = [ new ac-library zlib : $(.project) : $(condition) :
+ $(include-path) : $(library-path) : $(library-name) : $(root) ] ;
+ $(mt).set-header $(header) ;
+ $(mt).set-default-names $(names) ;
+ targets.main-target-alternative $(mt) ;
+ }
+ .configured.$(condition) = true ;
+}
diff --git a/tools/build/v2/util/__init__.py b/tools/build/src/util/__init__.py
index f80fe70e90..f80fe70e90 100644
--- a/tools/build/v2/util/__init__.py
+++ b/tools/build/src/util/__init__.py
diff --git a/tools/build/src/util/assert.jam b/tools/build/src/util/assert.jam
new file mode 100644
index 0000000000..65e880f43b
--- /dev/null
+++ b/tools/build/src/util/assert.jam
@@ -0,0 +1,346 @@
+# Copyright 2001, 2002, 2003 Dave Abrahams
+# Copyright 2006 Rene Rivera
+# Copyright 2002, 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import errors ;
+import modules ;
+
+
+################################################################################
+#
+# Private implementation details.
+#
+################################################################################
+
+# Rule added as a replacement for the regular Jam = operator but which does not
+# ignore trailing empty string elements.
+#
+local rule exact-equal-test ( lhs * : rhs * )
+{
+ local lhs_extended = $(lhs) xxx ;
+ local rhs_extended = $(rhs) xxx ;
+ if $(lhs_extended) = $(rhs_extended)
+ {
+ return true ;
+ }
+}
+
+
+# Two lists are considered set-equal if they contain the same elements, ignoring
+# duplicates and ordering.
+#
+local rule set-equal-test ( set1 * : set2 * )
+{
+ if ( $(set1) in $(set2) ) && ( $(set2) in $(set1) )
+ {
+ return true ;
+ }
+}
+
+
+################################################################################
+#
+# Public interface.
+#
+################################################################################
+
+# Assert the equality of A and B, ignoring trailing empty string elements.
+#
+rule equal ( a * : b * )
+{
+ if $(a) != $(b)
+ {
+ errors.error-skip-frames 3 assertion failure: \"$(a)\" "==" \"$(b)\"
+ (ignoring trailing empty strings) ;
+ }
+}
+
+
+# Assert that the result of calling RULE-NAME on the given arguments has a false
+# logical value (is either an empty list or all empty strings).
+#
+rule false ( rule-name args * : * )
+{
+ local result ;
+ module [ CALLER_MODULE ]
+ {
+ modules.poke assert : result : [ $(1) : $(2) : $(3) : $(4) : $(5) : $(6)
+ : $(7) : $(8) : $(9) : $(10) : $(11) : $(12) : $(13) : $(14) : $(15)
+ : $(16) : $(17) : $(18) : $(19) ] ;
+ }
+
+ if $(result)
+ {
+ errors.error-skip-frames 3 assertion failure: Expected false result from
+ "[" $(rule-name) [ errors.lol->list $(args) : $(2) : $(3) : $(4) :
+ $(5) : $(6) : $(7) : $(8) : $(9) : $(10) : $(11) : $(12) : $(13) :
+ $(14) : $(15) : $(16) : $(17) : $(18) : $(19) ] "]" : Got: "["
+ \"$(result)\" "]" ;
+ }
+}
+
+
+# Assert that ELEMENT is present in LIST.
+#
+rule "in" ( element : list * )
+{
+ if ! $(element) in $(list)
+ {
+ errors.error-skip-frames 3 assertion failure: Expected \"$(element)\" in
+ "[" \"$(list)\" "]" ;
+ }
+}
+
+
+# Assert the inequality of A and B, ignoring trailing empty string elements.
+#
+rule not-equal ( a * : b * )
+{
+ if $(a) = $(b)
+ {
+ errors.error-skip-frames 3 assertion failure: \"$(a)\" "!=" \"$(b)\"
+ (ignoring trailing empty strings) ;
+ }
+}
+
+
+# Assert that ELEMENT is not present in LIST.
+#
+rule not-in ( element : list * )
+{
+ if $(element) in $(list)
+ {
+ errors.error-skip-frames 3 assertion failure: Did not expect
+ \"$(element)\" in "[" \"$(list)\" "]" ;
+ }
+}
+
+
+# Assert the inequality of A and B as sets.
+#
+rule not-set-equal ( a * : b * )
+{
+ if [ set-equal-test $(a) : $(b) ]
+ {
+ errors.error-skip-frames 3 assertion failure: Expected "[" \"$(a)\" "]"
+ and "[" \"$(b)\" "]" to not be equal as sets ;
+ }
+}
+
+
+# Assert that A and B are not exactly equal, not ignoring trailing empty string
+# elements.
+#
+rule not-exact-equal ( a * : b * )
+{
+ if [ exact-equal-test $(a) : $(b) ]
+ {
+ errors.error-skip-frames 3 assertion failure: \"$(a)\" "!=" \"$(b)\" ;
+ }
+}
+
+
+# Assert that EXPECTED is the result of calling RULE-NAME with the given
+# arguments.
+#
+rule result ( expected * : rule-name args * : * )
+{
+ local result ;
+ module [ CALLER_MODULE ]
+ {
+ modules.poke assert : result : [ $(2) : $(3) : $(4) : $(5) : $(6) : $(7)
+ : $(8) : $(9) : $(10) : $(11) : $(12) : $(13) : $(14) : $(15) :
+ $(16) : $(17) : $(18) : $(19) ] ;
+ }
+
+ if ! [ exact-equal-test $(result) : $(expected) ]
+ {
+ errors.error-skip-frames 3 assertion failure: "[" $(rule-name) [
+ errors.lol->list $(args) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) :
+ $(9) : $(10) : $(11) : $(12) : $(13) : $(14) : $(15) : $(16) : $(17)
+ : $(18) : $(19) ] "]" : Expected: "[" \"$(expected)\" "]" : Got: "["
+ \"$(result)\" "]" ;
+ }
+}
+
+
+# Assert that EXPECTED is set-equal (i.e. duplicates and ordering are ignored)
+# to the result of calling RULE-NAME with the given arguments. Note that rules
+# called this way may accept at most 18 parameters.
+#
+rule result-set-equal ( expected * : rule-name args * : * )
+{
+ local result ;
+ module [ CALLER_MODULE ]
+ {
+ modules.poke assert : result : [ $(2) : $(3) : $(4) : $(5) : $(6) : $(7)
+ : $(8) : $(9) : $(10) : $(11) : $(12) : $(13) : $(14) : $(15) :
+ $(16) : $(17) : $(18) : $(19) ] ;
+ }
+
+ if ! [ set-equal-test $(result) : $(expected) ]
+ {
+ errors.error-skip-frames 3 assertion failure: "[" $(rule-name) [
+ errors.lol->list $(args) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) :
+ $(9) : $(10) : $(11) : $(12) : $(13) : $(14) : $(15) : $(16) : $(17)
+ : $(18) : $(19) ] "]" : Expected: "[" \"$(expected)\" "]" : Got: "["
+ \"$(result)\" "]" ;
+ }
+}
+
+
+# Assert the equality of A and B as sets.
+#
+rule set-equal ( a * : b * )
+{
+ if ! [ set-equal-test $(a) : $(b) ]
+ {
+ errors.error-skip-frames 3 assertion failure: Expected "[" \"$(a)\" "]"
+ and "[" \"$(b)\" "]" to be equal as sets ;
+ }
+}
+
+
+# Assert that the result of calling RULE-NAME on the given arguments has a true
+# logical value (is neither an empty list nor all empty strings).
+#
+rule true ( rule-name args * : * )
+{
+ local result ;
+ module [ CALLER_MODULE ]
+ {
+ modules.poke assert : result : [ $(1) : $(2) : $(3) : $(4) : $(5) : $(6)
+ : $(7) : $(8) : $(9) : $(10) : $(11) : $(12) : $(13) : $(14) : $(15)
+ : $(16) : $(17) : $(18) : $(19) ] ;
+ }
+
+ if ! $(result)
+ {
+ errors.error-skip-frames 3 assertion failure: Expected true result from
+ "[" $(rule-name) [ errors.lol->list $(args) : $(2) : $(3) : $(4) :
+ $(5) : $(6) : $(7) : $(8) : $(9) : $(10) : $(11) : $(12) : $(13) :
+ $(14) : $(15) : $(16) : $(17) : $(18) : $(19) ] "]" ;
+ }
+}
+
+
+# Assert the exact equality of A and B, not ignoring trailing empty string
+# elements.
+#
+rule exact-equal ( a * : b * )
+{
+ if ! [ exact-equal-test $(a) : $(b) ]
+ {
+ errors.error-skip-frames 3 assertion failure: \"$(a)\" "==" \"$(b)\" ;
+ }
+}
+
+
+# Assert that the given variable is not an empty list.
+#
+rule variable-not-empty ( name )
+{
+ local value = [ modules.peek [ CALLER_MODULE ] : $(name) ] ;
+ if ! $(value)-is-not-empty
+ {
+ errors.error-skip-frames 3 assertion failure: Expected variable
+ \"$(name)\" not to be an empty list ;
+ }
+}
+
+
+rule __test__ ( )
+{
+ # Helper rule used to avoid test duplication related to different list
+ # equality test rules.
+ #
+ local rule run-equality-test ( equality-assert : ignore-trailing-empty-strings ? )
+ {
+ local not-equality-assert = not-$(equality-assert) ;
+
+ # When the given equality test is expected to ignore trailing empty
+ # strings some of the test results should be inverted.
+ local not-equality-assert-i = not-$(equality-assert) ;
+ if $(ignore-trailing-empty-strings)
+ {
+ not-equality-assert-i = $(equality-assert) ;
+ }
+
+ $(equality-assert) : ;
+ $(equality-assert) "" "" : "" "" ;
+ $(not-equality-assert-i) : "" "" ;
+ $(equality-assert) x : x ;
+ $(not-equality-assert) : x ;
+ $(not-equality-assert) "" : x ;
+ $(not-equality-assert) "" "" : x ;
+ $(not-equality-assert-i) x : x "" ;
+ $(equality-assert) x "" : x "" ;
+ $(not-equality-assert) x : "" x ;
+ $(equality-assert) "" x : "" x ;
+
+ $(equality-assert) 1 2 3 : 1 2 3 ;
+ $(not-equality-assert) 1 2 3 : 3 2 1 ;
+ $(not-equality-assert) 1 2 3 : 1 5 3 ;
+ $(not-equality-assert) 1 2 3 : 1 "" 3 ;
+ $(not-equality-assert) 1 2 3 : 1 1 2 3 ;
+ $(not-equality-assert) 1 2 3 : 1 2 2 3 ;
+ $(not-equality-assert) 1 2 3 : 5 6 7 ;
+
+ # Extra variables used here just to make sure Boost Jam or Boost Build
+ # do not handle lists with empty strings differently depending on
+ # whether they are literals or stored in variables.
+
+ local empty = ;
+ local empty-strings = "" "" ;
+ local x-empty-strings = x "" "" ;
+ local empty-strings-x = "" "" x ;
+
+ $(equality-assert) : $(empty) ;
+ $(not-equality-assert-i) "" : $(empty) ;
+ $(not-equality-assert-i) "" "" : $(empty) ;
+ $(not-equality-assert-i) : $(empty-strings) ;
+ $(not-equality-assert-i) "" : $(empty-strings) ;
+ $(equality-assert) "" "" : $(empty-strings) ;
+ $(equality-assert) $(empty) : $(empty) ;
+ $(equality-assert) $(empty-strings) : $(empty-strings) ;
+ $(not-equality-assert-i) $(empty) : $(empty-strings) ;
+ $(equality-assert) $(x-empty-strings) : $(x-empty-strings) ;
+ $(equality-assert) $(empty-strings-x) : $(empty-strings-x) ;
+ $(not-equality-assert) $(empty-strings-x) : $(x-empty-strings) ;
+ $(not-equality-assert-i) x : $(x-empty-strings) ;
+ $(not-equality-assert) x : $(empty-strings-x) ;
+ $(not-equality-assert-i) x : $(x-empty-strings) ;
+ $(not-equality-assert-i) x "" : $(x-empty-strings) ;
+ $(equality-assert) x "" "" : $(x-empty-strings) ;
+ $(not-equality-assert) x : $(empty-strings-x) ;
+ $(not-equality-assert) "" x : $(empty-strings-x) ;
+ $(equality-assert) "" "" x : $(empty-strings-x) ;
+ }
+
+
+ # ---------------
+ # Equality tests.
+ # ---------------
+
+ run-equality-test equal : ignore-trailing-empty-strings ;
+ run-equality-test exact-equal ;
+
+
+ # -------------------------
+ # assert.set-equal() tests.
+ # -------------------------
+
+ set-equal : ;
+ not-set-equal "" "" : ;
+ set-equal "" "" : "" ;
+ set-equal "" "" : "" "" ;
+ set-equal a b c : a b c ;
+ set-equal a b c : b c a ;
+ set-equal a b c a : a b c ;
+ set-equal a b c : a b c a ;
+ not-set-equal a b c : a b c d ;
+ not-set-equal a b c d : a b c ;
+}
diff --git a/tools/build/v2/util/container.jam b/tools/build/src/util/container.jam
index dd4963938e..dd4963938e 100644
--- a/tools/build/v2/util/container.jam
+++ b/tools/build/src/util/container.jam
diff --git a/tools/build/src/util/doc.jam b/tools/build/src/util/doc.jam
new file mode 100644
index 0000000000..702cab4b5a
--- /dev/null
+++ b/tools/build/src/util/doc.jam
@@ -0,0 +1,1009 @@
+# Copyright 2002, 2005 Dave Abrahams
+# Copyright 2002, 2003, 2006 Rene Rivera
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Documentation system, handles --help requests.
+# It defines rules that attach documentation to modules, rules, and variables.
+# Collects and generates documentation for the various parts of the build
+# system. The documentation is collected from comments integrated into the code.
+
+import modules ;
+import print ;
+import set ;
+import container ;
+import "class" ;
+import sequence ;
+import path ;
+
+
+# The type of output to generate.
+# "console" is formated text echoed to the console (the default);
+# "text" is formated text appended to the output file;
+# "html" is HTML output to the file.
+#
+help-output = console ;
+
+
+# The file to output documentation to when generating "text" or "html" help.
+# This is without extension as the extension is determined by the type of
+# output.
+#
+help-output-file = help ;
+
+# Whether to include local rules in help output.
+#
+.option.show-locals ?= ;
+
+# When showing documentation for a module, whether to also generate
+# automatically the detailed docs for each item in the module.
+#
+.option.detailed ?= ;
+
+# Generate debug output as the help is generated and modules are parsed.
+#
+.option.debug ?= ;
+
+# These are all the options available for enabling or disabling to control the
+# help system in various ways. Options can be enabled or disabled with
+# '--help-enable-<option>', and '--help-disable-<option>' respectively.
+#
+.option-description = Help Options ;
+
+# Enable or disable a documentation option.
+#
+local rule set-option (
+ option # The option name.
+ : value ? # Enabled (non-empty), or disabled (empty)
+)
+{
+ .option.$(option) = $(value) ;
+}
+
+
+# Set the type of output.
+#
+local rule set-output ( type )
+{
+ help-output = $(type) ;
+}
+
+
+# Set the output to a file.
+#
+local rule set-output-file ( file )
+{
+ help-output-file = $(file) ;
+}
+
+
+# Extracts the brief comment from a complete comment. The brief comment is the
+# first sentence.
+#
+local rule brief-comment (
+ docs * # The comment documentation.
+)
+{
+ local d = $(docs:J=" ") ;
+ local p = [ MATCH ".*([.])$" : $(d) ] ;
+ if ! $(p) { d = $(d)"." ; }
+ d = $(d)" " ;
+ local m = [ MATCH "^([^.]+[.])(.*)" : $(d) ] ;
+ local brief = $(m[1]) ;
+ while $(m[2]) && [ MATCH "^([^ ])" : $(m[2]) ]
+ {
+ m = [ MATCH "^([^.]+[.])(.*)" : $(m[2]) ] ;
+ brief += $(m[1]) ;
+ }
+ return $(brief:J="") ;
+}
+
+
+# Specifies the documentation for the current module.
+#
+local rule set-module-doc (
+ module-name ? # The name of the module to document.
+ : docs * # The documentation for the module.
+)
+{
+ module-name ?= * ;
+
+ $(module-name).brief = [ brief-comment $(docs) ] ;
+ $(module-name).docs = $(docs) ;
+
+ if ! $(module-name) in $(documented-modules)
+ {
+ documented-modules += $(module-name) ;
+ }
+}
+
+
+# Specifies the documentation for the current module.
+#
+local rule set-module-copyright (
+ module-name ? # The name of the module to document.
+ : copyright * # The copyright for the module.
+)
+{
+ module-name ?= * ;
+
+ $(module-name).copy-brief = [ brief-comment $(copyright) ] ;
+ $(module-name).copy-docs = $(docs) ;
+
+ if ! $(module-name) in $(documented-modules)
+ {
+ documented-modules += $(module-name) ;
+ }
+}
+
+
+# Specifies the documentation for a rule in the current module. If called in the
+# global module, this documents a global rule.
+#
+local rule set-rule-doc (
+ name # The name of the rule.
+ module-name ? # The name of the module to document.
+ is-local ? # Whether the rule is local to the module.
+ : docs * # The documentation for the rule.
+)
+{
+ module-name ?= * ;
+
+ $(module-name).$(name).brief = [ brief-comment $(docs) ] ;
+ $(module-name).$(name).docs = $(docs) ;
+ $(module-name).$(name).is-local = $(is-local) ;
+
+ if ! $(name) in $($(module-name).rules)
+ {
+ $(module-name).rules += $(name) ;
+ }
+}
+
+
+# Specify a class, will turn a rule into a class.
+#
+local rule set-class-doc (
+ name # The name of the class.
+ module-name ? # The name of the module to document.
+ : super-name ? # The super class name.
+)
+{
+ module-name ?= * ;
+
+ $(module-name).$(name).is-class = true ;
+ $(module-name).$(name).super-name = $(super-name) ;
+ $(module-name).$(name).class-rules =
+ [ MATCH "^($(name)[.].*)" : $($(module-name).rules) ] ;
+ $(module-name).$($(module-name).$(name).class-rules).is-class-rule = true ;
+
+ $(module-name).classes += $(name) ;
+ $(module-name).class-rules += $($(module-name).$(name).class-rules) ;
+ $(module-name).rules =
+ [ set.difference $($(module-name).rules) :
+ $(name) $($(module-name).$(name).class-rules) ] ;
+}
+
+
+# Set the argument call signature of a rule.
+#
+local rule set-rule-arguments-signature (
+ name # The name of the rule.
+ module-name ? # The name of the module to document.
+ : signature * # The arguments signature.
+)
+{
+ module-name ?= * ;
+
+ $(module-name).$(name).signature = $(signature) ;
+}
+
+
+# Specifies the documentation for an argument of a rule.
+#
+local rule set-argument-doc (
+ name # The name of the argument.
+ qualifier # Argument syntax qualifier, "*", "+", etc.
+ rule-name # The name of the rule.
+ module-name ? # THe optional name of the module.
+ : docs * # The documentation.
+)
+{
+ module-name ?= * ;
+
+ $(module-name).$(rule-name).args.$(name).qualifier = $(qualifier) ;
+ $(module-name).$(rule-name).args.$(name).docs = $(docs) ;
+
+ if ! $(name) in $($(module-name).$(rule-name).args)
+ {
+ $(module-name).$(rule-name).args += $(name) ;
+ }
+}
+
+
+# Specifies the documentation for a variable in the current module. If called in
+# the global module, the global variable is documented.
+#
+local rule set-variable-doc (
+ name # The name of the variable.
+ default # The default value.
+ initial # The initial value.
+ module-name ? # The name of the module to document.
+ : docs * # The documentation for the variable.
+)
+{
+ module-name ?= * ;
+
+ $(module-name).$(name).brief = [ brief-comment $(docs) ] ;
+ $(module-name).$(name).default = $(default) ;
+ $(module-name).$(name).initial = $(initial) ;
+ $(module-name).$(name).docs = $(docs) ;
+
+ if ! $(name) in $($(module-name).variables)
+ {
+ $(module-name).variables += $(name) ;
+ }
+}
+
+
+# Generates a general description of the documentation and help system.
+#
+local rule print-help-top ( )
+{
+ print.section "General command line usage" ;
+
+ print.text " b2 [options] [properties] [targets]
+
+ Options, properties and targets can be specified in any order.
+ " ;
+
+ print.section "Important Options" ;
+
+ print.list-start ;
+ print.list-item "--clean Remove targets instead of building" ;
+ print.list-item "-a Rebuild everything" ;
+ print.list-item "-n Don't execute the commands, only print them" ;
+ print.list-item "-d+2 Show commands as they are executed" ;
+ print.list-item "-d0 Supress all informational messages" ;
+ print.list-item "-q Stop at first error" ;
+ print.list-item "--reconfigure Rerun all configuration checks" ;
+ print.list-item "--debug-configuration Diagnose configuration" ;
+ print.list-item "--debug-building Report which targets are built with what properties" ;
+ print.list-item "--debug-generator Diagnose generator search/execution" ;
+ print.list-end ;
+
+ print.section "Further Help"
+ The following options can be used to obtain additional documentation.
+ ;
+
+ print.list-start ;
+ print.list-item "--help-options Print more obscure command line options." ;
+ print.list-item "--help-internal Boost.Build implementation details." ;
+ print.list-item "--help-doc-options Implementation details doc formatting." ;
+ print.list-end ;
+}
+
+
+# Generate Jam/Boost.Jam command usage information.
+#
+local rule print-help-usage ( )
+{
+ print.section "Boost.Build Usage"
+ "b2 [ options... ] targets..."
+ ;
+ print.list-start ;
+ print.list-item -a;
+ Build all targets, even if they are current. ;
+ print.list-item -fx;
+ Read '"x"' as the Jamfile for building instead of searching for the
+ Boost.Build system. ;
+ print.list-item -jx;
+ Run up to '"x"' commands concurrently. ;
+ print.list-item -n;
+ Do not execute build commands. Instead print out the commands as they
+ would be executed if building. ;
+ print.list-item -ox;
+ Output the used build commands to file '"x"'. ;
+ print.list-item -q;
+ Quit as soon as a build failure is encountered. Without this option
+ Boost.Jam will continue building as many targets as it can. ;
+ print.list-item -sx=y;
+ Sets a Jam variable '"x"' to the value '"y"', overriding any value that
+ variable would have from the environment. ;
+ print.list-item -tx;
+ Rebuild the target '"x"', even if it is up-to-date. ;
+ print.list-item -v;
+ Display the version of b2. ;
+ print.list-item --x;
+ Any option not explicitly handled by Boost.Build remains available to
+ build scripts using the '"ARGV"' variable. ;
+ print.list-item --abbreviate-paths;
+ Use abbreviated paths for targets. ;
+ print.list-item --hash;
+ Shorten target paths by using an MD5 hash. ;
+ print.list-item -dn;
+ Enables output of diagnostic messages. The debug level '"n"' and all
+ below it are enabled by this option. ;
+ print.list-item -d+n;
+ Enables output of diagnostic messages. Only the output for debug level
+ '"n"' is enabled. ;
+ print.list-end ;
+ print.section "Debug Levels"
+ Each debug level shows a different set of information. Usually with
+ higher levels producing more verbose information. The following levels
+ are supported: ;
+ print.list-start ;
+ print.list-item 0;
+ Turn off all diagnostic output. Only errors are reported. ;
+ print.list-item 1;
+ Show the actions taken for building targets, as they are executed. ;
+ print.list-item 2;
+ Show "quiet" actions and display all action text, as they are executed. ;
+ print.list-item 3;
+ Show dependency analysis, and target/source timestamps/paths. ;
+ print.list-item 4;
+ Show arguments of shell invocations. ;
+ print.list-item 5;
+ Show rule invocations and variable expansions. ;
+ print.list-item 6;
+ Show directory/header file/archive scans, and attempts at binding to targets. ;
+ print.list-item 7;
+ Show variable settings. ;
+ print.list-item 8;
+ Show variable fetches, variable expansions, and evaluation of '"if"' expressions. ;
+ print.list-item 9;
+ Show variable manipulation, scanner tokens, and memory usage. ;
+ print.list-item 10;
+ Show execution times for rules. ;
+ print.list-item 11;
+ Show parsing progress of Jamfiles. ;
+ print.list-item 12;
+ Show graph for target dependencies. ;
+ print.list-item 13;
+ Show changes in target status (fate). ;
+ print.list-end ;
+}
+
+# Generates description of options controlling the help system. This
+# automatically reads the options as all variables in the module given
+# with the name `module-name` of the form ".option.*".
+#
+local rule print-help-options (
+ module-name
+)
+{
+ local options-to-list = [ MATCH ^[.]option[.](.*) : $($(module-name).variables) ] ;
+ if $(options-to-list)
+ {
+ local option-title = $($(module-name)..option-description.initial) ;
+ if ! $(option-title) || $(option-title) = "(empty)"
+ {
+ option-title = "$(module-name) Options" ;
+ }
+ local option-description = $(option-title)
+ $($(module-name)..option-description.docs) ;
+ print.section $(option-description) ;
+ print.list-start ;
+ for local option in [ sequence.insertion-sort $(options-to-list) ]
+ {
+ local def = disabled ;
+ if $($(module-name)..option.$(option).default) != "(empty)"
+ {
+ def = $($(module-name)..option.$(option).default) ;
+ }
+ print.list-item $(option): $($(module-name)..option.$(option).docs)
+ Default is $(def). ;
+ }
+ print.list-end ;
+ }
+}
+
+
+# Generate brief documentation for all the known items in the section for a
+# module. Possible sections are: "rules", and "variables".
+#
+local rule print-help-module-section (
+ module # The module name.
+ section # rules or variables.
+ : section-head # The title of the section.
+ section-description * # The detailed description of the section.
+)
+{
+ if $($(module).$(section))
+ {
+ print.section $(section-head) $(section-description) ;
+ print.list-start ;
+ for local item in [ sequence.insertion-sort $($(module).$(section)) ]
+ {
+ local show = ;
+ if ! $($(module).$(item).is-local)
+ {
+ show = yes ;
+ }
+ if $(.option.show-locals)
+ {
+ show = yes ;
+ }
+ if $(show)
+ {
+ print.list-item $(item): $($(module).$(item).brief) ;
+ }
+ }
+ print.list-end ;
+ }
+}
+
+
+# Generate documentation for all possible modules. We attempt to list all known
+# modules together with a brief description of each.
+#
+local rule print-help-all (
+ ignored # Usually the module name, but is ignored here.
+)
+{
+ print.section "Modules"
+ "These are all the known modules. Use --help <module> to get more"
+ "detailed information."
+ ;
+ if $(documented-modules)
+ {
+ print.list-start ;
+ for local module-name in [ sequence.insertion-sort $(documented-modules) ]
+ {
+ # The brief docs for each module.
+ print.list-item $(module-name): $($(module-name).brief) ;
+ }
+ print.list-end ;
+ }
+ # The documentation for each module when details are requested.
+ if $(documented-modules) && $(.option.detailed)
+ {
+ for local module-name in [ sequence.insertion-sort $(documented-modules) ]
+ {
+ # The brief docs for each module.
+ print-help-module $(module-name) ;
+ }
+ }
+}
+
+
+# Generate documentation for a module. Basic information about the module is
+# generated.
+#
+local rule print-help-module (
+ module-name # The module to generate docs for.
+)
+{
+ # Print the docs.
+ print.section "Module '$(module-name)'" $($(module-name).docs) ;
+
+ # Print out the documented classes.
+ print-help-module-section $(module-name) classes : "Module '$(module-name)' classes"
+ Use --help $(module-name).<class-name> to get more information. ;
+
+ # Print out the documented rules.
+ print-help-module-section $(module-name) rules : "Module '$(module-name)' rules"
+ Use --help $(module-name).<rule-name> to get more information. ;
+
+ # Print out the documented variables.
+ print-help-module-section $(module-name) variables : "Module '$(module-name)' variables"
+ Use --help $(module-name).<variable-name> to get more information. ;
+
+ # Print out all the same information but indetailed form.
+ if $(.option.detailed)
+ {
+ print-help-classes $(module-name) ;
+ print-help-rules $(module-name) ;
+ print-help-variables $(module-name) ;
+ }
+}
+
+
+# Generate documentation for a set of rules in a module.
+#
+local rule print-help-rules (
+ module-name # Module of the rules.
+ : name * # Optional list of rules to describe.
+)
+{
+ name ?= $($(module-name).rules) ;
+ if [ set.intersection $(name) : $($(module-name).rules) $($(module-name).class-rules) ]
+ {
+ # Print out the given rules.
+ for local rule-name in [ sequence.insertion-sort $(name) ]
+ {
+ if $(.option.show-locals) || ! $($(module-name).$(rule-name).is-local)
+ {
+ local signature = $($(module-name).$(rule-name).signature:J=" ") ;
+ signature ?= "" ;
+ print.section "Rule '$(module-name).$(rule-name) ( $(signature) )'"
+ $($(module-name).$(rule-name).docs) ;
+ if $($(module-name).$(rule-name).args)
+ {
+ print.list-start ;
+ for local arg-name in $($(module-name).$(rule-name).args)
+ {
+ print.list-item $(arg-name): $($(module-name).$(rule-name).args.$(arg-name).docs) ;
+ }
+ print.list-end ;
+ }
+ }
+ }
+ }
+}
+
+
+# Generate documentation for a set of classes in a module.
+#
+local rule print-help-classes (
+ module-name # Module of the classes.
+ : name * # Optional list of classes to describe.
+)
+{
+ name ?= $($(module-name).classes) ;
+ if [ set.intersection $(name) : $($(module-name).classes) ]
+ {
+ # Print out the given classes.
+ for local class-name in [ sequence.insertion-sort $(name) ]
+ {
+ if $(.option.show-locals) || ! $($(module-name).$(class-name).is-local)
+ {
+ local signature = $($(module-name).$(class-name).signature:J=" ") ;
+ signature ?= "" ;
+ print.section "Class '$(module-name).$(class-name) ( $(signature) )'"
+ $($(module-name).$(class-name).docs)
+ "Inherits from '"$($(module-name).$(class-name).super-name)"'." ;
+ if $($(module-name).$(class-name).args)
+ {
+ print.list-start ;
+ for local arg-name in $($(module-name).$(class-name).args)
+ {
+ print.list-item $(arg-name): $($(module-name).$(class-name).args.$(arg-name).docs) ;
+ }
+ print.list-end ;
+ }
+ }
+
+ # Print out the documented rules of the class.
+ print-help-module-section $(module-name) $(class-name).class-rules : "Class '$(module-name).$(class-name)' rules"
+ Use --help $(module-name).<rule-name> to get more information. ;
+
+ # Print out all the rules if details are requested.
+ if $(.option.detailed)
+ {
+ print-help-rules $(module-name) : $($(module-name).$(class-name).class-rules) ;
+ }
+ }
+ }
+}
+
+
+# Generate documentation for a set of variables in a module.
+#
+local rule print-help-variables (
+ module-name ? # Module of the variables.
+ : name * # Optional list of variables to describe.
+)
+{
+ name ?= $($(module-name).variables) ;
+ if [ set.intersection $(name) : $($(module-name).variables) ]
+ {
+ # Print out the given variables.
+ for local variable-name in [ sequence.insertion-sort $(name) ]
+ {
+ print.section "Variable '$(module-name).$(variable-name)'" $($(module-name).$(variable-name).docs) ;
+ if $($(module-name).$(variable-name).default) ||
+ $($(module-name).$(variable-name).initial)
+ {
+ print.list-start ;
+ if $($(module-name).$(variable-name).default)
+ {
+ print.list-item "default value:" '$($(module-name).$(variable-name).default:J=" ")' ;
+ }
+ if $($(module-name).$(variable-name).initial)
+ {
+ print.list-item "initial value:" '$($(module-name).$(variable-name).initial:J=" ")' ;
+ }
+ print.list-end ;
+ }
+ }
+ }
+}
+
+
+# Generate documentation for a project.
+#
+local rule print-help-project (
+ unused ?
+ : jamfile * # The project Jamfile.
+)
+{
+ if $(jamfile<$(jamfile)>.docs)
+ {
+ # Print the docs.
+ print.section "Project-specific help"
+ Project has jamfile at $(jamfile) ;
+
+ print.lines $(jamfile<$(jamfile)>.docs) "" ;
+ }
+}
+
+
+# Generate documentation for a config file.
+#
+local rule print-help-config (
+ unused ?
+ : type # The type of configuration file user or site.
+ config-file # The configuration Jamfile.
+)
+{
+ if $(jamfile<$(config-file)>.docs)
+ {
+ # Print the docs.
+ print.section "Configuration help"
+ Configuration file at $(config-file) ;
+
+ print.lines $(jamfile<$(config-file)>.docs) "" ;
+ }
+}
+
+
+ws = " " ;
+
+# Extract the text from a block of comments.
+#
+local rule extract-comment (
+ var # The name of the variable to extract from.
+)
+{
+ local comment = ;
+ local line = $($(var)[1]) ;
+ local l = [ MATCH "^[$(ws)]*(#)(.*)$" : $(line) ] ;
+ while $(l[1]) && $($(var))
+ {
+ if $(l[2]) { comment += [ MATCH "^[$(ws)]?(.*)$" : $(l[2]) ] ; }
+ else { comment += "" ; }
+ $(var) = $($(var)[2-]) ;
+ line = $($(var)[1]) ;
+ l = [ MATCH "^[$(ws)]*(#)(.*)$" : $(line) ] ;
+ }
+ return $(comment) ;
+}
+
+
+# Extract s single line of Jam syntax, ignoring any comments.
+#
+local rule extract-syntax (
+ var # The name of the variable to extract from.
+)
+{
+ local syntax = ;
+ local line = $($(var)[1]) ;
+ while ! $(syntax) && ! [ MATCH "^[$(ws)]*(#)" : $(line) ] && $($(var))
+ {
+ local m = [ MATCH "^[$(ws)]*(.*)$" : $(line) ] ;
+ if $(m)
+ {
+ syntax = $(m) ;
+ }
+ $(var) = $($(var)[2-]) ;
+ line = $($(var)[1]) ;
+ }
+ return $(syntax) ;
+}
+
+
+# Extract the next token, this is either a single Jam construct or a comment as
+# a single token.
+#
+local rule extract-token (
+ var # The name of the variable to extract from.
+)
+{
+ local parts = ;
+ while ! $(parts)
+ {
+ parts = [ MATCH "^[$(ws)]*([^$(ws)]+)[$(ws)]*(.*)" : $($(var)[1]) ] ;
+ if ! $(parts)
+ {
+ $(var) = $($(var)[2-]) ;
+ }
+ }
+ local token = ;
+ if [ MATCH "^(#)" : $(parts[1]) ]
+ {
+ token = $(parts:J=" ") ;
+ $(var) = $($(var)[2-]) ;
+ }
+ else
+ {
+ token = $(parts[1]) ;
+ $(var) = $(parts[2-]:J=" ") $($(var)[2-]) ;
+ }
+ return $(token) ;
+}
+
+
+# Scan for a rule declaration as the next item in the variable.
+#
+local rule scan-rule (
+ syntax ? # The first part of the text which contains the rule declaration.
+ : var # The name of the variable to extract from.
+)
+{
+ local rule-parts =
+ [ MATCH "^[$(ws)]*(rule|local[$(ws)]*rule)[$(ws)]+([^$(ws)]+)[$(ws)]*(.*)" : $(syntax:J=" ") ] ;
+ if $(rule-parts[1])
+ {
+ # Mark as doc for rule.
+ local rule-name = $(rule-parts[2]) ;
+ if $(scope-name)
+ {
+ rule-name = $(scope-name).$(rule-name) ;
+ }
+ local is-local = [ MATCH "^(local).*" : $(rule-parts[1]) ] ;
+ if $(comment-block)
+ {
+ set-rule-doc $(rule-name) $(module-name) $(is-local) : $(comment-block) ;
+ }
+ # Parse args of rule.
+ $(var) = $(rule-parts[3-]) $($(var)) ;
+ set-rule-arguments-signature $(rule-name) $(module-name) : [ scan-rule-arguments $(var) ] ;
+ # Scan within this rules scope.
+ local scope-level = [ extract-token $(var) ] ;
+ local scope-name = $(rule-name) ;
+ while $(scope-level) && $($(var))
+ {
+ local comment-block = [ extract-comment $(var) ] ;
+ local syntax-block = [ extract-syntax $(var) ] ;
+ if [ scan-rule $(syntax-block) : $(var) ]
+ {
+ }
+ else if [ MATCH "^(\\{)" : $(syntax-block) ]
+ {
+ scope-level += "{" ;
+ }
+ else if [ MATCH "^[^\\}]*([\\}])[$(ws)]*$" : $(syntax-block) ]
+ {
+ scope-level = $(scope-level[2-]) ;
+ }
+ }
+
+ return true ;
+ }
+}
+
+
+# Scan the arguments of a rule.
+#
+local rule scan-rule-arguments (
+ var # The name of the variable to extract from.
+)
+{
+ local arg-syntax = ;
+ local token = [ extract-token $(var) ] ;
+ while $(token) != "(" && $(token) != "{"
+ {
+ token = [ extract-token $(var) ] ;
+ }
+ if $(token) != "{"
+ {
+ token = [ extract-token $(var) ] ;
+ }
+ local arg-signature = ;
+ while $(token) != ")" && $(token) != "{"
+ {
+ local arg-name = ;
+ local arg-qualifier = " " ;
+ local arg-doc = ;
+ if $(token) = ":"
+ {
+ arg-signature += $(token) ;
+ token = [ extract-token $(var) ] ;
+ }
+ arg-name = $(token) ;
+ arg-signature += $(token) ;
+ token = [ extract-token $(var) ] ;
+ if [ MATCH "^([\\*\\+\\?])" : $(token) ]
+ {
+ arg-qualifier = $(token) ;
+ arg-signature += $(token) ;
+ token = [ extract-token $(var) ] ;
+ }
+ if $(token) = ":"
+ {
+ arg-signature += $(token) ;
+ token = [ extract-token $(var) ] ;
+ }
+ if [ MATCH "^(#)" : $(token) ]
+ {
+ $(var) = $(token) $($(var)) ;
+ arg-doc = [ extract-comment $(var) ] ;
+ token = [ extract-token $(var) ] ;
+ }
+ set-argument-doc $(arg-name) $(arg-qualifier) $(rule-name) $(module-name) : $(arg-doc) ;
+ }
+ while $(token) != "{"
+ {
+ token = [ extract-token $(var) ] ;
+ }
+ $(var) = "{" $($(var)) ;
+ arg-signature ?= "" ;
+ return $(arg-signature) ;
+}
+
+
+# Scan for a variable declaration.
+#
+local rule scan-variable (
+ syntax ? # The first part of the text which contains the variable declaration.
+ : var # The name of the variable to extract from.
+)
+{
+ # [1] = name, [2] = value(s)
+ local var-parts =
+ [ MATCH "^[$(ws)]*([^$(ws)]+)[$(ws)]+([\\?\\=]*)[$(ws)]+([^\\;]*)\\;" : $(syntax) ] ;
+ if $(var-parts)
+ {
+ local value = [ MATCH "^(.*)[ ]$" : $(var-parts[3-]:J=" ") ] ;
+ local default-value = "" ;
+ local initial-valie = "" ;
+ if $(var-parts[2]) = "?="
+ {
+ default-value = $(value) ;
+ default-value ?= "(empty)" ;
+ }
+ else
+ {
+ initial-value = $(value) ;
+ initial-value ?= "(empty)" ;
+ }
+ if $(comment-block)
+ {
+ set-variable-doc $(var-parts[1]) $(default-value) $(initial-value) $(module-name) : $(comment-block) ;
+ }
+ return true ;
+ }
+}
+
+
+# Scan a class declaration.
+#
+local rule scan-class (
+ syntax ? # The syntax text for the class declaration.
+)
+{
+ # [1] = class?, [2] = name, [3] = superclass
+ local class-parts =
+ [ MATCH "^[$(ws)]*([^$(ws)]+)[$(ws)]+([^$(ws)]+)[$(ws)]+:*[$(ws)]*([^$(ws);]*)" : $(syntax) ] ;
+ if $(class-parts[1]) = "class" || $(class-parts[1]) = "class.class"
+ {
+ set-class-doc $(class-parts[2]) $(module-name) : $(class-parts[3]) ;
+ }
+}
+
+
+# Scan a module file for documentation comments. This also invokes any actions
+# assigned to the module. The actions are the rules that do the actual output of
+# the documentation. This rule is invoked as the header scan rule for the module
+# file.
+#
+rule scan-module (
+ target # The module file.
+ : text * # The text in the file, one item per line.
+ : action * # Rule to call to output docs for the module.
+)
+{
+ if $(.option.debug) { ECHO "HELP:" scanning module target '$(target)' ; }
+ local module-name = $(target:B) ;
+ local module-documented = ;
+ local comment-block = ;
+ local syntax-block = ;
+ # This is a hack because we can not get the line of a file if it happens to
+ # not have a new-line termination.
+ text += "}" ;
+ while $(text)
+ {
+ comment-block = [ extract-comment text ] ;
+ syntax-block = [ extract-syntax text ] ;
+ if $(.option.debug)
+ {
+ ECHO "HELP:" comment block; '$(comment-block)' ;
+ ECHO "HELP:" syntax block; '$(syntax-block)' ;
+ }
+ if [ scan-rule $(syntax-block) : text ] { }
+ else if [ scan-variable $(syntax-block) : text ] { }
+ else if [ scan-class $(syntax-block) ] { }
+ else if [ MATCH .*([cC]opyright).* : $(comment-block:J=" ") ]
+ {
+ # mark as the copy for the module.
+ set-module-copyright $(module-name) : $(comment-block) ;
+ }
+ else if $(action[1]) in "print-help-project" "print-help-config"
+ && ! $(jamfile<$(target)>.docs)
+ {
+ # special module docs for the project jamfile.
+ jamfile<$(target)>.docs = $(comment-block) ;
+ }
+ else if ! $(module-documented)
+ {
+ # document the module.
+ set-module-doc $(module-name) : $(comment-block) ;
+ module-documented = true ;
+ }
+ }
+ if $(action)
+ {
+ $(action[1]) $(module-name) : $(action[2-]) ;
+ }
+}
+
+
+# Import scan-module to global scope, so that it is available during header
+# scanning phase.
+#
+IMPORT $(__name__) : scan-module : : doc.scan-module ;
+
+
+# Read in a file using the SHELL builtin and return the individual lines as
+# would be done for header scanning.
+#
+local rule read-file (
+ file # The file to read in.
+)
+{
+ file = [ path.native [ path.root [ path.make $(file) ] [ path.pwd ] ] ] ;
+ if ! $(.file<$(file)>.lines)
+ {
+ local content ;
+ switch [ modules.peek : OS ]
+ {
+ case NT :
+ content = [ SHELL "TYPE \"$(file)\"" ] ;
+
+ case * :
+ content = [ SHELL "cat \"$(file)\"" ] ;
+ }
+ local lines ;
+ local nl = "
+" ;
+ local << = "([^$(nl)]*)[$(nl)](.*)" ;
+ local line+ = [ MATCH "$(<<)" : "$(content)" ] ;
+ while $(line+)
+ {
+ lines += $(line+[1]) ;
+ line+ = [ MATCH "$(<<)" : "$(line+[2])" ] ;
+ }
+ .file<$(file)>.lines = $(lines) ;
+ }
+ return $(.file<$(file)>.lines) ;
+}
+
+
+# Add a scan action to perform to generate the help documentation. The action
+# rule is passed the name of the module as the first argument. The second
+# argument(s) are optional and passed directly as specified here.
+#
+local rule do-scan (
+ modules + # The modules to scan and perform the action on.
+ : action * # The action rule, plus the secondary arguments to pass to the action rule.
+)
+{
+ if $(help-output) = text
+ {
+ print.output $(help-output-file).txt plain ;
+ ALWAYS $(help-output-file).txt ;
+ DEPENDS all : $(help-output-file).txt ;
+ }
+ if $(help-output) = html
+ {
+ print.output $(help-output-file).html html ;
+ ALWAYS $(help-output-file).html ;
+ DEPENDS all : $(help-output-file).html ;
+ }
+ for local module-file in $(modules[1--2])
+ {
+ scan-module $(module-file) : [ read-file $(module-file) ] ;
+ }
+ scan-module $(modules[-1]) : [ read-file $(modules[-1]) ] : $(action) ;
+}
diff --git a/tools/build/src/util/indirect.jam b/tools/build/src/util/indirect.jam
new file mode 100644
index 0000000000..40884da96d
--- /dev/null
+++ b/tools/build/src/util/indirect.jam
@@ -0,0 +1,117 @@
+# Copyright 2003 Dave Abrahams
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import modules ;
+import numbers ;
+
+
+# The pattern that indirect rules must match: module%rule
+.pattern = ^([^%]*)%([^%]+)$ ;
+
+
+#
+# Type checking rules.
+#
+local rule indirect-rule ( x )
+{
+ if ! [ MATCH $(.pattern) : $(x) ]
+ {
+ return "expected a string of the form module%rule, but got \""$(x)"\" for argument" ;
+ }
+}
+
+
+# Make an indirect rule which calls the given rule. If context is supplied it is
+# expected to be the module in which to invoke the rule by the 'call' rule
+# below. Otherwise, the rule will be invoked in the module of this rule's
+# caller.
+#
+rule make ( rulename bound-args * : context ? )
+{
+ context ?= [ CALLER_MODULE ] ;
+ context ?= "" ;
+ return $(context)%$(rulename) $(bound-args) ;
+}
+
+
+# Make an indirect rule which calls the given rule. 'rulename' may be a
+# qualified rule; if so it is returned unchanged. Otherwise, if frames is not
+# supplied, the result will be invoked (by 'call', below) in the module of the
+# caller. Otherwise, frames > 1 specifies additional call frames to back up in
+# order to find the module context.
+#
+rule make-qualified ( rulename bound-args * : frames ? )
+{
+ if [ MATCH $(.pattern) : $(rulename) ]
+ {
+ return $(rulename) $(bound-args) ;
+ }
+ else
+ {
+ frames ?= 1 ;
+ # If the rule name includes a Jamfile module, grab it.
+ local module-context = [ MATCH ^(Jamfile<[^>]*>)\\..* : $(rulename) ] ;
+
+ if ! $(module-context)
+ {
+ # Take the first dot-separated element as module name. This disallows
+ # module names with dots, but allows rule names with dots.
+ module-context = [ MATCH ^([^.]*)\\..* : $(rulename) ] ;
+ }
+ module-context ?= [ CALLER_MODULE $(frames) ] ;
+ return [ make $(rulename) $(bound-args) : $(module-context) ] ;
+ }
+}
+
+
+# Returns the module name in which the given indirect rule will be invoked.
+#
+rule get-module ( [indirect-rule] x )
+{
+ local m = [ MATCH $(.pattern) : $(x) ] ;
+ if ! $(m[1])
+ {
+ m = ;
+ }
+ return $(m[1]) ;
+}
+
+
+# Returns the rulename that will be called when x is invoked.
+#
+rule get-rule ( [indirect-rule] x )
+{
+ local m = [ MATCH $(.pattern) : $(x) ] ;
+ return $(m[2]) ;
+}
+
+
+# Invoke the given indirect-rule.
+#
+rule call ( [indirect-rule] r args * : * )
+{
+ return [ modules.call-in [ get-module $(r) ] : [ get-rule $(r) ] $(args) :
+ $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) : $(10) : $(11) :
+ $(12) : $(13) : $(14) : $(15) : $(16) : $(17) : $(18) : $(19) ] ;
+}
+
+
+rule __test__
+{
+ import assert ;
+
+ rule foo-barr! ( x )
+ {
+ assert.equal $(x) : x ;
+ }
+
+ assert.equal [ get-rule [ make foo-barr! ] ] : foo-barr! ;
+ assert.equal [ get-module [ make foo-barr! ] ] : [ CALLER_MODULE ] ;
+
+ call [ make foo-barr! ] x ;
+ call [ make foo-barr! x ] ;
+ call [ make foo-barr! : [ CALLER_MODULE ] ] x ;
+}
diff --git a/tools/build/v2/util/indirect.py b/tools/build/src/util/indirect.py
index 78fa89946b..78fa89946b 100644
--- a/tools/build/v2/util/indirect.py
+++ b/tools/build/src/util/indirect.py
diff --git a/tools/build/v2/util/logger.py b/tools/build/src/util/logger.py
index de6521290a..de6521290a 100644
--- a/tools/build/v2/util/logger.py
+++ b/tools/build/src/util/logger.py
diff --git a/tools/build/v2/util/numbers.jam b/tools/build/src/util/numbers.jam
index 665347d318..665347d318 100644
--- a/tools/build/v2/util/numbers.jam
+++ b/tools/build/src/util/numbers.jam
diff --git a/tools/build/v2/util/option.jam b/tools/build/src/util/option.jam
index f6dc375223..f6dc375223 100644
--- a/tools/build/v2/util/option.jam
+++ b/tools/build/src/util/option.jam
diff --git a/tools/build/v2/util/option.py b/tools/build/src/util/option.py
index 47d6abdff6..47d6abdff6 100644
--- a/tools/build/v2/util/option.py
+++ b/tools/build/src/util/option.py
diff --git a/tools/build/v2/util/order.jam b/tools/build/src/util/order.jam
index a74fc8c849..a74fc8c849 100644
--- a/tools/build/v2/util/order.jam
+++ b/tools/build/src/util/order.jam
diff --git a/tools/build/v2/util/order.py b/tools/build/src/util/order.py
index 4e67b3f1a1..4e67b3f1a1 100644
--- a/tools/build/v2/util/order.py
+++ b/tools/build/src/util/order.py
diff --git a/tools/build/v2/util/os.jam b/tools/build/src/util/os.jam
index daef27f77f..daef27f77f 100644
--- a/tools/build/v2/util/os.jam
+++ b/tools/build/src/util/os.jam
diff --git a/tools/build/v2/util/os_j.py b/tools/build/src/util/os_j.py
index f44cca6201..f44cca6201 100644
--- a/tools/build/v2/util/os_j.py
+++ b/tools/build/src/util/os_j.py
diff --git a/tools/build/src/util/path.jam b/tools/build/src/util/path.jam
new file mode 100644
index 0000000000..545d83c84a
--- /dev/null
+++ b/tools/build/src/util/path.jam
@@ -0,0 +1,910 @@
+# Copyright 2002-2006. Vladimir Prus
+# Copyright 2003-2004. Dave Abrahams
+# Copyright 2003-2006. Rene Rivera
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Performs various path manipulations. Paths are always in a 'normalized'
+# representation. In it, a path may be either:
+#
+# - '.', or
+#
+# - ['/'] [ ( '..' '/' )* (token '/')* token ]
+#
+# In plain english, path can be rooted, '..' elements are allowed only at the
+# beginning, and it never ends in slash, except for path consisting of slash
+# only.
+
+import modules ;
+import regex ;
+import sequence ;
+import set ;
+
+
+os = [ modules.peek : OS ] ;
+if [ modules.peek : UNIX ]
+{
+ local uname = [ modules.peek : JAMUNAME ] ;
+ switch $(uname)
+ {
+ case CYGWIN* : os = CYGWIN ;
+ case * : os = UNIX ;
+ }
+}
+
+
+# Converts the native path into normalized form.
+#
+rule make ( native )
+{
+ return [ make-$(os) $(native) ] ;
+}
+
+
+# Builds native representation of the path.
+#
+rule native ( path )
+{
+ return [ native-$(os) $(path) ] ;
+}
+
+
+# Tests if a path is rooted.
+#
+rule is-rooted ( path )
+{
+ return [ MATCH "^(/)" : $(path) ] ;
+}
+
+
+# Tests if a path has a parent.
+#
+rule has-parent ( path )
+{
+ if $(path) != /
+ {
+ return 1 ;
+ }
+ else
+ {
+ return ;
+ }
+}
+
+
+# Returns the path without any directory components.
+#
+rule basename ( path )
+{
+ return [ MATCH "([^/]+)$" : $(path) ] ;
+}
+
+
+# Returns parent directory of the path. If no parent exists, error is issued.
+#
+rule parent ( path )
+{
+ if [ has-parent $(path) ]
+ {
+ if $(path) = .
+ {
+ return .. ;
+ }
+ else
+ {
+ # Strip everything at the end of path up to and including the last
+ # slash.
+ local result = [ regex.match "((.*)/)?([^/]+)" : $(path) : 2 3 ] ;
+
+ # Did we strip what we shouldn't?
+ if $(result[2]) = ".."
+ {
+ return $(path)/.. ;
+ }
+ else
+ {
+ if ! $(result[1])
+ {
+ if [ is-rooted $(path) ]
+ {
+ result = / ;
+ }
+ else
+ {
+ result = . ;
+ }
+ }
+ return $(result[1]) ;
+ }
+ }
+ }
+ else
+ {
+ import errors ;
+ errors.error "Path '$(path)' has no parent" ;
+ }
+}
+
+
+# Returns path2 such that "[ join path path2 ] = .". The path may not contain
+# ".." element or be rooted.
+#
+rule reverse ( path )
+{
+ if $(path) = .
+ {
+ return $(path) ;
+ }
+ else
+ {
+ local tokens = [ regex.split $(path) / ] ;
+ local tokens2 ;
+ for local i in $(tokens)
+ {
+ tokens2 += .. ;
+ }
+ return [ sequence.join $(tokens2) : / ] ;
+ }
+}
+
+
+# Concatenates the passed path elements. Generates an error if any element other
+# than the first one is rooted. Skips any empty or undefined path elements.
+#
+rule join ( elements + )
+{
+ if ! $(elements[2-])
+ {
+ return $(elements[1]) ;
+ }
+ else
+ {
+ for local e in $(elements[2-])
+ {
+ if [ is-rooted $(e) ]
+ {
+ import errors ;
+ errors.error only the first element may be rooted ;
+ }
+ }
+ return [ NORMALIZE_PATH "$(elements)" ] ;
+ }
+}
+
+
+# If 'path' is relative, it is rooted at 'root'. Otherwise, it is unchanged.
+#
+rule root ( path root )
+{
+ if [ is-rooted $(path) ]
+ {
+ return $(path) ;
+ }
+ else
+ {
+ return [ join $(root) $(path) ] ;
+ }
+}
+
+
+# Returns the current working directory.
+#
+rule pwd ( )
+{
+ if ! $(.pwd)
+ {
+ .pwd = [ make [ PWD ] ] ;
+ }
+ return $(.pwd) ;
+}
+
+
+# Returns the list of files matching the given pattern in the specified
+# directory. Both directories and patterns are supplied as portable paths. Each
+# pattern should be non-absolute path, and can't contain "." or ".." elements.
+# Each slash separated element of pattern can contain the following special
+# characters:
+# - '?', which match any character
+# - '*', which matches arbitrary number of characters.
+# A file $(d)/e1/e2/e3 (where 'd' is in $(dirs)) matches pattern p1/p2/p3 if and
+# only if e1 matches p1, e2 matches p2 and so on.
+#
+# For example:
+# [ glob . : *.cpp ]
+# [ glob . : */build/Jamfile ]
+#
+rule glob ( dirs * : patterns + : exclude-patterns * )
+{
+ local result ;
+ local real-patterns ;
+ local real-exclude-patterns ;
+ for local d in $(dirs)
+ {
+ for local p in $(patterns)
+ {
+ local pattern = [ path.root $(p) $(d) ] ;
+ real-patterns += [ path.native $(pattern) ] ;
+ }
+
+ for local p in $(exclude-patterns)
+ {
+ local pattern = [ path.root $(p) $(d) ] ;
+ real-exclude-patterns += [ path.native $(pattern) ] ;
+ }
+ }
+
+ local inc = [ GLOB-RECURSIVELY $(real-patterns) ] ;
+ inc = [ sequence.transform NORMALIZE_PATH : $(inc) ] ;
+ local exc = [ GLOB-RECURSIVELY $(real-exclude-patterns) ] ;
+ exc = [ sequence.transform NORMALIZE_PATH : $(exc) ] ;
+
+ return [ sequence.transform path.make : [ set.difference $(inc) : $(exc) ] ]
+ ;
+}
+
+
+# Recursive version of GLOB. Builds the glob of files while also searching in
+# the subdirectories of the given roots. An optional set of exclusion patterns
+# will filter out the matching entries from the result. The exclusions also
+# apply to the subdirectory scanning, such that directories that match the
+# exclusion patterns will not be searched.
+#
+rule glob-tree ( roots * : patterns + : exclude-patterns * )
+{
+ return [ sequence.transform path.make : [ .glob-tree [ sequence.transform
+ path.native : $(roots) ] : $(patterns) : $(exclude-patterns) ] ] ;
+}
+
+
+local rule .glob-tree ( roots * : patterns * : exclude-patterns * )
+{
+ local excluded ;
+ if $(exclude-patterns)
+ {
+ excluded = [ GLOB $(roots) : $(exclude-patterns) ] ;
+ }
+ local result = [ set.difference [ GLOB $(roots) : $(patterns) ] :
+ $(excluded) ] ;
+ local subdirs ;
+ for local d in [ set.difference [ GLOB $(roots) : * ] : $(excluded) ]
+ {
+ if ! ( $(d:D=) in . .. ) && ! [ CHECK_IF_FILE $(d) ]
+ {
+ subdirs += $(d) ;
+ }
+ }
+ if $(subdirs)
+ {
+ result += [ .glob-tree $(subdirs) : $(patterns) : $(exclude-patterns) ]
+ ;
+ }
+ return $(result) ;
+}
+
+
+# Returns true is the specified file exists.
+#
+rule exists ( file )
+{
+ return [ path.glob $(file:D) : $(file:D=) ] ;
+}
+NATIVE_RULE path : exists ;
+
+
+# Find out the absolute name of path and returns the list of all the parents,
+# starting with the immediate one. Parents are returned as relative names. If
+# 'upper_limit' is specified, directories above it will be pruned.
+#
+rule all-parents ( path : upper_limit ? : cwd ? )
+{
+ cwd ?= [ pwd ] ;
+ local path_ele = [ regex.split [ root $(path) $(cwd) ] / ] ;
+
+ if ! $(upper_limit)
+ {
+ upper_limit = / ;
+ }
+ local upper_ele = [ regex.split [ root $(upper_limit) $(cwd) ] / ] ;
+
+ # Leave only elements in 'path_ele' below 'upper_ele'.
+ while $(path_ele) && ( $(upper_ele[1]) = $(path_ele[1]) )
+ {
+ upper_ele = $(upper_ele[2-]) ;
+ path_ele = $(path_ele[2-]) ;
+ }
+
+ # Have all upper elements been removed ?
+ if $(upper_ele)
+ {
+ import errors ;
+ errors.error "$(upper_limit) is not prefix of $(path)" ;
+ }
+
+ # Create the relative paths to parents, number of elements in 'path_ele'.
+ local result ;
+ for local i in $(path_ele)
+ {
+ path = [ parent $(path) ] ;
+ result += $(path) ;
+ }
+ return $(result) ;
+}
+
+
+# Search for 'pattern' in parent directories of 'dir', up to and including
+# 'upper_limit', if it is specified, or up to the filesystem root otherwise.
+#
+rule glob-in-parents ( dir : patterns + : upper-limit ? )
+{
+ local result ;
+ local parent-dirs = [ all-parents $(dir) : $(upper-limit) ] ;
+
+ while $(parent-dirs) && ! $(result)
+ {
+ result = [ glob $(parent-dirs[1]) : $(patterns) ] ;
+ parent-dirs = $(parent-dirs[2-]) ;
+ }
+ return $(result) ;
+}
+
+
+# Assuming 'child' is a subdirectory of 'parent', return the relative path from
+# 'parent' to 'child'.
+#
+rule relative ( child parent : no-error ? )
+{
+ local not-a-child ;
+ if $(parent) = "."
+ {
+ return $(child) ;
+ }
+ else
+ {
+ local split1 = [ regex.split $(parent) / ] ;
+ local split2 = [ regex.split $(child) / ] ;
+
+ while $(split1)
+ {
+ if $(split1[1]) = $(split2[1])
+ {
+ split1 = $(split1[2-]) ;
+ split2 = $(split2[2-]) ;
+ }
+ else
+ {
+ not-a-child = true ;
+ split1 = ;
+ }
+ }
+ if $(split2)
+ {
+ if $(not-a-child)
+ {
+ if $(no-error)
+ {
+ return not-a-child ;
+ }
+ else
+ {
+ import errors ;
+ errors.error $(child) is not a subdir of $(parent) ;
+ }
+ }
+ else
+ {
+ return [ join $(split2) ] ;
+ }
+ }
+ else
+ {
+ return "." ;
+ }
+ }
+}
+
+
+# Returns the minimal path to path2 that is relative to path1.
+#
+rule relative-to ( path1 path2 )
+{
+ local root_1 = [ regex.split [ reverse $(path1) ] / ] ;
+ local split1 = [ regex.split $(path1) / ] ;
+ local split2 = [ regex.split $(path2) / ] ;
+
+ while $(split1) && $(root_1)
+ {
+ if $(split1[1]) = $(split2[1])
+ {
+ root_1 = $(root_1[2-]) ;
+ split1 = $(split1[2-]) ;
+ split2 = $(split2[2-]) ;
+ }
+ else
+ {
+ split1 = ;
+ }
+ }
+ return [ join . $(root_1) $(split2) ] ;
+}
+
+
+# Returns the list of paths used by the operating system for looking up
+# programs.
+#
+rule programs-path ( )
+{
+ local result ;
+ local raw = [ modules.peek : PATH Path path ] ;
+ for local p in $(raw)
+ {
+ if $(p)
+ {
+ result += [ path.make $(p) ] ;
+ }
+ }
+ return $(result) ;
+}
+
+
+rule makedirs ( path )
+{
+ local result = true ;
+ local native = [ native $(path) ] ;
+ if ! [ exists $(native) ]
+ {
+ if [ makedirs [ parent $(path) ] ]
+ {
+ if ! [ MAKEDIR $(native) ]
+ {
+ import errors ;
+ errors.error "Could not create directory '$(path)'" ;
+ result = ;
+ }
+ }
+ }
+ return $(result) ;
+}
+
+
+# Converts native Windows paths into our internal canonic path representation.
+# Supports 'invalid' paths containing multiple successive path separator
+# characters.
+#
+# TODO: Check and if needed add support for Windows 'X:file' path format where
+# the file is located in the current folder on drive X.
+#
+rule make-NT ( native )
+{
+ local result = [ NORMALIZE_PATH $(native) ] ;
+
+ # We need to add an extra '/' in front in case this is a rooted Windows path
+ # starting with a drive letter and not a path separator character since the
+ # builtin NORMALIZE_PATH rule has no knowledge of this leading drive letter
+ # and treats it as a regular folder name.
+ if [ regex.match "(^.:)" : $(native) ]
+ {
+ result = /$(result) ;
+ }
+
+ return $(result) ;
+}
+
+
+rule native-NT ( path )
+{
+ local remove-slash = [ MATCH "^/(.:.*)" : $(path) ] ;
+ if $(remove-slash)
+ {
+ path = $(remove-slash) ;
+ }
+ return [ regex.replace $(path) / \\ ] ;
+}
+
+
+rule make-UNIX ( native )
+{
+ # VP: I have no idea now 'native' can be empty here! But it can!
+ if ! $(native)
+ {
+ import errors ;
+ errors.error "Empty path passed to 'make-UNIX'" ;
+ }
+ else
+ {
+ return [ NORMALIZE_PATH $(native:T) ] ;
+ }
+}
+
+
+rule native-UNIX ( path )
+{
+ return $(path) ;
+}
+
+
+rule make-CYGWIN ( path )
+{
+ return [ make-NT $(path) ] ;
+}
+
+
+rule native-CYGWIN ( path )
+{
+ local result = $(path) ;
+ if [ regex.match "(^/.:)" : $(path) ] # Windows absolute path.
+ {
+ result = [ MATCH "^/?(.*)" : $(path) ] ; # Remove leading '/'.
+ }
+ return [ native-UNIX $(result) ] ;
+}
+
+
+# split-path-VMS: splits input native path into device dir file (each part is
+# optional).
+#
+# example:
+#
+# dev:[dir]file.c => dev: [dir] file.c
+#
+rule split-path-VMS ( native )
+{
+ local matches = [ MATCH ([a-zA-Z0-9_-]+:)?(\\[[^\]]*\\])?(.*)?$ : $(native)
+ ] ;
+ local device = $(matches[1]) ;
+ local dir = $(matches[2]) ;
+ local file = $(matches[3]) ;
+
+ return $(device) $(dir) $(file) ;
+}
+
+
+# Converts a native VMS path into a portable path spec.
+#
+# Does not handle current-device absolute paths such as "[dir]File.c" as it is
+# not clear how to represent them in the portable path notation.
+#
+# Adds a trailing dot (".") to the file part if no extension is present (helps
+# when converting it back into native path).
+#
+rule make-VMS ( native )
+{
+ if [ MATCH ^(\\[[a-zA-Z0-9]) : $(native) ]
+ {
+ import errors ;
+ errors.error "Can't handle default-device absolute paths: " $(native) ;
+ }
+
+ local parts = [ split-path-VMS $(native) ] ;
+ local device = $(parts[1]) ;
+ local dir = $(parts[2]) ;
+ local file = $(parts[3]) ;
+ local elems ;
+
+ if $(device)
+ {
+ #
+ # rooted
+ #
+ elems = /$(device) ;
+ }
+
+ if $(dir) = "[]"
+ {
+ #
+ # Special case: current directory
+ #
+ elems = $(elems) "." ;
+ }
+ else if $(dir)
+ {
+ dir = [ regex.replace $(dir) "\\[|\\]" "" ] ;
+ local dir_parts = [ regex.split $(dir) \\. ] ;
+
+ if $(dir_parts[1]) = ""
+ {
+ #
+ # Relative path
+ #
+ dir_parts = $(dir_parts[2--1]) ;
+ }
+
+ #
+ # replace "parent-directory" parts (- => ..)
+ #
+ dir_parts = [ regex.replace-list $(dir_parts) : - : .. ] ;
+
+ elems = $(elems) $(dir_parts) ;
+ }
+
+ if $(file)
+ {
+ if ! [ MATCH (\\.) : $(file) ]
+ {
+ #
+ # Always add "." to end of non-extension file.
+ #
+ file = $(file). ;
+ }
+ elems = $(elems) $(file) ;
+ }
+
+ local portable = [ path.join $(elems) ] ;
+
+ return $(portable) ;
+}
+
+
+# Converts a portable path spec into a native VMS path.
+#
+# Relies on having at least one dot (".") included in the file name to be able
+# to differentiate it from the directory part.
+#
+rule native-VMS ( path )
+{
+ local device = "" ;
+ local dir = $(path) ;
+ local file = "" ;
+ local native ;
+ local split ;
+
+ #
+ # Has device ?
+ #
+ if [ is-rooted $(dir) ]
+ {
+ split = [ MATCH ^/([^:]+:)/?(.*) : $(dir) ] ;
+ device = $(split[1]) ;
+ dir = $(split[2]) ;
+ }
+
+ #
+ # Has file ?
+ #
+ # This is no exact science, just guess work:
+ #
+ # If the last part of the current path spec includes some chars, followed by
+ # a dot, optionally followed by more chars - then it is a file (keep your
+ # fingers crossed).
+ #
+ split = [ regex.split $(dir) / ] ;
+ local maybe_file = $(split[-1]) ;
+
+ if [ MATCH ^([^.]+\\..*) : $(maybe_file) ]
+ {
+ file = $(maybe_file) ;
+ dir = [ sequence.join $(split[1--2]) : / ] ;
+ }
+
+ #
+ # Has dir spec ?
+ #
+ if $(dir) = "."
+ {
+ dir = "[]" ;
+ }
+ else if $(dir)
+ {
+ dir = [ regex.replace $(dir) \\.\\. - ] ;
+ dir = [ regex.replace $(dir) / . ] ;
+
+ if $(device) = ""
+ {
+ #
+ # Relative directory
+ #
+ dir = "."$(dir) ;
+ }
+ dir = "["$(dir)"]" ;
+ }
+
+ native = [ sequence.join $(device) $(dir) $(file) ] ;
+
+ return $(native) ;
+}
+
+# Remove one level of indirection
+IMPORT $(__name__) : make-$(os) native-$(os) : $(__name__) : make native ;
+EXPORT $(__name__) : make native ;
+
+rule __test__ ( )
+{
+ import assert ;
+ import errors : try catch ;
+
+ assert.true is-rooted "/" ;
+ assert.true is-rooted "/foo" ;
+ assert.true is-rooted "/foo/bar" ;
+ assert.result : is-rooted "." ;
+ assert.result : is-rooted "foo" ;
+ assert.result : is-rooted "foo/bar" ;
+
+ assert.true has-parent "foo" ;
+ assert.true has-parent "foo/bar" ;
+ assert.true has-parent "." ;
+ assert.result : has-parent "/" ;
+
+ assert.result "." : basename "." ;
+ assert.result ".." : basename ".." ;
+ assert.result "foo" : basename "foo" ;
+ assert.result "foo" : basename "bar/foo" ;
+ assert.result "foo" : basename "gaz/bar/foo" ;
+ assert.result "foo" : basename "/gaz/bar/foo" ;
+
+ assert.result "." : parent "foo" ;
+ assert.result "/" : parent "/foo" ;
+ assert.result "foo/bar" : parent "foo/bar/giz" ;
+ assert.result ".." : parent "." ;
+ assert.result ".." : parent "../foo" ;
+ assert.result "../../foo" : parent "../../foo/bar" ;
+
+ assert.result "." : reverse "." ;
+ assert.result ".." : reverse "foo" ;
+ assert.result "../../.." : reverse "foo/bar/giz" ;
+
+ assert.result "foo" : join "foo" ;
+ assert.result "/foo" : join "/" "foo" ;
+ assert.result "foo/bar" : join "foo" "bar" ;
+ assert.result "foo/bar" : join "foo/giz" "../bar" ;
+ assert.result "foo/giz" : join "foo/bar/baz" "../../giz" ;
+ assert.result ".." : join "." ".." ;
+ assert.result ".." : join "foo" "../.." ;
+ assert.result "../.." : join "../foo" "../.." ;
+ assert.result "/foo" : join "/bar" "../foo" ;
+ assert.result "foo/giz" : join "foo/giz" "." ;
+ assert.result "." : join lib2 ".." ;
+ assert.result "/" : join "/a" ".." ;
+
+ assert.result /a/b : join /a/b/c .. ;
+
+ assert.result "foo/bar/giz" : join "foo" "bar" "giz" ;
+ assert.result "giz" : join "foo" ".." "giz" ;
+ assert.result "foo/giz" : join "foo" "." "giz" ;
+
+ try ;
+ {
+ join "a" "/b" ;
+ }
+ catch only first element may be rooted ;
+
+ local CWD = "/home/ghost/build" ;
+ assert.result : all-parents . : . : $(CWD) ;
+ assert.result . .. ../.. ../../.. : all-parents "Jamfile" : "" : $(CWD) ;
+ assert.result foo . .. ../.. ../../.. : all-parents "foo/Jamfile" : "" :
+ $(CWD) ;
+ assert.result ../Work .. ../.. ../../.. : all-parents "../Work/Jamfile" : ""
+ : $(CWD) ;
+
+ local CWD = "/home/ghost" ;
+ assert.result . .. : all-parents "Jamfile" : "/home" : $(CWD) ;
+ assert.result . : all-parents "Jamfile" : "/home/ghost" : $(CWD) ;
+
+ assert.result "c/d" : relative "a/b/c/d" "a/b" ;
+ assert.result "foo" : relative "foo" "." ;
+
+ local save-os = [ modules.peek path : os ] ;
+ modules.poke path : os : NT ;
+
+ assert.result "foo/bar/giz" : make-NT "foo/bar/giz" ;
+ assert.result "foo/bar/giz" : make-NT "foo\\bar\\giz" ;
+ assert.result "foo" : make-NT "foo/" ;
+ assert.result "foo" : make-NT "foo\\" ;
+ assert.result "foo" : make-NT "foo/." ;
+ assert.result "foo" : make-NT "foo/bar/.." ;
+ assert.result "foo" : make-NT "foo/bar/../" ;
+ assert.result "foo" : make-NT "foo/bar/..\\" ;
+ assert.result "foo/bar" : make-NT "foo/././././bar" ;
+ assert.result "/foo" : make-NT "\\foo" ;
+ assert.result "/D:/My Documents" : make-NT "D:\\My Documents" ;
+ assert.result "/c:/boost/tools/build/new/project.jam" : make-NT
+ "c:\\boost\\tools\\build\\test\\..\\new\\project.jam" ;
+
+ # Test processing 'invalid' paths containing multiple successive path
+ # separators.
+ assert.result "foo" : make-NT "foo//" ;
+ assert.result "foo" : make-NT "foo///" ;
+ assert.result "foo" : make-NT "foo\\\\" ;
+ assert.result "foo" : make-NT "foo\\\\\\" ;
+ assert.result "/foo" : make-NT "//foo" ;
+ assert.result "/foo" : make-NT "///foo" ;
+ assert.result "/foo" : make-NT "\\\\foo" ;
+ assert.result "/foo" : make-NT "\\\\\\foo" ;
+ assert.result "/foo" : make-NT "\\/\\/foo" ;
+ assert.result "foo/bar" : make-NT "foo//\\//\\\\bar//\\//\\\\\\//\\//\\\\" ;
+ assert.result "foo" : make-NT "foo/bar//.." ;
+ assert.result "foo/bar" : make-NT "foo/bar/giz//.." ;
+ assert.result "foo/giz" : make-NT
+ "foo//\\//\\\\bar///\\\\//\\\\////\\/..///giz\\//\\\\\\//\\//\\\\" ;
+ assert.result "../../../foo" : make-NT "..///.//..///.//..////foo///" ;
+
+ # Test processing 'invalid' rooted paths with too many '..' path elements
+ # that would place them before the root.
+ assert.result : make-NT "/.." ;
+ assert.result : make-NT "/../" ;
+ assert.result : make-NT "/../." ;
+ assert.result : make-NT "/.././" ;
+ assert.result : make-NT "/foo/../bar/giz/.././././../../." ;
+ assert.result : make-NT "/foo/../bar/giz/.././././../.././" ;
+ assert.result : make-NT "//foo/../bar/giz/.././././../../." ;
+ assert.result : make-NT "//foo/../bar/giz/.././././../.././" ;
+ assert.result : make-NT "\\\\foo/../bar/giz/.././././../../." ;
+ assert.result : make-NT "\\\\foo/../bar/giz/.././././../.././" ;
+ assert.result : make-NT "/..///.//..///.//..////foo///" ;
+
+ assert.result "foo\\bar\\giz" : native-NT "foo/bar/giz" ;
+ assert.result "foo" : native-NT "foo" ;
+ assert.result "\\foo" : native-NT "/foo" ;
+ assert.result "D:\\My Documents\\Work" : native-NT "/D:/My Documents/Work" ;
+
+ modules.poke path : os : UNIX ;
+
+ assert.result "foo/bar/giz" : make-UNIX "foo/bar/giz" ;
+ assert.result "/sub1" : make-UNIX "/sub1/." ;
+ assert.result "/sub1" : make-UNIX "/sub1/sub2/.." ;
+ assert.result "sub1" : make-UNIX "sub1/." ;
+ assert.result "sub1" : make-UNIX "sub1/sub2/.." ;
+ assert.result "/foo/bar" : native-UNIX "/foo/bar" ;
+
+ modules.poke path : os : VMS ;
+
+ #
+ # Do not really need to poke os before these
+ #
+ assert.result "disk:" "[dir]" "file" : split-path-VMS "disk:[dir]file" ;
+ assert.result "disk:" "[dir]" "" : split-path-VMS "disk:[dir]" ;
+ assert.result "disk:" "" "" : split-path-VMS "disk:" ;
+ assert.result "disk:" "" "file" : split-path-VMS "disk:file" ;
+ assert.result "" "[dir]" "file" : split-path-VMS "[dir]file" ;
+ assert.result "" "[dir]" "" : split-path-VMS "[dir]" ;
+ assert.result "" "" "file" : split-path-VMS "file" ;
+ assert.result "" "" "" : split-path-VMS "" ;
+
+ #
+ # Special case: current directory
+ #
+ assert.result "" "[]" "" : split-path-VMS "[]" ;
+ assert.result "disk:" "[]" "" : split-path-VMS "disk:[]" ;
+ assert.result "" "[]" "file" : split-path-VMS "[]file" ;
+ assert.result "disk:" "[]" "file" : split-path-VMS "disk:[]file" ;
+
+ #
+ # Make portable paths
+ #
+ assert.result "/disk:" : make-VMS "disk:" ;
+ assert.result "foo/bar/giz" : make-VMS "[.foo.bar.giz]" ;
+ assert.result "foo" : make-VMS "[.foo]" ;
+ assert.result "foo" : make-VMS "[.foo.bar.-]" ;
+ assert.result ".." : make-VMS "[.-]" ;
+ assert.result ".." : make-VMS "[-]" ;
+ assert.result "." : make-VMS "[]" ;
+ assert.result "giz.h" : make-VMS "giz.h" ;
+ assert.result "foo/bar/giz.h" : make-VMS "[.foo.bar]giz.h" ;
+ assert.result "/disk:/my_docs" : make-VMS "disk:[my_docs]" ;
+ assert.result "/disk:/boost/tools/build/new/project.jam" : make-VMS
+ "disk:[boost.tools.build.test.-.new]project.jam" ;
+
+ #
+ # Special case (adds '.' to end of file w/o extension to disambiguate from
+ # directory in portable path spec)
+ #
+ assert.result "Jamfile." : make-VMS "Jamfile" ;
+ assert.result "dir/Jamfile." : make-VMS "[.dir]Jamfile" ;
+ assert.result "/disk:/dir/Jamfile." : make-VMS "disk:[dir]Jamfile" ;
+
+ #
+ # Make native paths
+ #
+ assert.result "disk:" : native-VMS "/disk:" ;
+ assert.result "[.foo.bar.giz]" : native-VMS "foo/bar/giz" ;
+ assert.result "[.foo]" : native-VMS "foo" ;
+ assert.result "[.-]" : native-VMS ".." ;
+ assert.result "[.foo.-]" : native-VMS "foo/.." ;
+ assert.result "[]" : native-VMS "." ;
+ assert.result "disk:[my_docs.work]" : native-VMS "/disk:/my_docs/work" ;
+ assert.result "giz.h" : native-VMS "giz.h" ;
+ assert.result "disk:Jamfile." : native-VMS "/disk:Jamfile." ;
+ assert.result "disk:[my_docs.work]Jamfile." : native-VMS
+ "/disk:/my_docs/work/Jamfile." ;
+
+ modules.poke path : os : $(save-os) ;
+}
diff --git a/tools/build/src/util/path.py b/tools/build/src/util/path.py
new file mode 100644
index 0000000000..d602598c97
--- /dev/null
+++ b/tools/build/src/util/path.py
@@ -0,0 +1,936 @@
+# Status: this module is ported on demand by however needs something
+# from it. Functionality that is not needed by Python port will
+# be dropped.
+
+# Copyright (C) Vladimir Prus 2002. Permission to copy, use, modify, sell and
+# distribute this software is granted provided this copyright notice appears in
+# all copies. This software is provided "as is" without express or implied
+# warranty, and with no claim as to its suitability for any purpose.
+
+# Performs various path manipulations. Path are always in a 'normilized'
+# representation. In it, a path may be either:
+#
+# - '.', or
+#
+# - ['/'] [ ( '..' '/' )* (token '/')* token ]
+#
+# In plain english, path can be rooted, '..' elements are allowed only
+# at the beginning, and it never ends in slash, except for path consisting
+# of slash only.
+
+import os.path
+from utility import to_seq
+from glob import glob as builtin_glob
+
+from b2.util import bjam_signature
+
+@bjam_signature((["path", "root"],))
+def root (path, root):
+ """ If 'path' is relative, it is rooted at 'root'. Otherwise, it's unchanged.
+ """
+ if os.path.isabs (path):
+ return path
+ else:
+ return os.path.join (root, path)
+
+@bjam_signature((["native"],))
+def make (native):
+ """ Converts the native path into normalized form.
+ """
+ # TODO: make os selection here.
+ return make_UNIX (native)
+
+def make_UNIX (native):
+
+ # VP: I have no idea now 'native' can be empty here! But it can!
+ assert (native)
+
+ return os.path.normpath (native)
+
+@bjam_signature((["path"],))
+def native (path):
+ """ Builds a native representation of the path.
+ """
+ # TODO: make os selection here.
+ return native_UNIX (path)
+
+def native_UNIX (path):
+ return path
+
+
+def pwd ():
+ """ Returns the current working directory.
+ # TODO: is it a good idea to use the current dir? Some use-cases
+ may not allow us to depend on the current dir.
+ """
+ return make (os.getcwd ())
+
+def is_rooted (path):
+ """ Tests if a path is rooted.
+ """
+ return path and path [0] == '/'
+
+
+###################################################################
+# Still to port.
+# Original lines are prefixed with "# "
+#
+# # Copyright (C) Vladimir Prus 2002. Permission to copy, use, modify, sell and
+# # distribute this software is granted provided this copyright notice appears in
+# # all copies. This software is provided "as is" without express or implied
+# # warranty, and with no claim as to its suitability for any purpose.
+#
+# # Performs various path manipulations. Path are always in a 'normilized'
+# # representation. In it, a path may be either:
+# #
+# # - '.', or
+# #
+# # - ['/'] [ ( '..' '/' )* (token '/')* token ]
+# #
+# # In plain english, path can be rooted, '..' elements are allowed only
+# # at the beginning, and it never ends in slash, except for path consisting
+# # of slash only.
+#
+# import modules ;
+# import sequence ;
+# import regex ;
+# import errors : error ;
+#
+#
+# os = [ modules.peek : OS ] ;
+# if [ modules.peek : UNIX ]
+# {
+# local uname = [ modules.peek : JAMUNAME ] ;
+# switch $(uname)
+# {
+# case CYGWIN* :
+# os = CYGWIN ;
+#
+# case * :
+# os = UNIX ;
+# }
+# }
+#
+# #
+# # Tests if a path is rooted.
+# #
+# rule is-rooted ( path )
+# {
+# return [ MATCH "^(/)" : $(path) ] ;
+# }
+#
+# #
+# # Tests if a path has a parent.
+# #
+# rule has-parent ( path )
+# {
+# if $(path) != / {
+# return 1 ;
+# } else {
+# return ;
+# }
+# }
+#
+# #
+# # Returns the path without any directory components.
+# #
+# rule basename ( path )
+# {
+# return [ MATCH "([^/]+)$" : $(path) ] ;
+# }
+#
+# #
+# # Returns parent directory of the path. If no parent exists, error is issued.
+# #
+# rule parent ( path )
+# {
+# if [ has-parent $(path) ] {
+#
+# if $(path) = . {
+# return .. ;
+# } else {
+#
+# # Strip everything at the end of path up to and including
+# # the last slash
+# local result = [ regex.match "((.*)/)?([^/]+)" : $(path) : 2 3 ] ;
+#
+# # Did we strip what we shouldn't?
+# if $(result[2]) = ".." {
+# return $(path)/.. ;
+# } else {
+# if ! $(result[1]) {
+# if [ is-rooted $(path) ] {
+# result = / ;
+# } else {
+# result = . ;
+# }
+# }
+# return $(result[1]) ;
+# }
+# }
+# } else {
+# error "Path '$(path)' has no parent" ;
+# }
+# }
+#
+# #
+# # Returns path2 such that "[ join path path2 ] = .".
+# # The path may not contain ".." element or be rooted.
+# #
+# rule reverse ( path )
+# {
+# if $(path) = .
+# {
+# return $(path) ;
+# }
+# else
+# {
+# local tokens = [ regex.split $(path) "/" ] ;
+# local tokens2 ;
+# for local i in $(tokens) {
+# tokens2 += .. ;
+# }
+# return [ sequence.join $(tokens2) : "/" ] ;
+# }
+# }
+def reverse(path):
+ """Returns path2 such that `os.path.join(path, path2) == '.'`.
+ `path` may not contain '..' or be rooted.
+
+ Args:
+ path (str): the path to reverse
+
+ Returns:
+ the string of the reversed path
+
+ Example:
+
+ >>> p1 = 'path/to/somewhere'
+ >>> p2 = reverse('path/to/somewhere')
+ >>> p2
+ '../../..'
+ >>> os.path.normpath(os.path.join(p1, p2))
+ '.'
+ """
+ if is_rooted(path) or '..' in path:
+ from b2.manager import get_manager
+ get_manager().errors()(
+ 'reverse(path): path is either rooted or contains ".." in the path')
+ if path == '.':
+ return path
+ path = os.path.normpath(path)
+ # os.sep.join() is being used over os.path.join() due
+ # to an extra '..' that is created by os.path.join()
+ return os.sep.join('..' for t in path.split(os.sep))
+# #
+# # Auxillary rule: does all the semantic of 'join', except for error cheching.
+# # The error checking is separated because this rule is recursive, and I don't
+# # like the idea of checking the same input over and over.
+# #
+# local rule join-imp ( elements + )
+# {
+# return [ NORMALIZE_PATH $(elements:J="/") ] ;
+# }
+#
+# #
+# # Contanenates the passed path elements. Generates an error if
+# # any element other than the first one is rooted.
+# #
+# rule join ( elements + )
+# {
+# if ! $(elements[2])
+# {
+# return $(elements[1]) ;
+# }
+# else
+# {
+# for local e in $(elements[2-])
+# {
+# if [ is-rooted $(e) ]
+# {
+# error only first element may be rooted ;
+# }
+# }
+# return [ join-imp $(elements) ] ;
+# }
+# }
+
+
+def glob (dirs, patterns):
+ """ Returns the list of files matching the given pattern in the
+ specified directory. Both directories and patterns are
+ supplied as portable paths. Each pattern should be non-absolute
+ path, and can't contain "." or ".." elements. Each slash separated
+ element of pattern can contain the following special characters:
+ - '?', which match any character
+ - '*', which matches arbitrary number of characters.
+ A file $(d)/e1/e2/e3 (where 'd' is in $(dirs)) matches pattern p1/p2/p3
+ if and only if e1 matches p1, e2 matches p2 and so on.
+
+ For example:
+ [ glob . : *.cpp ]
+ [ glob . : */build/Jamfile ]
+ """
+# {
+# local result ;
+# if $(patterns:D)
+# {
+# # When a pattern has a directory element, we first glob for
+# # directory, and then glob for file name is the found directories.
+# for local p in $(patterns)
+# {
+# # First glob for directory part.
+# local globbed-dirs = [ glob $(dirs) : $(p:D) ] ;
+# result += [ glob $(globbed-dirs) : $(p:D="") ] ;
+# }
+# }
+# else
+# {
+# # When a pattern has not directory, we glob directly.
+# # Take care of special ".." value. The "GLOB" rule simply ignores
+# # the ".." element (and ".") element in directory listings. This is
+# # needed so that
+# #
+# # [ glob libs/*/Jamfile ]
+# #
+# # don't return
+# #
+# # libs/../Jamfile (which is the same as ./Jamfile)
+# #
+# # On the other hand, when ".." is explicitly present in the pattern
+# # we need to return it.
+# #
+# for local dir in $(dirs)
+# {
+# for local p in $(patterns)
+# {
+# if $(p) != ".."
+# {
+# result += [ sequence.transform make
+# : [ GLOB [ native $(dir) ] : $(p) ] ] ;
+# }
+# else
+# {
+# result += [ path.join $(dir) .. ] ;
+# }
+# }
+# }
+# }
+# return $(result) ;
+# }
+#
+
+# TODO: (PF) I replaced the code above by this. I think it should work but needs to be tested.
+ result = []
+ dirs = to_seq (dirs)
+ patterns = to_seq (patterns)
+
+ splitdirs = []
+ for dir in dirs:
+ splitdirs += dir.split (os.pathsep)
+
+ for dir in splitdirs:
+ for pattern in patterns:
+ p = os.path.join (dir, pattern)
+ import glob
+ result.extend (glob.glob (p))
+ return result
+
+#
+# Find out the absolute name of path and returns the list of all the parents,
+# starting with the immediate one. Parents are returned as relative names.
+# If 'upper_limit' is specified, directories above it will be pruned.
+#
+def all_parents(path, upper_limit=None, cwd=None):
+
+ if not cwd:
+ cwd = os.getcwd()
+
+ path_abs = os.path.join(cwd, path)
+
+ if upper_limit:
+ upper_limit = os.path.join(cwd, upper_limit)
+
+ result = []
+ while path_abs and path_abs != upper_limit:
+ (head, tail) = os.path.split(path)
+ path = os.path.join(path, "..")
+ result.append(path)
+ path_abs = head
+
+ if upper_limit and path_abs != upper_limit:
+ raise BaseException("'%s' is not a prefix of '%s'" % (upper_limit, path))
+
+ return result
+
+# Search for 'pattern' in parent directories of 'dir', up till and including
+# 'upper_limit', if it is specified, or till the filesystem root otherwise.
+#
+def glob_in_parents(dir, patterns, upper_limit=None):
+
+ result = []
+ parent_dirs = all_parents(dir, upper_limit)
+
+ for p in parent_dirs:
+ result = glob(p, patterns)
+ if result: break
+
+ return result
+
+#
+# #
+# # Assuming 'child' is a subdirectory of 'parent', return the relative
+# # path from 'parent' to 'child'
+# #
+# rule relative ( child parent )
+# {
+# if $(parent) = "."
+# {
+# return $(child) ;
+# }
+# else
+# {
+# local split1 = [ regex.split $(parent) / ] ;
+# local split2 = [ regex.split $(child) / ] ;
+#
+# while $(split1)
+# {
+# if $(split1[1]) = $(split2[1])
+# {
+# split1 = $(split1[2-]) ;
+# split2 = $(split2[2-]) ;
+# }
+# else
+# {
+# errors.error $(child) is not a subdir of $(parent) ;
+# }
+# }
+# return [ join $(split2) ] ;
+# }
+# }
+#
+# # Returns the minimal path to path2 that is relative path1.
+# #
+# rule relative-to ( path1 path2 )
+# {
+# local root_1 = [ regex.split [ reverse $(path1) ] / ] ;
+# local split1 = [ regex.split $(path1) / ] ;
+# local split2 = [ regex.split $(path2) / ] ;
+#
+# while $(split1) && $(root_1)
+# {
+# if $(split1[1]) = $(split2[1])
+# {
+# root_1 = $(root_1[2-]) ;
+# split1 = $(split1[2-]) ;
+# split2 = $(split2[2-]) ;
+# }
+# else
+# {
+# split1 = ;
+# }
+# }
+# return [ join . $(root_1) $(split2) ] ;
+# }
+
+# Returns the list of paths which are used by the operating system
+# for looking up programs
+def programs_path ():
+ raw = []
+ names = ['PATH', 'Path', 'path']
+
+ for name in names:
+ raw.append(os.environ.get (name, ''))
+
+ result = []
+ for elem in raw:
+ if elem:
+ for p in elem.split(os.path.pathsep):
+ # it's possible that the user's Path has
+ # double path separators, thus it is possible
+ # for p to be an empty string.
+ if p:
+ result.append(make(p))
+
+ return result
+
+# rule make-NT ( native )
+# {
+# local tokens = [ regex.split $(native) "[/\\]" ] ;
+# local result ;
+#
+# # Handle paths ending with slashes
+# if $(tokens[-1]) = ""
+# {
+# tokens = $(tokens[1--2]) ; # discard the empty element
+# }
+#
+# result = [ path.join $(tokens) ] ;
+#
+# if [ regex.match "(^.:)" : $(native) ]
+# {
+# result = /$(result) ;
+# }
+#
+# if $(native) = ""
+# {
+# result = "." ;
+# }
+#
+# return $(result) ;
+# }
+#
+# rule native-NT ( path )
+# {
+# local result = [ MATCH "^/?(.*)" : $(path) ] ;
+# result = [ sequence.join [ regex.split $(result) "/" ] : "\\" ] ;
+# return $(result) ;
+# }
+#
+# rule make-CYGWIN ( path )
+# {
+# return [ make-NT $(path) ] ;
+# }
+#
+# rule native-CYGWIN ( path )
+# {
+# local result = $(path) ;
+# if [ regex.match "(^/.:)" : $(path) ] # win absolute
+# {
+# result = [ MATCH "^/?(.*)" : $(path) ] ; # remove leading '/'
+# }
+# return [ native-UNIX $(result) ] ;
+# }
+#
+# #
+# # split-VMS: splits input native path into
+# # device dir file (each part is optional),
+# # example:
+# #
+# # dev:[dir]file.c => dev: [dir] file.c
+# #
+# rule split-path-VMS ( native )
+# {
+# local matches = [ MATCH ([a-zA-Z0-9_-]+:)?(\\[[^\]]*\\])?(.*)?$ : $(native) ] ;
+# local device = $(matches[1]) ;
+# local dir = $(matches[2]) ;
+# local file = $(matches[3]) ;
+#
+# return $(device) $(dir) $(file) ;
+# }
+#
+# #
+# # Converts a native VMS path into a portable path spec.
+# #
+# # Does not handle current-device absolute paths such
+# # as "[dir]File.c" as it is not clear how to represent
+# # them in the portable path notation.
+# #
+# # Adds a trailing dot (".") to the file part if no extension
+# # is present (helps when converting it back into native path).
+# #
+# rule make-VMS ( native )
+# {
+# if [ MATCH ^(\\[[a-zA-Z0-9]) : $(native) ]
+# {
+# errors.error "Can't handle default-device absolute paths: " $(native) ;
+# }
+#
+# local parts = [ split-path-VMS $(native) ] ;
+# local device = $(parts[1]) ;
+# local dir = $(parts[2]) ;
+# local file = $(parts[3]) ;
+# local elems ;
+#
+# if $(device)
+# {
+# #
+# # rooted
+# #
+# elems = /$(device) ;
+# }
+#
+# if $(dir) = "[]"
+# {
+# #
+# # Special case: current directory
+# #
+# elems = $(elems) "." ;
+# }
+# else if $(dir)
+# {
+# dir = [ regex.replace $(dir) "\\[|\\]" "" ] ;
+# local dir_parts = [ regex.split $(dir) \\. ] ;
+#
+# if $(dir_parts[1]) = ""
+# {
+# #
+# # Relative path
+# #
+# dir_parts = $(dir_parts[2--1]) ;
+# }
+#
+# #
+# # replace "parent-directory" parts (- => ..)
+# #
+# dir_parts = [ regex.replace-list $(dir_parts) : - : .. ] ;
+#
+# elems = $(elems) $(dir_parts) ;
+# }
+#
+# if $(file)
+# {
+# if ! [ MATCH (\\.) : $(file) ]
+# {
+# #
+# # Always add "." to end of non-extension file
+# #
+# file = $(file). ;
+# }
+# elems = $(elems) $(file) ;
+# }
+#
+# local portable = [ path.join $(elems) ] ;
+#
+# return $(portable) ;
+# }
+#
+# #
+# # Converts a portable path spec into a native VMS path.
+# #
+# # Relies on having at least one dot (".") included in the file
+# # name to be able to differentiate it ftom the directory part.
+# #
+# rule native-VMS ( path )
+# {
+# local device = "" ;
+# local dir = $(path) ;
+# local file = "" ;
+# local native ;
+# local split ;
+#
+# #
+# # Has device ?
+# #
+# if [ is-rooted $(dir) ]
+# {
+# split = [ MATCH ^/([^:]+:)/?(.*) : $(dir) ] ;
+# device = $(split[1]) ;
+# dir = $(split[2]) ;
+# }
+#
+# #
+# # Has file ?
+# #
+# # This is no exact science, just guess work:
+# #
+# # If the last part of the current path spec
+# # includes some chars, followed by a dot,
+# # optionally followed by more chars -
+# # then it is a file (keep your fingers crossed).
+# #
+# split = [ regex.split $(dir) / ] ;
+# local maybe_file = $(split[-1]) ;
+#
+# if [ MATCH ^([^.]+\\..*) : $(maybe_file) ]
+# {
+# file = $(maybe_file) ;
+# dir = [ sequence.join $(split[1--2]) : / ] ;
+# }
+#
+# #
+# # Has dir spec ?
+# #
+# if $(dir) = "."
+# {
+# dir = "[]" ;
+# }
+# else if $(dir)
+# {
+# dir = [ regex.replace $(dir) \\.\\. - ] ;
+# dir = [ regex.replace $(dir) / . ] ;
+#
+# if $(device) = ""
+# {
+# #
+# # Relative directory
+# #
+# dir = "."$(dir) ;
+# }
+# dir = "["$(dir)"]" ;
+# }
+#
+# native = [ sequence.join $(device) $(dir) $(file) ] ;
+#
+# return $(native) ;
+# }
+#
+#
+# rule __test__ ( ) {
+#
+# import assert ;
+# import errors : try catch ;
+#
+# assert.true is-rooted "/" ;
+# assert.true is-rooted "/foo" ;
+# assert.true is-rooted "/foo/bar" ;
+# assert.result : is-rooted "." ;
+# assert.result : is-rooted "foo" ;
+# assert.result : is-rooted "foo/bar" ;
+#
+# assert.true has-parent "foo" ;
+# assert.true has-parent "foo/bar" ;
+# assert.true has-parent "." ;
+# assert.result : has-parent "/" ;
+#
+# assert.result "." : basename "." ;
+# assert.result ".." : basename ".." ;
+# assert.result "foo" : basename "foo" ;
+# assert.result "foo" : basename "bar/foo" ;
+# assert.result "foo" : basename "gaz/bar/foo" ;
+# assert.result "foo" : basename "/gaz/bar/foo" ;
+#
+# assert.result "." : parent "foo" ;
+# assert.result "/" : parent "/foo" ;
+# assert.result "foo/bar" : parent "foo/bar/giz" ;
+# assert.result ".." : parent "." ;
+# assert.result ".." : parent "../foo" ;
+# assert.result "../../foo" : parent "../../foo/bar" ;
+#
+#
+# assert.result "." : reverse "." ;
+# assert.result ".." : reverse "foo" ;
+# assert.result "../../.." : reverse "foo/bar/giz" ;
+#
+# assert.result "foo" : join "foo" ;
+# assert.result "/foo" : join "/" "foo" ;
+# assert.result "foo/bar" : join "foo" "bar" ;
+# assert.result "foo/bar" : join "foo/giz" "../bar" ;
+# assert.result "foo/giz" : join "foo/bar/baz" "../../giz" ;
+# assert.result ".." : join "." ".." ;
+# assert.result ".." : join "foo" "../.." ;
+# assert.result "../.." : join "../foo" "../.." ;
+# assert.result "/foo" : join "/bar" "../foo" ;
+# assert.result "foo/giz" : join "foo/giz" "." ;
+# assert.result "." : join lib2 ".." ;
+# assert.result "/" : join "/a" ".." ;
+#
+# assert.result /a/b : join /a/b/c .. ;
+#
+# assert.result "foo/bar/giz" : join "foo" "bar" "giz" ;
+# assert.result "giz" : join "foo" ".." "giz" ;
+# assert.result "foo/giz" : join "foo" "." "giz" ;
+#
+# try ;
+# {
+# join "a" "/b" ;
+# }
+# catch only first element may be rooted ;
+#
+# local CWD = "/home/ghost/build" ;
+# assert.result : all-parents . : . : $(CWD) ;
+# assert.result . .. ../.. ../../.. : all-parents "Jamfile" : "" : $(CWD) ;
+# assert.result foo . .. ../.. ../../.. : all-parents "foo/Jamfile" : "" : $(CWD) ;
+# assert.result ../Work .. ../.. ../../.. : all-parents "../Work/Jamfile" : "" : $(CWD) ;
+#
+# local CWD = "/home/ghost" ;
+# assert.result . .. : all-parents "Jamfile" : "/home" : $(CWD) ;
+# assert.result . : all-parents "Jamfile" : "/home/ghost" : $(CWD) ;
+#
+# assert.result "c/d" : relative "a/b/c/d" "a/b" ;
+# assert.result "foo" : relative "foo" "." ;
+#
+# local save-os = [ modules.peek path : os ] ;
+# modules.poke path : os : NT ;
+#
+# assert.result "foo/bar/giz" : make "foo/bar/giz" ;
+# assert.result "foo/bar/giz" : make "foo\\bar\\giz" ;
+# assert.result "foo" : make "foo/." ;
+# assert.result "foo" : make "foo/bar/.." ;
+# assert.result "/D:/My Documents" : make "D:\\My Documents" ;
+# assert.result "/c:/boost/tools/build/new/project.jam" : make "c:\\boost\\tools\\build\\test\\..\\new\\project.jam" ;
+#
+# assert.result "foo\\bar\\giz" : native "foo/bar/giz" ;
+# assert.result "foo" : native "foo" ;
+# assert.result "D:\\My Documents\\Work" : native "/D:/My Documents/Work" ;
+#
+# modules.poke path : os : UNIX ;
+#
+# assert.result "foo/bar/giz" : make "foo/bar/giz" ;
+# assert.result "/sub1" : make "/sub1/." ;
+# assert.result "/sub1" : make "/sub1/sub2/.." ;
+# assert.result "sub1" : make "sub1/." ;
+# assert.result "sub1" : make "sub1/sub2/.." ;
+# assert.result "/foo/bar" : native "/foo/bar" ;
+#
+# modules.poke path : os : VMS ;
+#
+# #
+# # Don't really need to poke os before these
+# #
+# assert.result "disk:" "[dir]" "file" : split-path-VMS "disk:[dir]file" ;
+# assert.result "disk:" "[dir]" "" : split-path-VMS "disk:[dir]" ;
+# assert.result "disk:" "" "" : split-path-VMS "disk:" ;
+# assert.result "disk:" "" "file" : split-path-VMS "disk:file" ;
+# assert.result "" "[dir]" "file" : split-path-VMS "[dir]file" ;
+# assert.result "" "[dir]" "" : split-path-VMS "[dir]" ;
+# assert.result "" "" "file" : split-path-VMS "file" ;
+# assert.result "" "" "" : split-path-VMS "" ;
+#
+# #
+# # Special case: current directory
+# #
+# assert.result "" "[]" "" : split-path-VMS "[]" ;
+# assert.result "disk:" "[]" "" : split-path-VMS "disk:[]" ;
+# assert.result "" "[]" "file" : split-path-VMS "[]file" ;
+# assert.result "disk:" "[]" "file" : split-path-VMS "disk:[]file" ;
+#
+# #
+# # Make portable paths
+# #
+# assert.result "/disk:" : make "disk:" ;
+# assert.result "foo/bar/giz" : make "[.foo.bar.giz]" ;
+# assert.result "foo" : make "[.foo]" ;
+# assert.result "foo" : make "[.foo.bar.-]" ;
+# assert.result ".." : make "[.-]" ;
+# assert.result ".." : make "[-]" ;
+# assert.result "." : make "[]" ;
+# assert.result "giz.h" : make "giz.h" ;
+# assert.result "foo/bar/giz.h" : make "[.foo.bar]giz.h" ;
+# assert.result "/disk:/my_docs" : make "disk:[my_docs]" ;
+# assert.result "/disk:/boost/tools/build/new/project.jam" : make "disk:[boost.tools.build.test.-.new]project.jam" ;
+#
+# #
+# # Special case (adds '.' to end of file w/o extension to
+# # disambiguate from directory in portable path spec).
+# #
+# assert.result "Jamfile." : make "Jamfile" ;
+# assert.result "dir/Jamfile." : make "[.dir]Jamfile" ;
+# assert.result "/disk:/dir/Jamfile." : make "disk:[dir]Jamfile" ;
+#
+# #
+# # Make native paths
+# #
+# assert.result "disk:" : native "/disk:" ;
+# assert.result "[.foo.bar.giz]" : native "foo/bar/giz" ;
+# assert.result "[.foo]" : native "foo" ;
+# assert.result "[.-]" : native ".." ;
+# assert.result "[.foo.-]" : native "foo/.." ;
+# assert.result "[]" : native "." ;
+# assert.result "disk:[my_docs.work]" : native "/disk:/my_docs/work" ;
+# assert.result "giz.h" : native "giz.h" ;
+# assert.result "disk:Jamfile." : native "/disk:Jamfile." ;
+# assert.result "disk:[my_docs.work]Jamfile." : native "/disk:/my_docs/work/Jamfile." ;
+#
+# modules.poke path : os : $(save-os) ;
+#
+# }
+
+#
+
+
+#def glob(dir, patterns):
+# result = []
+# for pattern in patterns:
+# result.extend(builtin_glob(os.path.join(dir, pattern)))
+# return result
+
+def glob(dirs, patterns, exclude_patterns=None):
+ """Returns the list of files matching the given pattern in the
+ specified directory. Both directories and patterns are
+ supplied as portable paths. Each pattern should be non-absolute
+ path, and can't contain '.' or '..' elements. Each slash separated
+ element of pattern can contain the following special characters:
+ - '?', which match any character
+ - '*', which matches arbitrary number of characters.
+ A file $(d)/e1/e2/e3 (where 'd' is in $(dirs)) matches pattern p1/p2/p3
+ if and only if e1 matches p1, e2 matches p2 and so on.
+ For example:
+ [ glob . : *.cpp ]
+ [ glob . : */build/Jamfile ]
+ """
+
+ assert(isinstance(patterns, list))
+ assert(isinstance(dirs, list))
+
+ if not exclude_patterns:
+ exclude_patterns = []
+ else:
+ assert(isinstance(exclude_patterns, list))
+
+ real_patterns = [os.path.join(d, p) for p in patterns for d in dirs]
+ real_exclude_patterns = [os.path.join(d, p) for p in exclude_patterns
+ for d in dirs]
+
+ inc = [os.path.normpath(name) for p in real_patterns
+ for name in builtin_glob(p)]
+ exc = [os.path.normpath(name) for p in real_exclude_patterns
+ for name in builtin_glob(p)]
+ return [x for x in inc if x not in exc]
+
+def glob_tree(roots, patterns, exclude_patterns=None):
+ """Recursive version of GLOB. Builds the glob of files while
+ also searching in the subdirectories of the given roots. An
+ optional set of exclusion patterns will filter out the
+ matching entries from the result. The exclusions also apply
+ to the subdirectory scanning, such that directories that
+ match the exclusion patterns will not be searched."""
+
+ if not exclude_patterns:
+ exclude_patterns = []
+
+ result = glob(roots, patterns, exclude_patterns)
+ subdirs = [s for s in glob(roots, ["*"]) if s != "." and s != ".." and os.path.isdir(s)]
+ if subdirs:
+ result.extend(glob_tree(subdirs, patterns, exclude_patterns))
+
+ return result
+
+def glob_in_parents(dir, patterns, upper_limit=None):
+ """Recursive version of GLOB which glob sall parent directories
+ of dir until the first match is found. Returns an empty result if no match
+ is found"""
+
+ assert(isinstance(dir, str))
+ assert(isinstance(patterns, list))
+
+ result = []
+
+ absolute_dir = os.path.join(os.getcwd(), dir)
+ absolute_dir = os.path.normpath(absolute_dir)
+ while absolute_dir:
+ new_dir = os.path.split(absolute_dir)[0]
+ if new_dir == absolute_dir:
+ break
+ result = glob([new_dir], patterns)
+ if result:
+ break
+ absolute_dir = new_dir
+
+ return result
+
+
+# The relpath functionality is written by
+# Cimarron Taylor
+def split(p, rest=[]):
+ (h,t) = os.path.split(p)
+ if len(h) < 1: return [t]+rest
+ if len(t) < 1: return [h]+rest
+ return split(h,[t]+rest)
+
+def commonpath(l1, l2, common=[]):
+ if len(l1) < 1: return (common, l1, l2)
+ if len(l2) < 1: return (common, l1, l2)
+ if l1[0] != l2[0]: return (common, l1, l2)
+ return commonpath(l1[1:], l2[1:], common+[l1[0]])
+
+def relpath(p1, p2):
+ (common,l1,l2) = commonpath(split(p1), split(p2))
+ p = []
+ if len(l1) > 0:
+ p = [ '../' * len(l1) ]
+ p = p + l2
+ if p:
+ return os.path.join( *p )
+ else:
+ return "."
diff --git a/tools/build/src/util/print.jam b/tools/build/src/util/print.jam
new file mode 100644
index 0000000000..c867e4e1b5
--- /dev/null
+++ b/tools/build/src/util/print.jam
@@ -0,0 +1,488 @@
+# Copyright 2003 Douglas Gregor
+# Copyright 2002, 2003, 2005 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Utilities for generating format independent output. Using these
+# will help in generation of documentation in at minimum plain/console
+# and html.
+
+import modules ;
+import numbers ;
+import string ;
+import regex ;
+import "class" ;
+import scanner ;
+import path ;
+
+# The current output target. Defaults to console.
+output-target = console ;
+
+# The current output type. Defaults to plain. Other possible values are "html".
+output-type = plain ;
+
+# Whitespace.
+.whitespace = [ string.whitespace ] ;
+
+
+# Set the target and type of output to generate. This sets both the destination
+# output and the type of docs to generate to that output. The target can be
+# either a file or "console" for echoing to the console. If the type of output
+# is not specified it defaults to plain text.
+#
+rule output (
+ target # The target file or device; file or "console".
+ type ? # The type of output; "plain" or "html".
+)
+{
+ type ?= plain ;
+ if $(output-target) != $(target)
+ {
+ output-target = $(target) ;
+ output-type = $(type) ;
+ if $(output-type) = html
+ {
+ text
+ "<!DOCTYPE html PUBLIC \"-//W3C//DTD HTML 4.01 Transitional//EN\">"
+ "<html>"
+ "<head>"
+ "</head>"
+ "<body link=\"#0000ff\" vlink=\"#800080\">"
+ : true
+ : prefix ;
+ text
+ "</body>"
+ "</html>"
+ :
+ : suffix ;
+ }
+ }
+}
+
+
+# Generate a section with a description. The type of output can be controlled by
+# the value of the 'output-type' variable.
+#
+rule section (
+ name # The name of the section.
+ description * # A number of description lines.
+)
+{
+ if $(output-type) = plain
+ {
+ lines [ split-at-words $(name): ] ;
+ lines ;
+ }
+ else if $(output-type) = html
+ {
+ name = [ escape-html $(name) ] ;
+ text <h3>$(name)</h3> <p> ;
+ }
+ local pre = ;
+ while $(description)
+ {
+ local paragraph = ;
+ while $(description) && [ string.is-whitespace $(description[1]) ] { description = $(description[2-]) ; }
+ if $(pre)
+ {
+ while $(description) && (
+ $(pre) = " $(description[1])" ||
+ ( $(pre) < [ string.chars [ MATCH "^([$(.whitespace)]*)" : " $(description[1])" ] ] )
+ )
+ { paragraph += $(description[1]) ; description = $(description[2-]) ; }
+ while [ string.is-whitespace $(paragraph[-1]) ] { paragraph = $(paragraph[1--2]) ; }
+ pre = ;
+ if $(output-type) = plain
+ {
+ lines $(paragraph) "" : " " " " ;
+ }
+ else if $(output-type) = html
+ {
+ text <blockquote> ;
+ lines $(paragraph) ;
+ text </blockquote> ;
+ }
+ }
+ else
+ {
+ while $(description) && ! [ string.is-whitespace $(description[1]) ]
+ { paragraph += $(description[1]) ; description = $(description[2-]) ; }
+ if $(paragraph[1]) = :: && ! $(paragraph[2])
+ {
+ pre = " " ;
+ }
+ if $(paragraph[1]) = ::
+ {
+ if $(output-type) = plain
+ {
+ lines $(paragraph[2-]) "" : " " " " ;
+ lines ;
+ }
+ else if $(output-type) = html
+ {
+ text <blockquote> ;
+ lines $(paragraph[2-]) ;
+ text </blockquote> ;
+ }
+ }
+ else
+ {
+ local p = [ MATCH "(.*)(::)$" : $(paragraph[-1]) ] ;
+ local pws = [ MATCH "([ ]*)$" : $(p[1]) ] ;
+ p = [ MATCH "(.*)($(pws))($(p[2]))$" : $(paragraph[-1]) ] ;
+ if $(p[3]) = ::
+ {
+ pre = [ string.chars [ MATCH "^([$(.whitespace)]*)" : " $(p[1])" ] ] ;
+ if ! $(p[2]) || $(p[2]) = "" { paragraph = $(paragraph[1--2]) $(p[1]): ; }
+ else { paragraph = $(paragraph[1--2]) $(p[1]) ; }
+ if $(output-type) = plain
+ {
+ lines [ split-at-words " " $(paragraph) ] : " " " " ;
+ lines ;
+ }
+ else if $(output-type) = html
+ {
+ text </p> <p> [ escape-html $(paragraph) ] ;
+ }
+ }
+ else
+ {
+ if $(output-type) = plain
+ {
+ lines [ split-at-words " " $(paragraph) ] : " " " " ;
+ lines ;
+ }
+ else if $(output-type) = html
+ {
+ text </p> <p> [ escape-html $(paragraph) ] ;
+ }
+ }
+ }
+ }
+ }
+ if $(output-type) = html
+ {
+ text </p> ;
+ }
+}
+
+
+# Generate the start of a list of items. The type of output can be controlled by
+# the value of the 'output-type' variable.
+#
+rule list-start ( )
+{
+ if $(output-type) = plain
+ {
+ }
+ else if $(output-type) = html
+ {
+ text <ul> ;
+ }
+}
+
+
+# Generate an item in a list. The type of output can be controlled by the value
+# of the 'output-type' variable.
+#
+rule list-item (
+ item + # The item to list.
+)
+{
+ if $(output-type) = plain
+ {
+ lines [ split-at-words "*" $(item) ] : " " " " ;
+ }
+ else if $(output-type) = html
+ {
+ text <li> [ escape-html $(item) ] </li> ;
+ }
+}
+
+
+# Generate the end of a list of items. The type of output can be controlled by
+# the value of the 'output-type' variable.
+#
+rule list-end ( )
+{
+ if $(output-type) = plain
+ {
+ lines ;
+ }
+ else if $(output-type) = html
+ {
+ text </ul> ;
+ }
+}
+
+
+# Split the given text into separate lines, word-wrapping to a margin. The
+# default margin is 78 characters.
+#
+rule split-at-words (
+ text + # The text to split.
+ : margin ? # An optional margin, default is 78.
+)
+{
+ local lines = ;
+ text = [ string.words $(text:J=" ") ] ;
+ text = $(text:J=" ") ;
+ margin ?= 78 ;
+ local char-match-1 = ".?" ;
+ local char-match = "" ;
+ while $(margin) != 0
+ {
+ char-match = $(char-match)$(char-match-1) ;
+ margin = [ numbers.decrement $(margin) ] ;
+ }
+ while $(text)
+ {
+ local s = "" ;
+ local t = "" ;
+ # divide s into the first X characters and the rest
+ s = [ MATCH "^($(char-match))(.*)" : $(text) ] ;
+
+ if $(s[2])
+ {
+ # split the first half at a space
+ t = [ MATCH "^(.*)[\\ ]([^\\ ]*)$" : $(s[1]) ] ;
+ }
+ else
+ {
+ t = $(s) ;
+ }
+
+ if ! $(t[2])
+ {
+ t += "" ;
+ }
+
+ text = $(t[2])$(s[2]) ;
+ lines += $(t[1]) ;
+ }
+ return $(lines) ;
+}
+
+
+# Generate a set of fixed lines. Each single item passed in is output on a
+# separate line. For console this just echos each line, but for html this will
+# split them with <br>.
+#
+rule lines (
+ text * # The lines of text.
+ : indent ? # Optional indentation prepended to each line after the first.
+ outdent ? # Optional indentation to prepend to the first line.
+)
+{
+ text ?= "" ;
+ indent ?= "" ;
+ outdent ?= "" ;
+ if $(output-type) = plain
+ {
+ text $(outdent)$(text[1]) $(indent)$(text[2-]) ;
+ }
+ else if $(output-type) = html
+ {
+ local indent-chars = [ string.chars $(indent) ] ;
+ indent = "" ;
+ for local c in $(indent-chars)
+ {
+ if $(c) = " " { c = "&nbsp;" ; }
+ else if $(c) = " " { c = "&nbsp;&nbsp;&nbsp;&nbsp;" ; }
+ indent = $(indent)$(c) ;
+ }
+ local html-text = [ escape-html $(text) : "&nbsp;" ] ;
+ text $(html-text[1])<br> $(indent)$(html-text[2-])<br> ;
+ }
+}
+
+
+# Output text directly to the current target. When doing output to a file, one
+# can indicate if the text should be output to "prefix" it, as the "body"
+# (default), or "suffix" of the file. This is independant of the actual
+# execution order of the text rule. This rule invokes a singular action, one
+# action only once, which does the build of the file. Therefore actions on the
+# target outside of this rule will happen entirely before and/or after all
+# output using this rule.
+#
+rule text (
+ strings * # The strings of text to output.
+ : overwrite ? # True to overwrite the output (if it is a file).
+ : prefix-body-suffix ? # Indication to output prefix, body, or suffix (for
+ # a file).
+)
+{
+ prefix-body-suffix ?= body ;
+ if $(output-target) = console
+ {
+ if ! $(strings)
+ {
+ ECHO ;
+ }
+ else
+ {
+ for local s in $(strings)
+ {
+ ECHO $(s) ;
+ }
+ }
+ }
+ if ! $($(output-target).did-action)
+ {
+ $(output-target).did-action = yes ;
+ $(output-target).text-prefix = ;
+ $(output-target).text-body = ;
+ $(output-target).text-suffix = ;
+
+ nl on $(output-target) = "
+" ;
+ text-redirect on $(output-target) = ">>" ;
+ if $(overwrite)
+ {
+ text-redirect on $(output-target) = ">" ;
+ }
+ text-content on $(output-target) = ;
+
+ text-action $(output-target) ;
+
+ if $(overwrite) && $(output-target) != console
+ {
+ check-for-update $(output-target) ;
+ }
+ }
+ $(output-target).text-$(prefix-body-suffix) += $(strings) ;
+ text-content on $(output-target) =
+ $($(output-target).text-prefix)
+ $($(output-target).text-body)
+ $($(output-target).text-suffix) ;
+}
+
+
+# Outputs the text to the current targets, after word-wrapping it.
+#
+rule wrapped-text ( text + )
+{
+ local lines = [ split-at-words $(text) ] ;
+ text $(lines) ;
+}
+
+
+# Escapes text into html/xml printable equivalents. Does not know about tags and
+# therefore tags fed into this will also be escaped. Currently escapes space,
+# "<", ">", and "&".
+#
+rule escape-html (
+ text + # The text to escape.
+ : space ? # What to replace spaces with, defaults to " ".
+)
+{
+ local html-text = ;
+ while $(text)
+ {
+ local html = $(text[1]) ;
+ text = $(text[2-]) ;
+ html = [ regex.replace $(html) "&" "&amp;" ] ;
+ html = [ regex.replace $(html) "<" "&lt;" ] ;
+ html = [ regex.replace $(html) ">" "&gt;" ] ;
+ if $(space)
+ {
+ html = [ regex.replace $(html) " " "$(space)" ] ;
+ }
+ html-text += $(html) ;
+ }
+ return $(html-text) ;
+}
+
+
+# Outputs the text strings collected by the text rule to the output file.
+#
+actions quietly text-action
+{
+ @($(STDOUT):E=$(text-content:J=$(nl))) $(text-redirect) "$(<)"
+}
+
+
+rule get-scanner ( )
+{
+ if ! $(.scanner)
+ {
+ .scanner = [ class.new print-scanner ] ;
+ }
+ return $(.scanner) ;
+}
+
+
+# The following code to update print targets when their contents change is a
+# horrible hack. It basically creates a target which binds to this file
+# (print.jam) and installs a scanner on it which reads the target and compares
+# its contents to the new contents that we are writing.
+#
+rule check-for-update ( target )
+{
+ local scanner = [ get-scanner ] ;
+ local file = [ path.native [ modules.binding $(__name__) ] ] ;
+ local g = [ MATCH <(.*)> : $(target:G) ] ;
+ local dependency-target = $(__file__:G=$(g:E=)-$(target:G=)-$(scanner)) ;
+ DEPENDS $(target) : $(dependency-target) ;
+ SEARCH on $(dependency-target) = $(file:D) ;
+ ISFILE $(dependency-target) ;
+ NOUPDATE $(dependency-target) ;
+ base on $(dependency-target) = $(target) ;
+ scanner.install $(scanner) : $(dependency-target) ;
+ return $(dependency-target) ;
+}
+
+
+class print-scanner : scanner
+{
+ import path ;
+ import os ;
+
+ rule pattern ( )
+ {
+ return "(One match...)" ;
+ }
+
+ rule process ( target : matches * : binding )
+ {
+ local base = [ on $(target) return $(base) ] ;
+ local nl = [ on $(base) return $(nl) ] ;
+ local text-content = [ on $(base) return $(text-content) ] ;
+ local dir = [ on $(base) return $(LOCATE) ] ;
+ if $(dir)
+ {
+ dir = [ path.make $(dir) ] ;
+ }
+ local file = [ path.native [ path.join $(dir) $(base:G=) ] ] ;
+ local actual-content ;
+ if [ os.name ] = NT
+ {
+ actual-content = [ SHELL "type \"$(file)\" 2>nul" ] ;
+ }
+ else
+ {
+ actual-content = [ SHELL "cat \"$(file)\" 2>/dev/null" ] ;
+ }
+ if $(text-content:J=$(nl)) != $(actual-content)
+ {
+ ALWAYS $(base) ;
+ }
+ }
+}
+
+
+rule __test__ ( )
+{
+ import assert ;
+
+ assert.result one two three : split-at-words one two three : 5 ;
+ assert.result "one two" three : split-at-words one two three : 8 ;
+ assert.result "one two" three : split-at-words one two three : 9 ;
+ assert.result "one two three" : split-at-words one two three ;
+
+ # VP, 2004-12-03 The following test fails for some reason, so commenting it
+ # out.
+ #assert.result "one&nbsp;two&nbsp;three" "&amp;&lt;&gt;" :
+ # escape-html "one two three" "&<>" ;
+}
diff --git a/tools/build/src/util/regex.jam b/tools/build/src/util/regex.jam
new file mode 100644
index 0000000000..be8b3cfd12
--- /dev/null
+++ b/tools/build/src/util/regex.jam
@@ -0,0 +1,203 @@
+# Copyright 2001, 2002 Dave Abrahams
+# Copyright 2003 Douglas Gregor
+# Copyright 2003 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+#
+# Returns a list of the following substrings:
+# 1) from beginning till the first occurrence of 'separator' or till the end,
+# 2) between each occurrence of 'separator' and the next occurrence,
+# 3) from the last occurrence of 'separator' till the end.
+# If no separator is present, the result will contain only one element.
+#
+
+rule split ( string separator )
+{
+ local result ;
+ local s = $(string) ;
+
+ # Break pieaces off 's' until it has no separators left.
+ local match = 1 ;
+ while $(match)
+ {
+ match = [ MATCH ^(.*)($(separator))(.*) : $(s) ] ;
+ if $(match)
+ {
+ match += "" ; # in case 3rd item was empty - works around MATCH bug
+ result = $(match[3]) $(result) ;
+ s = $(match[1]) ;
+ }
+ }
+ # Combine the remaining part at the beginning, which does not have
+ # separators, with the pieces broken off. Note that the rule's signature
+ # does not allow the initial s to be empty.
+ return $(s) $(result) ;
+}
+
+if [ HAS_NATIVE_RULE regex : split : 1 ]
+{
+ NATIVE_RULE regex : split ;
+}
+
+# Returns the concatenated results of Applying regex.split to every element of
+# the list using the separator pattern.
+#
+rule split-list ( list * : separator )
+{
+ local result ;
+ for s in $(list)
+ {
+ result += [ split $(s) $(separator) ] ;
+ }
+ return $(result) ;
+}
+
+
+# Match string against pattern, and return the elements indicated by indices.
+#
+rule match ( pattern : string : indices * )
+{
+ indices ?= 1 2 3 4 5 6 7 8 9 ;
+ local x = [ MATCH $(pattern) : $(string) ] ;
+ return $(x[$(indices)]) ;
+}
+
+
+# Matches all elements of 'list' agains the 'pattern' and returns a list of
+# elements indicated by indices of all successful matches. If 'indices' is
+# omitted returns a list of first parenthesised groups of all successful
+# matches.
+#
+rule transform ( list * : pattern : indices * )
+{
+ indices ?= 1 ;
+ local result ;
+ for local e in $(list)
+ {
+ local m = [ MATCH $(pattern) : $(e) ] ;
+ if $(m)
+ {
+ result += $(m[$(indices)]) ;
+ }
+ }
+ return $(result) ;
+}
+
+NATIVE_RULE regex : transform ;
+
+
+# Escapes all of the characters in symbols using the escape symbol escape-symbol
+# for the given string, and returns the escaped string.
+#
+rule escape ( string : symbols : escape-symbol )
+{
+ local result = "" ;
+ local m = 1 ;
+ while $(m)
+ {
+ m = [ MATCH ^([^$(symbols)]*)([$(symbols)])(.*) : $(string) ] ;
+ if $(m)
+ {
+ m += "" ; # Supposedly a bug fix; borrowed from regex.split
+ result = "$(result)$(m[1])$(escape-symbol)$(m[2])" ;
+ string = $(m[3]) ;
+ }
+ }
+ string ?= "" ;
+ result = "$(result)$(string)" ;
+ return $(result) ;
+}
+
+
+# Replaces occurrences of a match string in a given string and returns the new
+# string. The match string can be a regex expression.
+#
+rule replace (
+ string # The string to modify.
+ match # The characters to replace.
+ replacement # The string to replace with.
+ )
+{
+ local result = "" ;
+ local parts = 1 ;
+ while $(parts)
+ {
+ parts = [ MATCH ^(.*)($(match))(.*) : $(string) ] ;
+ if $(parts)
+ {
+ parts += "" ;
+ result = "$(replacement)$(parts[3])$(result)" ;
+ string = $(parts[1]) ;
+ }
+ }
+ string ?= "" ;
+ result = "$(string)$(result)" ;
+ return $(result) ;
+}
+
+if [ HAS_NATIVE_RULE regex : replace : 1 ]
+{
+ NATIVE_RULE regex : replace ;
+}
+
+
+# Replaces occurrences of a match string in a given list of strings and returns
+# a list of new strings. The match string can be a regex expression.
+#
+# list - the list of strings to modify.
+# match - the search expression.
+# replacement - the string to replace with.
+#
+rule replace-list ( list * : match : replacement )
+{
+ local result ;
+ for local e in $(list)
+ {
+ result += [ replace $(e) $(match) $(replacement) ] ;
+ }
+ return $(result) ;
+}
+
+
+rule __test__ ( )
+{
+ import assert ;
+
+ assert.result a b c : split "a/b/c" / ;
+ assert.result "" a b c : split "/a/b/c" / ;
+ assert.result "" "" a b c : split "//a/b/c" / ;
+ assert.result "" a "" b c : split "/a//b/c" / ;
+ assert.result "" a "" b c "" : split "/a//b/c/" / ;
+ assert.result "" a "" b c "" "" : split "/a//b/c//" / ;
+
+ assert.result a c b d
+ : match (.)(.)(.)(.) : abcd : 1 3 2 4 ;
+
+ assert.result a b c d
+ : match (.)(.)(.)(.) : abcd ;
+
+ assert.result ababab cddc
+ : match ((ab)*)([cd]+) : abababcddc : 1 3 ;
+
+ assert.result a.h c.h
+ : transform <a.h> \"b.h\" <c.h> : <(.*)> ;
+
+ assert.result a.h b.h c.h
+ : transform <a.h> \"b.h\" <c.h> : <([^>]*)>|\"([^\"]*)\" : 1 2 ;
+
+ assert.result "^<?xml version=\"1.0\"^>"
+ : escape "<?xml version=\"1.0\">" : "&|()<>^" : "^" ;
+
+ assert.result "<?xml version=\\\"1.0\\\">"
+ : escape "<?xml version=\"1.0\">" : "\\\"" : "\\" ;
+
+ assert.result "string&nbsp;string&nbsp;" : replace "string string " " " "&nbsp;" ;
+ assert.result "&nbsp;string&nbsp;string" : replace " string string" " " "&nbsp;" ;
+ assert.result "string&nbsp;&nbsp;string" : replace "string string" " " "&nbsp;" ;
+ assert.result "-" : replace "&" "&" "-" ;
+
+ assert.result "-" "a-b" : replace-list "&" "a&b" : "&" : "-" ;
+}
diff --git a/tools/build/src/util/regex.py b/tools/build/src/util/regex.py
new file mode 100644
index 0000000000..6348c6fb19
--- /dev/null
+++ b/tools/build/src/util/regex.py
@@ -0,0 +1,54 @@
+# (C) Copyright David Abrahams 2001. Permission to copy, use, modify, sell and
+# distribute this software is granted provided this copyright notice appears in
+# all copies. This software is provided "as is" without express or implied
+# warranty, and with no claim as to its suitability for any purpose.
+
+import re
+
+from b2.util import bjam_signature
+
+
+def transform (list, pattern, indices = [1]):
+ """ Matches all elements of 'list' agains the 'pattern'
+ and returns a list of the elements indicated by indices of
+ all successfull matches. If 'indices' is omitted returns
+ a list of first paranthethised groups of all successfull
+ matches.
+ """
+ result = []
+
+ for e in list:
+ m = re.match (pattern, e)
+
+ if m:
+ for i in indices:
+ result.append (m.group (i))
+
+ return result
+
+
+@bjam_signature([['s', 'pattern', 'replacement']])
+def replace(s, pattern, replacement):
+ """Replaces occurrences of a match string in a given
+ string and returns the new string. The match string
+ can be a regex expression.
+
+ Args:
+ s (str): the string to modify
+ pattern (str): the search expression
+ replacement (str): the string to replace each match with
+ """
+ return re.sub(pattern, replacement, s)
+
+
+@bjam_signature((['items', '*'], ['match'], ['replacement']))
+def replace_list(items, match, replacement):
+ """Replaces occurrences of a match string in a given list of strings and returns
+ a list of new strings. The match string can be a regex expression.
+
+ Args:
+ items (list): the list of strings to modify.
+ match (str): the search expression.
+ replacement (str): the string to replace with.
+ """
+ return [replace(item, match, replacement) for item in items]
diff --git a/tools/build/src/util/sequence.jam b/tools/build/src/util/sequence.jam
new file mode 100644
index 0000000000..97ddfe1539
--- /dev/null
+++ b/tools/build/src/util/sequence.jam
@@ -0,0 +1,342 @@
+# Copyright 2001, 2002, 2003 Dave Abrahams
+# Copyright 2006 Rene Rivera
+# Copyright 2002, 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import assert ;
+import numbers ;
+import modules ;
+
+
+# Note that algorithms in this module execute largely in the caller's module
+# namespace, so that local rules can be used as function objects. Also note that
+# most predicates can be multi-element lists. In that case, all but the first
+# element are prepended to the first argument which is passed to the rule named
+# by the first element.
+
+
+# Return the elements e of $(sequence) for which [ $(predicate) e ] has a
+# non-null value.
+#
+rule filter ( predicate + : sequence * )
+{
+ local caller = [ CALLER_MODULE ] ;
+ local result ;
+
+ for local e in $(sequence)
+ {
+ if [ modules.call-in $(caller) : $(predicate) $(e) ]
+ {
+ result += $(e) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Return a new sequence consisting of [ $(function) $(e) ] for each element e of
+# $(sequence).
+#
+rule transform ( function + : sequence * )
+{
+ local caller = [ CALLER_MODULE ] ;
+ local result ;
+
+ for local e in $(sequence)
+ {
+ result += [ modules.call-in $(caller) : $(function) $(e) ] ;
+ }
+ return $(result) ;
+}
+
+if [ HAS_NATIVE_RULE sequence : transform : 1 ]
+{
+ NATIVE_RULE sequence : transform ;
+}
+
+# Returns the elements of 's' in reverse order
+rule reverse ( s * )
+{
+ local r ;
+ for local x in $(s)
+ {
+ r = $(x) $(r) ;
+ }
+ return $(r) ;
+}
+
+
+rule less ( a b )
+{
+ if $(a) < $(b)
+ {
+ return true ;
+ }
+}
+
+
+# Insertion-sort s using the BinaryPredicate ordered.
+#
+rule insertion-sort ( s * : ordered * )
+{
+ if ! $(ordered)
+ {
+ return [ SORT $(s) ] ;
+ }
+ else
+ {
+ local caller = [ CALLER_MODULE ] ;
+ ordered ?= sequence.less ;
+ local result = $(s[1]) ;
+ if $(ordered) = sequence.less
+ {
+ local head tail ;
+ for local x in $(s[2-])
+ {
+ head = ;
+ tail = $(result) ;
+ while $(tail) && ( $(tail[1]) < $(x) )
+ {
+ head += $(tail[1]) ;
+ tail = $(tail[2-]) ;
+ }
+ result = $(head) $(x) $(tail) ;
+ }
+ }
+ else
+ {
+ for local x in $(s[2-])
+ {
+ local head tail ;
+ tail = $(result) ;
+ while $(tail) && [ modules.call-in $(caller) : $(ordered) $(tail[1]) $(x) ]
+ {
+ head += $(tail[1]) ;
+ tail = $(tail[2-]) ;
+ }
+ result = $(head) $(x) $(tail) ;
+ }
+ }
+
+ return $(result) ;
+ }
+}
+
+
+# Merge two ordered sequences using the BinaryPredicate ordered.
+#
+rule merge ( s1 * : s2 * : ordered * )
+{
+ ordered ?= sequence.less ;
+ local result__ ;
+ local caller = [ CALLER_MODULE ] ;
+
+ while $(s1) && $(s2)
+ {
+ if [ modules.call-in $(caller) : $(ordered) $(s1[1]) $(s2[1]) ]
+ {
+ result__ += $(s1[1]) ;
+ s1 = $(s1[2-]) ;
+ }
+ else if [ modules.call-in $(caller) : $(ordered) $(s2[1]) $(s1[1]) ]
+ {
+ result__ += $(s2[1]) ;
+ s2 = $(s2[2-]) ;
+ }
+ else
+ {
+ s2 = $(s2[2-]) ;
+ }
+
+ }
+ result__ += $(s1) ;
+ result__ += $(s2) ;
+
+ return $(result__) ;
+}
+
+
+# Join the elements of s into one long string. If joint is supplied, it is used
+# as a separator.
+#
+rule join ( s * : joint ? )
+{
+ joint ?= "" ;
+ return $(s:J=$(joint)) ;
+}
+
+
+# Find the length of any sequence.
+#
+rule length ( s * )
+{
+ local result = 0 ;
+ for local i in $(s)
+ {
+ result = [ CALC $(result) + 1 ] ;
+ }
+ return $(result) ;
+}
+
+# Removes duplicates from 'list'. If 'stable' is
+# passed, then the order of the elements will
+# be unchanged.
+rule unique ( list * : stable ? )
+{
+ local result ;
+ local prev ;
+ if $(stable)
+ {
+ for local f in $(list)
+ {
+ if ! $(f) in $(result)
+ {
+ result += $(f) ;
+ }
+ }
+ }
+ else
+ {
+ for local i in [ SORT $(list) ]
+ {
+ if $(i) != $(prev)
+ {
+ result += $(i) ;
+ }
+ prev = $(i) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# Returns the maximum number in 'elements'. Uses 'ordered' for comparisons or
+# 'numbers.less' if none is provided.
+#
+rule max-element ( elements + : ordered ? )
+{
+ ordered ?= numbers.less ;
+
+ local max = $(elements[1]) ;
+ for local e in $(elements[2-])
+ {
+ if [ $(ordered) $(max) $(e) ]
+ {
+ max = $(e) ;
+ }
+ }
+ return $(max) ;
+}
+
+
+# Returns all of 'elements' for which corresponding element in parallel list
+# 'rank' is equal to the maximum value in 'rank'.
+#
+rule select-highest-ranked ( elements * : ranks * )
+{
+ if $(elements)
+ {
+ local max-rank = [ max-element $(ranks) ] ;
+ local result ;
+ while $(elements)
+ {
+ if $(ranks[1]) = $(max-rank)
+ {
+ result += $(elements[1]) ;
+ }
+ elements = $(elements[2-]) ;
+ ranks = $(ranks[2-]) ;
+ }
+ return $(result) ;
+ }
+}
+NATIVE_RULE sequence : select-highest-ranked ;
+
+
+rule __test__ ( )
+{
+ # Use a unique module so we can test the use of local rules.
+ module sequence.__test__
+ {
+ import assert ;
+ import sequence ;
+
+ local rule is-even ( n )
+ {
+ if $(n) in 0 2 4 6 8
+ {
+ return true ;
+ }
+ }
+
+ assert.result 4 6 4 2 8 : sequence.filter is-even : 1 4 6 3 4 7 2 3 8 ;
+
+ # Test that argument binding works.
+ local rule is-equal-test ( x y )
+ {
+ if $(x) = $(y)
+ {
+ return true ;
+ }
+ }
+
+ assert.result 3 3 3 : sequence.filter is-equal-test 3 : 1 2 3 4 3 5 3 5 7 ;
+
+ local rule append-x ( n )
+ {
+ return $(n)x ;
+ }
+
+ assert.result 1x 2x 3x : sequence.transform append-x : 1 2 3 ;
+
+ local rule repeat2 ( x )
+ {
+ return $(x) $(x) ;
+ }
+
+ assert.result 1 1 2 2 3 3 : sequence.transform repeat2 : 1 2 3 ;
+
+ local rule test-greater ( a b )
+ {
+ if $(a) > $(b)
+ {
+ return true ;
+ }
+ }
+ assert.result 1 2 3 4 5 6 7 8 9 : sequence.insertion-sort 9 6 5 3 8 7 1 2 4 ;
+ assert.result 9 8 7 6 5 4 3 2 1 : sequence.insertion-sort 9 6 5 3 8 7 1 2 4 : test-greater ;
+ assert.result 1 2 3 4 5 6 : sequence.merge 1 3 5 : 2 4 6 ;
+ assert.result 6 5 4 3 2 1 : sequence.merge 5 3 1 : 6 4 2 : test-greater ;
+ assert.result 1 2 3 : sequence.merge 1 2 3 : ;
+ assert.result 1 : sequence.merge 1 : 1 ;
+
+ assert.result foo-bar-baz : sequence.join foo bar baz : - ;
+ assert.result substandard : sequence.join sub stan dard ;
+ assert.result 3.0.1 : sequence.join 3.0.1 : - ;
+
+ assert.result 0 : sequence.length ;
+ assert.result 3 : sequence.length a b c ;
+ assert.result 17 : sequence.length 17 16 15 14 13 12 11 10 9 8 7 6 5 4 3 2 1 ;
+
+ assert.result 1 : sequence.length a ;
+ assert.result 10 : sequence.length a b c d e f g h i j ;
+ assert.result 11 : sequence.length a b c d e f g h i j k ;
+ assert.result 12 : sequence.length a b c d e f g h i j k l ;
+
+ local p2 = x ;
+ for local i in 1 2 3 4 5 6 7 8
+ {
+ p2 = $(p2) $(p2) ;
+ }
+ assert.result 256 : sequence.length $(p2) ;
+
+ assert.result 1 2 3 4 5 : sequence.unique 1 2 3 2 4 3 3 5 5 5 ;
+
+ assert.result 5 : sequence.max-element 1 3 5 0 4 ;
+
+ assert.result e-3 h-3 : sequence.select-highest-ranked e-1 e-3 h-3 m-2 : 1 3 3 2 ;
+
+ assert.result 7 6 5 4 3 2 1 : sequence.reverse 1 2 3 4 5 6 7 ;
+ }
+}
diff --git a/tools/build/v2/util/sequence.py b/tools/build/src/util/sequence.py
index 1d32efd2e3..1d32efd2e3 100644
--- a/tools/build/v2/util/sequence.py
+++ b/tools/build/src/util/sequence.py
diff --git a/tools/build/v2/util/set.jam b/tools/build/src/util/set.jam
index fc179134f3..fc179134f3 100644
--- a/tools/build/v2/util/set.jam
+++ b/tools/build/src/util/set.jam
diff --git a/tools/build/v2/util/set.py b/tools/build/src/util/set.py
index dc7cf32822..dc7cf32822 100644
--- a/tools/build/v2/util/set.py
+++ b/tools/build/src/util/set.py
diff --git a/tools/build/v2/util/string.jam b/tools/build/src/util/string.jam
index a39ed119e2..a39ed119e2 100644
--- a/tools/build/v2/util/string.jam
+++ b/tools/build/src/util/string.jam
diff --git a/tools/build/src/util/utility.jam b/tools/build/src/util/utility.jam
new file mode 100644
index 0000000000..26981c5480
--- /dev/null
+++ b/tools/build/src/util/utility.jam
@@ -0,0 +1,235 @@
+# Copyright 2001, 2002 Dave Abrahams
+# Copyright 2002, 2003, 2004, 2005 Vladimir Prus
+# Copyright 2008 Jurko Gospodnetic
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import "class" : is-instance ;
+
+
+# For all elements of 'list' which do not already have 'suffix', add 'suffix'.
+#
+rule apply-default-suffix ( suffix : list * )
+{
+ local result ;
+ for local i in $(list)
+ {
+ if $(i:S) = $(suffix)
+ {
+ result += $(i) ;
+ }
+ else
+ {
+ result += $(i)$(suffix) ;
+ }
+ }
+ return $(result) ;
+}
+
+
+# If 'name' contains a dot, returns the part before the last dot. If 'name'
+# contains no dot, returns it unmodified.
+#
+rule basename ( name )
+{
+ if $(name:S)
+ {
+ name = $(name:B) ;
+ }
+ return $(name) ;
+}
+
+
+# Return the file of the caller of the rule that called caller-file.
+#
+rule caller-file ( )
+{
+ local bt = [ BACKTRACE ] ;
+ return $(bt[9]) ;
+}
+
+
+# Tests if 'a' is equal to 'b'. If 'a' is a class instance, calls its 'equal'
+# method. Uses ordinary jam's comparison otherwise.
+#
+rule equal ( a b )
+{
+ if [ is-instance $(a) ]
+ {
+ return [ $(a).equal $(b) ] ;
+ }
+ else
+ {
+ if $(a) = $(b)
+ {
+ return true ;
+ }
+ }
+}
+
+
+# Tests if 'a' is less than 'b'. If 'a' is a class instance, calls its 'less'
+# method. Uses ordinary jam's comparison otherwise.
+#
+rule less ( a b )
+{
+ if [ is-instance $(a) ]
+ {
+ return [ $(a).less $(b) ] ;
+ }
+ else
+ {
+ if $(a) < $(b)
+ {
+ return true ;
+ }
+ }
+}
+
+
+# Returns the textual representation of argument. If it is a class instance,
+# class its 'str' method. Otherwise, returns the argument.
+#
+rule str ( value )
+{
+ if [ is-instance $(value) ]
+ {
+ return [ $(value).str ] ;
+ }
+ else
+ {
+ return $(value) ;
+ }
+}
+
+
+# Accepts a list of gristed values and returns them ungristed. Reports an error
+# in case any of the passed parameters is not gristed, i.e. surrounded in angle
+# brackets < and >.
+#
+rule ungrist ( names * )
+{
+ local result ;
+ for local name in $(names)
+ {
+ local stripped = [ MATCH ^<(.*)>$ : $(name) ] ;
+ if ! $(stripped)-defined
+ {
+ import errors ;
+ local quoted-names = \"$(names)\" ;
+ errors.error "in" ungrist $(quoted-names:J=" "): \"$(name)\" is not
+ of the form <.*> ;
+ }
+ result += $(stripped) ;
+ }
+ return $(result) ;
+}
+
+
+# If the passed value is quoted, unquotes it. Otherwise returns the value
+# unchanged.
+#
+rule unquote ( value ? )
+{
+ local match-result = [ MATCH ^(\")(.*)(\")$ : $(value) ] ;
+ if $(match-result)
+ {
+ return $(match-result[2]) ;
+ }
+ else
+ {
+ return $(value) ;
+ }
+}
+
+
+rule __test__ ( )
+{
+ import assert ;
+ import "class" : new ;
+ import errors : try catch ;
+
+ assert.result 123 : str 123 ;
+
+ class test-class__
+ {
+ rule __init__ ( ) { }
+ rule str ( ) { return "str-test-class" ; }
+ rule less ( a ) { return "yes, of course!" ; }
+ rule equal ( a ) { return "not sure" ; }
+ }
+
+ assert.result "str-test-class" : str [ new test-class__ ] ;
+ assert.true less 1 2 ;
+ assert.false less 2 1 ;
+ assert.result "yes, of course!" : less [ new test-class__ ] 1 ;
+ assert.true equal 1 1 ;
+ assert.false equal 1 2 ;
+ assert.result "not sure" : equal [ new test-class__ ] 1 ;
+
+ assert.result foo.lib foo.lib : apply-default-suffix .lib : foo.lib foo.lib
+ ;
+
+ assert.result foo : basename foo ;
+ assert.result foo : basename foo.so ;
+ assert.result foo.so : basename foo.so.1 ;
+
+ assert.result : unquote ;
+ assert.result "" : unquote "" ;
+ assert.result "" : unquote \"\" ;
+ assert.result \" : unquote \"\"\" ;
+ assert.result \"\" : unquote \"\"\"\" ;
+ assert.result foo : unquote foo ;
+ assert.result \"foo : unquote \"foo ;
+ assert.result foo\" : unquote foo\" ;
+ assert.result foo : unquote \"foo\" ;
+ assert.result \"foo\" : unquote \"\"foo\"\" ;
+
+ assert.result : ungrist ;
+ assert.result "" : ungrist <> ;
+ assert.result foo : ungrist <foo> ;
+ assert.result <foo> : ungrist <<foo>> ;
+ assert.result foo bar : ungrist <foo> <bar> ;
+
+ try ;
+ {
+ ungrist "" ;
+ }
+ catch "in" ungrist \"\": \"\" is not of the form <.*> ;
+
+ try ;
+ {
+ ungrist foo ;
+ }
+ catch "in" ungrist \"foo\": \"foo\" is not of the form <.*> ;
+
+ try ;
+ {
+ ungrist <foo ;
+ }
+ catch "in" ungrist \"<foo\": \"<foo\" is not of the form <.*> ;
+
+ try ;
+ {
+ ungrist foo> ;
+ }
+ catch "in" ungrist \"foo>\": \"foo>\" is not of the form <.*> ;
+
+ try ;
+ {
+ ungrist foo bar ;
+ }
+ catch "in" ungrist "\"foo\" \"bar\"": \"foo\" is not of the form <.*> ;
+
+ try ;
+ {
+ ungrist foo <bar> ;
+ }
+ catch "in" ungrist "\"foo\" \"<bar>\"": \"foo\" is not of the form <.*> ;
+
+ try ;
+ {
+ ungrist <foo> bar ;
+ }
+ catch "in" ungrist "\"<foo>\" \"bar\"": \"bar\" is not of the form <.*> ;
+}
diff --git a/tools/build/v2/util/utility.py b/tools/build/src/util/utility.py
index afea765b97..afea765b97 100644
--- a/tools/build/v2/util/utility.py
+++ b/tools/build/src/util/utility.py
diff --git a/tools/build/test/BoostBuild.py b/tools/build/test/BoostBuild.py
new file mode 100644
index 0000000000..540830e346
--- /dev/null
+++ b/tools/build/test/BoostBuild.py
@@ -0,0 +1,1317 @@
+# Copyright 2002-2005 Vladimir Prus.
+# Copyright 2002-2003 Dave Abrahams.
+# Copyright 2006 Rene Rivera.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import TestCmd
+
+import copy
+import fnmatch
+import glob
+import math
+import os
+import os.path
+import re
+import shutil
+import StringIO
+import subprocess
+import sys
+import tempfile
+import time
+import traceback
+import tree
+import types
+
+from xml.sax.saxutils import escape
+
+
+class TestEnvironmentError(Exception):
+ pass
+
+
+annotations = []
+
+
+def print_annotation(name, value, xml):
+ """Writes some named bits of information about the current test run."""
+ if xml:
+ print escape(name) + " {{{"
+ print escape(value)
+ print "}}}"
+ else:
+ print name + " {{{"
+ print value
+ print "}}}"
+
+
+def flush_annotations(xml=0):
+ global annotations
+ for ann in annotations:
+ print_annotation(ann[0], ann[1], xml)
+ annotations = []
+
+
+def clear_annotations():
+ global annotations
+ annotations = []
+
+
+defer_annotations = 0
+
+def set_defer_annotations(n):
+ global defer_annotations
+ defer_annotations = n
+
+
+def annotate_stack_trace(tb=None):
+ if tb:
+ trace = TestCmd.caller(traceback.extract_tb(tb), 0)
+ else:
+ trace = TestCmd.caller(traceback.extract_stack(), 1)
+ annotation("stacktrace", trace)
+
+
+def annotation(name, value):
+ """Records an annotation about the test run."""
+ annotations.append((name, value))
+ if not defer_annotations:
+ flush_annotations()
+
+
+def get_toolset():
+ toolset = None
+ for arg in sys.argv[1:]:
+ if not arg.startswith("-"):
+ toolset = arg
+ return toolset or "gcc"
+
+
+# Detect the host OS.
+cygwin = hasattr(os, "uname") and os.uname()[0].lower().startswith("cygwin")
+windows = cygwin or os.environ.get("OS", "").lower().startswith("windows")
+
+
+def prepare_prefixes_and_suffixes(toolset):
+ prepare_suffix_map(toolset)
+ prepare_library_prefix(toolset)
+
+
+def prepare_suffix_map(toolset):
+ """
+ Set up suffix translation performed by the Boost Build testing framework
+ to accomodate different toolsets generating targets of the same type using
+ different filename extensions (suffixes).
+
+ """
+ global suffixes
+ suffixes = {}
+ if windows:
+ if toolset == "gcc":
+ suffixes[".lib"] = ".a" # mingw static libs use suffix ".a".
+ suffixes[".obj"] = ".o"
+ if cygwin:
+ suffixes[".implib"] = ".lib.a"
+ else:
+ suffixes[".implib"] = ".lib"
+ else:
+ suffixes[".exe"] = ""
+ suffixes[".dll"] = ".so"
+ suffixes[".lib"] = ".a"
+ suffixes[".obj"] = ".o"
+ suffixes[".implib"] = ".no_implib_files_on_this_platform"
+
+ if hasattr(os, "uname") and os.uname()[0] == "Darwin":
+ suffixes[".dll"] = ".dylib"
+
+
+def prepare_library_prefix(toolset):
+ """
+ Setup whether Boost Build is expected to automatically prepend prefixes
+ to its built library targets.
+
+ """
+ global lib_prefix
+ lib_prefix = "lib"
+
+ global dll_prefix
+ if cygwin:
+ dll_prefix = "cyg"
+ elif windows and toolset != "gcc":
+ dll_prefix = None
+ else:
+ dll_prefix = "lib"
+
+
+def re_remove(sequence, regex):
+ me = re.compile(regex)
+ result = filter(lambda x: me.match(x), sequence)
+ if not result:
+ raise ValueError()
+ for r in result:
+ sequence.remove(r)
+
+
+def glob_remove(sequence, pattern):
+ result = fnmatch.filter(sequence, pattern)
+ if not result:
+ raise ValueError()
+ for r in result:
+ sequence.remove(r)
+
+
+class Tester(TestCmd.TestCmd):
+ """Main tester class for Boost Build.
+
+ Optional arguments:
+
+ `arguments` - Arguments passed to the run executable.
+ `executable` - Name of the executable to invoke.
+ `match` - Function to use for compating actual and
+ expected file contents.
+ `boost_build_path` - Boost build path to be passed to the run
+ executable.
+ `translate_suffixes` - Whether to update suffixes on the the file
+ names passed from the test script so they
+ match those actually created by the current
+ toolset. For example, static library files
+ are specified by using the .lib suffix but
+ when the "gcc" toolset is used it actually
+ creates them using the .a suffix.
+ `pass_toolset` - Whether the test system should pass the
+ specified toolset to the run executable.
+ `use_test_config` - Whether the test system should tell the run
+ executable to read in the test_config.jam
+ configuration file.
+ `ignore_toolset_requirements` - Whether the test system should tell the run
+ executable to ignore toolset requirements.
+ `workdir` - Absolute directory where the test will be
+ run from.
+ `pass_d0` - If set, when tests are not explicitly run
+ in verbose mode, they are run as silent
+ (-d0 & --quiet Boost Jam options).
+
+ Optional arguments inherited from the base class:
+
+ `description` - Test description string displayed in case
+ of a failed test.
+ `subdir` - List of subdirectories to automatically
+ create under the working directory. Each
+ subdirectory needs to be specified
+ separately, parent coming before its child.
+ `verbose` - Flag that may be used to enable more
+ verbose test system output. Note that it
+ does not also enable more verbose build
+ system output like the --verbose command
+ line option does.
+ """
+ def __init__(self, arguments=None, executable="bjam",
+ match=TestCmd.match_exact, boost_build_path=None,
+ translate_suffixes=True, pass_toolset=True, use_test_config=True,
+ ignore_toolset_requirements=True, workdir="", pass_d0=True,
+ **keywords):
+
+ assert arguments.__class__ is not str
+ self.original_workdir = os.getcwd()
+ if workdir and not os.path.isabs(workdir):
+ raise ("Parameter workdir <%s> must point to an absolute "
+ "directory: " % workdir)
+
+ self.last_build_timestamp = 0
+ self.translate_suffixes = translate_suffixes
+ self.use_test_config = use_test_config
+
+ self.toolset = get_toolset()
+ self.pass_toolset = pass_toolset
+ self.ignore_toolset_requirements = ignore_toolset_requirements
+
+ prepare_prefixes_and_suffixes(pass_toolset and self.toolset or "gcc")
+
+ use_default_bjam = "--default-bjam" in sys.argv
+
+ if not use_default_bjam:
+ jam_build_dir = ""
+ if os.name == "nt":
+ jam_build_dir = "bin.ntx86"
+ elif (os.name == "posix") and os.__dict__.has_key("uname"):
+ if os.uname()[0].lower().startswith("cygwin"):
+ jam_build_dir = "bin.cygwinx86"
+ if ("TMP" in os.environ and
+ os.environ["TMP"].find("~") != -1):
+ print("Setting $TMP to /tmp to get around problem "
+ "with short path names")
+ os.environ["TMP"] = "/tmp"
+ elif os.uname()[0] == "Linux":
+ cpu = os.uname()[4]
+ if re.match("i.86", cpu):
+ jam_build_dir = "bin.linuxx86"
+ else:
+ jam_build_dir = "bin.linux" + os.uname()[4]
+ elif os.uname()[0] == "SunOS":
+ jam_build_dir = "bin.solaris"
+ elif os.uname()[0] == "Darwin":
+ if os.uname()[4] == "i386":
+ jam_build_dir = "bin.macosxx86"
+ else:
+ jam_build_dir = "bin.macosxppc"
+ elif os.uname()[0] == "AIX":
+ jam_build_dir = "bin.aix"
+ elif os.uname()[0] == "IRIX64":
+ jam_build_dir = "bin.irix"
+ elif os.uname()[0] == "FreeBSD":
+ jam_build_dir = "bin.freebsd"
+ elif os.uname()[0] == "OSF1":
+ jam_build_dir = "bin.osf"
+ else:
+ raise ("Do not know directory where Jam is built for this "
+ "system: %s/%s" % (os.name, os.uname()[0]))
+ else:
+ raise ("Do not know directory where Jam is built for this "
+ "system: %s" % os.name)
+
+ # Find where jam_src is located. Try for the debug version if it is
+ # lying around.
+ dirs = [os.path.join("..", "src", "engine", jam_build_dir + ".debug"),
+ os.path.join("..", "src", "engine", jam_build_dir)]
+ for d in dirs:
+ if os.path.exists(d):
+ jam_build_dir = d
+ break
+ else:
+ print("Cannot find built Boost.Jam")
+ sys.exit(1)
+
+ verbosity = ["-d0", "--quiet"]
+ if not pass_d0:
+ verbosity = []
+ if "--verbose" in sys.argv:
+ keywords["verbose"] = True
+ verbosity = ["-d+2"]
+
+ if boost_build_path is None:
+ boost_build_path = self.original_workdir + "/.."
+
+ program_list = []
+ if use_default_bjam:
+ program_list.append(executable)
+ else:
+ program_list.append(os.path.join(jam_build_dir, executable))
+ program_list.append('-sBOOST_BUILD_PATH="' + boost_build_path + '"')
+ if verbosity:
+ program_list += verbosity
+ if arguments:
+ program_list += arguments
+
+ TestCmd.TestCmd.__init__(self, program=program_list, match=match,
+ workdir=workdir, inpath=use_default_bjam, **keywords)
+
+ os.chdir(self.workdir)
+
+ def cleanup(self):
+ try:
+ TestCmd.TestCmd.cleanup(self)
+ os.chdir(self.original_workdir)
+ except AttributeError:
+ # When this is called during TestCmd.TestCmd.__del__ we can have
+ # both 'TestCmd' and 'os' unavailable in our scope. Do nothing in
+ # this case.
+ pass
+
+ #
+ # Methods that change the working directory's content.
+ #
+ def set_tree(self, tree_location):
+ # It is not possible to remove the current directory.
+ d = os.getcwd()
+ os.chdir(os.path.dirname(self.workdir))
+ shutil.rmtree(self.workdir, ignore_errors=False)
+
+ if not os.path.isabs(tree_location):
+ tree_location = os.path.join(self.original_workdir, tree_location)
+ shutil.copytree(tree_location, self.workdir)
+
+ os.chdir(d)
+ def make_writable(unused, dir, entries):
+ for e in entries:
+ name = os.path.join(dir, e)
+ os.chmod(name, os.stat(name).st_mode | 0222)
+ os.path.walk(".", make_writable, None)
+
+ def write(self, file, content, wait=True):
+ nfile = self.native_file_name(file)
+ self.__makedirs(os.path.dirname(nfile), wait)
+ f = open(nfile, "wb")
+ try:
+ f.write(content)
+ finally:
+ f.close()
+ self.__ensure_newer_than_last_build(nfile)
+
+ def copy(self, src, dst):
+ try:
+ self.write(dst, self.read(src, 1))
+ except:
+ self.fail_test(1)
+
+ def copy_preserving_timestamp(self, src, dst):
+ src_name = self.native_file_name(src)
+ dst_name = self.native_file_name(dst)
+ stats = os.stat(src_name)
+ self.write(dst, self.read(src, 1))
+ os.utime(dst_name, (stats.st_atime, stats.st_mtime))
+
+ def touch(self, names, wait=True):
+ if names.__class__ is str:
+ names = [names]
+ for name in names:
+ path = self.native_file_name(name)
+ if wait:
+ self.__ensure_newer_than_last_build(path)
+ else:
+ os.utime(path, None)
+
+ def rm(self, names):
+ if not type(names) == types.ListType:
+ names = [names]
+
+ if names == ["."]:
+ # If we are deleting the entire workspace, there is no need to wait
+ # for a clock tick.
+ self.last_build_timestamp = 0
+
+ # Avoid attempts to remove the current directory.
+ os.chdir(self.original_workdir)
+ for name in names:
+ n = glob.glob(self.native_file_name(name))
+ if n: n = n[0]
+ if not n:
+ n = self.glob_file(name.replace("$toolset", self.toolset + "*")
+ )
+ if n:
+ if os.path.isdir(n):
+ shutil.rmtree(n, ignore_errors=False)
+ else:
+ os.unlink(n)
+
+ # Create working dir root again in case we removed it.
+ if not os.path.exists(self.workdir):
+ os.mkdir(self.workdir)
+ os.chdir(self.workdir)
+
+ def expand_toolset(self, name):
+ """
+ Expands $toolset placeholder in the given file to the name of the
+ toolset currently being tested.
+
+ """
+ self.write(name, self.read(name).replace("$toolset", self.toolset))
+
+ def dump_stdio(self):
+ annotation("STDOUT", self.stdout())
+ annotation("STDERR", self.stderr())
+
+ def run_build_system(self, extra_args=None, subdir="", stdout=None,
+ stderr="", status=0, match=None, pass_toolset=None,
+ use_test_config=None, ignore_toolset_requirements=None,
+ expected_duration=None, **kw):
+
+ assert extra_args.__class__ is not str
+
+ if os.path.isabs(subdir):
+ print("You must pass a relative directory to subdir <%s>." % subdir
+ )
+ return
+
+ self.previous_tree, dummy = tree.build_tree(self.workdir)
+
+ if match is None:
+ match = self.match
+
+ if pass_toolset is None:
+ pass_toolset = self.pass_toolset
+
+ if use_test_config is None:
+ use_test_config = self.use_test_config
+
+ if ignore_toolset_requirements is None:
+ ignore_toolset_requirements = self.ignore_toolset_requirements
+
+ try:
+ kw["program"] = []
+ kw["program"] += self.program
+ if extra_args:
+ kw["program"] += extra_args
+ if pass_toolset:
+ kw["program"].append("toolset=" + self.toolset)
+ if use_test_config:
+ kw["program"].append('--test-config="%s"' % os.path.join(
+ self.original_workdir, "test-config.jam"))
+ if ignore_toolset_requirements:
+ kw["program"].append("--ignore-toolset-requirements")
+ if "--python" in sys.argv:
+ kw["program"].append("--python")
+ kw["chdir"] = subdir
+ self.last_program_invocation = kw["program"]
+ build_time_start = time.time()
+ apply(TestCmd.TestCmd.run, [self], kw)
+ build_time_finish = time.time()
+ except:
+ self.dump_stdio()
+ raise
+
+ old_last_build_timestamp = self.last_build_timestamp
+ self.tree, self.last_build_timestamp = tree.build_tree(self.workdir)
+ self.difference = tree.tree_difference(self.previous_tree, self.tree)
+ if self.difference.empty():
+ # If nothing has been changed by this build and sufficient time has
+ # passed since the last build that actually changed something,
+ # there is no need to wait for touched or newly created files to
+ # start getting newer timestamps than the currently existing ones.
+ self.last_build_timestamp = old_last_build_timestamp
+
+ self.difference.ignore_directories()
+ self.unexpected_difference = copy.deepcopy(self.difference)
+
+ if (status and self.status) is not None and self.status != status:
+ expect = ""
+ if status != 0:
+ expect = " (expected %d)" % status
+
+ annotation("failure", '"%s" returned %d%s' % (kw["program"],
+ self.status, expect))
+
+ annotation("reason", "unexpected status returned by bjam")
+ self.fail_test(1)
+
+ if stdout is not None and not match(self.stdout(), stdout):
+ annotation("failure", "Unexpected stdout")
+ annotation("Expected STDOUT", stdout)
+ annotation("Actual STDOUT", self.stdout())
+ stderr = self.stderr()
+ if stderr:
+ annotation("STDERR", stderr)
+ self.maybe_do_diff(self.stdout(), stdout)
+ self.fail_test(1, dump_stdio=False)
+
+ # Intel tends to produce some messages to stderr which make tests fail.
+ intel_workaround = re.compile("^xi(link|lib): executing.*\n", re.M)
+ actual_stderr = re.sub(intel_workaround, "", self.stderr())
+
+ if stderr is not None and not match(actual_stderr, stderr):
+ annotation("failure", "Unexpected stderr")
+ annotation("Expected STDERR", stderr)
+ annotation("Actual STDERR", self.stderr())
+ annotation("STDOUT", self.stdout())
+ self.maybe_do_diff(actual_stderr, stderr)
+ self.fail_test(1, dump_stdio=False)
+
+ if expected_duration is not None:
+ actual_duration = build_time_finish - build_time_start
+ if actual_duration > expected_duration:
+ print("Test run lasted %f seconds while it was expected to "
+ "finish in under %f seconds." % (actual_duration,
+ expected_duration))
+ self.fail_test(1, dump_stdio=False)
+
+ def glob_file(self, name):
+ result = None
+ if hasattr(self, "difference"):
+ for f in (self.difference.added_files +
+ self.difference.modified_files +
+ self.difference.touched_files):
+ if fnmatch.fnmatch(f, name):
+ result = self.native_file_name(f)
+ break
+ if not result:
+ result = glob.glob(self.native_file_name(name))
+ if result:
+ result = result[0]
+ return result
+
+ def read(self, name, binary=False):
+ try:
+ if self.toolset:
+ name = name.replace("$toolset", self.toolset + "*")
+ name = self.glob_file(name)
+ openMode = "r"
+ if binary:
+ openMode += "b"
+ else:
+ openMode += "U"
+ f = open(name, openMode)
+ result = f.read()
+ f.close()
+ return result
+ except:
+ annotation("failure", "Could not open '%s'" % name)
+ self.fail_test(1)
+ return ""
+
+ def read_and_strip(self, name):
+ if not self.glob_file(name):
+ return ""
+ f = open(self.glob_file(name), "rb")
+ lines = f.readlines()
+ f.close()
+ result = "\n".join(x.rstrip() for x in lines)
+ if lines and lines[-1][-1] != "\n":
+ return result + "\n"
+ return result
+
+ def fail_test(self, condition, dump_difference=True, dump_stdio=True,
+ dump_stack=True):
+ if not condition:
+ return
+
+ if dump_difference and hasattr(self, "difference"):
+ f = StringIO.StringIO()
+ self.difference.pprint(f)
+ annotation("changes caused by the last build command",
+ f.getvalue())
+
+ if dump_stdio:
+ self.dump_stdio()
+
+ if "--preserve" in sys.argv:
+ print
+ print "*** Copying the state of working dir into 'failed_test' ***"
+ print
+ path = os.path.join(self.original_workdir, "failed_test")
+ if os.path.isdir(path):
+ shutil.rmtree(path, ignore_errors=False)
+ elif os.path.exists(path):
+ raise "Path " + path + " already exists and is not a directory"
+ shutil.copytree(self.workdir, path)
+ print "The failed command was:"
+ print " ".join(self.last_program_invocation)
+
+ if dump_stack:
+ annotate_stack_trace()
+ sys.exit(1)
+
+ # A number of methods below check expectations with actual difference
+ # between directory trees before and after a build. All the 'expect*'
+ # methods require exact names to be passed. All the 'ignore*' methods allow
+ # wildcards.
+
+ # All names can be either a string or a list of strings.
+ def expect_addition(self, names):
+ for name in self.adjust_names(names):
+ try:
+ glob_remove(self.unexpected_difference.added_files, name)
+ except:
+ annotation("failure", "File %s not added as expected" % name)
+ self.fail_test(1)
+
+ def ignore_addition(self, wildcard):
+ self.__ignore_elements(self.unexpected_difference.added_files,
+ wildcard)
+
+ def expect_removal(self, names):
+ for name in self.adjust_names(names):
+ try:
+ glob_remove(self.unexpected_difference.removed_files, name)
+ except:
+ annotation("failure", "File %s not removed as expected" % name)
+ self.fail_test(1)
+
+ def ignore_removal(self, wildcard):
+ self.__ignore_elements(self.unexpected_difference.removed_files,
+ wildcard)
+
+ def expect_modification(self, names):
+ for name in self.adjust_names(names):
+ try:
+ glob_remove(self.unexpected_difference.modified_files, name)
+ except:
+ annotation("failure", "File %s not modified as expected" %
+ name)
+ self.fail_test(1)
+
+ def ignore_modification(self, wildcard):
+ self.__ignore_elements(self.unexpected_difference.modified_files,
+ wildcard)
+
+ def expect_touch(self, names):
+ d = self.unexpected_difference
+ for name in self.adjust_names(names):
+ # We need to check both touched and modified files. The reason is
+ # that:
+ # (1) Windows binaries such as obj, exe or dll files have slight
+ # differences even with identical inputs due to Windows PE
+ # format headers containing an internal timestamp.
+ # (2) Intel's compiler for Linux has the same behaviour.
+ filesets = [d.modified_files, d.touched_files]
+
+ while filesets:
+ try:
+ glob_remove(filesets[-1], name)
+ break
+ except ValueError:
+ filesets.pop()
+
+ if not filesets:
+ annotation("failure", "File %s not touched as expected" % name)
+ self.fail_test(1)
+
+ def ignore_touch(self, wildcard):
+ self.__ignore_elements(self.unexpected_difference.touched_files,
+ wildcard)
+
+ def ignore(self, wildcard):
+ self.ignore_addition(wildcard)
+ self.ignore_removal(wildcard)
+ self.ignore_modification(wildcard)
+ self.ignore_touch(wildcard)
+
+ def expect_nothing(self, names):
+ for name in self.adjust_names(names):
+ if name in self.difference.added_files:
+ annotation("failure",
+ "File %s added, but no action was expected" % name)
+ self.fail_test(1)
+ if name in self.difference.removed_files:
+ annotation("failure",
+ "File %s removed, but no action was expected" % name)
+ self.fail_test(1)
+ pass
+ if name in self.difference.modified_files:
+ annotation("failure",
+ "File %s modified, but no action was expected" % name)
+ self.fail_test(1)
+ if name in self.difference.touched_files:
+ annotation("failure",
+ "File %s touched, but no action was expected" % name)
+ self.fail_test(1)
+
+ def expect_nothing_more(self):
+ # Not totally sure about this change, but I do not see a good
+ # alternative.
+ if windows:
+ self.ignore("*.ilk") # MSVC incremental linking files.
+ self.ignore("*.pdb") # MSVC program database files.
+ self.ignore("*.rsp") # Response files.
+ self.ignore("*.tds") # Borland debug symbols.
+ self.ignore("*.manifest") # MSVC DLL manifests.
+
+ # Debug builds of bjam built with gcc produce this profiling data.
+ self.ignore("gmon.out")
+ self.ignore("*/gmon.out")
+
+ # Boost Build's 'configure' functionality (unfinished at the time)
+ # produces this file.
+ self.ignore("bin/config.log")
+ self.ignore("bin/project-cache.jam")
+
+ # Compiled Python files created when running Python based Boost Build.
+ self.ignore("*.pyc")
+
+ if not self.unexpected_difference.empty():
+ annotation("failure", "Unexpected changes found")
+ output = StringIO.StringIO()
+ self.unexpected_difference.pprint(output)
+ annotation("unexpected changes", output.getvalue())
+ self.fail_test(1)
+
+ def expect_output_lines(self, lines, expected=True):
+ self.__expect_lines(self.stdout(), lines, expected)
+
+ def expect_content_lines(self, filename, line, expected=True):
+ self.__expect_lines(self.__read_file(filename), line, expected)
+
+ def expect_content(self, name, content, exact=False):
+ actual = self.__read_file(name, exact)
+ content = content.replace("$toolset", self.toolset + "*")
+
+ matched = False
+ if exact:
+ matched = fnmatch.fnmatch(actual, content)
+ else:
+ def sorted_(x):
+ x.sort()
+ return x
+ actual_ = map(lambda x: sorted_(x.split()), actual.splitlines())
+ content_ = map(lambda x: sorted_(x.split()), content.splitlines())
+ if len(actual_) == len(content_):
+ matched = map(
+ lambda x, y: map(lambda n, p: fnmatch.fnmatch(n, p), x, y),
+ actual_, content_)
+ matched = reduce(
+ lambda x, y: x and reduce(
+ lambda a, b: a and b,
+ y),
+ matched)
+
+ if not matched:
+ print "Expected:\n"
+ print content
+ print "Got:\n"
+ print actual
+ self.fail_test(1)
+
+ def maybe_do_diff(self, actual, expected):
+ if os.environ.get("DO_DIFF"):
+ e = tempfile.mktemp("expected")
+ a = tempfile.mktemp("actual")
+ f = open(e, "w")
+ f.write(expected)
+ f.close()
+ f = open(a, "w")
+ f.write(actual)
+ f.close()
+ print("DIFFERENCE")
+ # Current diff should return 1 to indicate 'different input files'
+ # but some older diff versions may return 0 and depending on the
+ # exact Python/OS platform version, os.system() call may gobble up
+ # the external process's return code and return 0 itself.
+ if os.system('diff -u "%s" "%s"' % (e, a)) not in [0, 1]:
+ print('Unable to compute difference: diff -u "%s" "%s"' % (e, a
+ ))
+ os.unlink(e)
+ os.unlink(a)
+ else:
+ print("Set environmental variable 'DO_DIFF' to examine the "
+ "difference.")
+
+ # Internal methods.
+ def adjust_lib_name(self, name):
+ global lib_prefix
+ global dll_prefix
+ result = name
+
+ pos = name.rfind(".")
+ if pos != -1:
+ suffix = name[pos:]
+ if suffix == ".lib":
+ (head, tail) = os.path.split(name)
+ if lib_prefix:
+ tail = lib_prefix + tail
+ result = os.path.join(head, tail)
+ elif suffix == ".dll":
+ (head, tail) = os.path.split(name)
+ if dll_prefix:
+ tail = dll_prefix + tail
+ result = os.path.join(head, tail)
+ # If we want to use this name in a Jamfile, we better convert \ to /,
+ # as otherwise we would have to quote \.
+ result = result.replace("\\", "/")
+ return result
+
+ def adjust_suffix(self, name):
+ if not self.translate_suffixes:
+ return name
+ pos = name.rfind(".")
+ if pos == -1:
+ return name
+ suffix = name[pos:]
+ return name[:pos] + suffixes.get(suffix, suffix)
+
+ # Acceps either a string or a list of strings and returns a list of
+ # strings. Adjusts suffixes on all names.
+ def adjust_names(self, names):
+ if names.__class__ is str:
+ names = [names]
+ r = map(self.adjust_lib_name, names)
+ r = map(self.adjust_suffix, r)
+ r = map(lambda x, t=self.toolset: x.replace("$toolset", t + "*"), r)
+ return r
+
+ def native_file_name(self, name):
+ name = self.adjust_names(name)[0]
+ return os.path.normpath(os.path.join(self.workdir, *name.split("/")))
+
+ def wait_for_time_change(self, path, touch):
+ """
+ Wait for newly assigned file system modification timestamps for the
+ given path to become large enough for the timestamp difference to be
+ correctly recognized by both this Python based testing framework and
+ the Boost Jam executable being tested. May optionally touch the given
+ path to set its modification timestamp to the new value.
+
+ """
+ self.__wait_for_time_change(path, touch, last_build_time=False)
+
+ def __build_timestamp_resolution(self):
+ """
+ Returns the minimum path modification timestamp resolution supported
+ by the used Boost Jam executable.
+
+ """
+ dir = tempfile.mkdtemp("bjam_version_info")
+ try:
+ jam_script = "timestamp_resolution.jam"
+ f = open(os.path.join(dir, jam_script), "w")
+ try:
+ f.write("EXIT $(JAM_TIMESTAMP_RESOLUTION) : 0 ;")
+ finally:
+ f.close()
+ p = subprocess.Popen([self.program[0], "-d0", "-f%s" % jam_script],
+ stdout=subprocess.PIPE, cwd=dir, universal_newlines=True)
+ out, err = p.communicate()
+ finally:
+ shutil.rmtree(dir, ignore_errors=False)
+
+ if p.returncode != 0:
+ raise TestEnvironmentError("Unexpected return code (%s) when "
+ "detecting Boost Jam's minimum supported path modification "
+ "timestamp resolution version information." % p.returncode)
+ if err:
+ raise TestEnvironmentError("Unexpected error output (%s) when "
+ "detecting Boost Jam's minimum supported path modification "
+ "timestamp resolution version information." % err)
+
+ r = re.match("([0-9]{2}):([0-9]{2}):([0-9]{2}\\.[0-9]{9})$", out)
+ if not r:
+ # Older Boost Jam versions did not report their minimum supported
+ # path modification timestamp resolution and did not actually
+ # support path modification timestamp resolutions finer than 1
+ # second.
+ # TODO: Phase this support out to avoid such fallback code from
+ # possibly covering up other problems.
+ return 1
+ if r.group(1) != "00" or r.group(2) != "00": # hours, minutes
+ raise TestEnvironmentError("Boost Jam with too coarse minimum "
+ "supported path modification timestamp resolution (%s:%s:%s)."
+ % (r.group(1), r.group(2), r.group(3)))
+ return float(r.group(3)) # seconds.nanoseconds
+
+ def __ensure_newer_than_last_build(self, path):
+ """
+ Updates the given path's modification timestamp after waiting for the
+ newly assigned file system modification timestamp to become large
+ enough for the timestamp difference between it and the last build
+ timestamp to be correctly recognized by both this Python based testing
+ framework and the Boost Jam executable being tested. Does nothing if
+ there is no 'last build' information available.
+
+ """
+ if self.last_build_timestamp:
+ self.__wait_for_time_change(path, touch=True, last_build_time=True)
+
+ def __expect_lines(self, data, lines, expected):
+ """
+ Checks whether the given data contains the given lines.
+
+ Data may be specified as a single string containing text lines
+ separated by newline characters.
+
+ Lines may be specified in any of the following forms:
+ * Single string containing text lines separated by newlines - the
+ given lines are searched for in the given data without any extra
+ data lines between them.
+ * Container of strings containing text lines separated by newlines
+ - the given lines are searched for in the given data with extra
+ data lines allowed between lines belonging to different strings.
+ * Container of strings containing text lines separated by newlines
+ and containers containing strings - the same as above with the
+ internal containers containing strings being interpreted as if
+ all their content was joined together into a single string
+ separated by newlines.
+
+ A newline at the end of any multi-line lines string is interpreted as
+ an expected extra trailig empty line.
+ """
+ # str.splitlines() trims at most one trailing newline while we want the
+ # trailing newline to indicate that there should be an extra empty line
+ # at the end.
+ splitlines = lambda x : (x + "\n").splitlines()
+
+ if data is None:
+ data = []
+ elif data.__class__ is str:
+ data = splitlines(data)
+
+ if lines.__class__ is str:
+ lines = [splitlines(lines)]
+ else:
+ expanded = []
+ for x in lines:
+ if x.__class__ is str:
+ x = splitlines(x)
+ expanded.append(x)
+ lines = expanded
+
+ if _contains_lines(data, lines) != bool(expected):
+ output = []
+ if expected:
+ output = ["Did not find expected lines:"]
+ else:
+ output = ["Found unexpected lines:"]
+ first = True
+ for line_sequence in lines:
+ if line_sequence:
+ if first:
+ first = False
+ else:
+ output.append("...")
+ output.extend(" > " + line for line in line_sequence)
+ output.append("in output:")
+ output.extend(" > " + line for line in data)
+ annotation("failure", "\n".join(output))
+ self.fail_test(1)
+
+ def __ignore_elements(self, list, wildcard):
+ """Removes in-place 'list' elements matching the given 'wildcard'."""
+ list[:] = filter(lambda x, w=wildcard: not fnmatch.fnmatch(x, w), list)
+
+ def __makedirs(self, path, wait):
+ """
+ Creates a folder with the given path, together with any missing
+ parent folders. If WAIT is set, makes sure any newly created folders
+ have modification timestamps newer than the ones left behind by the
+ last build run.
+
+ """
+ try:
+ if wait:
+ stack = []
+ while path and path not in stack and not os.path.isdir(path):
+ stack.append(path)
+ path = os.path.dirname(path)
+ while stack:
+ path = stack.pop()
+ os.mkdir(path)
+ self.__ensure_newer_than_last_build(path)
+ else:
+ os.makedirs(path)
+ except Exception:
+ pass
+
+ def __python_timestamp_resolution(self, path, minimum_resolution):
+ """
+ Returns the modification timestamp resolution for the given path
+ supported by the used Python interpreter/OS/filesystem combination.
+ Will not check for resolutions less than the given minimum value. Will
+ change the path's modification timestamp in the process.
+
+ Return values:
+ 0 - nanosecond resolution supported
+ positive decimal - timestamp resolution in seconds
+
+ """
+ # Note on Python's floating point timestamp support:
+ # Python interpreter versions prior to Python 2.3 did not support
+ # floating point timestamps. Versions 2.3 through 3.3 may or may not
+ # support it depending on the configuration (may be toggled by calling
+ # os.stat_float_times(True/False) at program startup, disabled by
+ # default prior to Python 2.5 and enabled by default since). Python 3.3
+ # deprecated this configuration and 3.4 removed support for it after
+ # which floating point timestamps are always supported.
+ ver = sys.version_info[0:2]
+ python_nanosecond_support = ver >= (3, 4) or (ver >= (2, 3) and
+ os.stat_float_times())
+
+ # Minimal expected floating point difference used to account for
+ # possible imprecise floating point number representations. We want
+ # this number to be small (at least smaller than 0.0001) but still
+ # large enough that we can be sure that increasing a floating point
+ # value by 2 * eta guarantees the value read back will be increased by
+ # at least eta.
+ eta = 0.00005
+
+ stats_orig = os.stat(path)
+ def test_time(diff):
+ """Returns whether a timestamp difference is detectable."""
+ os.utime(path, (stats_orig.st_atime, stats_orig.st_mtime + diff))
+ return os.stat(path).st_mtime > stats_orig.st_mtime + eta
+
+ # Test for nanosecond timestamp resolution support.
+ if not minimum_resolution and python_nanosecond_support:
+ if test_time(2 * eta):
+ return 0
+
+ # Detect the filesystem timestamp resolution. Note that there is no
+ # need to make this code 'as fast as possible' as, this function gets
+ # called before having to sleep until the next detectable modification
+ # timestamp value and that, since we already know nanosecond resolution
+ # is not supported, will surely take longer than whatever we do here to
+ # detect this minimal detectable modification timestamp resolution.
+ step = 0.1
+ if not python_nanosecond_support:
+ # If Python does not support nanosecond timestamp resolution we
+ # know the minimum possible supported timestamp resolution is 1
+ # second.
+ minimum_resolution = max(1, minimum_resolution)
+ index = max(1, int(minimum_resolution / step))
+ while step * index < minimum_resolution:
+ # Floating point number representation errors may cause our
+ # initially calculated start index to be too small if calculated
+ # directly.
+ index += 1
+ while True:
+ # Do not simply add up the steps to avoid cumulative floating point
+ # number representation errors.
+ next = step * index
+ if next > 10:
+ raise TestEnvironmentError("File systems with too coarse "
+ "modification timestamp resolutions not supported.")
+ if test_time(next):
+ return next
+ index += 1
+
+ def __read_file(self, name, exact=False):
+ name = self.adjust_names(name)[0]
+ result = ""
+ try:
+ if exact:
+ result = self.read(name)
+ else:
+ result = self.read_and_strip(name).replace("\\", "/")
+ except (IOError, IndexError):
+ print "Note: could not open file", name
+ self.fail_test(1)
+ return result
+
+ def __wait_for_time_change(self, path, touch, last_build_time):
+ """
+ Wait until a newly assigned file system modification timestamp for
+ the given path is large enough for the timestamp difference between it
+ and the last build timestamp or the path's original file system
+ modification timestamp (depending on the last_build_time flag) to be
+ correctly recognized by both this Python based testing framework and
+ the Boost Jam executable being tested. May optionally touch the given
+ path to set its modification timestamp to the new value.
+
+ """
+ assert self.last_build_timestamp or not last_build_time
+ stats_orig = os.stat(path)
+
+ if last_build_time:
+ start_time = self.last_build_timestamp
+ else:
+ start_time = stats_orig.st_mtime
+
+ build_resolution = self.__build_timestamp_resolution()
+ assert build_resolution >= 0
+
+ # Check whether the current timestamp is already new enough.
+ if stats_orig.st_mtime > start_time and (not build_resolution or
+ stats_orig.st_mtime >= start_time + build_resolution):
+ return
+
+ resolution = self.__python_timestamp_resolution(path, build_resolution)
+ assert resolution >= build_resolution
+
+ # Implementation notes:
+ # * Theoretically time.sleep() API might get interrupted too soon
+ # (never actually encountered).
+ # * We encountered cases where we sleep just long enough for the
+ # filesystem's modifiction timestamp to change to the desired value,
+ # but after waking up, the read timestamp is still just a tiny bit
+ # too small (encountered on Windows). This is most likely caused by
+ # imprecise floating point timestamp & sleep interval representation
+ # used by Python. Note though that we never encountered a case where
+ # more than one additional tiny sleep() call was needed to remedy
+ # the situation.
+ # * We try to wait long enough for the timestamp to change, but do not
+ # want to waste processing time by waiting too long. The main
+ # problem is that when we have a coarse resolution, the actual times
+ # get rounded and we do not know the exact sleep time needed for the
+ # difference between two such times to pass. E.g. if we have a 1
+ # second resolution and the original and the current file timestamps
+ # are both 10 seconds then it could be that the current time is
+ # 10.99 seconds and that we can wait for just one hundredth of a
+ # second for the current file timestamp to reach its next value, and
+ # using a longer sleep interval than that would just be wasting
+ # time.
+ while True:
+ os.utime(path, None)
+ c = os.stat(path).st_mtime
+ if resolution:
+ if c > start_time and (not build_resolution or c >= start_time
+ + build_resolution):
+ break
+ if c <= start_time - resolution:
+ # Move close to the desired timestamp in one sleep, but not
+ # close enough for timestamp rounding to potentially cause
+ # us to wait too long.
+ if start_time - c > 5:
+ if last_build_time:
+ error_message = ("Last build time recorded as "
+ "being a future event, causing a too long "
+ "wait period. Something must have played "
+ "around with the system clock.")
+ else:
+ error_message = ("Original path modification "
+ "timestamp set to far into the future or "
+ "something must have played around with the "
+ "system clock, causing a too long wait "
+ "period.\nPath: '%s'" % path)
+ raise TestEnvironmentError(message)
+ _sleep(start_time - c)
+ else:
+ # We are close to the desired timestamp so take baby sleeps
+ # to avoid sleeping too long.
+ _sleep(max(0.01, resolution / 10))
+ else:
+ if c > start_time:
+ break
+ _sleep(max(0.01, start_time - c))
+
+ if not touch:
+ os.utime(path, (stats_orig.st_atime, stats_orig.st_mtime))
+
+
+class List:
+ def __init__(self, s=""):
+ elements = []
+ if s.__class__ is str:
+ # Have to handle escaped spaces correctly.
+ elements = s.replace("\ ", "\001").split()
+ else:
+ elements = s
+ self.l = [e.replace("\001", " ") for e in elements]
+
+ def __len__(self):
+ return len(self.l)
+
+ def __getitem__(self, key):
+ return self.l[key]
+
+ def __setitem__(self, key, value):
+ self.l[key] = value
+
+ def __delitem__(self, key):
+ del self.l[key]
+
+ def __str__(self):
+ return str(self.l)
+
+ def __repr__(self):
+ return "%s.List(%r)" % (self.__module__, " ".join(self.l))
+
+ def __mul__(self, other):
+ result = List()
+ if not isinstance(other, List):
+ other = List(other)
+ for f in self:
+ for s in other:
+ result.l.append(f + s)
+ return result
+
+ def __rmul__(self, other):
+ if not isinstance(other, List):
+ other = List(other)
+ return List.__mul__(other, self)
+
+ def __add__(self, other):
+ result = List()
+ result.l = self.l[:] + other.l[:]
+ return result
+
+
+def _contains_lines(data, lines):
+ data_line_count = len(data)
+ expected_line_count = reduce(lambda x, y: x + len(y), lines, 0)
+ index = 0
+ for expected in lines:
+ if expected_line_count > data_line_count - index:
+ return False
+ expected_line_count -= len(expected)
+ index = _match_line_sequence(data, index, data_line_count -
+ expected_line_count, expected)
+ if index < 0:
+ return False
+ return True
+
+
+def _match_line_sequence(data, start, end, lines):
+ if not lines:
+ return start
+ for index in xrange(start, end - len(lines) + 1):
+ data_index = index
+ for expected in lines:
+ if not fnmatch.fnmatch(data[data_index], expected):
+ break;
+ data_index += 1
+ else:
+ return data_index
+ return -1
+
+
+def _sleep(delay):
+ if delay > 5:
+ raise TestEnvironmentError("Test environment error: sleep period of "
+ "more than 5 seconds requested. Most likely caused by a file with "
+ "its modification timestamp set to sometime in the future.")
+ time.sleep(delay)
+
+
+###############################################################################
+#
+# Initialization.
+#
+###############################################################################
+
+# Make os.stat() return file modification times as floats instead of integers
+# to get the best possible file timestamp resolution available. The exact
+# resolution depends on the underlying file system and the Python os.stat()
+# implementation. The better the resolution we achieve, the shorter we need to
+# wait for files we create to start getting new timestamps.
+#
+# Additional notes:
+# * os.stat_float_times() function first introduced in Python 2.3. and
+# suggested for deprecation in Python 3.3.
+# * On Python versions 2.5+ we do not need to do this as there os.stat()
+# returns floating point file modification times by default.
+# * Windows CPython implementations prior to version 2.5 do not support file
+# modification timestamp resolutions of less than 1 second no matter whether
+# these timestamps are returned as integer or floating point values.
+# * Python documentation states that this should be set in a program's
+# __main__ module to avoid affecting other libraries that might not be ready
+# to support floating point timestamps. Since we use no such external
+# libraries, we ignore this warning to make it easier to enable this feature
+# in both our single & multiple-test scripts.
+if (2, 3) <= sys.version_info < (2, 5) and not os.stat_float_times():
+ os.stat_float_times(True)
+
+
+# Quickie tests. Should use doctest instead.
+if __name__ == "__main__":
+ assert str(List("foo bar") * "/baz") == "['foo/baz', 'bar/baz']"
+ assert repr("foo/" * List("bar baz")) == "__main__.List('foo/bar foo/baz')"
+
+ assert _contains_lines([], [])
+ assert _contains_lines([], [[]])
+ assert _contains_lines([], [[], []])
+ assert _contains_lines([], [[], [], []])
+ assert not _contains_lines([], [[""]])
+ assert not _contains_lines([], [["a"]])
+
+ assert _contains_lines([""], [])
+ assert _contains_lines(["a"], [])
+ assert _contains_lines(["a", "b"], [])
+ assert _contains_lines(["a", "b"], [[], [], []])
+
+ assert _contains_lines([""], [[""]])
+ assert not _contains_lines([""], [["a"]])
+ assert not _contains_lines(["a"], [[""]])
+ assert _contains_lines(["a", "", "b", ""], [["a"]])
+ assert _contains_lines(["a", "", "b", ""], [[""]])
+ assert _contains_lines(["a", "", "b"], [["b"]])
+ assert not _contains_lines(["a", "b"], [[""]])
+ assert not _contains_lines(["a", "", "b", ""], [["c"]])
+ assert _contains_lines(["a", "", "b", "x"], [["x"]])
+
+ data = ["1", "2", "3", "4", "5", "6", "7", "8", "9"]
+ assert _contains_lines(data, [["1", "2"]])
+ assert not _contains_lines(data, [["2", "1"]])
+ assert not _contains_lines(data, [["1", "3"]])
+ assert not _contains_lines(data, [["1", "3"]])
+ assert _contains_lines(data, [["1"], ["2"]])
+ assert _contains_lines(data, [["1"], [], [], [], ["2"]])
+ assert _contains_lines(data, [["1"], ["3"]])
+ assert not _contains_lines(data, [["3"], ["1"]])
+ assert _contains_lines(data, [["3"], ["7"], ["8"]])
+ assert not _contains_lines(data, [["1"], ["3", "5"]])
+ assert not _contains_lines(data, [["1"], [""], ["5"]])
+ assert not _contains_lines(data, [["1"], ["5"], ["3"]])
+ assert not _contains_lines(data, [["1"], ["5", "3"]])
+
+ assert not _contains_lines(data, [[" 3"]])
+ assert not _contains_lines(data, [["3 "]])
+ assert not _contains_lines(data, [["3", ""]])
+ assert not _contains_lines(data, [["", "3"]])
+
+ print("tests passed")
diff --git a/tools/build/test/MockToolset.py b/tools/build/test/MockToolset.py
new file mode 100755
index 0000000000..741959e50e
--- /dev/null
+++ b/tools/build/test/MockToolset.py
@@ -0,0 +1,250 @@
+#!/usr/bin/python
+
+# Copyright (C) 2013 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import sys
+
+def create(t):
+ t.write('''mockinfo.py''', '''
+import re
+import optparse
+import os
+
+parser = optparse.OptionParser()
+parser.add_option('-o', dest="output_file")
+parser.add_option('-x', dest="language")
+parser.add_option('-c', dest="compile", action="store_true")
+parser.add_option('-I', dest="includes", action="append")
+parser.add_option('-L', dest="library_path", action="append")
+parser.add_option('--dll', dest="dll", action="store_true")
+parser.add_option('--archive', dest="archive", action="store_true")
+parser.add_option('--static-lib', dest="static_libraries", action="append")
+parser.add_option('--shared-lib', dest="shared_libraries", action="append")
+
+cwd = os.environ["JAM_CWD"]
+
+class MockInfo(object):
+ def __init__(self, verbose=False):
+ self.files = dict()
+ self.commands = list()
+ self.verbose = verbose
+ def source_file(self, name, pattern):
+ self.files[name] = pattern
+ def action(self, command, status=0):
+ self.commands.append((command, status))
+ def check(self, command):
+ print "Testing command", command
+ for (raw, status) in self.commands:
+ if self.matches(raw, command):
+ return status
+ def matches(self, raw, command):
+ (expected_options, expected_args) = parser.parse_args(raw.split())
+ options = command[0]
+ input_files = list(command[1])
+ if self.verbose:
+ print " - matching against", (expected_options, expected_args)
+ if len(expected_args) != len(input_files):
+ if self.verbose:
+ print " argument list sizes differ"
+ return False
+ for arg in expected_args:
+ if arg.startswith('$'):
+ fileid = arg[1:]
+ pattern = self.files[fileid] if fileid in self.files else fileid
+ matching_file = None
+ for input_file in input_files:
+ with open(input_file, 'r') as f:
+ contents = f.read()
+ if pattern == contents:
+ matching_file = input_file
+ break
+ if matching_file is not None:
+ input_files.remove(matching_file)
+ else:
+ if self.verbose:
+ print " Failed to match input file contents: %s" % arg
+ return False
+ else:
+ if arg in input_files:
+ input_files.remove(arg)
+ else:
+ if self.verbose:
+ print " Failed to match input file: %s" % arg
+ return False
+
+ if options.language != expected_options.language:
+ if self.verbose:
+ print " Failed to match -c"
+ return False
+
+ if options.compile != expected_options.compile:
+ if self.verbose:
+ print " Failed to match -x"
+ return False
+
+ # Normalize a path for comparison purposes
+ def adjust_path(p):
+ return os.path.normcase(os.path.normpath(os.path.join(cwd, p)))
+
+ # order matters
+ if options.includes is None:
+ options.includes = []
+ if expected_options.includes is None:
+ expected_options.includes = []
+ if map(adjust_path, options.includes) != \
+ map(adjust_path, expected_options.includes):
+ if self.verbose:
+ print " Failed to match -I ", map(adjust_path, options.includes), \
+ " != ", map(adjust_path, expected_options.includes)
+ return False
+
+ if options.library_path is None:
+ options.library_path = []
+ if expected_options.library_path is None:
+ expected_options.library_path = []
+ if map(adjust_path, options.library_path) != \
+ map(adjust_path, expected_options.library_path):
+ if self.verbose:
+ print " Failed to match -L ", map(adjust_path, options.library_path), \
+ " != ", map(adjust_path, expected_options.library_path)
+ return False
+
+ if options.static_libraries != expected_options.static_libraries:
+ if self.verbose:
+ print " Failed to match --static-lib"
+ return False
+
+ if options.shared_libraries != expected_options.shared_libraries:
+ if self.verbose:
+ print " Failed to match --shared-lib"
+ return False
+
+ if options.dll != expected_options.dll:
+ if self.verbose:
+ print " Failed to match --dll"
+ return False
+
+ if options.archive != expected_options.archive:
+ if self.verbose:
+ print " Failed to match --archive"
+ return False
+
+ # The output must be handled after everything else
+ # is validated
+ if expected_options.output_file is not None:
+ if options.output_file is not None:
+ if expected_options.output_file.startswith('$'):
+ fileid = expected_options.output_file[1:]
+ if fileid not in self.files:
+ self.files[fileid] = fileid
+ else:
+ assert(self.files[fileid] == fileid)
+ with open(options.output_file, 'w') as output:
+ output.write(fileid)
+ else:
+ if self.verbose:
+ print "Failed to match -o"
+ return False
+ elif options.output_file is not None:
+ if self.verbose:
+ print "Failed to match -o"
+ return False
+
+ # if we've gotten here, then everything matched
+ if self.verbose:
+ print " Matched"
+ return True
+''')
+
+ t.write('mock.py', '''
+import mockinfo
+import markup
+import sys
+
+status = markup.info.check(mockinfo.parser.parse_args())
+if status is not None:
+ exit(status)
+else:
+ print("Unrecognized command: " + ' '.join(sys.argv))
+ exit(1)
+''')
+
+ t.write('mock.jam', '''
+import feature ;
+import toolset ;
+import path ;
+import modules ;
+import common ;
+import type ;
+
+.python-cmd = "\"%s\"" ;
+
+# Behave the same as gcc on Windows, because that's what
+# the test system expects
+type.set-generated-target-prefix SHARED_LIB : <toolset>mock <target-os>windows : lib ;
+type.set-generated-target-suffix STATIC_LIB : <toolset>mock <target-os>windows : a ;
+
+rule init ( )
+{
+ local here = [ path.make [ modules.binding $(__name__) ] ] ;
+ here = [ path.native [ path.root [ path.parent $(here) ] [ path.pwd ] ] ] ;
+ .config-cmd = [ common.variable-setting-command JAM_CWD : $(here) ] $(.python-cmd) -B ;
+}
+
+feature.extend toolset : mock ;
+
+generators.register-c-compiler mock.compile.c++ : CPP : OBJ : <toolset>mock ;
+generators.register-c-compiler mock.compile.c : C : OBJ : <toolset>mock ;
+
+generators.register-linker mock.link : LIB OBJ : EXE : <toolset>mock ;
+generators.register-linker mock.link.dll : LIB OBJ : SHARED_LIB : <toolset>mock ;
+generators.register-archiver mock.archive : OBJ : STATIC_LIB : <toolset>mock ;
+
+toolset.flags mock.compile INCLUDES <include> ;
+
+actions compile.c
+{
+ $(.config-cmd) mock.py -c -x c -I"$(INCLUDES)" "$(>)" -o "$(<)"
+}
+
+actions compile.c++
+{
+ $(.config-cmd) mock.py -c -x c++ -I"$(INCLUDES)" "$(>)" -o "$(<)"
+}
+
+toolset.flags mock.link USER_OPTIONS <linkflags> ;
+toolset.flags mock.link FINDLIBS-STATIC <find-static-library> ;
+toolset.flags mock.link FINDLIBS-SHARED <find-shared-library> ;
+toolset.flags mock.link LINK_PATH <library-path> ;
+toolset.flags mock.link LIBRARIES <library-file> ;
+
+actions link
+{
+ $(.config-cmd) mock.py "$(>)" -o "$(<)" $(USER_OPTIONS) -L"$(LINK_PATH)" --static-lib=$(FINDLIBS-STATIC) --shared-lib=$(FINDLIBS-SHARED)
+}
+
+actions archive
+{
+ $(.config-cmd) mock.py --archive "$(>)" -o "$(<)" $(USER_OPTIONS)
+}
+
+actions link.dll
+{
+ $(.config-cmd) mock.py --dll "$(>)" -o "$(<)" $(USER_OPTIONS) -L"$(LINK_PATH)" --static-lib=$(FINDLIBS-STATIC) --shared-lib=$(FINDLIBS-SHARED)
+}
+
+''' % sys.executable.replace('\\', '\\\\'))
+
+def set_expected(t, markup):
+ verbose = "True" if t.verbose else "False"
+ t.write('markup.py', '''
+import mockinfo
+info = mockinfo.MockInfo(%s)
+def source_file(name, contents):
+ info.source_file(name, contents)
+def action(command, status=0):
+ info.action(command, status)
+''' % verbose + markup)
diff --git a/tools/build/test/TestCmd.py b/tools/build/test/TestCmd.py
new file mode 100644
index 0000000000..5993df7ff9
--- /dev/null
+++ b/tools/build/test/TestCmd.py
@@ -0,0 +1,589 @@
+"""
+TestCmd.py: a testing framework for commands and scripts.
+
+The TestCmd module provides a framework for portable automated testing of
+executable commands and scripts (in any language, not just Python), especially
+commands and scripts that require file system interaction.
+
+In addition to running tests and evaluating conditions, the TestCmd module
+manages and cleans up one or more temporary workspace directories, and provides
+methods for creating files and directories in those workspace directories from
+in-line data, here-documents), allowing tests to be completely self-contained.
+
+A TestCmd environment object is created via the usual invocation:
+
+ test = TestCmd()
+
+The TestCmd module provides pass_test(), fail_test(), and no_result() unbound
+methods that report test results for use with the Aegis change management
+system. These methods terminate the test immediately, reporting PASSED, FAILED
+or NO RESULT respectively and exiting with status 0 (success), 1 or 2
+respectively. This allows for a distinction between an actual failed test and a
+test that could not be properly evaluated because of an external condition (such
+as a full file system or incorrect permissions).
+
+"""
+
+# Copyright 2000 Steven Knight
+# This module is free software, and you may redistribute it and/or modify
+# it under the same terms as Python itself, so long as this copyright message
+# and disclaimer are retained in their original form.
+#
+# IN NO EVENT SHALL THE AUTHOR BE LIABLE TO ANY PARTY FOR DIRECT, INDIRECT,
+# SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OF
+# THIS CODE, EVEN IF THE AUTHOR HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
+# DAMAGE.
+#
+# THE AUTHOR SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+# PARTICULAR PURPOSE. THE CODE PROVIDED HEREUNDER IS ON AN "AS IS" BASIS,
+# AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE,
+# SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
+
+# Copyright 2002-2003 Vladimir Prus.
+# Copyright 2002-2003 Dave Abrahams.
+# Copyright 2006 Rene Rivera.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+
+from string import join, split
+
+__author__ = "Steven Knight <knight@baldmt.com>"
+__revision__ = "TestCmd.py 0.D002 2001/08/31 14:56:12 software"
+__version__ = "0.02"
+
+from types import *
+
+import os
+import os.path
+import re
+import shutil
+import stat
+import subprocess
+import sys
+import tempfile
+import traceback
+
+
+tempfile.template = 'testcmd.'
+
+_Cleanup = []
+
+def _clean():
+ global _Cleanup
+ list = _Cleanup[:]
+ _Cleanup = []
+ list.reverse()
+ for test in list:
+ test.cleanup()
+
+sys.exitfunc = _clean
+
+
+def caller(tblist, skip):
+ string = ""
+ arr = []
+ for file, line, name, text in tblist:
+ if file[-10:] == "TestCmd.py":
+ break
+ arr = [(file, line, name, text)] + arr
+ atfrom = "at"
+ for file, line, name, text in arr[skip:]:
+ if name == "?":
+ name = ""
+ else:
+ name = " (" + name + ")"
+ string = string + ("%s line %d of %s%s\n" % (atfrom, line, file, name))
+ atfrom = "\tfrom"
+ return string
+
+
+def fail_test(self=None, condition=True, function=None, skip=0):
+ """Cause the test to fail.
+
+ By default, the fail_test() method reports that the test FAILED and exits
+ with a status of 1. If a condition argument is supplied, the test fails
+ only if the condition is true.
+
+ """
+ if not condition:
+ return
+ if not function is None:
+ function()
+ of = ""
+ desc = ""
+ sep = " "
+ if not self is None:
+ if self.program:
+ of = " of " + join(self.program, " ")
+ sep = "\n\t"
+ if self.description:
+ desc = " [" + self.description + "]"
+ sep = "\n\t"
+
+ at = caller(traceback.extract_stack(), skip)
+
+ sys.stderr.write("FAILED test" + of + desc + sep + at + """
+in directory: """ + os.getcwd() )
+ sys.exit(1)
+
+
+def no_result(self=None, condition=True, function=None, skip=0):
+ """Causes a test to exit with no valid result.
+
+ By default, the no_result() method reports NO RESULT for the test and
+ exits with a status of 2. If a condition argument is supplied, the test
+ fails only if the condition is true.
+
+ """
+ if not condition:
+ return
+ if not function is None:
+ function()
+ of = ""
+ desc = ""
+ sep = " "
+ if not self is None:
+ if self.program:
+ of = " of " + self.program
+ sep = "\n\t"
+ if self.description:
+ desc = " [" + self.description + "]"
+ sep = "\n\t"
+
+ at = caller(traceback.extract_stack(), skip)
+ sys.stderr.write("NO RESULT for test" + of + desc + sep + at)
+ sys.exit(2)
+
+
+def pass_test(self=None, condition=True, function=None):
+ """Causes a test to pass.
+
+ By default, the pass_test() method reports PASSED for the test and exits
+ with a status of 0. If a condition argument is supplied, the test passes
+ only if the condition is true.
+
+ """
+ if not condition:
+ return
+ if not function is None:
+ function()
+ sys.stderr.write("PASSED\n")
+ sys.exit(0)
+
+
+def match_exact(lines=None, matches=None):
+ """
+ Returns whether the given lists or strings containing lines separated
+ using newline characters contain exactly the same data.
+
+ """
+ if not type(lines) is ListType:
+ lines = split(lines, "\n")
+ if not type(matches) is ListType:
+ matches = split(matches, "\n")
+ if len(lines) != len(matches):
+ return
+ for i in range(len(lines)):
+ if lines[i] != matches[i]:
+ return
+ return 1
+
+
+def match_re(lines=None, res=None):
+ """
+ Given lists or strings contain lines separated using newline characters.
+ This function matches those lines one by one, interpreting the lines in the
+ res parameter as regular expressions.
+
+ """
+ if not type(lines) is ListType:
+ lines = split(lines, "\n")
+ if not type(res) is ListType:
+ res = split(res, "\n")
+ if len(lines) != len(res):
+ return
+ for i in range(len(lines)):
+ if not re.compile("^" + res[i] + "$").search(lines[i]):
+ return
+ return 1
+
+
+class TestCmd:
+ def __init__(self, description=None, program=None, workdir=None,
+ subdir=None, verbose=False, match=None, inpath=None):
+
+ self._cwd = os.getcwd()
+ self.description_set(description)
+ self.program_set(program, inpath)
+ self.verbose_set(verbose)
+ if match is None:
+ self.match_func = match_re
+ else:
+ self.match_func = match
+ self._dirlist = []
+ self._preserve = {'pass_test': 0, 'fail_test': 0, 'no_result': 0}
+ env = os.environ.get('PRESERVE')
+ if env:
+ self._preserve['pass_test'] = env
+ self._preserve['fail_test'] = env
+ self._preserve['no_result'] = env
+ else:
+ env = os.environ.get('PRESERVE_PASS')
+ if env is not None:
+ self._preserve['pass_test'] = env
+ env = os.environ.get('PRESERVE_FAIL')
+ if env is not None:
+ self._preserve['fail_test'] = env
+ env = os.environ.get('PRESERVE_PASS')
+ if env is not None:
+ self._preserve['PRESERVE_NO_RESULT'] = env
+ self._stdout = []
+ self._stderr = []
+ self.status = None
+ self.condition = 'no_result'
+ self.workdir_set(workdir)
+ self.subdir(subdir)
+
+ def __del__(self):
+ self.cleanup()
+
+ def __repr__(self):
+ return "%x" % id(self)
+
+ def cleanup(self, condition=None):
+ """
+ Removes any temporary working directories for the specified TestCmd
+ environment. If the environment variable PRESERVE was set when the
+ TestCmd environment was created, temporary working directories are not
+ removed. If any of the environment variables PRESERVE_PASS,
+ PRESERVE_FAIL or PRESERVE_NO_RESULT were set when the TestCmd
+ environment was created, then temporary working directories are not
+ removed if the test passed, failed or had no result, respectively.
+ Temporary working directories are also preserved for conditions
+ specified via the preserve method.
+
+ Typically, this method is not called directly, but is used when the
+ script exits to clean up temporary working directories as appropriate
+ for the exit status.
+
+ """
+ if not self._dirlist:
+ return
+ if condition is None:
+ condition = self.condition
+ if self._preserve[condition]:
+ for dir in self._dirlist:
+ print("Preserved directory %s" % dir)
+ else:
+ list = self._dirlist[:]
+ list.reverse()
+ for dir in list:
+ self.writable(dir, 1)
+ shutil.rmtree(dir, ignore_errors=1)
+
+ self._dirlist = []
+ self.workdir = None
+ os.chdir(self._cwd)
+ try:
+ global _Cleanup
+ _Cleanup.remove(self)
+ except (AttributeError, ValueError):
+ pass
+
+ def description_set(self, description):
+ """Set the description of the functionality being tested."""
+ self.description = description
+
+ def fail_test(self, condition=True, function=None, skip=0):
+ """Cause the test to fail."""
+ if not condition:
+ return
+ self.condition = 'fail_test'
+ fail_test(self = self,
+ condition = condition,
+ function = function,
+ skip = skip)
+
+ def match(self, lines, matches):
+ """Compare actual and expected file contents."""
+ return self.match_func(lines, matches)
+
+ def match_exact(self, lines, matches):
+ """Compare actual and expected file content exactly."""
+ return match_exact(lines, matches)
+
+ def match_re(self, lines, res):
+ """Compare file content with a regular expression."""
+ return match_re(lines, res)
+
+ def no_result(self, condition=True, function=None, skip=0):
+ """Report that the test could not be run."""
+ if not condition:
+ return
+ self.condition = 'no_result'
+ no_result(self = self,
+ condition = condition,
+ function = function,
+ skip = skip)
+
+ def pass_test(self, condition=True, function=None):
+ """Cause the test to pass."""
+ if not condition:
+ return
+ self.condition = 'pass_test'
+ pass_test(self, condition, function)
+
+ def preserve(self, *conditions):
+ """
+ Arrange for the temporary working directories for the specified
+ TestCmd environment to be preserved for one or more conditions. If no
+ conditions are specified, arranges for the temporary working
+ directories to be preserved for all conditions.
+
+ """
+ if conditions is ():
+ conditions = ('pass_test', 'fail_test', 'no_result')
+ for cond in conditions:
+ self._preserve[cond] = 1
+
+ def program_set(self, program, inpath):
+ """Set the executable program or script to be tested."""
+ if not inpath and program and not os.path.isabs(program[0]):
+ program[0] = os.path.join(self._cwd, program[0])
+ self.program = program
+
+ def read(self, file, mode='rb'):
+ """
+ Reads and returns the contents of the specified file name. The file
+ name may be a list, in which case the elements are concatenated with
+ the os.path.join() method. The file is assumed to be under the
+ temporary working directory unless it is an absolute path name. The I/O
+ mode for the file may be specified and must begin with an 'r'. The
+ default is 'rb' (binary read).
+
+ """
+ if type(file) is ListType:
+ file = apply(os.path.join, tuple(file))
+ if not os.path.isabs(file):
+ file = os.path.join(self.workdir, file)
+ if mode[0] != 'r':
+ raise ValueError, "mode must begin with 'r'"
+ return open(file, mode).read()
+
+ def run(self, program=None, arguments=None, chdir=None, stdin=None,
+ universal_newlines=True):
+ """
+ Runs a test of the program or script for the test environment.
+ Standard output and error output are saved for future retrieval via the
+ stdout() and stderr() methods.
+
+ 'universal_newlines' parameter controls how the child process
+ input/output streams are opened as defined for the same named Python
+ subprocess.POpen constructor parameter.
+
+ """
+ if chdir:
+ if not os.path.isabs(chdir):
+ chdir = os.path.join(self.workpath(chdir))
+ if self.verbose:
+ sys.stderr.write("chdir(" + chdir + ")\n")
+ else:
+ chdir = self.workdir
+
+ cmd = []
+ if program and program[0]:
+ if program[0] != self.program[0] and not os.path.isabs(program[0]):
+ program[0] = os.path.join(self._cwd, program[0])
+ cmd += program
+ else:
+ cmd += self.program
+ if arguments:
+ cmd += arguments.split(" ")
+ if self.verbose:
+ sys.stderr.write(join(cmd, " ") + "\n")
+ p = subprocess.Popen(cmd, stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=chdir,
+ universal_newlines=universal_newlines)
+
+ if stdin:
+ if type(stdin) is ListType:
+ for line in stdin:
+ p.tochild.write(line)
+ else:
+ p.tochild.write(stdin)
+ out, err = p.communicate()
+ self._stdout.append(out)
+ self._stderr.append(err)
+ self.status = p.returncode
+
+ if self.verbose:
+ sys.stdout.write(self._stdout[-1])
+ sys.stderr.write(self._stderr[-1])
+
+ def stderr(self, run=None):
+ """
+ Returns the error output from the specified run number. If there is
+ no specified run number, then returns the error output of the last run.
+ If the run number is less than zero, then returns the error output from
+ that many runs back from the current run.
+
+ """
+ if not run:
+ run = len(self._stderr)
+ elif run < 0:
+ run = len(self._stderr) + run
+ run -= 1
+ if run < 0:
+ return ''
+ return self._stderr[run]
+
+ def stdout(self, run=None):
+ """
+ Returns the standard output from the specified run number. If there
+ is no specified run number, then returns the standard output of the
+ last run. If the run number is less than zero, then returns the
+ standard output from that many runs back from the current run.
+
+ """
+ if not run:
+ run = len(self._stdout)
+ elif run < 0:
+ run = len(self._stdout) + run
+ run -= 1
+ if run < 0:
+ return ''
+ return self._stdout[run]
+
+ def subdir(self, *subdirs):
+ """
+ Create new subdirectories under the temporary working directory, one
+ for each argument. An argument may be a list, in which case the list
+ elements are concatenated using the os.path.join() method.
+ Subdirectories multiple levels deep must be created using a separate
+ argument for each level:
+
+ test.subdir('sub', ['sub', 'dir'], ['sub', 'dir', 'ectory'])
+
+ Returns the number of subdirectories actually created.
+
+ """
+ count = 0
+ for sub in subdirs:
+ if sub is None:
+ continue
+ if type(sub) is ListType:
+ sub = apply(os.path.join, tuple(sub))
+ new = os.path.join(self.workdir, sub)
+ try:
+ os.mkdir(new)
+ except:
+ pass
+ else:
+ count += 1
+ return count
+
+ def unlink(self, file):
+ """
+ Unlinks the specified file name. The file name may be a list, in
+ which case the elements are concatenated using the os.path.join()
+ method. The file is assumed to be under the temporary working directory
+ unless it is an absolute path name.
+
+ """
+ if type(file) is ListType:
+ file = apply(os.path.join, tuple(file))
+ if not os.path.isabs(file):
+ file = os.path.join(self.workdir, file)
+ os.unlink(file)
+
+ def verbose_set(self, verbose):
+ """Set the verbose level."""
+ self.verbose = verbose
+
+ def workdir_set(self, path):
+ """
+ Creates a temporary working directory with the specified path name.
+ If the path is a null string (''), a unique directory name is created.
+
+ """
+ if os.path.isabs(path):
+ self.workdir = path
+ else:
+ if path != None:
+ if path == '':
+ path = tempfile.mktemp()
+ if path != None:
+ os.mkdir(path)
+ self._dirlist.append(path)
+ global _Cleanup
+ try:
+ _Cleanup.index(self)
+ except ValueError:
+ _Cleanup.append(self)
+ # We would like to set self.workdir like this:
+ # self.workdir = path
+ # But symlinks in the path will report things differently from
+ # os.getcwd(), so chdir there and back to fetch the canonical
+ # path.
+ cwd = os.getcwd()
+ os.chdir(path)
+ self.workdir = os.getcwd()
+ os.chdir(cwd)
+ else:
+ self.workdir = None
+
+ def workpath(self, *args):
+ """
+ Returns the absolute path name to a subdirectory or file within the
+ current temporary working directory. Concatenates the temporary working
+ directory name with the specified arguments using os.path.join().
+
+ """
+ return apply(os.path.join, (self.workdir,) + tuple(args))
+
+ def writable(self, top, write):
+ """
+ Make the specified directory tree writable (write == 1) or not
+ (write == None).
+
+ """
+ def _walk_chmod(arg, dirname, names):
+ st = os.stat(dirname)
+ os.chmod(dirname, arg(st[stat.ST_MODE]))
+ for name in names:
+ fullname = os.path.join(dirname, name)
+ st = os.stat(fullname)
+ os.chmod(fullname, arg(st[stat.ST_MODE]))
+
+ _mode_writable = lambda mode: stat.S_IMODE(mode|0200)
+ _mode_non_writable = lambda mode: stat.S_IMODE(mode&~0200)
+
+ if write:
+ f = _mode_writable
+ else:
+ f = _mode_non_writable
+ try:
+ os.path.walk(top, _walk_chmod, f)
+ except:
+ pass # Ignore any problems changing modes.
+
+ def write(self, file, content, mode='wb'):
+ """
+ Writes the specified content text (second argument) to the specified
+ file name (first argument). The file name may be a list, in which case
+ the elements are concatenated using the os.path.join() method. The file
+ is created under the temporary working directory. Any subdirectories in
+ the path must already exist. The I/O mode for the file may be specified
+ and must begin with a 'w'. The default is 'wb' (binary write).
+
+ """
+ if type(file) is ListType:
+ file = apply(os.path.join, tuple(file))
+ if not os.path.isabs(file):
+ file = os.path.join(self.workdir, file)
+ if mode[0] != 'w':
+ raise ValueError, "mode must begin with 'w'"
+ open(file, mode).write(content)
diff --git a/tools/build/test/abs_workdir.py b/tools/build/test/abs_workdir.py
new file mode 100644
index 0000000000..fa6aadc587
--- /dev/null
+++ b/tools/build/test/abs_workdir.py
@@ -0,0 +1,26 @@
+# Niklaus Giger, 2005-03-15
+# Testing whether we may run a test in absolute directories. There are no tests
+# for temporary directories as this is implictly tested in a lot of other cases.
+
+# TODO: Move to a separate testing-system test group.
+# TODO: Make the test not display any output on success.
+# TODO: Make sure implemented path handling is correct under Windows, Cygwin &
+# Unix/Linux.
+
+import BoostBuild
+import os
+import tempfile
+
+t = BoostBuild.Tester(["-ffile.jam"], workdir=os.getcwd(), pass_d0=False,
+ pass_toolset=False)
+
+t.write("file.jam", "EXIT [ PWD ] : 0 ;")
+
+t.run_build_system()
+t.expect_output_lines("*%s*" % tempfile.gettempdir(), False)
+t.expect_output_lines("*build/v2/test*")
+
+t.run_build_system(status=1, subdir="/must/fail/with/absolute/path",
+ stderr=None)
+
+t.cleanup()
diff --git a/tools/build/test/absolute_sources.py b/tools/build/test/absolute_sources.py
new file mode 100644
index 0000000000..64cd770e1f
--- /dev/null
+++ b/tools/build/test/absolute_sources.py
@@ -0,0 +1,73 @@
+#!/usr/bin/python
+
+# Copyright 2003, 2004 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that sources with absolute names are handled OK.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("jamroot.jam", "path-constant TOP : . ;")
+t.write("jamfile.jam", """\
+local pwd = [ PWD ] ;
+ECHO $(pwd) XXXXX ;
+exe hello : $(pwd)/hello.cpp $(TOP)/empty.cpp ;
+""")
+t.write("hello.cpp", "int main() {}\n")
+t.write("empty.cpp", "\n")
+
+t.run_build_system()
+t.expect_addition("bin/$toolset/debug/hello.exe")
+t.rm(".")
+
+# Test a contrived case in which an absolute name is used in a standalone
+# project (not Jamfile). Moreover, the target with an absolute name is returned
+# via an 'alias' and used from another project.
+t.write("a.cpp", "int main() {}\n")
+t.write("jamfile.jam", "exe a : /standalone//a ;")
+t.write("jamroot.jam", "import standalone ;")
+t.write("standalone.jam", """\
+import project ;
+project.initialize $(__name__) ;
+project standalone ;
+local pwd = [ PWD ] ;
+alias a : $(pwd)/a.cpp ;
+""")
+
+t.write("standalone.py", """
+from b2.manager import get_manager
+
+# FIXME: this is ugly as death
+get_manager().projects().initialize(__name__)
+
+import os ;
+
+# This use of list as parameter is also ugly.
+project(['standalone'])
+
+pwd = os.getcwd()
+alias('a', [os.path.join(pwd, 'a.cpp')])
+""")
+
+t.run_build_system()
+t.expect_addition("bin/$toolset/debug/a.exe")
+
+# Test absolute path in target ids.
+t.rm(".")
+
+t.write("d1/jamroot.jam", "")
+t.write("d1/jamfile.jam", "exe a : a.cpp ;")
+t.write("d1/a.cpp", "int main() {}\n")
+t.write("d2/jamroot.jam", "")
+t.write("d2/jamfile.jam", """\
+local pwd = [ PWD ] ;
+alias x : $(pwd)/../d1//a ;
+""")
+
+t.run_build_system(subdir="d2")
+t.expect_addition("d1/bin/$toolset/debug/a.exe")
+
+t.cleanup()
diff --git a/tools/build/test/alias.py b/tools/build/test/alias.py
new file mode 100644
index 0000000000..4ff4d74d50
--- /dev/null
+++ b/tools/build/test/alias.py
@@ -0,0 +1,109 @@
+#!/usr/bin/python
+
+# Copyright 2003 Dave Abrahams
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+
+###############################################################################
+#
+# test_alias_rule()
+# -----------------
+#
+###############################################################################
+
+def test_alias_rule(t):
+ """Basic alias rule test."""
+
+ t.write("jamroot.jam", """\
+exe a : a.cpp ;
+exe b : b.cpp ;
+exe c : c.cpp ;
+
+alias bin1 : a ;
+alias bin2 : a b ;
+
+alias src : s.cpp ;
+exe hello : hello.cpp src ;
+""")
+
+ t.write("a.cpp", "int main() {}\n")
+ t.copy("a.cpp", "b.cpp")
+ t.copy("a.cpp", "c.cpp")
+ t.copy("a.cpp", "hello.cpp")
+ t.write("s.cpp", "")
+
+ # Check that targets to which "bin1" refers are updated, and only those.
+ t.run_build_system(["bin1"])
+ t.expect_addition(BoostBuild.List("bin/$toolset/debug/") * "a.exe a.obj")
+ t.expect_nothing_more()
+
+ # Try again with "bin2"
+ t.run_build_system(["bin2"])
+ t.expect_addition(BoostBuild.List("bin/$toolset/debug/") * "b.exe b.obj")
+ t.expect_nothing_more()
+
+ # Try building everything, making sure 'hello' target is created.
+ t.run_build_system()
+ t.expect_addition(BoostBuild.List("bin/$toolset/debug/") * \
+ "hello.exe hello.obj")
+ t.expect_addition("bin/$toolset/debug/s.obj")
+ t.expect_addition(BoostBuild.List("bin/$toolset/debug/") * "c.exe c.obj")
+ t.expect_nothing_more()
+
+
+###############################################################################
+#
+# test_alias_source_usage_requirements()
+# --------------------------------------
+#
+###############################################################################
+
+def test_alias_source_usage_requirements(t):
+ """
+ Check whether usage requirements are propagated via "alias". In case they
+ are not, linking will fail as there will be no main() function defined
+ anywhere in the source.
+
+ """
+ t.write("jamroot.jam", """\
+lib l : l.cpp : : : <define>WANT_MAIN ;
+alias la : l ;
+exe main : main.cpp la ;
+""")
+
+ t.write("l.cpp", """\
+void
+#if defined(_WIN32)
+__declspec(dllexport)
+#endif
+foo() {}
+""")
+
+ t.write("main.cpp", """\
+#ifdef WANT_MAIN
+int main() {}
+#endif
+""")
+
+ t.run_build_system()
+
+
+###############################################################################
+#
+# main()
+# ------
+#
+###############################################################################
+
+# We do not pass the '-d0' option to Boost Build here to get more detailed
+# information in case of failure.
+t = BoostBuild.Tester(pass_d0=False, use_test_config=False)
+
+test_alias_rule(t)
+test_alias_source_usage_requirements(t)
+
+t.cleanup()
diff --git a/tools/build/test/alternatives.py b/tools/build/test/alternatives.py
new file mode 100644
index 0000000000..645927304f
--- /dev/null
+++ b/tools/build/test/alternatives.py
@@ -0,0 +1,113 @@
+#!/usr/bin/python
+
+# Copyright 2003 Dave Abrahams
+# Copyright 2003, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test main target alternatives.
+
+import BoostBuild
+import string
+
+t = BoostBuild.Tester(use_test_config=False)
+
+# Test that basic alternatives selection works.
+t.write("jamroot.jam", "")
+
+t.write("jamfile.jam", """
+exe a : a_empty.cpp ;
+exe a : a.cpp : <variant>release ;
+""")
+
+t.write("a_empty.cpp", "")
+
+t.write("a.cpp", "int main() {}\n")
+
+t.run_build_system(["release"])
+
+t.expect_addition("bin/$toolset/release/a.exe")
+
+# Test that alternative selection works for ordinary properties, in particular
+# user-defined.
+t.write("jamroot.jam", "")
+
+t.write("jamfile.jam", """
+import feature ;
+feature.feature X : off on : propagated ;
+exe a : b.cpp ;
+exe a : a.cpp : <X>on ;
+""")
+t.write("b.cpp", "int main() {}\n")
+
+t.rm("bin")
+
+t.run_build_system()
+t.expect_addition("bin/$toolset/debug/b.obj")
+
+t.run_build_system(["X=on"])
+t.expect_addition("bin/$toolset/debug/X-on/a.obj")
+
+t.rm("bin")
+
+# Test that everything works ok even with the default build.
+t.write("jamfile.jam", """\
+exe a : a_empty.cpp : <variant>release ;
+exe a : a.cpp : <variant>debug ;
+""")
+
+t.run_build_system()
+t.expect_addition("bin/$toolset/debug/a.exe")
+
+# Test that only properties which are in the build request matter for
+# alternative selection. IOW, alternative with <variant>release is better than
+# one with <variant>debug when building the release variant.
+t.write("jamfile.jam", """\
+exe a : a_empty.cpp : <variant>debug ;
+exe a : a.cpp : <variant>release ;
+""")
+
+t.run_build_system(["release"])
+t.expect_addition("bin/$toolset/release/a.exe")
+
+# Test that free properties do not matter. We really do not want <cxxflags>
+# property in build request to affect alternative selection.
+t.write("jamfile.jam", """
+exe a : a_empty.cpp : <variant>debug <define>FOO <include>BAR ;
+exe a : a.cpp : <variant>release ;
+""")
+
+t.rm("bin/$toolset/release/a.exe")
+t.run_build_system(["release", "define=FOO"])
+t.expect_addition("bin/$toolset/release/a.exe")
+
+# Test that ambiguity is reported correctly.
+t.write("jamfile.jam", """\
+exe a : a_empty.cpp ;
+exe a : a.cpp ;
+""")
+t.run_build_system(["--no-error-backtrace"], status=None)
+t.fail_test(string.find(t.stdout(), "No best alternative") == -1)
+
+# Another ambiguity test: two matches properties in one alternative are neither
+# better nor worse than a single one in another alternative.
+t.write("jamfile.jam", """\
+exe a : a_empty.cpp : <optimization>off <profiling>off ;
+exe a : a.cpp : <debug-symbols>on ;
+""")
+
+t.run_build_system(["--no-error-backtrace"], status=None)
+t.fail_test(string.find(t.stdout(), "No best alternative") == -1)
+
+# Test that we can have alternative without sources.
+t.write("jamfile.jam", """\
+alias specific-sources ;
+import feature ;
+feature.extend os : MAGIC ;
+alias specific-sources : b.cpp : <os>MAGIC ;
+exe a : a.cpp specific-sources ;
+""")
+t.rm("bin")
+t.run_build_system()
+
+t.cleanup()
diff --git a/tools/build/v2/test/bad_dirname.py b/tools/build/test/bad_dirname.py
index 961a45a48a..961a45a48a 100644
--- a/tools/build/v2/test/bad_dirname.py
+++ b/tools/build/test/bad_dirname.py
diff --git a/tools/build/test/boost-build.jam b/tools/build/test/boost-build.jam
new file mode 100644
index 0000000000..668452daf4
--- /dev/null
+++ b/tools/build/test/boost-build.jam
@@ -0,0 +1,14 @@
+# Copyright 2002, 2003 Dave Abrahams
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Assume BOOST_BUILD_PATH point to the 'test' directory.
+# We need to leave 'test' there, so that 'test-config.jam'
+# can be found, but also add parent directory, to find
+# all the other modules.
+
+BOOST_BUILD_PATH = $(BOOST_BUILD_PATH)/.. $(BOOST_BUILD_PATH) ;
+
+# Find the boost build system in the ../kernel directory.
+boost-build ../src/kernel ;
diff --git a/tools/build/v2/test/boostbook.py b/tools/build/test/boostbook.py
index 78eab87d0d..78eab87d0d 100644
--- a/tools/build/v2/test/boostbook.py
+++ b/tools/build/test/boostbook.py
diff --git a/tools/build/v2/test/boostbook/a.hpp b/tools/build/test/boostbook/a.hpp
index 5fab129a9c..5fab129a9c 100644
--- a/tools/build/v2/test/boostbook/a.hpp
+++ b/tools/build/test/boostbook/a.hpp
diff --git a/tools/build/v2/test/boostbook/docs.xml b/tools/build/test/boostbook/docs.xml
index c2d9b1f888..c2d9b1f888 100644
--- a/tools/build/v2/test/boostbook/docs.xml
+++ b/tools/build/test/boostbook/docs.xml
diff --git a/tools/build/v2/test/boostbook/jamroot.jam b/tools/build/test/boostbook/jamroot.jam
index 94564ca29c..94564ca29c 100644
--- a/tools/build/v2/test/boostbook/jamroot.jam
+++ b/tools/build/test/boostbook/jamroot.jam
diff --git a/tools/build/test/build_dir.py b/tools/build/test/build_dir.py
new file mode 100644
index 0000000000..50c2a906cd
--- /dev/null
+++ b/tools/build/test/build_dir.py
@@ -0,0 +1,107 @@
+#!/usr/bin/python
+
+# Copyright 2003 Dave Abrahams
+# Copyright 2002, 2003, 2005 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that we can change build directory using the 'build-dir' project
+# attribute.
+
+import BoostBuild
+import string
+import os
+
+t = BoostBuild.Tester(use_test_config=False)
+
+
+# Test that top-level project can affect build dir.
+t.write("jamroot.jam", "import gcc ;")
+t.write("jamfile.jam", """\
+project : build-dir build ;
+exe a : a.cpp ;
+build-project src ;
+""")
+
+t.write("a.cpp", "int main() {}\n")
+
+t.write("src/jamfile.jam", "exe b : b.cpp ; ")
+
+t.write("src/b.cpp", "int main() {}\n")
+
+t.run_build_system()
+
+t.expect_addition(["build/$toolset/debug/a.exe",
+ "build/src/$toolset/debug/b.exe"])
+
+# Test that building from child projects work.
+t.run_build_system(subdir='src')
+t.ignore("build/config.log")
+t.ignore("build/project-cache.jam")
+t.expect_nothing_more()
+
+# Test that project can override build dir.
+t.write("jamfile.jam", """\
+exe a : a.cpp ;
+build-project src ;
+""")
+
+t.write("src/jamfile.jam", """\
+project : build-dir build ;
+exe b : b.cpp ;
+""")
+
+t.run_build_system()
+t.expect_addition(["bin/$toolset/debug/a.exe",
+ "src/build/$toolset/debug/b.exe"])
+
+# Now test the '--build-dir' option.
+t.rm(".")
+t.write("jamroot.jam", "")
+
+# Test that we get an error when no project id is specified.
+t.run_build_system(["--build-dir=foo"])
+t.fail_test(string.find(t.stdout(),
+ "warning: the --build-dir option will be ignored") == -1)
+
+t.write("jamroot.jam", """\
+project foo ;
+exe a : a.cpp ;
+build-project sub ;
+""")
+t.write("a.cpp", "int main() {}\n")
+t.write("sub/jamfile.jam", "exe b : b.cpp ;\n")
+t.write("sub/b.cpp", "int main() {}\n")
+
+t.run_build_system(["--build-dir=build"])
+t.expect_addition(["build/foo/$toolset/debug/a.exe",
+ "build/foo/sub/$toolset/debug/b.exe"])
+
+t.write("jamroot.jam", """\
+project foo : build-dir bin.v2 ;
+exe a : a.cpp ;
+build-project sub ;
+""")
+
+t.run_build_system(["--build-dir=build"])
+t.expect_addition(["build/foo/bin.v2/$toolset/debug/a.exe",
+ "build/foo/bin.v2/sub/$toolset/debug/b.exe"])
+
+# Try building in subdir. We expect that the entire build tree with be in
+# 'sub/build'. Today, I am not sure if this is what the user expects, but let
+# it be.
+t.rm('build')
+t.run_build_system(["--build-dir=build"], subdir="sub")
+t.expect_addition(["sub/build/foo/bin.v2/sub/$toolset/debug/b.exe"])
+
+t.write("jamroot.jam", """\
+project foo : build-dir %s ;
+exe a : a.cpp ;
+build-project sub ;
+""" % string.replace(os.getcwd(), '\\', '\\\\'))
+
+t.run_build_system(["--build-dir=build"], status=1)
+t.fail_test(string.find(t.stdout(),
+ "Absolute directory specified via 'build-dir' project attribute") == -1)
+
+t.cleanup()
diff --git a/tools/build/test/build_file.py b/tools/build/test/build_file.py
new file mode 100644
index 0000000000..d670af4284
--- /dev/null
+++ b/tools/build/test/build_file.py
@@ -0,0 +1,170 @@
+#!/usr/bin/python
+
+# Copyright (C) 2006. Vladimir Prus
+# Copyright (C) 2008. Jurko Gospodnetic
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Tests that we explicitly request a file (not target) to be built by
+# specifying its name on the command line.
+
+import BoostBuild
+
+
+###############################################################################
+#
+# test_building_file_from_specific_project()
+# ------------------------------------------
+#
+###############################################################################
+
+def test_building_file_from_specific_project():
+ t = BoostBuild.Tester(use_test_config=False)
+
+ t.write("jamroot.jam", """\
+exe hello : hello.cpp ;
+exe hello2 : hello.cpp ;
+build-project sub ;
+""")
+ t.write("hello.cpp", "int main() {}\n")
+ t.write("sub/jamfile.jam", """
+exe hello : hello.cpp ;
+exe hello2 : hello.cpp ;
+exe sub : hello.cpp ;
+""")
+ t.write("sub/hello.cpp", "int main() {}\n")
+
+ t.run_build_system(["sub", t.adjust_suffix("hello.obj")])
+ t.expect_output_lines("*depends on itself*", False)
+ t.expect_addition("sub/bin/$toolset/debug/hello.obj")
+ t.expect_nothing_more()
+
+ t.cleanup()
+
+
+###############################################################################
+#
+# test_building_file_from_specific_target()
+# -----------------------------------------
+#
+###############################################################################
+
+def test_building_file_from_specific_target():
+ t = BoostBuild.Tester(use_test_config=False)
+
+ t.write("jamroot.jam", """\
+exe hello1 : hello1.cpp ;
+exe hello2 : hello2.cpp ;
+exe hello3 : hello3.cpp ;
+""")
+ t.write("hello1.cpp", "int main() {}\n")
+ t.write("hello2.cpp", "int main() {}\n")
+ t.write("hello3.cpp", "int main() {}\n")
+
+ t.run_build_system(["hello1", t.adjust_suffix("hello1.obj")])
+ t.expect_addition("bin/$toolset/debug/hello1.obj")
+ t.expect_nothing_more()
+
+ t.cleanup()
+
+
+###############################################################################
+#
+# test_building_missing_file_from_specific_target()
+# -------------------------------------------------
+#
+###############################################################################
+
+def test_building_missing_file_from_specific_target():
+ t = BoostBuild.Tester(use_test_config=False)
+
+ t.write("jamroot.jam", """\
+exe hello1 : hello1.cpp ;
+exe hello2 : hello2.cpp ;
+exe hello3 : hello3.cpp ;
+""")
+ t.write("hello1.cpp", "int main() {}\n")
+ t.write("hello2.cpp", "int main() {}\n")
+ t.write("hello3.cpp", "int main() {}\n")
+
+ obj = t.adjust_suffix("hello2.obj")
+ t.run_build_system(["hello1", obj], status=1)
+ t.expect_output_lines("don't know how to make*" + obj)
+ t.expect_nothing_more()
+
+ t.cleanup()
+
+
+###############################################################################
+#
+# test_building_multiple_files_with_different_names()
+# ---------------------------------------------------
+#
+###############################################################################
+
+def test_building_multiple_files_with_different_names():
+ t = BoostBuild.Tester(use_test_config=False)
+
+ t.write("jamroot.jam", """\
+exe hello1 : hello1.cpp ;
+exe hello2 : hello2.cpp ;
+exe hello3 : hello3.cpp ;
+""")
+ t.write("hello1.cpp", "int main() {}\n")
+ t.write("hello2.cpp", "int main() {}\n")
+ t.write("hello3.cpp", "int main() {}\n")
+
+ t.run_build_system([t.adjust_suffix("hello1.obj"), t.adjust_suffix(
+ "hello2.obj")])
+ t.expect_addition("bin/$toolset/debug/hello1.obj")
+ t.expect_addition("bin/$toolset/debug/hello2.obj")
+ t.expect_nothing_more()
+
+ t.cleanup()
+
+
+###############################################################################
+#
+# test_building_multiple_files_with_the_same_name()
+# -------------------------------------------------
+#
+###############################################################################
+
+def test_building_multiple_files_with_the_same_name():
+ t = BoostBuild.Tester(use_test_config=False)
+
+ t.write("jamroot.jam", """\
+exe hello : hello.cpp ;
+exe hello2 : hello.cpp ;
+build-project sub ;
+""")
+ t.write("hello.cpp", "int main() {}\n")
+ t.write("sub/jamfile.jam", """
+exe hello : hello.cpp ;
+exe hello2 : hello.cpp ;
+exe sub : hello.cpp ;
+""")
+ t.write("sub/hello.cpp", "int main() {}\n")
+
+ t.run_build_system([t.adjust_suffix("hello.obj")])
+ t.expect_output_lines("*depends on itself*", False)
+ t.expect_addition("bin/$toolset/debug/hello.obj")
+ t.expect_addition("sub/bin/$toolset/debug/hello.obj")
+ t.expect_nothing_more()
+
+ t.cleanup()
+
+
+###############################################################################
+#
+# main()
+# ------
+#
+###############################################################################
+
+test_building_file_from_specific_project()
+test_building_file_from_specific_target()
+test_building_missing_file_from_specific_target()
+test_building_multiple_files_with_different_names()
+test_building_multiple_files_with_the_same_name()
diff --git a/tools/build/test/build_no.py b/tools/build/test/build_no.py
new file mode 100644
index 0000000000..07b45804e8
--- /dev/null
+++ b/tools/build/test/build_no.py
@@ -0,0 +1,23 @@
+#!/usr/bin/python
+
+# Copyright (C) Vladimir Prus 2006.
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Tests that <build>no property prevents a target from being built.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("jamroot.jam", "exe hello : hello.cpp : <variant>debug:<build>no ;")
+t.write("hello.cpp", "int main() {}\n")
+
+t.run_build_system()
+t.expect_nothing_more()
+
+t.run_build_system(["release"])
+t.expect_addition("bin/$toolset/release/hello.exe")
+
+t.cleanup()
diff --git a/tools/build/test/builtin_echo.py b/tools/build/test/builtin_echo.py
new file mode 100755
index 0000000000..3092361734
--- /dev/null
+++ b/tools/build/test/builtin_echo.py
@@ -0,0 +1,30 @@
+#!/usr/bin/python
+
+# Copyright 2012 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This tests the ECHO rule.
+
+import BoostBuild
+
+def test_echo(name):
+ t = BoostBuild.Tester(["-ffile.jam"], pass_toolset=0)
+
+ t.write("file.jam", """\
+%s ;
+UPDATE ;
+""" % name)
+ t.run_build_system(stdout="\n")
+
+ t.write("file.jam", """\
+%s a message ;
+UPDATE ;
+""" % name)
+ t.run_build_system(stdout="a message\n")
+
+ t.cleanup()
+
+test_echo("ECHO")
+test_echo("Echo")
+test_echo("echo")
diff --git a/tools/build/test/builtin_exit.py b/tools/build/test/builtin_exit.py
new file mode 100755
index 0000000000..1db8693667
--- /dev/null
+++ b/tools/build/test/builtin_exit.py
@@ -0,0 +1,42 @@
+#!/usr/bin/python
+
+# Copyright 2012 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This tests the EXIT rule.
+
+import BoostBuild
+
+def test_exit(name):
+ t = BoostBuild.Tester(["-ffile.jam"], pass_toolset=0)
+
+ t.write("file.jam", "%s ;" % name)
+ t.run_build_system(status=1, stdout="\n")
+ t.rm(".")
+
+ t.write("file.jam", "%s : 0 ;" % name)
+ t.run_build_system(stdout="\n")
+ t.rm(".")
+
+ t.write("file.jam", "%s : 1 ;" % name)
+ t.run_build_system(status=1, stdout="\n")
+ t.rm(".")
+
+ t.write("file.jam", "%s : 2 ;" % name)
+ t.run_build_system(status=2, stdout="\n")
+ t.rm(".")
+
+ t.write("file.jam", "%s a message ;" % name)
+ t.run_build_system(status=1, stdout="a message\n")
+ t.rm(".")
+
+ t.write("file.jam", "%s a message : 0 ;" % name)
+ t.run_build_system(stdout="a message\n")
+ t.rm(".")
+
+ t.cleanup()
+
+test_exit("EXIT")
+test_exit("Exit")
+test_exit("exit")
diff --git a/tools/build/test/builtin_glob.py b/tools/build/test/builtin_glob.py
new file mode 100755
index 0000000000..6f55a4d9c1
--- /dev/null
+++ b/tools/build/test/builtin_glob.py
@@ -0,0 +1,87 @@
+#!/usr/bin/python
+
+# Copyright 2014 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This tests the GLOB rule.
+
+import os
+import BoostBuild
+
+def test_glob(files, glob, expected, setup=""):
+ t = BoostBuild.Tester(["-ffile.jam"], pass_toolset=0)
+ t.write("file.jam", setup + """
+ for local p in [ SORT %s ]
+ {
+ ECHO $(p) ;
+ }
+ UPDATE ;
+ """ % glob)
+ for f in files:
+ t.write(f, "")
+ # convert / into \ on windows
+ expected = [os.path.join(*p.split("/")) for p in expected]
+ expected.sort()
+ t.run_build_system(stdout="\n".join(expected + [""]))
+ t.cleanup()
+
+# one or both arguments empty
+test_glob([], "[ GLOB : ]", [])
+test_glob([], "[ GLOB . : ]", [])
+test_glob([], "[ GLOB : * ]", [])
+
+# a single result
+test_glob([], "[ GLOB . : * ]", ["./file.jam"])
+
+# * can match any number of characters
+test_glob([], "[ GLOB . : file*.jam ]", ["./file.jam"])
+test_glob([], "[ GLOB . : f*am ]", ["./file.jam"])
+# ? should match a single character, but not more than one
+test_glob([], "[ GLOB . : fi?e.?am ]", ["./file.jam"])
+test_glob([], "[ GLOB . : fi?.jam ]", [])
+# [abc-fh-j] matches a set of characters
+test_glob([], "[ GLOB . : [f][i][l][e].jam ]", ["./file.jam"])
+test_glob([], "[ GLOB . : [fghau][^usdrwe][k-o][^f-s].jam ]", ["./file.jam"])
+# \x matches x
+test_glob([], "[ GLOB . : \\f\\i\\l\\e.jam ]", ["./file.jam"])
+
+# multiple results
+test_glob(["test.txt"], "[ GLOB . : * ]", ["./file.jam", "./test.txt"])
+
+# directories
+test_glob(["dir1/dir2/test.txt"], "[ GLOB dir1 : * ]", ["dir1/dir2"]);
+
+# non-existent directory
+test_glob([], "[ GLOB dir1 : * ] ", [])
+
+# multiple directories and patterns
+test_glob(["dir1/file1.txt", "dir2/file1.txt",
+ "dir2/file2.txt"],
+ "[ GLOB dir1 dir2 : file1* file2* ]",
+ ["dir1/file1.txt", "dir2/file1.txt",
+ "dir2/file2.txt"])
+
+# The directory can contain . and ..
+test_glob(["dir/test.txt"], "[ GLOB dir/. : test.txt ]", ["dir/./test.txt"])
+test_glob(["dir/test.txt"], "[ GLOB dir/.. : file.jam ]", ["dir/../file.jam"])
+
+# On case insensitive filesystems, the result should
+# be normalized. It should NOT be downcased.
+test_glob(["TEST.TXT"], "[ GLOB . : TEST.TXT ]", ["./TEST.TXT"])
+
+case_insensitive = (os.path.normcase("FILE") == "file")
+
+if case_insensitive:
+ test_glob(["TEST.TXT"], "[ GLOB . : test.txt ]", ["./TEST.TXT"])
+ # This used to fail because the caching routines incorrectly
+ # reported that . and .. do not exist.
+ test_glob(["D1/D2/TEST.TXT"], "[ GLOB D1/./D2 : test.txt ]",
+ ["D1/./D2/TEST.TXT"])
+ test_glob(["D1/TEST.TXT", "TEST.TXT"], "[ GLOB D1/../D1 : test.txt ]",
+ ["D1/../D1/TEST.TXT"])
+ # This also failed because directories that were first found
+ # by GLOB were recorded as non-existent.
+ test_glob(["D1/D2/TEST.TXT"], "[ GLOB d1/d2 : test.txt ]",
+ ["D1/D2/TEST.TXT"],
+ "GLOB . : * ;")
diff --git a/tools/build/test/builtin_readlink.py b/tools/build/test/builtin_readlink.py
new file mode 100755
index 0000000000..e57d7286a5
--- /dev/null
+++ b/tools/build/test/builtin_readlink.py
@@ -0,0 +1,24 @@
+#!/usr/bin/python
+
+# Copyright 2012 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+import os
+
+t = BoostBuild.Tester(pass_toolset=0)
+
+t.write("link-target", "")
+os.symlink("link-target", "link")
+
+t.write("file.jam", """
+ECHO [ READLINK link ] ;
+EXIT [ READLINK link-target ] : 0 ;
+""")
+
+t.run_build_system(["-ffile.jam"], stdout="""link-target
+
+""")
+
+t.cleanup()
diff --git a/tools/build/test/builtin_split_by_characters.py b/tools/build/test/builtin_split_by_characters.py
new file mode 100755
index 0000000000..4a0a0e0612
--- /dev/null
+++ b/tools/build/test/builtin_split_by_characters.py
@@ -0,0 +1,57 @@
+#!/usr/bin/python
+
+# Copyright 2012. Jurko Gospodnetic
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# This tests the SPLIT_BY_CHARACTERS rule.
+
+import BoostBuild
+
+def test_invalid(params, expected_error_line):
+ t = BoostBuild.Tester(pass_toolset=0)
+ t.write("file.jam", "SPLIT_BY_CHARACTERS %s ;" % params)
+ t.run_build_system(["-ffile.jam"], status=1)
+ t.expect_output_lines("[*] %s" % expected_error_line)
+ t.cleanup()
+
+def test_valid():
+ t = BoostBuild.Tester(pass_toolset=0)
+ t.write("jamroot.jam", """\
+import assert ;
+
+assert.result FooBarBaz : SPLIT_BY_CHARACTERS FooBarBaz : "" ;
+assert.result FooBarBaz : SPLIT_BY_CHARACTERS FooBarBaz : x ;
+assert.result FooBa Baz : SPLIT_BY_CHARACTERS FooBarBaz : r ;
+assert.result FooBa Baz : SPLIT_BY_CHARACTERS FooBarBaz : rr ;
+assert.result FooBa Baz : SPLIT_BY_CHARACTERS FooBarBaz : rrr ;
+assert.result FooB rB z : SPLIT_BY_CHARACTERS FooBarBaz : a ;
+assert.result FooB B z : SPLIT_BY_CHARACTERS FooBarBaz : ar ;
+assert.result ooBarBaz : SPLIT_BY_CHARACTERS FooBarBaz : F ;
+assert.result FooBarBa : SPLIT_BY_CHARACTERS FooBarBaz : z ;
+assert.result ooBarBa : SPLIT_BY_CHARACTERS FooBarBaz : Fz ;
+assert.result F B rB z : SPLIT_BY_CHARACTERS FooBarBaz : oa ;
+assert.result Alib b : SPLIT_BY_CHARACTERS Alibaba : oa ;
+assert.result libaba : SPLIT_BY_CHARACTERS Alibaba : oA ;
+assert.result : SPLIT_BY_CHARACTERS FooBarBaz : FooBarBaz ;
+assert.result : SPLIT_BY_CHARACTERS FooBarBaz : FoBarz ;
+
+# Questionable results - should they return an empty string or an empty list?
+assert.result : SPLIT_BY_CHARACTERS "" : "" ;
+assert.result : SPLIT_BY_CHARACTERS "" : x ;
+assert.result : SPLIT_BY_CHARACTERS "" : r ;
+assert.result : SPLIT_BY_CHARACTERS "" : rr ;
+assert.result : SPLIT_BY_CHARACTERS "" : rrr ;
+assert.result : SPLIT_BY_CHARACTERS "" : oa ;
+""")
+ t.run_build_system()
+ t.cleanup()
+
+test_invalid("", "missing argument string")
+test_invalid("Foo", "missing argument delimiters")
+test_invalid(": Bar", "missing argument string")
+test_invalid("a : b : c", "extra argument c")
+test_invalid("a b : c", "extra argument b")
+test_invalid("a : b c", "extra argument c")
+test_valid()
diff --git a/tools/build/test/c_file.py b/tools/build/test/c_file.py
new file mode 100644
index 0000000000..672fd48023
--- /dev/null
+++ b/tools/build/test/c_file.py
@@ -0,0 +1,36 @@
+#!/usr/bin/python
+
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that C files are compiled by a C compiler.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("jamroot.jam", """
+project ;
+exe hello : hello.cpp a.c ;
+""")
+
+t.write("hello.cpp", """
+extern "C" int foo();
+int main() { return foo(); }
+""")
+
+t.write("a.c", """
+// This will not compile unless in C mode.
+int foo()
+{
+ int new = 0;
+ new = (new+1)*7;
+ return new;
+}
+""")
+
+t.run_build_system()
+t.expect_addition("bin/$toolset/debug/hello.exe")
+
+t.cleanup()
diff --git a/tools/build/test/chain.py b/tools/build/test/chain.py
new file mode 100644
index 0000000000..4a39f520ed
--- /dev/null
+++ b/tools/build/test/chain.py
@@ -0,0 +1,56 @@
+#!/usr/bin/python
+
+# Copyright 2003 Dave Abrahams
+# Copyright 2002, 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This tests that :
+# 1) the 'make' correctly assigns types to produced targets
+# 2) if 'make' creates targets of type CPP, they are correctly used.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+# In order to correctly link this app, 'b.cpp', created by a 'make' rule, should
+# be compiled.
+
+t.write("jamroot.jam", "import gcc ;")
+
+t.write("jamfile.jam", r'''
+import os ;
+if [ os.name ] = NT
+{
+ actions create
+ {
+ echo int main() {} > $(<)
+ }
+}
+else
+{
+ actions create
+ {
+ echo "int main() {}" > $(<)
+ }
+}
+
+IMPORT $(__name__) : create : : create ;
+
+exe a : l dummy.cpp ;
+
+# Needs to be a static lib for Windows - main() cannot appear in DLL.
+static-lib l : a.cpp b.cpp ;
+
+make b.cpp : : create ;
+''')
+
+t.write("a.cpp", "")
+
+t.write("dummy.cpp", "// msvc needs at least one object file\n")
+
+t.run_build_system()
+
+t.expect_addition("bin/$toolset/debug/a.exe")
+
+t.cleanup()
diff --git a/tools/build/test/clean.py b/tools/build/test/clean.py
new file mode 100644
index 0000000000..b10644aac8
--- /dev/null
+++ b/tools/build/test/clean.py
@@ -0,0 +1,104 @@
+#!/usr/bin/python
+
+# Copyright (C) Vladimir Prus 2006.
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("a.cpp", "int main() {}\n")
+t.write("jamroot.jam", "exe a : a.cpp sub1//sub1 sub2//sub2 sub3//sub3 ;")
+t.write("sub1/jamfile.jam", """\
+lib sub1 : sub1.cpp sub1_2 ../sub2//sub2 ;
+lib sub1_2 : sub1_2.cpp ;
+""")
+
+t.write("sub1/sub1.cpp", """\
+#ifdef _WIN32
+__declspec(dllexport)
+#endif
+void sub1() {}
+""")
+
+t.write("sub1/sub1_2.cpp", """\
+#ifdef _WIN32
+__declspec(dllexport)
+#endif
+void sub1() {}
+""")
+
+t.write("sub2/jamfile.jam", "lib sub2 : sub2.cpp ;")
+t.write("sub2/sub2.cpp", """\
+#ifdef _WIN32
+__declspec(dllexport)
+#endif
+void sub2() {}
+""")
+
+t.write("sub3/jamroot.jam", "lib sub3 : sub3.cpp ;")
+t.write("sub3/sub3.cpp", """\
+#ifdef _WIN32
+__declspec(dllexport)
+#endif
+void sub3() {}
+""")
+
+# 'clean' should not remove files under separate jamroot.jam.
+t.run_build_system()
+t.run_build_system(["--clean"])
+t.expect_removal("bin/$toolset/debug/a.obj")
+t.expect_removal("sub1/bin/$toolset/debug/sub1.obj")
+t.expect_removal("sub1/bin/$toolset/debug/sub1_2.obj")
+t.expect_removal("sub2/bin/$toolset/debug/sub2.obj")
+t.expect_nothing("sub3/bin/$toolset/debug/sub3.obj")
+
+# 'clean-all' removes everything it can reach.
+t.run_build_system()
+t.run_build_system(["--clean-all"])
+t.expect_removal("bin/$toolset/debug/a.obj")
+t.expect_removal("sub1/bin/$toolset/debug/sub1.obj")
+t.expect_removal("sub1/bin/$toolset/debug/sub1_2.obj")
+t.expect_removal("sub2/bin/$toolset/debug/sub2.obj")
+t.expect_nothing("sub3/bin/$toolset/debug/sub3.obj")
+
+# 'clean' together with project target removes only under that project.
+t.run_build_system()
+t.run_build_system(["sub1", "--clean"])
+t.expect_nothing("bin/$toolset/debug/a.obj")
+t.expect_removal("sub1/bin/$toolset/debug/sub1.obj")
+t.expect_removal("sub1/bin/$toolset/debug/sub1_2.obj")
+t.expect_nothing("sub2/bin/$toolset/debug/sub2.obj")
+t.expect_nothing("sub3/bin/$toolset/debug/sub3.obj")
+
+# 'clean-all' removes everything.
+t.run_build_system()
+t.run_build_system(["sub1", "--clean-all"])
+t.expect_nothing("bin/$toolset/debug/a.obj")
+t.expect_removal("sub1/bin/$toolset/debug/sub1.obj")
+t.expect_removal("sub1/bin/$toolset/debug/sub1_2.obj")
+t.expect_removal("sub2/bin/$toolset/debug/sub2.obj")
+t.expect_nothing("sub3/bin/$toolset/debug/sub3.obj")
+
+# If main target is explicitly named, we should not remove files from other
+# targets.
+t.run_build_system()
+t.run_build_system(["sub1//sub1", "--clean"])
+t.expect_removal("sub1/bin/$toolset/debug/sub1.obj")
+t.expect_nothing("sub1/bin/$toolset/debug/sub1_2.obj")
+t.expect_nothing("sub2/bin/$toolset/debug/sub2.obj")
+t.expect_nothing("sub3/bin/$toolset/debug/sub3.obj")
+
+# Regression test: sources of the 'cast' rule were mistakenly deleted.
+t.rm(".")
+t.write("jamroot.jam", """\
+import cast ;
+cast a cpp : a.h ;
+""")
+t.write("a.h", "")
+t.run_build_system(["--clean"])
+t.expect_nothing("a.h")
+
+t.cleanup()
diff --git a/tools/build/test/collect_debug_info.py b/tools/build/test/collect_debug_info.py
new file mode 100755
index 0000000000..b25c15cc3a
--- /dev/null
+++ b/tools/build/test/collect_debug_info.py
@@ -0,0 +1,341 @@
+#!/usr/bin/python
+
+# Copyright 2012 Jurko Gospodnetic
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Temporarily enabled dummy test that always fails and is used to collect
+# extra debugging information from Boost Build test runner sites.
+
+import BoostBuild
+
+import os
+import re
+import sys
+
+
+###############################################################################
+#
+# Public interface.
+#
+###############################################################################
+
+def collectDebugInfo():
+ t = _init()
+
+ global tag
+
+ tag = "Python version"
+ try:
+ _info(sys.version)
+ except:
+ _info_exc()
+
+ tag = "Python platform"
+ try:
+ _info(sys.platform)
+ except:
+ _info_exc()
+
+ tag = "Boost Jam/Build version"
+ try:
+ _infoX(_getJamVersionInfo(t))
+ except:
+ _info_exc()
+
+ #_collectDebugInfo_environ()
+
+ # Report prepared annotations.
+ t.fail_test(1, dump_difference=False, dump_stdio=False, dump_stack=False)
+
+
+###############################################################################
+#
+# Private interface.
+#
+###############################################################################
+
+varSeparator = "###$^%~~~"
+
+
+def _collect(results, prefix, name, t):
+ results.append("%s - %s - os.getenv(): %r" % (prefix, name, os.getenv(
+ name)))
+ results.append("%s - %s - os.environ.get(): %r" % (prefix, name,
+ os.environ.get(name)))
+ external_values = _getExternalValues(t, name)
+ results.append("%s - %s - external: %r" % (prefix, name,
+ external_values[name]))
+
+
+def _collectDebugInfo_environ(t):
+ dummyVars = ["WOOF_WOOFIE_%d" % x for x in xrange(4)]
+ global tag
+
+ tag = "XXX in os.environ"
+ try:
+ def f(name):
+ return "%s: %s" % (name, name in os.environ)
+ _infoX(f(x) for x in dummyVars)
+ except:
+ _info_exc()
+
+ tag = "os.environ[XXX]"
+ try:
+ def f(name):
+ try:
+ result = os.environ[name]
+ except:
+ result = _str_exc()
+ return "%s: %r" % (name, result)
+ _infoX(f(x) for x in dummyVars)
+ except:
+ _info_exc()
+
+ tag = "os.environ.get(XXX)"
+ try:
+ def f(name):
+ return "%s: %r" % (name, os.environ.get(name))
+ _infoX(f(x) for x in dummyVars)
+ except:
+ _info_exc()
+
+ tag = "os.getenv(XXX)"
+ try:
+ def f(name):
+ return "%s: %r" % (name, os.getenv(name))
+ _infoX(f(x) for x in dummyVars)
+ except:
+ _info_exc()
+
+ name = dummyVars[0]
+ value = "foo"
+ tag = "os.putenv(%s) to %r" % (name, value)
+ try:
+ results = []
+ _collect(results, "before", name, t)
+ os.putenv(name, value)
+ _collect(results, "after", name, t)
+ _infoX(results)
+ except:
+ _info_exc()
+
+ name = dummyVars[1]
+ value = "bar"
+ tag = "os.environ[%s] to %r" % (name, value)
+ try:
+ results = []
+ _collect(results, "before", name, t)
+ os.environ[name] = value
+ _collect(results, "after", name, t)
+ _infoX(results)
+ except:
+ _info_exc()
+
+ name = dummyVars[1]
+ value = "baz"
+ tag = "os.putenv(%s) to %r" % (name, value)
+ try:
+ results = []
+ _collect(results, "before", name, t)
+ os.putenv(name, value)
+ _collect(results, "after", name, t)
+ _infoX(results)
+ except:
+ _info_exc()
+
+ name = dummyVars[1]
+ value = ""
+ tag = "os.putenv(%s) to %r" % (name, value)
+ try:
+ results = []
+ _collect(results, "before", name, t)
+ os.putenv(name, value)
+ _collect(results, "after", name, t)
+ _infoX(results)
+ except:
+ _info_exc()
+
+ name = dummyVars[2]
+ value = "foo"
+ tag = "os.unsetenv(%s) from %r" % (name, value)
+ try:
+ results = []
+ os.environ[name] = value
+ _collect(results, "before", name, t)
+ os.unsetenv(name)
+ _collect(results, "after", name, t)
+ _infoX(results)
+ except:
+ _info_exc()
+
+ name = dummyVars[2]
+ value = "foo"
+ tag = "del os.environ[%s] from %r" % (name, value)
+ try:
+ results = []
+ os.environ[name] = value
+ _collect(results, "before", name, t)
+ del os.environ[name]
+ _collect(results, "after", name, t)
+ _infoX(results)
+ except:
+ _info_exc()
+
+ name = dummyVars[2]
+ value = "foo"
+ tag = "os.environ.pop(%s) from %r" % (name, value)
+ try:
+ results = []
+ os.environ[name] = value
+ _collect(results, "before", name, t)
+ os.environ.pop(name)
+ _collect(results, "after", name, t)
+ _infoX(results)
+ except:
+ _info_exc()
+
+ name = dummyVars[2]
+ value1 = "foo"
+ value2 = ""
+ tag = "os.environ[%s] to %r from %r" % (name, value2, value1)
+ try:
+ results = []
+ os.environ[name] = value1
+ _collect(results, "before", name, t)
+ os.environ[name] = value2
+ _collect(results, "after", name, t)
+ _infoX(results)
+ except:
+ _info_exc()
+
+ name = dummyVars[3]
+ value = '""'
+ tag = "os.environ[%s] to %r" % (name, value)
+ try:
+ results = []
+ _collect(results, "before", name, t)
+ os.environ[name] = value
+ _collect(results, "after", name, t)
+ _infoX(results)
+ except:
+ _info_exc()
+
+
+def _getExternalValues(t, *args):
+ t.run_build_system(["---var-name=%s" % x for x in args])
+ result = dict()
+ for x in args:
+ m = re.search(r"^\*\*\*ENV\*\*\* %s: '(.*)' \*\*\*$" % x, t.stdout(),
+ re.MULTILINE)
+ if m:
+ result[x] = m.group(1)
+ else:
+ result[x] = None
+ return result
+
+
+def _getJamVersionInfo(t):
+ result = []
+
+ # JAM version variables.
+ t.run_build_system(["---version"])
+ for m in re.finditer(r"^\*\*\*VAR\*\*\* ([^:]*): (.*)\*\*\*$", t.stdout(),
+ re.MULTILINE):
+ name = m.group(1)
+ value = m.group(2)
+ if not value:
+ value = []
+ elif value[-1] == ' ':
+ value = value[:-1].split(varSeparator)
+ else:
+ value = "!!!INVALID!!! - '%s'" % value
+ result.append("%s = %s" % (name, value))
+ result.append("")
+
+ # bjam -v output.
+ t.run_build_system(["-v"])
+ result.append("--- output for 'bjam -v' ---")
+ result.append(t.stdout())
+
+ # bjam --version output.
+ t.run_build_system(["--version"], status=1)
+ result.append("--- output for 'bjam --version' ---")
+ result.append(t.stdout())
+
+ return result
+
+
+def _init():
+ toolsetName = "__myDummyToolset__"
+
+ t = BoostBuild.Tester(["toolset=%s" % toolsetName], pass_toolset=False,
+ use_test_config=False)
+
+ # Prepare a dummy toolset so we do not get errors in case the default one
+ # is not found.
+ t.write(toolsetName + ".jam", """\
+import feature ;
+feature.extend toolset : %s ;
+rule init ( ) { }
+""" % toolsetName )
+
+ # Python version of the same dummy toolset.
+ t.write(toolsetName + ".py", """\
+from b2.build import feature
+feature.extend('toolset', ['%s'])
+def init(): pass
+""" % toolsetName )
+
+ t.write("jamroot.jam", """\
+import os ;
+.argv = [ modules.peek : ARGV ] ;
+local names = [ MATCH ^---var-name=(.*) : $(.argv) ] ;
+for x in $(names)
+{
+ value = [ os.environ $(x) ] ;
+ ECHO ***ENV*** $(x): '$(value)' *** ;
+}
+if ---version in $(.argv)
+{
+ for x in JAMVERSION JAM_VERSION JAMUNAME JAM_TIMESTAMP_RESOLUTION OS
+ {
+ v = [ modules.peek : $(x) ] ;
+ ECHO ***VAR*** $(x): "$(v:J=%s)" *** ;
+ }
+}
+""" % varSeparator)
+
+ return t
+
+
+def _info(*values):
+ values = list(values) + [""]
+ BoostBuild.annotation(tag, "\n".join(str(x) for x in values))
+
+
+def _infoX(values):
+ _info(*values)
+
+
+def _info_exc():
+ _info(_str_exc())
+
+
+def _str_exc():
+ exc_type, exc_value = sys.exc_info()[0:2]
+ if exc_type is None:
+ exc_type_name = "None"
+ else:
+ exc_type_name = exc_type.__name__
+ return "*** EXCEPTION *** %s - %s ***" % (exc_type_name, exc_value)
+
+
+###############################################################################
+#
+# main()
+# ------
+#
+###############################################################################
+
+collectDebugInfo()
diff --git a/tools/build/test/composite.py b/tools/build/test/composite.py
new file mode 100644
index 0000000000..064c4087dd
--- /dev/null
+++ b/tools/build/test/composite.py
@@ -0,0 +1,25 @@
+#!/usr/bin/python
+
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that composite properties are handled correctly.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("jamroot.jam", """
+exe hello : hello.cpp : <variant>release ;
+""")
+
+t.write("hello.cpp", """
+int main() {}
+""")
+
+t.run_build_system()
+
+t.expect_addition("bin/$toolset/release/hello.exe")
+
+t.cleanup()
diff --git a/tools/build/test/conditionals.py b/tools/build/test/conditionals.py
new file mode 100644
index 0000000000..7a9848b366
--- /dev/null
+++ b/tools/build/test/conditionals.py
@@ -0,0 +1,48 @@
+#!/usr/bin/python
+
+# Copyright 2003 Dave Abrahams
+# Copyright 2002, 2003, 2004 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Test conditional properties.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+# Arrange a project which will build only if 'a.cpp' is compiled with "STATIC"
+# define.
+t.write("a.cpp", """\
+#ifdef STATIC
+int main() {}
+#endif
+""")
+
+# Test conditionals in target requirements.
+t.write("jamroot.jam", "exe a : a.cpp : <link>static:<define>STATIC ;")
+t.run_build_system(["link=static"])
+t.expect_addition("bin/$toolset/debug/link-static/a.exe")
+t.rm("bin")
+
+# Test conditionals in project requirements.
+t.write("jamroot.jam", """
+project : requirements <link>static:<define>STATIC ;
+exe a : a.cpp ;
+""")
+t.run_build_system(["link=static"])
+t.expect_addition("bin/$toolset/debug/link-static/a.exe")
+t.rm("bin")
+
+# Regression test for a bug found by Ali Azarbayejani. Conditionals inside
+# usage requirement were not being evaluated.
+t.write("jamroot.jam", """
+lib l : l.cpp : : : <link>static:<define>STATIC ;
+exe a : a.cpp l ;
+""")
+t.write("l.cpp", "int i;")
+t.run_build_system(["link=static"])
+t.expect_addition("bin/$toolset/debug/link-static/a.exe")
+
+t.cleanup()
diff --git a/tools/build/v2/test/conditionals2.py b/tools/build/test/conditionals2.py
index 585e5ca774..585e5ca774 100644
--- a/tools/build/v2/test/conditionals2.py
+++ b/tools/build/test/conditionals2.py
diff --git a/tools/build/test/conditionals3.py b/tools/build/test/conditionals3.py
new file mode 100644
index 0000000000..028fad6214
--- /dev/null
+++ b/tools/build/test/conditionals3.py
@@ -0,0 +1,30 @@
+#!/usr/bin/python
+
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that conditional properties work, even if property is free, and value
+# includes a colon.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("jamroot.jam", """
+exe hello : hello.cpp : <variant>debug:<define>CLASS=Foo::Bar ;
+""")
+
+t.write("hello.cpp", """
+namespace Foo { class Bar { } ; }
+int main()
+{
+ CLASS c;
+ c; // Disables the unused variable warning.
+}
+""")
+
+t.run_build_system(stdout=None, stderr=None)
+t.expect_addition("bin/$toolset/debug/hello.exe")
+
+t.cleanup()
diff --git a/tools/build/test/conditionals_multiple.py b/tools/build/test/conditionals_multiple.py
new file mode 100755
index 0000000000..91b8f30d76
--- /dev/null
+++ b/tools/build/test/conditionals_multiple.py
@@ -0,0 +1,312 @@
+#!/usr/bin/python
+
+# Copyright 2008 Jurko Gospodnetic
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Tests that properties conditioned on more than one other property work as
+# expected.
+
+import BoostBuild
+
+
+###############################################################################
+#
+# test_multiple_conditions()
+# --------------------------
+#
+###############################################################################
+
+def test_multiple_conditions():
+ """Basic tests for properties conditioned on multiple other properties."""
+
+ t = BoostBuild.Tester(["--user-config=", "--ignore-site-config",
+ "toolset=testToolset"], pass_toolset=False, use_test_config=False)
+
+ t.write("testToolset.jam", """\
+import feature ;
+feature.extend toolset : testToolset ;
+rule init ( ) { }
+""")
+
+ t.write("testToolset.py", """\
+from b2.build import feature
+feature.extend('toolset', ["testToolset"])
+def init ( ): pass
+""")
+
+ t.write("jamroot.jam", """\
+import feature ;
+import notfile ;
+import toolset ;
+
+feature.feature description : : free incidental ;
+feature.feature aaa : 1 0 : incidental ;
+feature.feature bbb : 1 0 : incidental ;
+feature.feature ccc : 1 0 : incidental ;
+
+rule buildRule ( name : targets ? : properties * )
+{
+ for local description in [ feature.get-values description : $(properties) ]
+ {
+ ECHO "description:" /$(description)/ ;
+ }
+}
+
+notfile testTarget1 : @buildRule : :
+ <description>d
+ <aaa>0:<description>a0
+ <aaa>1:<description>a1
+ <aaa>0,<bbb>0:<description>a0-b0
+ <aaa>0,<bbb>1:<description>a0-b1
+ <aaa>1,<bbb>0:<description>a1-b0
+ <aaa>1,<bbb>1:<description>a1-b1
+ <aaa>0,<bbb>0,<ccc>0:<description>a0-b0-c0
+ <aaa>0,<bbb>0,<ccc>1:<description>a0-b0-c1
+ <aaa>0,<bbb>1,<ccc>1:<description>a0-b1-c1
+ <aaa>1,<bbb>0,<ccc>1:<description>a1-b0-c1
+ <aaa>1,<bbb>1,<ccc>0:<description>a1-b1-c0
+ <aaa>1,<bbb>1,<ccc>1:<description>a1-b1-c1 ;
+""")
+
+ t.run_build_system(["aaa=1", "bbb=1", "ccc=1"])
+ t.expect_output_lines("description: /d/" )
+ t.expect_output_lines("description: /a0/" , False)
+ t.expect_output_lines("description: /a1/" )
+ t.expect_output_lines("description: /a0-b0/" , False)
+ t.expect_output_lines("description: /a0-b1/" , False)
+ t.expect_output_lines("description: /a1-b0/" , False)
+ t.expect_output_lines("description: /a1-b1/" )
+ t.expect_output_lines("description: /a0-b0-c0/", False)
+ t.expect_output_lines("description: /a0-b0-c1/", False)
+ t.expect_output_lines("description: /a0-b1-c1/", False)
+ t.expect_output_lines("description: /a1-b0-c1/", False)
+ t.expect_output_lines("description: /a1-b1-c0/", False)
+ t.expect_output_lines("description: /a1-b1-c1/" )
+
+ t.run_build_system(["aaa=0", "bbb=0", "ccc=1"])
+ t.expect_output_lines("description: /d/" )
+ t.expect_output_lines("description: /a0/" )
+ t.expect_output_lines("description: /a1/" , False)
+ t.expect_output_lines("description: /a0-b0/" )
+ t.expect_output_lines("description: /a0-b1/" , False)
+ t.expect_output_lines("description: /a1-b0/" , False)
+ t.expect_output_lines("description: /a1-b1/" , False)
+ t.expect_output_lines("description: /a0-b0-c0/", False)
+ t.expect_output_lines("description: /a0-b0-c1/" )
+ t.expect_output_lines("description: /a0-b1-c1/", False)
+ t.expect_output_lines("description: /a1-b0-c1/", False)
+ t.expect_output_lines("description: /a1-b1-c0/", False)
+ t.expect_output_lines("description: /a1-b1-c1/", False)
+
+ t.run_build_system(["aaa=0", "bbb=0", "ccc=0"])
+ t.expect_output_lines("description: /d/" )
+ t.expect_output_lines("description: /a0/" )
+ t.expect_output_lines("description: /a1/" , False)
+ t.expect_output_lines("description: /a0-b0/" )
+ t.expect_output_lines("description: /a0-b1/" , False)
+ t.expect_output_lines("description: /a1-b0/" , False)
+ t.expect_output_lines("description: /a1-b1/" , False)
+ t.expect_output_lines("description: /a0-b0-c0/" )
+ t.expect_output_lines("description: /a0-b0-c1/", False)
+ t.expect_output_lines("description: /a0-b1-c1/", False)
+ t.expect_output_lines("description: /a1-b0-c1/", False)
+ t.expect_output_lines("description: /a1-b1-c0/", False)
+ t.expect_output_lines("description: /a1-b1-c1/", False)
+
+ t.cleanup()
+
+
+###############################################################################
+#
+# test_multiple_conditions_with_toolset_version()
+# -----------------------------------------------
+#
+###############################################################################
+
+def test_multiple_conditions_with_toolset_version():
+ """
+ Regression tests for properties conditioned on the toolset version
+ subfeature and some additional properties.
+
+ """
+ toolset = "testToolset" ;
+
+ t = BoostBuild.Tester(["--user-config=", "--ignore-site-config"],
+ pass_toolset=False, use_test_config=False)
+
+ t.write(toolset + ".jam", """\
+import feature ;
+feature.extend toolset : %(toolset)s ;
+feature.subfeature toolset %(toolset)s : version : 0 1 ;
+rule init ( version ? ) { }
+""" % {"toolset": toolset})
+
+ t.write("testToolset.py", """\
+from b2.build import feature
+feature.extend('toolset', ["%(toolset)s"])
+feature.subfeature('toolset', "%(toolset)s", "version", ['0','1'])
+def init ( version ): pass
+""" % {"toolset": toolset})
+
+ t.write("jamroot.jam", """\
+import feature ;
+import notfile ;
+import toolset ;
+
+toolset.using testToolset ;
+
+feature.feature description : : free incidental ;
+feature.feature aaa : 0 1 : incidental ;
+feature.feature bbb : 0 1 : incidental ;
+feature.feature ccc : 0 1 : incidental ;
+
+rule buildRule ( name : targets ? : properties * )
+{
+ local ttt = [ feature.get-values toolset : $(properties) ] ;
+ local vvv = [ feature.get-values toolset-testToolset:version : $(properties) ] ;
+ local aaa = [ feature.get-values aaa : $(properties) ] ;
+ local bbb = [ feature.get-values bbb : $(properties) ] ;
+ local ccc = [ feature.get-values ccc : $(properties) ] ;
+ ECHO "toolset:" /$(ttt)/ "version:" /$(vvv)/ "aaa/bbb/ccc:" /$(aaa)/$(bbb)/$(ccc)/ ;
+ for local description in [ feature.get-values description : $(properties) ]
+ {
+ ECHO "description:" /$(description)/ ;
+ }
+}
+
+notfile testTarget1 : @buildRule : :
+ <toolset>testToolset,<aaa>0:<description>t-a0
+ <toolset>testToolset,<aaa>1:<description>t-a1
+
+ <toolset>testToolset-0,<aaa>0:<description>t0-a0
+ <toolset>testToolset-0,<aaa>1:<description>t0-a1
+ <toolset>testToolset-1,<aaa>0:<description>t1-a0
+ <toolset>testToolset-1,<aaa>1:<description>t1-a1
+
+ <toolset>testToolset,<aaa>0,<bbb>0:<description>t-a0-b0
+ <toolset>testToolset,<aaa>0,<bbb>1:<description>t-a0-b1
+ <toolset>testToolset,<aaa>1,<bbb>0:<description>t-a1-b0
+ <toolset>testToolset,<aaa>1,<bbb>1:<description>t-a1-b1
+
+ <aaa>0,<toolset>testToolset,<bbb>0:<description>a0-t-b0
+ <aaa>0,<toolset>testToolset,<bbb>1:<description>a0-t-b1
+ <aaa>1,<toolset>testToolset,<bbb>0:<description>a1-t-b0
+ <aaa>1,<toolset>testToolset,<bbb>1:<description>a1-t-b1
+
+ <aaa>0,<bbb>0,<toolset>testToolset:<description>a0-b0-t
+ <aaa>0,<bbb>1,<toolset>testToolset:<description>a0-b1-t
+ <aaa>1,<bbb>0,<toolset>testToolset:<description>a1-b0-t
+ <aaa>1,<bbb>1,<toolset>testToolset:<description>a1-b1-t
+
+ <toolset>testToolset-0,<aaa>0,<bbb>0:<description>t0-a0-b0
+ <toolset>testToolset-0,<aaa>0,<bbb>1:<description>t0-a0-b1
+ <toolset>testToolset-0,<aaa>1,<bbb>0:<description>t0-a1-b0
+ <toolset>testToolset-0,<aaa>1,<bbb>1:<description>t0-a1-b1
+ <toolset>testToolset-1,<aaa>0,<bbb>0:<description>t1-a0-b0
+ <toolset>testToolset-1,<aaa>0,<bbb>1:<description>t1-a0-b1
+ <toolset>testToolset-1,<aaa>1,<bbb>0:<description>t1-a1-b0
+ <toolset>testToolset-1,<aaa>1,<bbb>1:<description>t1-a1-b1
+
+ <aaa>0,<toolset>testToolset-1,<bbb>0:<description>a0-t1-b0
+ <aaa>0,<toolset>testToolset-1,<bbb>1:<description>a0-t1-b1
+ <aaa>1,<toolset>testToolset-0,<bbb>0:<description>a1-t0-b0
+ <aaa>1,<toolset>testToolset-0,<bbb>1:<description>a1-t0-b1
+
+ <bbb>0,<aaa>1,<toolset>testToolset-0:<description>b0-a1-t0
+ <bbb>0,<aaa>0,<toolset>testToolset-1:<description>b0-a0-t1
+ <bbb>0,<aaa>1,<toolset>testToolset-1:<description>b0-a1-t1
+ <bbb>1,<aaa>0,<toolset>testToolset-1:<description>b1-a0-t1
+ <bbb>1,<aaa>1,<toolset>testToolset-0:<description>b1-a1-t0
+ <bbb>1,<aaa>1,<toolset>testToolset-1:<description>b1-a1-t1 ;
+""")
+
+ t.run_build_system(["aaa=1", "bbb=1", "ccc=1", "toolset=%s-0" % toolset])
+ t.expect_output_lines("description: /t-a0/" , False)
+ t.expect_output_lines("description: /t-a1/" )
+ t.expect_output_lines("description: /t0-a0/" , False)
+ t.expect_output_lines("description: /t0-a1/" )
+ t.expect_output_lines("description: /t1-a0/" , False)
+ t.expect_output_lines("description: /t1-a1/" , False)
+ t.expect_output_lines("description: /t-a0-b0/" , False)
+ t.expect_output_lines("description: /t-a0-b1/" , False)
+ t.expect_output_lines("description: /t-a1-b0/" , False)
+ t.expect_output_lines("description: /t-a1-b1/" )
+ t.expect_output_lines("description: /a0-t-b0/" , False)
+ t.expect_output_lines("description: /a0-t-b1/" , False)
+ t.expect_output_lines("description: /a1-t-b0/" , False)
+ t.expect_output_lines("description: /a1-t-b1/" )
+ t.expect_output_lines("description: /a0-b0-t/" , False)
+ t.expect_output_lines("description: /a0-b1-t/" , False)
+ t.expect_output_lines("description: /a1-b0-t/" , False)
+ t.expect_output_lines("description: /a1-b1-t/" )
+ t.expect_output_lines("description: /t0-a0-b0/", False)
+ t.expect_output_lines("description: /t0-a0-b1/", False)
+ t.expect_output_lines("description: /t0-a1-b0/", False)
+ t.expect_output_lines("description: /t0-a1-b1/" )
+ t.expect_output_lines("description: /t1-a0-b0/", False)
+ t.expect_output_lines("description: /t1-a0-b1/", False)
+ t.expect_output_lines("description: /t1-a1-b0/", False)
+ t.expect_output_lines("description: /t1-a1-b1/", False)
+ t.expect_output_lines("description: /a0-t1-b0/", False)
+ t.expect_output_lines("description: /a0-t1-b1/", False)
+ t.expect_output_lines("description: /a1-t0-b0/", False)
+ t.expect_output_lines("description: /a1-t0-b1/" )
+ t.expect_output_lines("description: /b0-a1-t0/", False)
+ t.expect_output_lines("description: /b0-a0-t1/", False)
+ t.expect_output_lines("description: /b0-a1-t1/", False)
+ t.expect_output_lines("description: /b1-a0-t1/", False)
+ t.expect_output_lines("description: /b1-a1-t0/" )
+ t.expect_output_lines("description: /b1-a1-t1/", False)
+
+ t.run_build_system(["aaa=1", "bbb=1", "ccc=1", "toolset=%s-1" % toolset])
+ t.expect_output_lines("description: /t-a0/" , False)
+ t.expect_output_lines("description: /t-a1/" )
+ t.expect_output_lines("description: /t0-a0/" , False)
+ t.expect_output_lines("description: /t0-a1/" , False)
+ t.expect_output_lines("description: /t1-a0/" , False)
+ t.expect_output_lines("description: /t1-a1/" )
+ t.expect_output_lines("description: /t-a0-b0/" , False)
+ t.expect_output_lines("description: /t-a0-b1/" , False)
+ t.expect_output_lines("description: /t-a1-b0/" , False)
+ t.expect_output_lines("description: /t-a1-b1/" )
+ t.expect_output_lines("description: /a0-t-b0/" , False)
+ t.expect_output_lines("description: /a0-t-b1/" , False)
+ t.expect_output_lines("description: /a1-t-b0/" , False)
+ t.expect_output_lines("description: /a1-t-b1/" )
+ t.expect_output_lines("description: /a0-b0-t/" , False)
+ t.expect_output_lines("description: /a0-b1-t/" , False)
+ t.expect_output_lines("description: /a1-b0-t/" , False)
+ t.expect_output_lines("description: /a1-b1-t/" )
+ t.expect_output_lines("description: /t0-a0-b0/", False)
+ t.expect_output_lines("description: /t0-a0-b1/", False)
+ t.expect_output_lines("description: /t0-a1-b0/", False)
+ t.expect_output_lines("description: /t0-a1-b1/", False)
+ t.expect_output_lines("description: /t1-a0-b0/", False)
+ t.expect_output_lines("description: /t1-a0-b1/", False)
+ t.expect_output_lines("description: /t1-a1-b0/", False)
+ t.expect_output_lines("description: /t1-a1-b1/" )
+ t.expect_output_lines("description: /a0-t1-b0/", False)
+ t.expect_output_lines("description: /a0-t1-b1/", False)
+ t.expect_output_lines("description: /a1-t0-b0/", False)
+ t.expect_output_lines("description: /a1-t0-b1/", False)
+ t.expect_output_lines("description: /b0-a1-t0/", False)
+ t.expect_output_lines("description: /b0-a0-t1/", False)
+ t.expect_output_lines("description: /b0-a1-t1/", False)
+ t.expect_output_lines("description: /b1-a0-t1/", False)
+ t.expect_output_lines("description: /b1-a1-t0/", False)
+ t.expect_output_lines("description: /b1-a1-t1/" )
+
+ t.cleanup()
+
+
+###############################################################################
+#
+# main()
+# ------
+#
+###############################################################################
+
+test_multiple_conditions()
+test_multiple_conditions_with_toolset_version()
diff --git a/tools/build/test/configuration.py b/tools/build/test/configuration.py
new file mode 100755
index 0000000000..724ecd7c93
--- /dev/null
+++ b/tools/build/test/configuration.py
@@ -0,0 +1,328 @@
+#!/usr/bin/python
+
+# Copyright 2008, 2012 Jurko Gospodnetic
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Test Boost Build configuration file handling.
+
+import BoostBuild
+
+import os
+import os.path
+import re
+
+
+###############################################################################
+#
+# test_user_configuration()
+# -------------------------
+#
+###############################################################################
+
+def test_user_configuration():
+ """
+ Test Boost Build user configuration handling. Both relative and absolute
+ path handling is tested.
+
+ """
+
+ implicitConfigLoadMessage = \
+ "notice: Loading user-config configuration file: *"
+ explicitConfigLoadMessage = \
+ "notice: Loading explicitly specified user configuration file:"
+ disabledConfigLoadMessage = \
+ "notice: User configuration file loading explicitly disabled."
+ testMessage = "_!_!_!_!_!_!_!_!_ %s _!_!_!_!_!_!_!_!_"
+ toolsetName = "__myDummyToolset__"
+ subdirName = "ASubDirectory"
+ configFileNames = ["ups_lala_1.jam", "ups_lala_2.jam",
+ os.path.join(subdirName, "ups_lala_3.jam")]
+
+ t = BoostBuild.Tester(["toolset=%s" % toolsetName,
+ "--debug-configuration"], pass_toolset=False, use_test_config=False)
+
+ for configFileName in configFileNames:
+ message = "ECHO \"%s\" ;" % testMessage % configFileName
+ # We need to double any backslashes in the message or Jam will
+ # interpret them as escape characters.
+ t.write(configFileName, message.replace("\\", "\\\\"))
+
+ # Prepare a dummy toolset so we do not get errors in case the default one
+ # is not found.
+ t.write(toolsetName + ".jam", """\
+import feature ;
+feature.extend toolset : %s ;
+rule init ( ) { }
+""" % toolsetName)
+
+ # Python version of the same dummy toolset.
+ t.write(toolsetName + ".py", """\
+from b2.build import feature
+feature.extend('toolset', ['%s'])
+def init(): pass
+""" % toolsetName)
+
+ t.write("jamroot.jam", """\
+local test-index = [ MATCH ---test-id---=(.*) : [ modules.peek : ARGV ] ] ;
+ECHO test-index: $(test-index:E=(unknown)) ;
+""")
+
+ class LocalTester:
+ def __init__(self, tester):
+ self.__tester = tester
+ self.__test_ids = []
+
+ def __assertionFailure(self, message):
+ BoostBuild.annotation("failure", "Internal test assertion failure "
+ "- %s" % message)
+ self.__tester.fail_test(1)
+
+ def __call__(self, test_id, env, extra_args=None, *args, **kwargs):
+ if env == "" and not canSetEmptyEnvironmentVariable:
+ self.__assertionFailure("Can not set empty environment "
+ "variables on this platform.")
+ self.__registerTestId(str(test_id))
+ if extra_args is None:
+ extra_args = []
+ extra_args.append("---test-id---=%s" % test_id)
+ env_name = "BOOST_BUILD_USER_CONFIG"
+ previous_env = os.environ.get(env_name)
+ _env_set(env_name, env)
+ try:
+ self.__tester.run_build_system(extra_args, *args, **kwargs)
+ finally:
+ _env_set(env_name, previous_env)
+
+ def __registerTestId(self, test_id):
+ if test_id in self.__test_ids:
+ self.__assertionFailure("Multiple test cases encountered "
+ "using the same test id '%s'." % test_id)
+ self.__test_ids.append(test_id)
+
+ test = LocalTester(t)
+
+ test(1, None)
+ t.expect_output_lines(explicitConfigLoadMessage, False)
+ t.expect_output_lines(disabledConfigLoadMessage, False)
+ t.expect_output_lines(testMessage % configFileNames[0], False)
+ t.expect_output_lines(testMessage % configFileNames[1], False)
+ t.expect_output_lines(testMessage % configFileNames[2], False)
+
+ test(2, None, ["--user-config="])
+ t.expect_output_lines(implicitConfigLoadMessage, False)
+ t.expect_output_lines(explicitConfigLoadMessage, False)
+ t.expect_output_lines(disabledConfigLoadMessage)
+ t.expect_output_lines(testMessage % configFileNames[0], False)
+ t.expect_output_lines(testMessage % configFileNames[1], False)
+ t.expect_output_lines(testMessage % configFileNames[2], False)
+
+ test(3, None, ['--user-config=""'])
+ t.expect_output_lines(implicitConfigLoadMessage, False)
+ t.expect_output_lines(explicitConfigLoadMessage, False)
+ t.expect_output_lines(disabledConfigLoadMessage)
+ t.expect_output_lines(testMessage % configFileNames[0], False)
+ t.expect_output_lines(testMessage % configFileNames[1], False)
+ t.expect_output_lines(testMessage % configFileNames[2], False)
+
+ test(4, None, ['--user-config="%s"' % configFileNames[0]])
+ t.expect_output_lines(implicitConfigLoadMessage, False)
+ t.expect_output_lines(explicitConfigLoadMessage)
+ t.expect_output_lines(disabledConfigLoadMessage, False)
+ t.expect_output_lines(testMessage % configFileNames[0])
+ t.expect_output_lines(testMessage % configFileNames[1], False)
+ t.expect_output_lines(testMessage % configFileNames[2], False)
+
+ test(5, None, ['--user-config="%s"' % configFileNames[2]])
+ t.expect_output_lines(implicitConfigLoadMessage, False)
+ t.expect_output_lines(explicitConfigLoadMessage)
+ t.expect_output_lines(disabledConfigLoadMessage, False)
+ t.expect_output_lines(testMessage % configFileNames[0], False)
+ t.expect_output_lines(testMessage % configFileNames[1], False)
+ t.expect_output_lines(testMessage % configFileNames[2])
+
+ test(6, None, ['--user-config="%s"' % os.path.abspath(configFileNames[1])])
+ t.expect_output_lines(implicitConfigLoadMessage, False)
+ t.expect_output_lines(explicitConfigLoadMessage)
+ t.expect_output_lines(disabledConfigLoadMessage, False)
+ t.expect_output_lines(testMessage % configFileNames[0], False)
+ t.expect_output_lines(testMessage % configFileNames[1])
+ t.expect_output_lines(testMessage % configFileNames[2], False)
+
+ test(7, None, ['--user-config="%s"' % os.path.abspath(configFileNames[2])])
+ t.expect_output_lines(implicitConfigLoadMessage, False)
+ t.expect_output_lines(explicitConfigLoadMessage)
+ t.expect_output_lines(disabledConfigLoadMessage, False)
+ t.expect_output_lines(testMessage % configFileNames[0], False)
+ t.expect_output_lines(testMessage % configFileNames[1], False)
+ t.expect_output_lines(testMessage % configFileNames[2])
+
+ if canSetEmptyEnvironmentVariable:
+ test(8, "")
+ t.expect_output_lines(implicitConfigLoadMessage, False)
+ t.expect_output_lines(explicitConfigLoadMessage, False)
+ t.expect_output_lines(disabledConfigLoadMessage, True)
+ t.expect_output_lines(testMessage % configFileNames[0], False)
+ t.expect_output_lines(testMessage % configFileNames[1], False)
+ t.expect_output_lines(testMessage % configFileNames[2], False)
+
+ test(9, '""')
+ t.expect_output_lines(implicitConfigLoadMessage, False)
+ t.expect_output_lines(explicitConfigLoadMessage, False)
+ t.expect_output_lines(disabledConfigLoadMessage)
+ t.expect_output_lines(testMessage % configFileNames[0], False)
+ t.expect_output_lines(testMessage % configFileNames[1], False)
+ t.expect_output_lines(testMessage % configFileNames[2], False)
+
+ test(10, configFileNames[1])
+ t.expect_output_lines(implicitConfigLoadMessage, False)
+ t.expect_output_lines(explicitConfigLoadMessage)
+ t.expect_output_lines(disabledConfigLoadMessage, False)
+ t.expect_output_lines(testMessage % configFileNames[0], False)
+ t.expect_output_lines(testMessage % configFileNames[1])
+ t.expect_output_lines(testMessage % configFileNames[2], False)
+
+ test(11, configFileNames[1], ['--user-config=""'])
+ t.expect_output_lines(implicitConfigLoadMessage, False)
+ t.expect_output_lines(explicitConfigLoadMessage, False)
+ t.expect_output_lines(disabledConfigLoadMessage)
+ t.expect_output_lines(testMessage % configFileNames[0], False)
+ t.expect_output_lines(testMessage % configFileNames[1], False)
+ t.expect_output_lines(testMessage % configFileNames[2], False)
+
+ test(12, configFileNames[1], ['--user-config="%s"' % configFileNames[0]])
+ t.expect_output_lines(implicitConfigLoadMessage, False)
+ t.expect_output_lines(explicitConfigLoadMessage)
+ t.expect_output_lines(disabledConfigLoadMessage, False)
+ t.expect_output_lines(testMessage % configFileNames[0])
+ t.expect_output_lines(testMessage % configFileNames[1], False)
+ t.expect_output_lines(testMessage % configFileNames[2], False)
+
+ if canSetEmptyEnvironmentVariable:
+ test(13, "", ['--user-config="%s"' % configFileNames[0]])
+ t.expect_output_lines(implicitConfigLoadMessage, False)
+ t.expect_output_lines(explicitConfigLoadMessage)
+ t.expect_output_lines(disabledConfigLoadMessage, False)
+ t.expect_output_lines(testMessage % configFileNames[0])
+ t.expect_output_lines(testMessage % configFileNames[1], False)
+ t.expect_output_lines(testMessage % configFileNames[2], False)
+
+ test(14, '""', ['--user-config="%s"' % configFileNames[0]])
+ t.expect_output_lines(implicitConfigLoadMessage, False)
+ t.expect_output_lines(explicitConfigLoadMessage)
+ t.expect_output_lines(disabledConfigLoadMessage, False)
+ t.expect_output_lines(testMessage % configFileNames[0])
+ t.expect_output_lines(testMessage % configFileNames[1], False)
+ t.expect_output_lines(testMessage % configFileNames[2], False)
+
+ test(15, "invalid", ['--user-config="%s"' % configFileNames[0]])
+ t.expect_output_lines(implicitConfigLoadMessage, False)
+ t.expect_output_lines(explicitConfigLoadMessage)
+ t.expect_output_lines(disabledConfigLoadMessage, False)
+ t.expect_output_lines(testMessage % configFileNames[0])
+ t.expect_output_lines(testMessage % configFileNames[1], False)
+ t.expect_output_lines(testMessage % configFileNames[2], False)
+
+ t.cleanup()
+
+
+###############################################################################
+#
+# Private interface.
+#
+###############################################################################
+
+def _canSetEmptyEnvironmentVariable():
+ """
+ Unfortunately different OSs (and possibly Python implementations as well)
+ have different interpretations of what it means to set an evironment
+ variable to an empty string. Some (e.g. Windows) interpret it as unsetting
+ the variable and some (e.g. AIX or Darwin) actually set it to an empty
+ string.
+
+ """
+ dummyName = "UGNABUNGA_FOO_BAR_BAZ_FEE_FAE_FOU_FAM"
+ original = os.environ.get(dummyName)
+ _env_set(dummyName, "")
+ result = _getExternalEnv(dummyName) == ""
+ _env_set(dummyName, original)
+ return result
+
+
+def _env_del(name):
+ """
+ Unsets the given environment variable if it is currently set.
+
+ Note that we can not use os.environ.pop() or os.environ.clear() here
+ since prior to Python 2.6 these functions did not remove the actual
+ environment variable by calling os.unsetenv().
+
+ """
+ try:
+ del os.environ[name]
+ except KeyError:
+ pass
+
+
+def _env_set(name, value):
+ """
+ Sets the given environment variable value or unsets it, if the value is
+ None.
+
+ """
+ if value is None:
+ _env_del(name)
+ else:
+ os.environ[name] = value
+
+
+def _getExternalEnv(name):
+ toolsetName = "__myDummyToolset__"
+
+ t = BoostBuild.Tester(["toolset=%s" % toolsetName], pass_toolset=False,
+ use_test_config=False)
+ try:
+ # Prepare a dummy toolset so we do not get errors in case the default
+ # one is not found.
+ t.write(toolsetName + ".jam", """\
+import feature ;
+feature.extend toolset : %s ;
+rule init ( ) { }
+""" % toolsetName)
+
+ # Python version of the same dummy toolset.
+ t.write(toolsetName + ".py", """\
+from b2.build import feature
+feature.extend('toolset', ['%s'])
+def init(): pass
+""" % toolsetName)
+
+ t.write("jamroot.jam", """\
+import os ;
+local names = [ MATCH ^---var-name---=(.*) : [ modules.peek : ARGV ] ] ;
+for x in $(names)
+{
+ value = [ os.environ $(x) ] ;
+ ECHO "###" $(x): '$(value)' "###" ;
+}
+""")
+
+ t.run_build_system(["---var-name---=%s" % name])
+ m = re.search("^### %s: '(.*)' ###$" % name, t.stdout(), re.MULTILINE)
+ if m:
+ return m.group(1)
+ finally:
+ t.cleanup()
+
+
+###############################################################################
+#
+# main()
+# ------
+#
+###############################################################################
+
+canSetEmptyEnvironmentVariable = _canSetEmptyEnvironmentVariable()
+
+test_user_configuration()
diff --git a/tools/build/test/copy_time.py b/tools/build/test/copy_time.py
new file mode 100755
index 0000000000..4bdaa88f7e
--- /dev/null
+++ b/tools/build/test/copy_time.py
@@ -0,0 +1,69 @@
+#!/usr/bin/python
+#
+# Copyright (c) 2008 Steven Watanabe
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that the common.copy rule set the modification date of the new file to
+# the current time.
+
+import BoostBuild
+
+tester = BoostBuild.Tester(use_test_config=False)
+
+tester.write("test1.cpp", """\
+template<bool, int M, class Next>
+struct time_waster {
+ typedef typename time_waster<true, M-1, time_waster>::type type1;
+ typedef typename time_waster<false, M-1, time_waster>::type type2;
+ typedef void type;
+};
+template<bool B, class Next>
+struct time_waster<B, 0, Next> {
+ typedef void type;
+};
+typedef time_waster<true, 10, void>::type type;
+int f() { return 0; }
+""")
+
+tester.write("test2.cpp", """\
+template<bool, int M, class Next>
+struct time_waster {
+ typedef typename time_waster<true, M-1, time_waster>::type type1;
+ typedef typename time_waster<false, M-1, time_waster>::type type2;
+ typedef void type;
+};
+template<bool B, class Next>
+struct time_waster<B, 0, Next> {
+ typedef void type;
+};
+typedef time_waster<true, 10, void>::type type;
+int g() { return 0; }
+""")
+
+tester.write("jamroot.jam", """\
+obj test2 : test2.cpp ;
+obj test1 : test1.cpp : <dependency>test2 ;
+install test2i : test2 : <dependency>test1 ;
+""")
+
+tester.run_build_system()
+tester.expect_addition("bin/$toolset/debug/test2.obj")
+tester.expect_addition("bin/$toolset/debug/test1.obj")
+tester.expect_addition("test2i/test2.obj")
+tester.expect_nothing_more()
+
+test2src = tester.read("test2i/test2.obj")
+test2dest = tester.read("bin/$toolset/debug/test2.obj")
+if test2src != test2dest:
+ BoostBuild.annotation("failure", "The object file was not copied "
+ "correctly")
+ tester.fail_test(1)
+
+tester.run_build_system(["-d1"])
+tester.expect_output_lines("common.copy*", False)
+tester.expect_nothing_more()
+
+tester.cleanup()
diff --git a/tools/build/test/core-language/test.jam b/tools/build/test/core-language/test.jam
new file mode 100644
index 0000000000..4198dd7204
--- /dev/null
+++ b/tools/build/test/core-language/test.jam
@@ -0,0 +1,1400 @@
+# Copyright 2011 Steven Watanabe.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Tools
+
+passed = 0 ;
+failed = 0 ;
+
+rule show-result ( id : test-result )
+{
+ if ! ( --quiet in $(ARGV) )
+ {
+ ECHO $(test-result): $(id) ;
+ }
+ $(test-result) = [ CALC $($(test-result)) + 1 ] ;
+}
+
+rule check-equal ( id : values * : expected * )
+{
+ local test-result ;
+ if x$(values) = x$(expected)
+ {
+ test-result = passed ;
+ }
+ else
+ {
+ ECHO error: "[" $(values) "] != [" $(expected) "]" ;
+ test-result = failed ;
+ }
+ show-result $(id) : $(test-result) ;
+}
+
+rule mark-order ( id : result * )
+{
+ order += $(id) ;
+ return $(result) ;
+}
+
+rule check-order ( id : expected * )
+{
+ check-equal $(id) : $(order) : $(expected) ;
+ order = ;
+}
+
+# Check variable expansion
+
+{
+
+local v1 = 1 2 3 ;
+local v2 = 4 5 6 ;
+local v3 = 0 1 2 3 4 5 6 7 8 9 10 ;
+local g = g1 g2 ;
+local v4 = String/With/Mixed/Case ;
+local v5 = path\\with\\backslashes ;
+local v6 = <grist>generic/path.txt(member.txt) ;
+local v7 = <Grist1>Dir1/File1.cpp(M1.c) <Grist2>Dir2/File2.hpp(M2.c) ;
+local v8 = <Grist3>Dir3/File3.c(M3.c) <Grist4>Dir4/File4.h(M4.c) ;
+local select1 = GU BL DBST ;
+local case1 = L U ;
+local vars = 7 8 ;
+local sub = 2 1 ;
+local p0 = name ;
+local p1 = dir/name ;
+local p2 = dir/sub/name ;
+local j1 = , - ;
+
+check-equal var-product : $(v1)$(v2) : 14 15 16 24 25 26 34 35 36 ;
+
+check-equal var-set-grist : $(v1:G=grist) : <grist>1 <grist>2 <grist>3 ;
+check-equal var-set-grist-multi : $(v1:G=$(g)) : <g1>1 <g1>2 <g1>3 <g2>1 <g2>2 <g2>3 ;
+
+check-equal var-lower : $(v4:L) : string/with/mixed/case ;
+check-equal var-upper : $(v4:U) : STRING/WITH/MIXED/CASE ;
+check-equal var-LU : $(v4:LU) : STRING/WITH/MIXED/CASE ;
+check-equal var-slashes : $(v5:T) : path/with/backslashes ;
+check-equal var-grist : $(v6:G) : <grist> ;
+check-equal var-grist-none : $(v1:G) : "" "" "" ;
+check-equal var-base : $(v6:B) : path ;
+check-equal var-suffix : $(v6:S) : .txt ;
+check-equal var-dir : $(v6:D) : generic ;
+check-equal var-member : $(v6:M) : (member.txt) ;
+check-equal var-multi : $(v6:$(select1)) : <GRIST> path generic/path.txt ;
+
+check-equal var-join-0 : $(:J=,) : ;
+check-equal var-join-1 : $(p0:J=,) : name ;
+check-equal var-join-3 : $(v1:J=,) : 1,2,3 ;
+check-equal var-set-grist-join : $(v1:G=grist:J=,) : <grist>1,<grist>2,<grist>3 ;
+# behavior change. In the past, a J= modifier would
+# cause only the last element of the other modifiers
+# to take effect.
+check-equal var-set-grist-multi-join : $(v1:G=$(g):J=,) : <g1>1,<g1>2,<g1>3 <g2>1,<g2>2,<g2>3 ;
+check-equal var-set-grist-multi-join-multi : $(v1:G=$(g):J=$(j1)) : <g1>1,<g1>2,<g1>3 <g1>1-<g1>2-<g1>3 <g2>1,<g2>2,<g2>3 <g2>1-<g2>2-<g2>3 ;
+
+check-equal var-D=-0 : name : $(p0:D=) ;
+check-equal var-D=-1 : name : $(p1:D=) ;
+check-equal var-D=-2 : name : $(p2:D=) ;
+check-equal var-D-0 : "" : $(p0:D) ;
+check-equal var-D-1 : dir : $(p1:D) ;
+check-equal var-D-2 : dir/sub : $(p2:D) ;
+check-equal var-S-1 : "" : $(p0:S) ;
+check-equal var-no-at-file-0 : ($(p0)) : [ MATCH ^@(.*) : "@($(p0))" ] ;
+check-equal var-no-at-file-1 : ($(p0)) : [ MATCH @(.*) : "--@($(p0))" ] ;
+
+if $(OS) = CYGWIN
+{
+ local cyg-root = $(:WE=/) ;
+ local cyg1 = /cygdrive/c/path1.txt ;
+ check-equal cygwin-to-cygdrive : $(cyg1:W) : C:\\path1.txt ;
+ local cyg2 = /bin/bash ;
+ check-equal cygwin-to-windows : $(cyg2:W) : $(cyg-root)\\bin\\bash ;
+ check-equal cygwin-combine-WT : $(cyg2:WT) : $(cyg-root)\\bin\\bash ;
+
+ local cyg3 = /home/boost/devel/trunk/bin.v2/ ; # exactly 31 characters
+ local win3 = $(cyg-root)\\home\\boost\\devel\\trunk\\bin.v2\\ ;
+ # This is is the easiest way to demonstrate a bug
+ # that used to cause undefined behavior. Longer paths
+ # resulted in a use-after-free error, which happened
+ # to work most of the time.
+ check-equal cygwin-long-WU : $(cyg3:WU) : $(win3:U) ;
+
+ local cyg-grist = <grist>$(cyg1) ;
+ check-equal cygwin-grist : $(cyg-grist:W) : <grist>\\cygdrive\\c\\path1.txt ;
+
+ check-equal cygwin-WU : $(cyg2:WU) : $(cyg-root:U)\\BIN\\BASH ;
+ # behavior change: L now consistently applied after W.
+ # used to affect all except the drive letter.
+ check-equal cygwin-WL : $(cyg2:WL) : $(cyg-root:L)\\bin\\bash ;
+}
+
+# behavior change
+check-equal var-test1 : $(v7[2]:G:L) : <grist2> ;
+
+check-equal var-multi-product-smm : $(v$(vars)[$(sub)]:G=$(g):$(case1)) :
+ <g1>dir2/file2.hpp(m2.c) <G1>DIR2/FILE2.HPP(M2.C)
+ <g2>dir2/file2.hpp(m2.c) <G2>DIR2/FILE2.HPP(M2.C)
+ <g1>dir1/file1.cpp(m1.c) <G1>DIR1/FILE1.CPP(M1.C)
+ <g2>dir1/file1.cpp(m1.c) <G2>DIR1/FILE1.CPP(M1.C)
+ <g1>dir4/file4.h(m4.c) <G1>DIR4/FILE4.H(M4.C)
+ <g2>dir4/file4.h(m4.c) <G2>DIR4/FILE4.H(M4.C)
+ <g1>dir3/file3.c(m3.c) <G1>DIR3/FILE3.C(M3.C)
+ <g2>dir3/file3.c(m3.c) <G2>DIR3/FILE3.C(M3.C)
+;
+check-equal var-nopathmods : $(:E=//) : // ;
+
+# showcases all the idiosyncracies of indexing
+# key: h = high, l = low, p = positive, m = minus, e = end.
+
+check-equal var-subscript-one-p : $(v3[3]) : 2 ;
+check-equal var-subscript-one-m : $(v3[-3]) : 8 ;
+check-equal var-subscript-one-0 : $(v3[0]) : 0 ;
+check-equal var-subscript-one-h : $(v3[20]) : ;
+check-equal var-subscript-one-l : $(v3[-20]) : 0 ;
+check-equal var-subscript-range-pp : $(v3[2-4]) : 1 2 3 ;
+check-equal var-subscript-range-pm : $(v3[2--3]) : 1 2 3 4 5 6 7 8 ;
+check-equal var-subscript-range-pe : $(v3[2-]) : 1 2 3 4 5 6 7 8 9 10 ;
+check-equal var-subscript-range-ph : $(v3[2-20]) : 1 2 3 4 5 6 7 8 9 10 ;
+check-equal var-subscript-range-pl : $(v3[2--20]) : ;
+check-equal var-subscript-range-mp : $(v3[-3-10]) : 8 9 ;
+check-equal var-subscript-range-mm : $(v3[-4--2]) : 7 8 9 ;
+check-equal var-subscript-range-me : $(v3[-4-]) : 7 8 9 10 ;
+check-equal var-subscript-range-mh : $(v3[-4-20]) : 7 8 9 10 ;
+check-equal var-subscript-range-mh : $(v3[-4--20]) : ;
+check-equal var-subscript-range-0p : $(v3[0-2]) : 0 1 2 ;
+check-equal var-subscript-range-0m : $(v3[0--4]) : 0 1 2 3 4 5 6 7 8 ;
+check-equal var-subscript-range-0e : $(v3[0-]) : 0 1 2 3 4 5 6 7 8 9 10 ;
+check-equal var-subscript-range-0h : $(v3[0-20]) : 0 1 2 3 4 5 6 7 8 9 10 ;
+check-equal var-subscript-range-0l : $(v3[0--20]) : ;
+check-equal var-subscript-range-hp : $(v3[20-4]) : ;
+check-equal var-subscript-range-hm : $(v3[20--4]) : ;
+check-equal var-subscript-range-he : $(v3[20-]) : ;
+check-equal var-subscript-range-hh : $(v3[20-20]) : ;
+check-equal var-subscript-range-hl : $(v3[20--20]) : ;
+check-equal var-subscript-range-lp : $(v3[-13-4]) : 0 1 2 3 4 5 ;
+check-equal var-subscript-range-lm : $(v3[-13--4]) : 0 1 2 3 4 5 6 7 8 9 ;
+check-equal var-subscript-range-le : $(v3[-13-]) : 0 1 2 3 4 5 6 7 8 9 10 ;
+check-equal var-subscript-range-lh : $(v3[-13-20]) : 0 1 2 3 4 5 6 7 8 9 10 ;
+check-equal var-subscript-range-ll : $(v3[-13--13]) : 0 ;
+check-equal var-subscript-range-empty : $(v3[4-3]) : ;
+
+}
+
+# Check rules
+
+{
+
+rule test-rule
+{
+ return $(<) - $(>) - $(1) - $(2) - $(3) - $(4) - $(5) - $(6) - $(7) - $(8) - $(9) - $(10) - $(11) - $(12) - $(13) - $(14) - $(15) - $(16) - $(17) - $(18) - $(19) ;
+}
+
+check-equal rule-arguments-numbered :
+ [ test-rule a1 : a2 : a3 : a4 : a5 : a6 : a7 : a8 : a9 : a10 : a11 : a12 : a13 : a14 : a15 : a16 : a17 : a18 : a19 ] :
+ a1 - a2 - a1 - a2 - a3 - a4 - a5 - a6 - a7 - a8 - a9 - a10 - a11 - a12 - a13 - a14 - a15 - a16 - a17 - a18 - a19 ;
+
+rule test-rule
+{
+ return $(<:L) - $(>:L) - $(1:L) - $(2:L) - $(3:L) - $(4:L) - $(5:L) - $(6:L) - $(7:L) - $(8:L) - $(9:L) - $(10:L) - $(11:L) - $(12:L) - $(13:L) - $(14:L) - $(15:L) - $(16:L) - $(17:L) - $(18:L) - $(19:L) ;
+}
+
+# behavior change
+check-equal rule-arguments-numbered-lower :
+ [ test-rule a1 : a2 : a3 : a4 : a5 : a6 : a7 : a8 : a9 : a10 : a11 : a12 : a13 : a14 : a15 : a16 : a17 : a18 : a19 ] :
+ a1 - a2 - a1 - a2 - a3 - a4 - a5 - a6 - a7 - a8 - a9 - a10 - a11 - a12 - a13 - a14 - a15 - a16 - a17 - a18 - a19 ;
+
+
+rule test-rule ( p1 : p2 : p3 : p4 : p5 : p6 : p7 : p8 : p9 :
+ p10 : p11 : p12 : p13 : p14 : p15 : p16 : p17 : p18 : p19 )
+
+
+{
+ return $(p1) - $(p2) - $(p3) - $(p4) - $(p5) - $(p6) - $(p7) - $(p8) - $(p9) - $(p10) - $(p11) - $(p12) - $(p13) - $(p14) - $(p15) - $(p16) - $(p17) - $(p18) - $(p19) ;
+}
+
+check-equal rule-arguments-named :
+ [ test-rule a1 : a2 : a3 : a4 : a5 : a6 : a7 : a8 : a9 : a10 : a11 : a12 : a13 : a14 : a15 : a16 : a17 : a18 : a19 ] :
+ a1 - a2 - a3 - a4 - a5 - a6 - a7 - a8 - a9 - a10 - a11 - a12 - a13 - a14 - a15 - a16 - a17 - a18 - a19 ;
+
+#
+# test rule indirection
+#
+rule select ( n list * )
+{
+ return $(list[$(n)]) ;
+}
+
+rule indirect1 ( rule + : args * )
+{
+ return [ $(rule) $(args) ] ;
+}
+
+check-equal rule-indirect-1 : [ indirect1 select 1 : a b c d e ] : a ;
+check-equal rule-indirect-2 : [ indirect1 select 2 : a b c d e ] : b ;
+
+x = reset ;
+rule reset-x ( new-value )
+{
+ x = $(new-value) ;
+}
+$(x)-x bar ; # invokes reset-x...
+check-equal rule-reset : $(x) : bar ; # which changes x
+
+rule bar-x ( new-value )
+{
+ mark-order r3 ;
+}
+
+# The arguments are evaluated in forward order
+# before the rule name
+$(x)-x [ mark-order r1 : [ reset-x reset ] ] : [ mark-order r2 ] ;
+check-order rule-order : r1 r2 ;
+
+# Cases that look like member calls
+rule looks.like-a-member ( args * )
+{
+ return $(args) ;
+}
+
+rule call-non-member ( rule + )
+{
+ return [ $(rule).like-a-member ] ;
+}
+
+rule call-non-member-with-args ( rule + )
+{
+ return [ $(rule).like-a-member a2 ] ;
+}
+
+check-equal rule-non-member : [ call-non-member looks ] : ;
+#check-equal rule-non-member-a1 : [ call-non-member looks a1 ] : looks.a1 ;
+check-equal rule-non-member-args : [ call-non-member-with-args looks ] : a2 ;
+#check-equal rule-non-member-args-a1 : [ call-non-member-with-args looks a1 ] : looks.a1 a2 ;
+
+}
+
+# Check append
+
+{
+
+local value = [ mark-order r1 : v1 v2 ] [ mark-order r2 : v3 v4 ] ;
+check-equal append : $(value) : v1 v2 v3 v4 ;
+check-order append-order : r1 r2 ;
+
+}
+
+# Check foreach
+
+{
+
+local v1 = 1 2 3 ;
+local x = old ;
+local result ;
+
+for local x in $(v1)
+{
+ result += $(x) + ;
+}
+
+check-equal foreach-local-item : $(result) : 1 + 2 + 3 + ;
+check-equal foreach-local : $(x) : old ;
+
+result = ;
+
+for x in $(v1)
+{
+ result += $(x) + ;
+}
+
+check-equal foreach-nonlocal-item : $(result) : 1 + 2 + 3 + ;
+check-equal foreach-nonlocal : $(x) : 3 ;
+
+rule call-foreach ( values * )
+{
+ for local x in $(values)
+ {
+ return $(x) ;
+ }
+}
+
+check-equal foreach-result : [ call-foreach 1 2 3 ] : ;
+
+result = ;
+local varname = x ;
+x = old ;
+
+for local $(varname) in $(v1)
+{
+ result += $(x) + ;
+}
+
+check-equal foreach-no-expand : $(result) : old + old + old + ;
+
+result = ;
+
+for local v1 in $(v1)
+{
+ result += $(v1) + ;
+}
+
+check-equal foreach-order : $(result) : 1 + 2 + 3 + ;
+
+}
+
+# Check if
+
+{
+
+if true
+{
+ mark-order r1 ;
+}
+
+check-order if-true : r1 ;
+
+if $(false)
+{
+ mark-order r1 ;
+}
+
+check-order if-false : ;
+
+if true
+{
+ mark-order r1 ;
+}
+else
+{
+ mark-order r2 ;
+}
+
+check-order if-else-true : r1 ;
+
+if $(false)
+{
+ mark-order r1 ;
+}
+else
+{
+ mark-order r2 ;
+}
+
+check-order if-else-false : r2 ;
+
+rule test-rule
+{
+ if true
+ {
+ return result ;
+ }
+}
+
+check-equal if-true-result : [ test-rule ] : result ;
+
+rule test-rule
+{
+ local idx = 1 2 ;
+ local values = true ;
+ while $(idx)
+ {
+ local v = $(values[$(idx[1])]) ;
+ idx = $(idx[2-]) ;
+ if $(v)
+ {
+ return result ;
+ }
+ }
+}
+
+check-equal if-false-result : [ test-rule ] : ;
+
+rule test-rule
+{
+ if true
+ {
+ return r1 ;
+ }
+ else
+ {
+ return r2 ;
+ }
+}
+
+check-equal if-else-true-result : [ test-rule ] : r1 ;
+
+rule test-rule
+{
+ if $(false)
+ {
+ return r1 ;
+ }
+ else
+ {
+ return r2 ;
+ }
+}
+
+check-equal if-else-false-result : [ test-rule ] : r2 ;
+
+}
+
+# Check the evaluation of conditions
+
+{
+
+local test-result ;
+local v1 = "" "" "" ;
+local v2 = ;
+local v3 = a b c ;
+local v4 = a b c d ;
+local v5 = a b d ;
+local v6 = "" "" "" d ;
+
+rule test-comparison ( id : equal less greater )
+{
+ check-equal $(id)-empty-1 : [ eval-$(id) $(v1) : $(v2) ] : $(equal) ;
+ check-equal $(id)-empty-2 : [ eval-$(id) $(v1) : $(v2) ] : $(equal) ;
+ check-equal $(id)-equal : [ eval-$(id) $(v3) : $(v3) ] : $(equal) ;
+ check-equal $(id)-less-1 : [ eval-$(id) $(v3) : $(v4) ] : $(less) ;
+ check-equal $(id)-less-2 : [ eval-$(id) $(v3) : $(v5) ] : $(less) ;
+ check-equal $(id)-less-3 : [ eval-$(id) $(v4) : $(v5) ] : $(less) ;
+ check-equal $(id)-greater-1 : [ eval-$(id) $(v4) : $(v3) ] : $(greater) ;
+ check-equal $(id)-greater-2 : [ eval-$(id) $(v5) : $(v3) ] : $(greater) ;
+ check-equal $(id)-greater-3 : [ eval-$(id) $(v5) : $(v4) ] : $(greater) ;
+}
+
+rule eval-lt ( lhs * : rhs * )
+{
+ if $(lhs) < $(rhs) { return true ; }
+ else { return false ; }
+}
+
+test-comparison lt : false true false ;
+
+rule eval-gt ( lhs * : rhs * )
+{
+ if $(lhs) > $(rhs) { return true ; }
+ else { return false ; }
+}
+
+test-comparison gt : false false true ;
+
+rule eval-le ( lhs * : rhs * )
+{
+ if $(lhs) <= $(rhs) { return true ; }
+ else { return false ; }
+}
+
+test-comparison le : true true false ;
+
+rule eval-ge ( lhs * : rhs * )
+{
+ if $(lhs) >= $(rhs) { return true ; }
+ else { return false ; }
+}
+
+test-comparison ge : true false true ;
+
+rule eval-eq ( lhs * : rhs * )
+{
+ if $(lhs) = $(rhs) { return true ; }
+ else { return false ; }
+}
+
+test-comparison eq : true false false ;
+
+rule eval-ne ( lhs * : rhs * )
+{
+ if $(lhs) != $(rhs) { return true ; }
+ else { return false ; }
+}
+
+test-comparison ne : false true true ;
+
+rule eval-not-lt ( lhs * : rhs * )
+{
+ if ! ( $(lhs) < $(rhs) ) { return true ; }
+ else { return false ; }
+}
+
+test-comparison not-lt : true false true ;
+
+rule eval-not-gt ( lhs * : rhs * )
+{
+ if ! ( $(lhs) > $(rhs) ) { return true ; }
+ else { return false ; }
+}
+
+test-comparison not-gt : true true false ;
+
+rule eval-not-le ( lhs * : rhs * )
+{
+ if ! ( $(lhs) <= $(rhs) ) { return true ; }
+ else { return false ; }
+}
+
+test-comparison not-le : false false true ;
+
+rule eval-not-ge ( lhs * : rhs * )
+{
+ if ! ( $(lhs) >= $(rhs) ) { return true ; }
+ else { return false ; }
+}
+
+test-comparison not-ge : false true false ;
+
+rule eval-not-eq ( lhs * : rhs * )
+{
+ if ! ( $(lhs) = $(rhs) ) { return true ; }
+ else { return false ; }
+}
+
+test-comparison not-eq : false true true ;
+
+rule eval-not-ne ( lhs * : rhs * )
+{
+ if ! ( $(lhs) != $(rhs) ) { return true ; }
+ else { return false ; }
+}
+
+test-comparison not-ne : true false false ;
+
+local v7 = a a a a a a ;
+local v8 = c b ;
+local v9 = c d b ;
+local v10 = c a b c c b a a a ;
+
+rule test-in ( id : subset not-subset )
+{
+ check-equal $(id)-0-0 : [ eval-$(id) $(v2) : $(v2) ] : $(subset) ;
+ check-equal $(id)-0-empty : [ eval-$(id) $(v2) : $(v1) ] : $(subset) ;
+ check-equal $(id)-empty-0 : [ eval-$(id) $(v1) : $(v2) ] : $(not-subset) ;
+ check-equal $(id)-equal : [ eval-$(id) $(v3) : $(v3) ] : $(subset) ;
+ check-equal $(id)-simple : [ eval-$(id) $(v3) : $(v4) ] : $(subset) ;
+ check-equal $(id)-extra : [ eval-$(id) $(v4) : $(v3) ] : $(not-subset) ;
+ check-equal $(id)-multiple : [ eval-$(id) $(v7) : $(v3) ] : $(subset) ;
+ check-equal $(id)-unordered : [ eval-$(id) $(v8) : $(v3) ] : $(subset) ;
+ check-equal $(id)-unordered-extra : [ eval-$(id) $(v9) : $(v3) ] : $(not-subset) ;
+ check-equal $(id)-unordered-multiple : [ eval-$(id) $(v10) : $(v3) ] : $(subset) ;
+}
+
+rule eval-in ( lhs * : rhs * )
+{
+ if $(lhs) in $(rhs) { return true ; }
+ else { return false ; }
+}
+
+test-in "in" : true false ;
+
+rule eval-not-in ( lhs * : rhs * )
+{
+ if ! ( $(lhs) in $(rhs) ) { return true ; }
+ else { return false ; }
+}
+
+test-in not-in : false true ;
+
+rule test-truth-table ( id : tt tf ft ff )
+{
+ check-equal $(id)-tt : [ eval-$(id) 1 : 1 ] : $(tt) ;
+ check-equal $(id)-tf : [ eval-$(id) 1 : ] : $(tf) ;
+ check-equal $(id)-ft : [ eval-$(id) : 1 ] : $(ft) ;
+ check-equal $(id)-ff : [ eval-$(id) : ] : $(ff) ;
+}
+
+rule eval-and ( lhs ? : rhs ? )
+{
+ if $(lhs) && $(rhs) { return true ; }
+ else { return false ; }
+}
+
+test-truth-table and : true false false false ;
+
+rule eval-or ( lhs ? : rhs ? )
+{
+ if $(lhs) || $(rhs) { return true ; }
+ else { return false ; }
+}
+
+test-truth-table or : true true true false ;
+
+rule eval-not-and ( lhs ? : rhs ? )
+{
+ if ! ( $(lhs) && $(rhs) ) { return true ; }
+ else { return false ; }
+}
+
+test-truth-table not-and : false true true true ;
+
+rule eval-not-or ( lhs ? : rhs ? )
+{
+ if ! ( $(lhs) || $(rhs) ) { return true ; }
+ else { return false ; }
+}
+
+test-truth-table not-or : false false false true ;
+
+if [ mark-order r1 : test1 ] < [ mark-order r2 : test2 ] { }
+check-order lt-order : r1 r2 ;
+if [ mark-order r1 : test1 ] > [ mark-order r2 : test2 ] { }
+check-order gt-order : r1 r2 ;
+if [ mark-order r1 : test1 ] <= [ mark-order r2 : test2 ] { }
+check-order le-order : r1 r2 ;
+if [ mark-order r1 : test1 ] >= [ mark-order r2 : test2 ] { }
+check-order ge-order : r1 r2 ;
+if [ mark-order r1 : test1 ] = [ mark-order r2 : test2 ] { }
+check-order eq-order : r1 r2 ;
+if [ mark-order r1 : test1 ] != [ mark-order r2 : test2 ] { }
+check-order ne-order : r1 r2 ;
+if [ mark-order r1 : test1 ] in [ mark-order r2 : test2 ] { }
+check-order in-order : r1 r2 ;
+
+if [ mark-order r1 : test1 ] && [ mark-order r2 : test2 ] { }
+check-order and-order : r1 r2 ;
+if [ mark-order r1 ] && [ mark-order r2 : test2 ] { }
+check-order and-order-short-circuit : r1 ;
+
+if [ mark-order r1 ] || [ mark-order r2 : test2 ] { }
+check-order or-order : r1 r2 ;
+if [ mark-order r1 : test1 ] || [ mark-order r2 : test2 ] { }
+check-order or-order-short-circuit : r1 ;
+
+}
+
+# Check include
+
+{
+#FIXME:
+# plain include
+# include in module
+# include returns an empty list
+# rule arguments are available inside include
+}
+
+# Check local
+
+{
+
+local v1 = a b c ;
+local v2 = f g h ;
+
+{
+ local v1 ;
+ check-equal local-no-init : $(v1) : ;
+}
+
+check-equal local-restore : $(v1) : a b c ;
+
+{
+ local v1 = d e f ;
+ check-equal local-init : $(v1) : d e f ;
+}
+
+check-equal local-restore-init : $(v1) : a b c ;
+
+{
+ local v1 v2 ;
+ check-equal local-multiple-no-init : $(v1) - $(v2) : - ;
+}
+
+check-equal local-multiple-restore : $(v1) - $(v2) : a b c - f g h ;
+
+{
+ local v1 v2 = d e f ;
+ check-equal local-multiple-init : $(v1) - $(v2) : d e f - d e f ;
+}
+
+{
+ local v1 v1 = d e f ;
+ check-equal local-duplicate : $(v1) - $(v1) : d e f - d e f ;
+}
+
+check-equal local-duplicate-restore : $(v1) : a b c ;
+
+{
+ local [ mark-order r1 : v1 ] = [ mark-order r2 : d e f ] ;
+ check-order local-order : r1 r2 ;
+}
+
+}
+
+# Check module
+
+{
+ local var1 = root-module-var ;
+ module my_module
+ {
+ var1 = module-var ;
+ rule get ( )
+ {
+ return $(var1) ;
+ }
+ local rule not_really ( ) { return nothing ; }
+ }
+
+ check-equal module-var-not-root : $(var1) : root-module-var ;
+
+ check-equal module-rulenames : [ RULENAMES my_module ] : get ;
+
+ IMPORT_MODULE my_module ;
+ check-equal module-rule-import-module : [ my_module.get ] : module-var ;
+
+ IMPORT my_module : get : : module-get ;
+ check-equal module-rule-imort : [ module-get ] : module-var ;
+
+ IMPORT my_module : get : : module-get : LOCALIZE ;
+ check-equal module-rule-imort-localize : [ module-get ] : root-module-var ;
+
+}
+
+# Check class
+{
+#FIXME:
+# ...
+}
+
+# Check on
+
+{
+
+local target1 = test-on-target1 ;
+local target2 = test-on-target2 ;
+local targets = $(target1) $(target2) ;
+local v1 v2 v3 ;
+
+VAR on $(target1) = value1 ;
+V2 on $(target2) = value2 ;
+
+check-equal on-return : [ on $(target1) return $(VAR) ] : value1 ;
+
+rule test-rule
+{
+ return $(VAR) ;
+}
+
+check-equal on-rule : [ on $(target1) test-rule ] : value1 ;
+
+check-equal on-multiple : [ on $(targets) return $(V2) ] : ;
+
+rule test-rule
+{
+ on $(target1)
+ {
+ return $(VAR) ;
+ }
+}
+
+check-equal on-block : [ test-rule ] : value1 ;
+
+# FIXME: crazy implementation artifacts:
+
+v1 on test-on-target3 = x1 ;
+on test-on-target3
+{
+ v1 on test-on-target3 += x1 ;
+ v1 = y1 ;
+ v2 on test-on-target3 += x2 ;
+ v2 = y2 ;
+ v3 = y3 ;
+}
+
+check-equal on-swap-old1 : $(v1) : x1 ;
+check-equal on-swap-old2 : [ on test-on-target3 return $(v1) ] : y1 ;
+check-equal on-swap-new1 : $(v2) : x2 ;
+check-equal on-swap-new2 : [ on test-on-target3 return $(v2) ] : y2 ;
+check-equal on-no-swap : $(v3) : y3 ;
+
+}
+
+# Check rule
+
+{
+#FIXME:
+# argument order
+# expand rule name
+}
+
+# Check rules
+
+{
+#FIXME:
+}
+
+# Check set
+
+{
+local v1 ;
+local v2 ;
+local v3 ;
+local vars = v1 v2 v3 ;
+
+v1 = x1 ;
+check-equal set-set-empty : $(v1) : x1 ;
+v2 += x2 ;
+check-equal set-append-empty : $(v2) : x2 ;
+v3 ?= x3 ;
+check-equal set-default-empty : $(v3) : x3 ;
+
+v1 = y1 ;
+check-equal set-set-non-empty : $(v1) : y1 ;
+v2 += y2 ;
+check-equal set-append-non-empty : $(v2) : x2 y2 ;
+v3 ?= y3 ;
+check-equal set-default-non-empty : $(v3) : x3 ;
+
+v1 = ;
+v2 = ;
+v3 = ;
+$(vars) = z ;
+check-equal set-set-empty-group : $(v1) - $(v2) - $(v3) : z - z - z ;
+
+v1 = ;
+v2 = ;
+v3 = ;
+$(vars) += z ;
+check-equal set-append-empty-group : $(v1) - $(v2) - $(v3) : z - z - z ;
+
+v1 = ;
+v2 = ;
+v3 = ;
+$(vars) ?= z ;
+check-equal set-default-empty-group : $(v1) - $(v2) - $(v3) : z - z - z ;
+
+v1 = x1 ;
+v2 = x2 ;
+v3 = x3 ;
+$(vars) = z ;
+check-equal set-set-non-empty-group : $(v1) - $(v2) - $(v3) : z - z - z ;
+
+v1 = x1 ;
+v2 = x2 ;
+v3 = x3 ;
+$(vars) += z ;
+check-equal set-append-non-empty-group : $(v1) - $(v2) - $(v3) : x1 z - x2 z - x3 z ;
+
+v1 = x1 ;
+v2 = x2 ;
+v3 = x3 ;
+$(vars) ?= z ;
+check-equal set-default-non-empty-group : $(v1) - $(v2) - $(v3) : x1 - x2 - x3 ;
+
+v1 = x1 ;
+v2 = ;
+v3 = x3 ;
+$(vars) = z ;
+check-equal set-set-mixed-group : $(v1) - $(v2) - $(v3) : z - z - z ;
+
+v1 = x1 ;
+v2 = ;
+v3 = x3 ;
+$(vars) += z ;
+check-equal set-append-mixed-group : $(v1) - $(v2) - $(v3) : x1 z - z - x3 z ;
+
+v1 = x1 ;
+v2 = ;
+v3 = x3 ;
+$(vars) ?= z ;
+check-equal set-default-mixed-group : $(v1) - $(v2) - $(v3) : x1 - z - x3 ;
+
+vars = v1 v1 ;
+
+v1 = ;
+$(vars) = z ;
+check-equal set-set-duplicate-empty : $(v1) : z ;
+v1 = ;
+$(vars) += z ;
+check-equal set-append-duplicate-empty : $(v1) : z z ;
+v1 = ;
+$(vars) ?= z ;
+check-equal set-default-duplicate-empty : $(v1) : z ;
+
+v1 = x1 ;
+$(vars) = z ;
+check-equal set-set-duplicate-non-empty : $(v1) : z ;
+v1 = x1 ;
+$(vars) += z ;
+check-equal set-append-duplicate-non-empty : $(v1) : x1 z z ;
+v1 = x1 ;
+$(vars) ?= z ;
+check-equal set-default-duplicate-non-empty : $(v1) : x1 ;
+
+rule test-rule { v1 = x1 ; }
+check-equal set-set-result : [ test-rule ] : x1 ;
+rule test-rule { v1 += x1 ; }
+check-equal set-append-result : [ test-rule ] : x1 ;
+rule test-rule { v1 ?= x1 ; }
+check-equal set-default-result : [ test-rule ] : x1 ;
+
+[ mark-order r1 ] = [ mark-order r2 ] ;
+check-order set-set-order : r1 r2 ;
+[ mark-order r1 ] += [ mark-order r2 ] ;
+check-order set-append-order : r1 r2 ;
+[ mark-order r1 ] ?= [ mark-order r2 ] ;
+check-order set-default-order : r1 r2 ;
+
+}
+
+# Check setcomp
+
+{
+#FIXME
+# Expand arguments
+# Don't expand name
+}
+
+# Check setexec
+
+{
+#FIXME:
+# Don't expand name
+# Evaluate bindlist
+}
+
+# Check settings ;
+
+{
+
+local target1 = test-settings-target1 ;
+local target2 = test-settings-target2 ;
+local target3 = test-settings-target3 ;
+local targets = $(target2) $(target3) ;
+
+local vars = v1 v2 v3 ;
+
+v1 on $(target1) = x1 ;
+check-equal settings-set-empty : [ on $(target1) return $(v1) ] : x1 ;
+v2 on $(target1) += x2 ;
+check-equal settings-append-empty : [ on $(target1) return $(v2) ] : x2 ;
+v3 on $(target1) ?= x3 ;
+check-equal settings-default-empty : [ on $(target1) return $(v3) ] : x3 ;
+
+v1 on $(target1) = y1 ;
+check-equal settings-set-non-empty : [ on $(target1) return $(v1) ] : y1 ;
+v2 on $(target1) += y2 ;
+check-equal settings-append-non-empty : [ on $(target1) return $(v2) ] : x2 y2 ;
+v3 on $(target1) ?= y3 ;
+check-equal settings-default-non-empty : [ on $(target1) return $(v3) ] : x3 ;
+
+$(vars) on setting-target2 = z ;
+check-equal settings-set-empty-group : [ on setting-target2 return $(v1) ] - [ on setting-target2 return $(v2) ] - [ on setting-target2 return $(v3) ] : z - z - z ;
+
+$(vars) on setting-target3 += z ;
+check-equal settings-append-empty-group : [ on setting-target3 return $(v1) ] - [ on setting-target3 return $(v2) ] - [ on setting-target3 return $(v3) ] : z - z - z ;
+
+$(vars) on setting-target4 ?= z ;
+check-equal settings-default-empty-group : [ on setting-target4 return $(v1) ] - [ on setting-target4 return $(v2) ] - [ on setting-target4 return $(v3) ] : z - z - z ;
+
+v1 on $(target1) = x1 ;
+v2 on $(target1) = x2 ;
+v3 on $(target1) = x3 ;
+$(vars) on $(target1) = z ;
+check-equal settings-set-non-empty-group : [ on $(target1) return $(v1) ] - [ on $(target1) return $(v2) ] - [ on $(target1) return $(v3) ] : z - z - z ;
+
+v1 on $(target1) = x1 ;
+v2 on $(target1) = x2 ;
+v3 on $(target1) = x3 ;
+$(vars) on $(target1) += z ;
+check-equal settings-append-non-empty-group : [ on $(target1) return $(v1) ] - [ on $(target1) return $(v2) ] - [ on $(target1) return $(v3) ] : x1 z - x2 z - x3 z ;
+
+v1 on $(target1) = x1 ;
+v2 on $(target1) = x2 ;
+v3 on $(target1) = x3 ;
+$(vars) on $(target1) ?= z ;
+check-equal settings-default-non-empty-group : [ on $(target1) return $(v1) ] - [ on $(target1) return $(v2) ] - [ on $(target1) return $(v3) ] : x1 - x2 - x3 ;
+
+v1 on setting-target5 = x1 ;
+v3 on setting-target5 = x3 ;
+$(vars) on setting-target5 = z ;
+check-equal settings-set-mixed-group : [ on setting-target5 return $(v1) ] - [ on setting-target5 return $(v2) ] - [ on setting-target5 return $(v3) ] : z - z - z ;
+
+v1 on setting-target6 = x1 ;
+v3 on setting-target6 = x3 ;
+$(vars) on setting-target6 += z ;
+check-equal settings-append-mixed-group : [ on setting-target6 return $(v1) ] - [ on setting-target6 return $(v2) ] - [ on setting-target6 return $(v3) ] : x1 z - z - x3 z ;
+
+v1 on setting-target7 = x1 ;
+v3 on setting-target7 = x3 ;
+$(vars) on setting-target7 ?= z ;
+check-equal settings-default-mixed-group : [ on setting-target7 return $(v1) ] - [ on setting-target7 return $(v2) ] - [ on setting-target7 return $(v3) ] : x1 - z - x3 ;
+
+vars = v1 v1 ;
+
+$(vars) on setting-target8 = z ;
+check-equal settings-set-duplicate-empty : [ on setting-target8 return $(v1) ] : z ;
+$(vars) on setting-target9 += z ;
+check-equal settings-append-duplicate-empty : [ on setting-target9 return $(v1) ] : z z ;
+$(vars) on setting-target10 ?= z ;
+check-equal settings-default-duplicate-empty : [ on setting-target10 return $(v1) ] : z ;
+
+v1 on $(target1) = x1 ;
+$(vars) on $(target1) = z ;
+check-equal settings-set-duplicate-non-empty : [ on $(target1) return $(v1) ] : z ;
+v1 on $(target1) = x1 ;
+$(vars) on $(target1) += z ;
+check-equal settings-append-duplicate-non-empty : [ on $(target1) return $(v1) ] : x1 z z ;
+v1 on $(target1) = x1 ;
+$(vars) on $(target1) ?= z ;
+check-equal settings-default-duplicate-non-empty : [ on $(target1) return $(v1) ] : x1 ;
+
+v1 on $(target1) = ;
+v1 on $(target1) ?= z ;
+check-equal settings-default-set-but-empty : [ on $(target1) return $(v1) ] : ;
+
+v1 on $(targets) = multi ;
+check-equal settings-set-multi-empty : [ on $(target2) return $(v1) ] - [ on $(target3) return $(v1) ] : multi - multi ;
+v2 on $(targets) += multi ;
+check-equal settings-append-multi-empty : [ on $(target2) return $(v2) ] - [ on $(target3) return $(v2) ] : multi - multi ;
+v3 on $(targets) ?= multi ;
+check-equal settings-default-multi-empty : [ on $(target2) return $(v3) ] - [ on $(target3) return $(v3) ] : multi - multi ;
+
+v1 on $(targets) = multi2 ;
+check-equal settings-set-multi-empty : [ on $(target2) return $(v1) ] - [ on $(target3) return $(v1) ] : multi2 - multi2 ;
+v2 on $(targets) += multi2 ;
+check-equal settings-append-multi-empty : [ on $(target2) return $(v2) ] - [ on $(target3) return $(v2) ] : multi multi2 - multi multi2 ;
+v3 on $(targets) ?= multi2 ;
+check-equal settings-default-multi-empty : [ on $(target2) return $(v3) ] - [ on $(target3) return $(v3) ] : multi - multi ;
+
+rule test-rule { v1 on $(target1) = x1 ; }
+check-equal settings-set-result : [ test-rule ] : x1 ;
+rule test-rule { v1 on $(target1) += x1 ; }
+check-equal settings-append-result : [ test-rule ] : x1 ;
+rule test-rule { v1 on $(target1) ?= x1 ; }
+check-equal settings-default-result : [ test-rule ] : x1 ;
+
+[ mark-order r1 : var ] on [ mark-order r3 : $(target1) ] = [ mark-order r2 : value ] ;
+check-order settings-set-order : r1 r2 r3 ;
+[ mark-order r1 : var ] on [ mark-order r3 : $(target1) ] += [ mark-order r2 : value ] ;
+check-order settings-append-order : r1 r2 r3 ;
+[ mark-order r1 : var ] on [ mark-order r3 : $(target1) ] ?= [ mark-order r2 : value ] ;
+check-order settings-default-order : r1 r2 r3 ;
+
+}
+
+# Check switch
+
+{
+
+local pattern = * ;
+
+switch value
+{
+ case * : mark-order r1 ;
+}
+
+check-order switch-match-any : r1 ;
+
+switch value
+{
+ case v2 : mark-order r1 ;
+}
+
+check-order switch-no-match : ;
+
+switch value
+{
+ case $(pattern) : mark-order r1 ;
+}
+
+check-order switch-no-expand : ;
+
+switch value
+{
+ case value : mark-order r1 ;
+ case * : mark-order r2 ;
+}
+
+check-order switch-match-several : r1 ;
+
+rule test-rule ( value )
+{
+ switch $(value)
+ {
+ case value : return 1 ;
+ }
+}
+
+check-equal switch-result-match : [ test-rule value ] : 1 ;
+check-equal switch-result-match : [ test-rule v1 ] : ;
+
+switch $()
+{
+ case "" : mark-order r1 ;
+ case * : mark-order r2 ;
+}
+
+check-order switch-empty : r1 ;
+
+local values = v1 v2 v3 ;
+switch $(values)
+{
+ case v1 : mark-order r1 ;
+ case v2 : mark-order r2 ;
+ case v3 : mark-order r3 ;
+}
+
+check-order switch-multiple : r1 ;
+
+# Test glob matching
+
+switch value { case * : mark-order r1 ; }
+check-order switch-glob-star : r1 ;
+
+switch value { case va*e : mark-order r1 ; }
+check-order switch-glob-star-1 : r1 ;
+
+switch value { case *a* : mark-order r1 ; }
+check-order switch-glob-star-2 : r1 ;
+
+switch value { case *a*ue* : mark-order r1 ; }
+check-order switch-glob-star-3 : r1 ;
+
+switch value { case *[eaiou]*ue : mark-order r1 ; }
+check-order switch-glob-group : r1 ;
+
+switch value { case *[eaiou]ue : mark-order r1 ; }
+check-order switch-glob-group-fail : ;
+
+switch value { case ?a?ue : mark-order r1 ; }
+check-order switch-glob-any : r1 ;
+
+switch value { case ?lue : mark-order r1 ; }
+check-order switch-glob-any-fail : ;
+
+}
+
+# Test while
+
+{
+
+local value = 1 2 3 ;
+
+while $(value)
+{
+ mark-order r$(value[1]) ;
+ value = $(value[2-]) ;
+}
+
+check-order while-exec : r1 r2 r3 ;
+
+rule test-rule
+{
+ local value = 1 2 3 ;
+ while $(value)
+ {
+ value = $(value[2-]) ;
+ return x ;
+ }
+}
+
+check-equal while-result : [ test-rule ] : x ;
+
+rule test-rule
+{
+ local value = 1 2 ;
+ while $(value)
+ {
+ value = $(value[2-]) ;
+ local inner = $(value) ;
+ while $(inner)
+ {
+ inner = $(inner[2-]) ;
+ return x ;
+ }
+ }
+}
+
+check-equal while-result-2 : [ test-rule ] : ;
+
+}
+
+#
+# test CALLER_MODULE and backtrace
+#
+
+{
+ local base = [ BACKTRACE ] ;
+ base = $(base[2]) ;
+ rule backtrace ( )
+ {
+ local bt = [ BACKTRACE ] ;
+ check-equal backtrace-1-file : $(bt) :
+ test.jam [ CALC $(base) + 4 ] "" backtrace
+ test.jam [ CALC $(base) + 28 ] module2. module2.f
+ test.jam [ CALC $(base) + 19 ] module1. module1.f
+ test.jam [ CALC $(base) + 32 ] "" "module scope"
+ ;
+ }
+ module module1
+ {
+ IMPORT_MODULE module2 : module1 ;
+ rule f ( )
+ {
+ local m = [ CALLER_MODULE ] ;
+ check-equal caller-module-root : $(m) ;
+ module2.f ;
+ }
+ }
+ module module2
+ {
+ rule f ( )
+ {
+ local m = [ CALLER_MODULE ] ;
+ check-equal caller-module : module1 : $(m) ;
+ backtrace ;
+ }
+ }
+ IMPORT_MODULE module1 ;
+ module1.f ;
+}
+
+
+# Test NORMALIZE_PATH
+
+{
+check-equal normalize-path : "." : [ NORMALIZE_PATH ] ;
+check-equal normalize-path : "." : [ NORMALIZE_PATH "" ] ;
+check-equal normalize-path : "." : [ NORMALIZE_PATH "." ] ;
+check-equal normalize-path : ".." : [ NORMALIZE_PATH ".." ] ;
+check-equal normalize-path : "/" : [ NORMALIZE_PATH "/" ] ;
+check-equal normalize-path : "/" : [ NORMALIZE_PATH "\\" ] ;
+check-equal normalize-path : "/" : [ NORMALIZE_PATH "//" ] ;
+check-equal normalize-path : "/" : [ NORMALIZE_PATH "\\\\" ] ;
+check-equal normalize-path : "/" : [ NORMALIZE_PATH "//\\\\//\\\\" ] ;
+check-equal normalize-path : "/" : [ NORMALIZE_PATH "/." ] ;
+check-equal normalize-path : "/" : [ NORMALIZE_PATH "/./" ] ;
+check-equal normalize-path : "/" : [ NORMALIZE_PATH "\\\\///.///\\\\\\" ] ;
+check-equal normalize-path : "." : [ NORMALIZE_PATH "./././././." ] ;
+check-equal normalize-path : "/" : [ NORMALIZE_PATH "/./././././." ] ;
+check-equal normalize-path : "foo" : [ NORMALIZE_PATH "foo" ] ;
+check-equal normalize-path : "foo" : [ NORMALIZE_PATH "foo/" ] ;
+check-equal normalize-path : "foo" : [ NORMALIZE_PATH "foo\\" ] ;
+check-equal normalize-path : "foo" : [ NORMALIZE_PATH "foo\\\\/////" ] ;
+check-equal normalize-path : "foo" : [ NORMALIZE_PATH "foo\\\\/////././." ] ;
+check-equal normalize-path : "foo" : [ NORMALIZE_PATH "foo\\\\/////./././" ] ;
+check-equal normalize-path : "." : [ NORMALIZE_PATH "foo/.." ] ;
+check-equal normalize-path : "." : [ NORMALIZE_PATH "foo////.." ] ;
+check-equal normalize-path : "/" : [ NORMALIZE_PATH "///foo/\\\\/.." ] ;
+check-equal normalize-path : "/" : [ NORMALIZE_PATH "\\\\\\foo\\//\\.." ] ;
+check-equal normalize-path : "." : [ NORMALIZE_PATH "foo/./.." ] ;
+check-equal normalize-path : "." : [ NORMALIZE_PATH "foo/././././.." ] ;
+check-equal normalize-path : "foo" : [ NORMALIZE_PATH "foo/./././bar/./././.././././baz/./././.." ] ;
+check-equal normalize-path : "/foo" : [ NORMALIZE_PATH "/foo/./././bar/./././.././././baz/./././.." ] ;
+check-equal normalize-path : "foo" : [ NORMALIZE_PATH "foo/./././bar/./././////.././././baz/./././.." ] ;
+check-equal normalize-path : "/foo" : [ NORMALIZE_PATH "/foo/./././bar/./././////.././././baz/./././.." ] ;
+check-equal normalize-path : ".." : [ NORMALIZE_PATH "./.." ] ;
+check-equal normalize-path : ".." : [ NORMALIZE_PATH "././././.." ] ;
+check-equal normalize-path : "../.." : [ NORMALIZE_PATH "../.." ] ;
+check-equal normalize-path : "../.." : [ NORMALIZE_PATH "./../.." ] ;
+check-equal normalize-path : "../.." : [ NORMALIZE_PATH "././././../.." ] ;
+check-equal normalize-path : "../.." : [ NORMALIZE_PATH "./.././././.." ] ;
+check-equal normalize-path : "../.." : [ NORMALIZE_PATH "././././.././././.." ] ;
+check-equal normalize-path : "../.." : [ NORMALIZE_PATH "..//\\\\\\//.." ] ;
+check-equal normalize-path : "../.." : [ NORMALIZE_PATH "../..\\\\/\\\\" ] ;
+check-equal normalize-path : "." : [ NORMALIZE_PATH "foo/../bar/../baz/.." ] ;
+check-equal normalize-path : "." : [ NORMALIZE_PATH "foo////..////bar////.//////.////../baz/.." ] ;
+check-equal normalize-path : "/" : [ NORMALIZE_PATH "/foo/../bar/../baz/.." ] ;
+check-equal normalize-path : "/" : [ NORMALIZE_PATH "/foo////..////bar////.//////.////../baz/.." ] ;
+
+# Invalid rooted paths with leading dotdots.
+check-equal normalize-path-invalid : : [ NORMALIZE_PATH "/.." ] ;
+check-equal normalize-path-invalid : : [ NORMALIZE_PATH "/../" ] ;
+check-equal normalize-path-invalid : : [ NORMALIZE_PATH "//\\\\//\\\\/.." ] ;
+check-equal normalize-path-invalid : : [ NORMALIZE_PATH "\\\\//\\\\//\\.." ] ;
+check-equal normalize-path-invalid : : [ NORMALIZE_PATH "/../.." ] ;
+check-equal normalize-path-invalid : : [ NORMALIZE_PATH "/../../.." ] ;
+check-equal normalize-path-invalid : : [ NORMALIZE_PATH "/foo/bar/../baz/../../.." ] ;
+check-equal normalize-path-invalid : : [ NORMALIZE_PATH "/../for/././../././bar/././../././.." ] ;
+check-equal normalize-path-invalid : : [ NORMALIZE_PATH "/../foo/bar" ] ;
+
+}
+
+# Test W32_GETREGNAMES
+
+{
+
+if $(NT)
+{
+ local sound = "Beep" "ExtendedSounds" ;
+ local r1 = [ W32_GETREGNAMES "HKEY_CURRENT_USER\\Control Panel\\Sound" :
+ values ] ;
+ check-equal w32_getregnames : $(sound:L) : $(r1:L) ;
+ local r2 = [ W32_GETREGNAMES "HKCU\\Control Panel\\Sound" : values ] ;
+ check-equal w32_getregnames : $(sound:L) : $(r2:L) ;
+
+ # Some Windows platforms may have additional keys under
+ # 'CurrentControlSet' which we then remove here so they would not be
+ # reported as errors by our test.
+ local rule remove-policies ( param * )
+ {
+ local found ;
+ local r ;
+ for local x in $(param:L)
+ {
+ if ! x in $(found) &&
+ $(x) in "addservices" "policies" "deleted device ids"
+ {
+ found += $(x) ;
+ }
+ else
+ {
+ r += $(x) ;
+ }
+ }
+ return $(r) ;
+ }
+ local CurrentControlSet = "Control" "Enum" "Hardware Profiles" "Services" ;
+ local r3 = [ W32_GETREGNAMES "HKEY_LOCAL_MACHINE\\SYSTEM\\CurrentControlSet"
+ : subkeys ] ;
+ check-equal w32_getregnames : $(CurrentControlSet:L) : [ remove-policies
+ $(r3:L) ] ;
+ local r4 = [ W32_GETREGNAMES "HKLM\\SYSTEM\\CurrentControlSet" : subkeys ] ;
+ check-equal w32_getregnames : $(CurrentControlSet:L) : [ remove-policies
+ $(r4:L) ] ;
+}
+
+}
+
+# Test SHELL
+
+{
+
+local c = "echo value" ;
+
+check-equal shell : "value\n" : [ SHELL $(c) ] ;
+check-equal shell : "" : [ SHELL $(c) : no-output ] ;
+check-equal shell : "value\n" 0 : [ SHELL $(c) : exit-status ] ;
+check-equal shell : "" 0 : [ SHELL $(c) : no-output : exit-status ] ;
+check-equal command : "value\n" : [ COMMAND $(c) ] ;
+check-equal command : "" : [ COMMAND $(c) : no-output ] ;
+check-equal command : "value\n" 0 : [ COMMAND $(c) : exit-status ] ;
+check-equal command : "" 0 : [ COMMAND $(c) : no-output : exit-status ] ;
+
+}
+
+# Test SUBST
+
+{
+
+# Check that unmatched subst returns an empty list
+check-equal subst-nomatch : [ SUBST "abc" "d+" x ] : ;
+
+# Check that a matched subst works
+check-equal subst-match : [ SUBST "ddd" "d+" x ] : x ;
+
+# Check that we can get multiple substitutions from a single invocation
+check-equal subst-multiple : [ SUBST "x/y/z" "([^/]*)/([^/]*).*" "\\1" $2 "\\1-\\2" ] : x y x-y ;
+
+}
+
+# Test summary
+
+if $(failed) = 0
+{
+ status = 0 ;
+}
+else
+{
+ status = 1 ;
+}
+
+EXIT $(passed) passed $(failed) failed : $(status) ;
diff --git a/tools/build/test/core_action_output.py b/tools/build/test/core_action_output.py
new file mode 100755
index 0000000000..b26f0e0bd8
--- /dev/null
+++ b/tools/build/test/core_action_output.py
@@ -0,0 +1,62 @@
+#!/usr/bin/python
+
+# Copyright 2012. Jurko Gospodnetic
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Test correct "-p" option handling.
+
+import BoostBuild
+
+t = BoostBuild.Tester(["-d1"], pass_d0=False, pass_toolset=False)
+
+t.write("file.jam", """\
+prefix = "echo \\"" ;
+suffix = "\\"" ;
+if $(NT)
+{
+ prefix = "(echo " ;
+ suffix = ")" ;
+}
+actions go
+{
+ $(prefix)stdout$(suffix)
+ $(prefix)stderr$(suffix) 1>&2
+}
+ECHO {{{ $(XXX) }}} ;
+ALWAYS all ;
+go all ;
+""")
+
+t.run_build_system(["-ffile.jam", "-sXXX=1"], stderr="")
+t.expect_output_lines("{{{ 1 }}}")
+t.expect_output_lines("stdout")
+t.expect_output_lines("stderr")
+t.expect_nothing_more()
+
+t.run_build_system(["-ffile.jam", "-sXXX=2", "-p0"], stderr="")
+t.expect_output_lines("{{{ 2 }}}")
+t.expect_output_lines("stdout")
+t.expect_output_lines("stderr")
+t.expect_nothing_more()
+
+t.run_build_system(["-ffile.jam", "-sXXX=3", "-p1"], stderr="")
+t.expect_output_lines("{{{ 3 }}}")
+t.expect_output_lines("stdout")
+t.expect_output_lines("stderr*", False)
+t.expect_nothing_more()
+
+t.run_build_system(["-ffile.jam", "-sXXX=4", "-p2"], stderr="stderr\n")
+t.expect_output_lines("{{{ 4 }}}")
+t.expect_output_lines("stdout*", False)
+t.expect_output_lines("stderr*", False)
+t.expect_nothing_more()
+
+t.run_build_system(["-ffile.jam", "-sXXX=5", "-p3"], stderr="stderr\n")
+t.expect_output_lines("{{{ 5 }}}")
+t.expect_output_lines("stdout")
+t.expect_output_lines("stderr*", False)
+t.expect_nothing_more()
+
+t.cleanup()
diff --git a/tools/build/test/core_action_status.py b/tools/build/test/core_action_status.py
new file mode 100755
index 0000000000..7ebd438698
--- /dev/null
+++ b/tools/build/test/core_action_status.py
@@ -0,0 +1,27 @@
+#!/usr/bin/python
+
+# Copyright 2007 Rene Rivera.
+# Copyright 2011 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+t = BoostBuild.Tester(pass_toolset=0)
+
+t.write("file.jam", """\
+actions quietly .a. { $(ACTION) }
+
+rule .a.
+{
+ DEPENDS $(<) : $(>) ;
+}
+
+NOTFILE subtest ;
+.a. subtest_a : subtest ;
+DEPENDS all : subtest_a ;
+""")
+
+t.run_build_system(["-ffile.jam", "-sACTION=invalid"], status=1)
+
+t.cleanup()
diff --git a/tools/build/test/core_actions_quietly.py b/tools/build/test/core_actions_quietly.py
new file mode 100755
index 0000000000..c020846d59
--- /dev/null
+++ b/tools/build/test/core_actions_quietly.py
@@ -0,0 +1,61 @@
+#!/usr/bin/python
+
+# Copyright 2007 Rene Rivera.
+# Copyright 2011 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+t = BoostBuild.Tester(pass_toolset=0)
+
+t.write("file.jam", """\
+actions quietly .a.
+{
+echo [$(<:B)] 0
+echo [$(<:B)] 1
+echo [$(<:B)] 2
+}
+
+rule .a.
+{
+ DEPENDS $(<) : $(>) ;
+}
+
+NOTFILE subtest ;
+.a. subtest_a : subtest ;
+.a. subtest_b : subtest ;
+DEPENDS all : subtest_a subtest_b ;
+""")
+
+t.run_build_system(["-ffile.jam", "-d2"], stdout="""\
+...found 4 targets...
+...updating 2 targets...
+.a. subtest_a
+
+echo [subtest_a] 0
+echo [subtest_a] 1
+echo [subtest_a] 2
+
+[subtest_a] 0
+[subtest_a] 1
+[subtest_a] 2
+.a. subtest_b
+
+echo [subtest_b] 0
+echo [subtest_b] 1
+echo [subtest_b] 2
+
+[subtest_b] 0
+[subtest_b] 1
+[subtest_b] 2
+...updated 2 targets...
+""")
+
+t.run_build_system(["-ffile.jam", "-d1"], stdout="""\
+...found 4 targets...
+...updating 2 targets...
+...updated 2 targets...
+""")
+
+t.cleanup()
diff --git a/tools/build/test/core_arguments.py b/tools/build/test/core_arguments.py
new file mode 100755
index 0000000000..a6e886ff63
--- /dev/null
+++ b/tools/build/test/core_arguments.py
@@ -0,0 +1,103 @@
+#!/usr/bin/python
+
+# Copyright 2001 Dave Abrahams
+# Copyright 2011 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+
+def simple_args(start, finish):
+ return " : ".join("%d" % x for x in xrange(start, finish + 1))
+
+
+def test(t, type, input, output, status=0):
+ code = ["include echo_args.jam ; echo_%s" % type]
+ if input: code.append(input)
+ code.append(";")
+ t.write("file.jam", " ".join(code))
+ t.run_build_system(["-ffile.jam"], status=status)
+ t.expect_output_lines(output);
+
+
+def test_args(t, *args, **kwargs):
+ test(t, "args", *args, **kwargs)
+
+
+def test_varargs(t, *args, **kwargs):
+ test(t, "varargs", *args, **kwargs)
+
+
+t = BoostBuild.Tester(pass_toolset=0, pass_d0=False)
+
+t.write("echo_args.jam", """\
+NOCARE all ;
+
+rule echo_args ( a b ? c ? : d + : e * )
+{
+ ECHO a= $(a) b= $(b) c= $(c) ":" d= $(d) ":" e= $(e) ;
+}
+
+rule echo_varargs ( a b ? c ? : d + : e * : * )
+{
+ ECHO a= $(a) b= $(b) c= $(c) ":" d= $(d) ":" e= $(e)
+ ": rest= "$(4[1]) $(4[2-])
+ ": "$(5[1]) $(5[2-]) ": "$(6[1]) $(6[2-]) ": "$(7[1]) $(7[2-])
+ ": "$(8[1]) $(8[2-]) ": "$(9[1]) $(9[2-]) ": "$(10[1]) $(10[2-])
+ ": "$(11[1]) $(11[2-]) ": "$(12[1]) $(12[2-]) ": "$(13[1]) $(13[2-])
+ ": "$(14[1]) $(14[2-]) ": "$(15[1]) $(15[2-]) ": "$(16[1]) $(16[2-])
+ ": "$(17[1]) $(17[2-]) ": "$(18[1]) $(18[2-]) ": "$(19[1]) $(19[2-])
+ ": "$(20[1]) $(20[2-]) ": "$(21[1]) $(21[2-]) ": "$(22[1]) $(22[2-])
+ ": "$(23[1]) $(23[2-]) ": "$(24[1]) $(24[2-]) ": "$(25[1]) $(25[2-]) ;
+}
+""")
+
+test_args(t, "", "* missing argument a", status=1)
+test_args(t, "1 2 : 3 : 4 : 5", "* extra argument 5", status=1)
+test_args(t, "a b c1 c2 : d", "* extra argument c2", status=1)
+
+# Check modifier '?'
+test_args(t, "1 2 3 : 4", "a= 1 b= 2 c= 3 : d= 4 : e=")
+test_args(t, "1 2 : 3", "a= 1 b= 2 c= : d= 3 : e=")
+test_args(t, "1 2 : 3", "a= 1 b= 2 c= : d= 3 : e=")
+test_args(t, "1 : 2", "a= 1 b= c= : d= 2 : e=")
+
+# Check modifier '+'
+test_args(t, "1", "* missing argument d", status=1)
+test_args(t, "1 : 2 3", "a= 1 b= c= : d= 2 3 : e=")
+test_args(t, "1 : 2 3 4", "a= 1 b= c= : d= 2 3 4 : e=")
+
+# Check modifier '*'
+test_args(t, "1 : 2 : 3", "a= 1 b= c= : d= 2 : e= 3")
+test_args(t, "1 : 2 : 3 4", "a= 1 b= c= : d= 2 : e= 3 4")
+test_args(t, "1 : 2 : 3 4 5", "a= 1 b= c= : d= 2 : e= 3 4 5")
+
+# Check varargs
+test_varargs(t, "1 : 2 : 3 4 5", "a= 1 b= c= : d= 2 : e= 3 4 5")
+test_varargs(t, "1 : 2 : 3 4 5 : 6", "a= 1 b= c= : d= 2 : e= 3 4 5 : rest= 6")
+test_varargs(t, "1 : 2 : 3 4 5 : 6 7",
+ "a= 1 b= c= : d= 2 : e= 3 4 5 : rest= 6 7")
+test_varargs(t, "1 : 2 : 3 4 5 : 6 7 : 8",
+ "a= 1 b= c= : d= 2 : e= 3 4 5 : rest= 6 7 : 8")
+test_varargs(t, "1 : 2 : 3 4 5 : 6 7 : 8 : 9",
+ "a= 1 b= c= : d= 2 : e= 3 4 5 : rest= 6 7 : 8 : 9")
+test_varargs(t, "1 : 2 : 3 4 5 : 6 7 : 8 : 9 : 10 : 11 : 12 : 13 : 14 : 15 : "
+ "16 : 17 : 18 : 19a 19b", "a= 1 b= c= : d= 2 : e= 3 4 5 : rest= 6 7 : 8 : "
+ "9 : 10 : 11 : 12 : 13 : 14 : 15 : 16 : 17 : 18 : 19a 19b")
+test_varargs(t, "1 : 2 : 3 4 5 : 6 7 : 8 : 9 : 10 : 11 : 12 : 13 : 14 : 15 : "
+ "16 : 17 : 18 : 19a 19b 19c : 20", "a= 1 b= c= : d= 2 : e= 3 4 5 : rest= "
+ "6 7 : 8 : 9 : 10 : 11 : 12 : 13 : 14 : 15 : 16 : 17 : 18 : 19a 19b 19c : "
+ "20")
+
+# Check varargs upper limit
+expected = "a= 1 b= c= : d= 2 : e= 3 : rest= " + simple_args(4, 19)
+test_varargs(t, simple_args(1, 19), expected)
+test_varargs(t, simple_args(1, 19) + " 19b 19c 19d", expected + " 19b 19c 19d")
+test_varargs(t, simple_args(1, 19) + " 19b 19c 19d : 20", expected + " 19b "
+ "19c 19d")
+test_varargs(t, simple_args(1, 20), expected)
+test_varargs(t, simple_args(1, 50), expected)
+
+t.cleanup()
diff --git a/tools/build/test/core_at_file.py b/tools/build/test/core_at_file.py
new file mode 100755
index 0000000000..1bcdcab559
--- /dev/null
+++ b/tools/build/test/core_at_file.py
@@ -0,0 +1,63 @@
+#!/usr/bin/python
+
+# Copyright 2011 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+
+import BoostBuild
+
+t = BoostBuild.Tester(["-ffile.jam"], pass_toolset=0)
+
+t.write("file.jam", """\
+name = n1 n2 ;
+contents = M1 M2 ;
+EXIT file: "@(o$(name) .txt:E= test -D$(contents))" : 0 ;
+""")
+
+t.run_build_system()
+t.expect_output_lines("file: on1 on2 .txt");
+t.expect_addition("on1 on2 .txt")
+t.expect_content("on1 on2 .txt", " test -DM1 -DM2", True)
+
+t.rm(".")
+
+t.write("file.jam", """\
+name = n1 n2 ;
+contents = M1 M2 ;
+actions run { echo file: "@(o$(name) .txt:E= test -D$(contents))" }
+run all ;
+""")
+
+t.run_build_system(["-d2"])
+t.expect_output_lines(' echo file: "on1 on2 .txt" ');
+t.expect_addition("on1 on2 .txt")
+t.expect_content("on1 on2 .txt", " test -DM1 -DM2", True)
+
+t.rm(".")
+
+t.write("file.jam", """\
+name = n1 n2 ;
+contents = M1 M2 ;
+file = "@($(STDOUT):E= test -D$(contents)\n)" ;
+actions run { $(file) }
+run all ;
+""")
+
+t.run_build_system(["-d1"])
+t.expect_output_lines(" test -DM1 -DM2")
+
+t.rm(".")
+
+t.write("file.jam", """\
+name = n1 n2 ;
+contents = M1 M2 ;
+actions run { @($(STDOUT):E= test -D$(contents)\n) }
+run all ;
+""")
+
+t.run_build_system(["-d1"])
+t.expect_output_lines(" test -DM1 -DM2")
+
+t.cleanup()
diff --git a/tools/build/test/core_bindrule.py b/tools/build/test/core_bindrule.py
new file mode 100755
index 0000000000..3a6916afa7
--- /dev/null
+++ b/tools/build/test/core_bindrule.py
@@ -0,0 +1,45 @@
+#!/usr/bin/python
+
+# Copyright 2001 Dave Abrahams
+# Copyright 2011 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+import os
+
+t = BoostBuild.Tester(pass_toolset=0, pass_d0=False)
+
+t.write("subdir1/file-to-bind", "# This file intentionally left blank")
+
+t.write("file.jam", """\
+rule do-nothing ( target : source )
+{
+ DEPENDS $(target) : $(source) ;
+}
+actions quietly do-nothing { }
+
+# Make a non-file target which depends on a file that exists
+NOTFILE fake-target ;
+SEARCH on file-to-bind = subdir1 ;
+
+do-nothing fake-target : file-to-bind ;
+
+# Set jam up to call our bind-rule
+BINDRULE = bind-rule ;
+
+rule bind-rule ( target : path )
+{
+ ECHO found: $(target) at $(path) ;
+}
+
+DEPENDS all : fake-target ;
+""")
+
+t.run_build_system(["-ffile.jam"], stdout="""\
+found: all at all
+found: file-to-bind at subdir1%sfile-to-bind
+...found 3 targets...
+""" % os.sep)
+
+t.cleanup()
diff --git a/tools/build/test/core_d12.py b/tools/build/test/core_d12.py
new file mode 100644
index 0000000000..5488973d29
--- /dev/null
+++ b/tools/build/test/core_d12.py
@@ -0,0 +1,32 @@
+#!/usr/bin/python
+
+# Copyright 2002, 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# This tests correct handling of "-d1" and "-d2" options.
+
+import BoostBuild
+
+t = BoostBuild.Tester(["-ffile.jam"], pass_d0=False, pass_toolset=0)
+
+t.write("file.jam", """\
+actions a { }
+actions quietly b { }
+ALWAYS all ;
+a all ;
+b all ;
+""")
+
+t.run_build_system(["-d0"], stdout="")
+
+t.run_build_system(["-d1"])
+t.expect_output_lines("a all")
+t.expect_output_lines("b all", False)
+
+t.run_build_system(["-d2"])
+t.expect_output_lines("a all")
+t.expect_output_lines("b all")
+
+t.cleanup()
diff --git a/tools/build/test/core_delete_module.py b/tools/build/test/core_delete_module.py
new file mode 100644
index 0000000000..d56ffe6e7a
--- /dev/null
+++ b/tools/build/test/core_delete_module.py
@@ -0,0 +1,51 @@
+#!/usr/bin/python
+
+# Copyright 2003 Dave Abrahams
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This tests the facilities for deleting modules.
+
+import BoostBuild
+
+t = BoostBuild.Tester(pass_toolset=0)
+
+t.write("file.jam", """
+module foo
+{
+ rule bar { }
+ var = x y ;
+}
+DELETE_MODULE foo ;
+if [ RULENAMES foo ]
+{
+ EXIT DELETE_MODULE failed to kill foo's rules: [ RULENAMES foo ] ;
+}
+
+module foo
+{
+ if $(var)
+ {
+ EXIT DELETE_MODULE failed to kill foo's variables ;
+ }
+
+ rule bar { }
+ var = x y ;
+
+ DELETE_MODULE foo ;
+
+ if $(var)
+ {
+ EXIT internal DELETE_MODULE failed to kill foo's variables ;
+ }
+ if [ RULENAMES foo ]
+ {
+ EXIT internal DELETE_MODULE failed to kill foo's rules: [ RULENAMES foo ] ;
+ }
+}
+DEPENDS all : xx ;
+NOTFILE xx ;
+""")
+
+t.run_build_system(["-ffile.jam"], status=0)
+t.cleanup()
diff --git a/tools/build/v2/test/core_dependencies.py b/tools/build/test/core_dependencies.py
index 2b2ef368d6..2b2ef368d6 100644
--- a/tools/build/v2/test/core_dependencies.py
+++ b/tools/build/test/core_dependencies.py
diff --git a/tools/build/test/core_import_module.py b/tools/build/test/core_import_module.py
new file mode 100644
index 0000000000..c5bbd3e638
--- /dev/null
+++ b/tools/build/test/core_import_module.py
@@ -0,0 +1,82 @@
+#!/usr/bin/python
+
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+t = BoostBuild.Tester(pass_toolset=0)
+
+t.write("code", """\
+module a
+{
+ rule r1 ( )
+ {
+ ECHO R1 ;
+ }
+
+ local rule l1 ( )
+ {
+ ECHO A.L1 ;
+ }
+}
+module a2
+{
+ rule r2 ( )
+ {
+ ECHO R2 ;
+ }
+}
+IMPORT a2 : r2 : : a2.r2 ;
+
+rule a.l1 ( )
+{
+ ECHO L1 ;
+}
+
+module b
+{
+ IMPORT_MODULE a : b ;
+ rule test
+ {
+ # Call rule visible via IMPORT_MODULE
+ a.r1 ;
+ # Call rule in global scope
+ a2.r2 ;
+ # Call rule in global scope. Doesn't find local rule
+ a.l1 ;
+ # Make l1 visible
+ EXPORT a : l1 ;
+ a.l1 ;
+ }
+}
+
+IMPORT b : test : : test ;
+test ;
+
+module c
+{
+ rule test
+ {
+ ECHO CTEST ;
+ }
+}
+
+IMPORT_MODULE c : ;
+c.test ;
+
+actions do-nothing { }
+do-nothing all ;
+""")
+
+t.run_build_system(["-fcode"], stdout="""\
+R1
+R2
+L1
+A.L1
+CTEST
+""")
+
+t.cleanup()
diff --git a/tools/build/test/core_jamshell.py b/tools/build/test/core_jamshell.py
new file mode 100644
index 0000000000..0344a5792d
--- /dev/null
+++ b/tools/build/test/core_jamshell.py
@@ -0,0 +1,54 @@
+#!/usr/bin/python
+
+# Copyright 2014 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+import sys
+
+t = BoostBuild.Tester(pass_toolset=False, pass_d0=False)
+
+t.write("file.jam", """
+actions run {
+ $(ACTION)
+}
+
+# Raw commands only work on Windows
+if $(OS) = NT
+{
+ JAMSHELL on test-raw = % ;
+ JAMSHELL on test-raw-fail = % ;
+}
+ACTION on test-raw = "\"$(PYTHON)\" -V" ;
+run test-raw ;
+
+ACTION on test-raw-fail = missing-executable ;
+run test-raw-fail ;
+
+# On Windows, the command is stored in a temporary
+# file. On other systems it is passed directly.
+if $(OS) = NT
+{
+ JAMSHELL on test-py = $(PYTHON) ;
+}
+else
+{
+ JAMSHELL on test-py = $(PYTHON) -c ;
+}
+ACTION on test-py = "
+print \\\",\\\".join([str(x) for x in range(3)])
+" ;
+run test-py ;
+
+DEPENDS all : test-raw test-raw-fail test-py ;
+""")
+
+t.run_build_system(["-ffile.jam", "-d1", "-sPYTHON=" + sys.executable], status=1)
+t.expect_output_lines([
+ "...failed run test-raw-fail...",
+ "0,1,2",
+ "...failed updating 1 target...",
+ "...updated 2 targets..."])
+
+t.cleanup()
diff --git a/tools/build/test/core_language.py b/tools/build/test/core_language.py
new file mode 100755
index 0000000000..717e91adaa
--- /dev/null
+++ b/tools/build/test/core_language.py
@@ -0,0 +1,12 @@
+#!/usr/bin/python
+
+# Copyright 2002, 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+t = BoostBuild.Tester(pass_toolset=0)
+t.set_tree("core-language")
+t.run_build_system(["-ftest.jam"])
+t.cleanup()
diff --git a/tools/build/v2/test/core_modifiers.py b/tools/build/test/core_modifiers.py
index fb95cbfbd7..fb95cbfbd7 100644
--- a/tools/build/v2/test/core_modifiers.py
+++ b/tools/build/test/core_modifiers.py
diff --git a/tools/build/test/core_multifile_actions.py b/tools/build/test/core_multifile_actions.py
new file mode 100755
index 0000000000..50bfe83394
--- /dev/null
+++ b/tools/build/test/core_multifile_actions.py
@@ -0,0 +1,202 @@
+#!/usr/bin/python
+
+# Copyright 2013 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Tests that actions that produce multiple targets are handled
+# correctly. The rules are as follows:
+#
+# - If any action that updates a target is run, then the target
+# is considered to be out-of-date and all of its updating actions
+# are run in order.
+# - A target is considered updated when all of its updating actions
+# have completed successfully.
+# - If any updating action for a target fails, then the remaining
+# actions are skipped and the target is marked as failed.
+#
+# Note that this is a more thorough test case for the same
+# problem that core_parallel_multifile_actions_N.py checks for.
+
+import BoostBuild
+
+t = BoostBuild.Tester(pass_toolset=0, pass_d0=False)
+
+t.write("file.jam", """
+actions update
+{
+ echo updating $(<)
+}
+
+update x1 x2 ;
+update x2 x3 ;
+""")
+
+# Updating x1 should force x2 to update as well.
+t.run_build_system(["-ffile.jam", "x1"], stdout="""\
+...found 3 targets...
+...updating 3 targets...
+update x1
+updating x1 x2
+update x2
+updating x2 x3
+...updated 3 targets...
+""")
+
+# If x1 is up-to-date, we don't need to update x2,
+# even though x2 is missing.
+t.write("x1", "")
+t.run_build_system(["-ffile.jam", "x1"], stdout="""\
+...found 1 target...
+""")
+
+# Building x3 should update x1 and x2, even though
+# x1 would be considered up-to-date, taken alone.
+t.run_build_system(["-ffile.jam", "x3"], stdout="""\
+...found 3 targets...
+...updating 2 targets...
+update x1
+updating x1 x2
+update x2
+updating x2 x3
+...updated 3 targets...
+""")
+
+# Updating x2 should succeed, but x3 should be skipped
+t.rm("x1")
+t.write("file.jam", """\
+actions update
+{
+ echo updating $(<)
+}
+actions fail
+{
+ echo failed $(<)
+ exit 1
+}
+
+update x1 x2 ;
+fail x1 ;
+update x1 x3 ;
+update x2 ;
+update x3 ;
+""")
+
+t.run_build_system(["-ffile.jam", "x3"], status=1, stdout="""\
+...found 3 targets...
+...updating 3 targets...
+update x1
+updating x1 x2
+fail x1
+failed x1
+
+ echo failed x1
+ exit 1
+
+...failed fail x1...
+update x2
+updating x2
+...failed updating 2 targets...
+...updated 1 target...
+""")
+
+# Make sure that dependencies of targets that are
+# updated as a result of a multifile action are
+# processed correctly.
+t.rm("x1")
+t.write("file.jam", """\
+actions update
+{
+ echo updating $(<)
+}
+
+update x1 ;
+update x2 ;
+DEPENDS x2 : x1 ;
+update x2 x3 ;
+""")
+t.run_build_system(["-ffile.jam", "x3"], stdout="""\
+...found 3 targets...
+...updating 3 targets...
+update x1
+updating x1
+update x2
+updating x2
+update x2
+updating x2 x3
+...updated 3 targets...
+""")
+
+# JAM_SEMAPHORE rules:
+#
+# - if two updating actions have targets that share a semaphore,
+# these actions cannot be run in parallel.
+#
+t.write("file.jam", """\
+actions update
+{
+ echo updating $(<)
+}
+
+targets = x1 x2 ;
+JAM_SEMAPHORE on $(targets) = <s>update_sem ;
+update x1 x2 ;
+""")
+t.run_build_system(["-ffile.jam", "x1"], stdout="""\
+...found 2 targets...
+...updating 2 targets...
+update x1
+updating x1 x2
+...updated 2 targets...
+""")
+
+# A target can appear multiple times in an action
+t.write("file.jam", """\
+actions update
+{
+ echo updating $(<)
+}
+
+update x1 x1 ;
+""")
+t.run_build_system(["-ffile.jam", "x1"], stdout="""\
+...found 1 target...
+...updating 1 target...
+update x1
+updating x1 x1
+...updated 1 target...
+""")
+
+# Together actions should check that all the targets are the same
+# before combining.
+t.write("file.jam", """\
+actions together update
+{
+ echo updating $(<) : $(>)
+}
+
+update x1 x2 : s1 ;
+update x1 x2 : s2 ;
+
+update x3 : s3 ;
+update x3 x4 : s4 ;
+update x4 x3 : s5 ;
+DEPENDS all : x1 x2 x3 x4 ;
+""")
+t.run_build_system(["-ffile.jam"], stdout="""\
+...found 5 targets...
+...updating 4 targets...
+update x1
+updating x1 x2 : s1 s2
+update x3
+updating x3 : s3
+update x3
+updating x3 x4 : s4
+update x4
+updating x4 x3 : s5
+...updated 4 targets...
+""")
+
+
+
+t.cleanup()
diff --git a/tools/build/test/core_nt_cmd_line.py b/tools/build/test/core_nt_cmd_line.py
new file mode 100755
index 0000000000..579242d24b
--- /dev/null
+++ b/tools/build/test/core_nt_cmd_line.py
@@ -0,0 +1,266 @@
+#!/usr/bin/python
+
+# Copyright 2001 Dave Abrahams
+# Copyright 2011 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Tests Windows command line construction.
+#
+# Note that the regular 'echo' is an internal shell command on Windows and
+# therefore can not be called directly as a standalone Windows process.
+
+import BoostBuild
+import os
+import re
+import sys
+
+
+executable = sys.executable.replace("\\", "/")
+if " " in executable:
+ executable = '"%s"' % executable
+
+
+def string_of_length(n):
+ if n <= 0:
+ return ""
+ n -= 1
+ y = ['', '$(1x10-1)', '$(10x10-1)', '$(100x10-1)', '$(1000x10-1)']
+ result = []
+ for i in reversed(xrange(5)):
+ x, n = divmod(n, 10 ** i)
+ result += [y[i]] * x
+ result.append('x')
+ return " ".join(result)
+
+
+# Boost Jam currently does not allow preparing actions with completely empty
+# content and always requires at least a single whitespace after the opening
+# brace in order to satisfy its Boost Jam language grammar rules.
+def test_raw_empty():
+ whitespace_in = " \n\n\r\r\v\v\t\t \t \r\r \n\n"
+
+ # We tell the testing system to read its child process output as raw
+ # binary data but the bjam process we run will read its input file and
+ # write out its output as text, i.e. convert all of our "\r\n" sequences to
+ # "\n" on input and all of its "\n" characters back to "\r\n" on output.
+ # This means that any lone "\n" input characters not preceded by "\r" will
+ # get an extra "\r" added in front of it on output.
+ whitespace_out = whitespace_in.replace("\r\n", "\n").replace("\n", "\r\n")
+
+ t = BoostBuild.Tester(["-d2", "-d+4"], pass_d0=False, pass_toolset=0,
+ use_test_config=False)
+ t.write("file.jam", """\
+actions do_empty {%s}
+JAMSHELL = %% ;
+do_empty all ;
+""" % (whitespace_in))
+ t.run_build_system(["-ffile.jam"], universal_newlines=False)
+ t.expect_output_lines("do_empty all")
+ t.expect_output_lines("Executing raw command directly", False)
+ if "\r\n%s\r\n" % whitespace_out not in t.stdout():
+ BoostBuild.annotation("failure", "Whitespace action content not found "
+ "on stdout.")
+ t.fail_test(1, dump_difference=False)
+ t.cleanup()
+
+
+def test_raw_nt(n=None, error=False):
+ t = BoostBuild.Tester(["-d1", "-d+4"], pass_d0=False, pass_toolset=0,
+ use_test_config=False)
+
+ cmd_prefix = "%s -c \"print('XXX: " % executable
+ cmd_suffix = "')\""
+ cmd_extra_length = len(cmd_prefix) + len(cmd_suffix)
+
+ if n == None:
+ n = cmd_extra_length
+
+ data_length = n - cmd_extra_length
+ if data_length < 0:
+ BoostBuild.annotation("failure", """\
+Can not construct Windows command of desired length. Requested command length
+too short for the current test configuration.
+ Requested command length: %d
+ Minimal supported command length: %d
+""" % (n, cmd_extra_length))
+ t.fail_test(1, dump_difference=False)
+
+ # Each $(Xx10-1) variable contains X words of 9 characters each, which,
+ # including spaces between words, brings the total number of characters in
+ # its string representation to X * 10 - 1 (X * 9 characters + (X - 1)
+ # spaces).
+ t.write("file.jam", """\
+ten = 0 1 2 3 4 5 6 7 8 9 ;
+
+1x10-1 = 123456789 ;
+10x10-1 = $(ten)12345678 ;
+100x10-1 = $(ten)$(ten)1234567 ;
+1000x10-1 = $(ten)$(ten)$(ten)123456 ;
+
+actions do_echo
+{
+ %s%s%s
+}
+JAMSHELL = %% ;
+do_echo all ;
+""" % (cmd_prefix, string_of_length(data_length), cmd_suffix))
+ if error:
+ expected_status = 1
+ else:
+ expected_status = 0
+ t.run_build_system(["-ffile.jam"], status=expected_status)
+ if error:
+ t.expect_output_lines("Executing raw command directly", False)
+ t.expect_output_lines("do_echo action is too long (%d, max 32766):" % n
+ )
+ t.expect_output_lines("XXX: *", False)
+ else:
+ t.expect_output_lines("Executing raw command directly")
+ t.expect_output_lines("do_echo action is too long*", False)
+
+ m = re.search("^XXX: (.*)$", t.stdout(), re.MULTILINE)
+ if not m:
+ BoostBuild.annotation("failure", "Expected output line starting "
+ "with 'XXX: ' not found.")
+ t.fail_test(1, dump_difference=False)
+ if len(m.group(1)) != data_length:
+ BoostBuild.annotation("failure", """Unexpected output data length.
+ Expected: %d
+ Received: %d""" % (n, len(m.group(1))))
+ t.fail_test(1, dump_difference=False)
+
+ t.cleanup()
+
+
+def test_raw_to_shell_fallback_nt():
+ t = BoostBuild.Tester(["-d1", "-d+4"], pass_d0=False, pass_toolset=0,
+ use_test_config=False)
+
+ cmd_prefix = '%s -c print(' % executable
+ cmd_suffix = ')'
+
+ t.write("file_multiline.jam", """\
+actions do_multiline
+{
+ echo one
+
+
+ echo two
+}
+JAMSHELL = % ;
+do_multiline all ;
+""")
+ t.run_build_system(["-ffile_multiline.jam"])
+ t.expect_output_lines("do_multiline all")
+ t.expect_output_lines("one")
+ t.expect_output_lines("two")
+ t.expect_output_lines("Executing raw command directly", False)
+ t.expect_output_lines("Executing using a command file and the shell: "
+ "cmd.exe /Q/C")
+
+ t.write("file_redirect.jam", """\
+actions do_redirect { echo one > two.txt }
+JAMSHELL = % ;
+do_redirect all ;
+""")
+ t.run_build_system(["-ffile_redirect.jam"])
+ t.expect_output_lines("do_redirect all")
+ t.expect_output_lines("one", False)
+ t.expect_output_lines("Executing raw command directly", False)
+ t.expect_output_lines("Executing using a command file and the shell: "
+ "cmd.exe /Q/C")
+ t.expect_addition("two.txt")
+
+ t.write("file_pipe.jam", """\
+actions do_pipe
+{
+ echo one | echo two
+}
+JAMSHELL = % ;
+do_pipe all ;
+""")
+ t.run_build_system(["-ffile_pipe.jam"])
+ t.expect_output_lines("do_pipe all")
+ t.expect_output_lines("one*", False)
+ t.expect_output_lines("two")
+ t.expect_output_lines("Executing raw command directly", False)
+ t.expect_output_lines("Executing using a command file and the shell: "
+ "cmd.exe /Q/C")
+
+ t.write("file_single_quoted.jam", """\
+actions do_single_quoted { %s'5>10'%s }
+JAMSHELL = %% ;
+do_single_quoted all ;
+""" % (cmd_prefix, cmd_suffix))
+ t.run_build_system(["-ffile_single_quoted.jam"])
+ t.expect_output_lines("do_single_quoted all")
+ t.expect_output_lines("5>10")
+ t.expect_output_lines("Executing raw command directly")
+ t.expect_output_lines("Executing using a command file and the shell: "
+ "cmd.exe /Q/C", False)
+ t.expect_nothing_more()
+
+ t.write("file_double_quoted.jam", """\
+actions do_double_quoted { %s"5>10"%s }
+JAMSHELL = %% ;
+do_double_quoted all ;
+""" % (cmd_prefix, cmd_suffix))
+ t.run_build_system(["-ffile_double_quoted.jam"])
+ t.expect_output_lines("do_double_quoted all")
+ # The difference between this example and the similar previous one using
+ # single instead of double quotes stems from how the used Python executable
+ # parses the command-line string received from Windows.
+ t.expect_output_lines("False")
+ t.expect_output_lines("Executing raw command directly")
+ t.expect_output_lines("Executing using a command file and the shell: "
+ "cmd.exe /Q/C", False)
+ t.expect_nothing_more()
+
+ t.write("file_escaped_quote.jam", """\
+actions do_escaped_quote { %s\\"5>10\\"%s }
+JAMSHELL = %% ;
+do_escaped_quote all ;
+""" % (cmd_prefix, cmd_suffix))
+ t.run_build_system(["-ffile_escaped_quote.jam"])
+ t.expect_output_lines("do_escaped_quote all")
+ t.expect_output_lines("5>10")
+ t.expect_output_lines("Executing raw command directly", False)
+ t.expect_output_lines("Executing using a command file and the shell: "
+ "cmd.exe /Q/C")
+ t.expect_nothing_more()
+
+ t.cleanup()
+
+
+###############################################################################
+#
+# main()
+# ------
+#
+###############################################################################
+
+if os.name == 'nt':
+ test_raw_empty()
+
+ # Can not test much shorter lengths as the shortest possible command line
+ # line length constructed in this depends on the runtime environment, e.g.
+ # path to the Panther executable running this test.
+ test_raw_nt()
+ test_raw_nt(255)
+ test_raw_nt(1000)
+ test_raw_nt(8000)
+ test_raw_nt(8191)
+ test_raw_nt(8192)
+ test_raw_nt(10000)
+ test_raw_nt(30000)
+ test_raw_nt(32766)
+ # CreateProcessA() Windows API places a limit of 32768 on the allowed
+ # command-line length, including a trailing Unicode (2-byte) nul-terminator
+ # character.
+ test_raw_nt(32767, error=True)
+ test_raw_nt(40000, error=True)
+ test_raw_nt(100001, error=True)
+
+ test_raw_to_shell_fallback_nt() \ No newline at end of file
diff --git a/tools/build/test/core_option_d2.py b/tools/build/test/core_option_d2.py
new file mode 100755
index 0000000000..bf809aa85c
--- /dev/null
+++ b/tools/build/test/core_option_d2.py
@@ -0,0 +1,55 @@
+#!/usr/bin/python
+
+# Copyright 2007 Rene Rivera.
+# Copyright 2011 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+t = BoostBuild.Tester(pass_toolset=0, pass_d0=False)
+
+t.write("file.jam", """\
+actions .a.
+{
+echo [$(<:B)] 0
+echo [$(<:B)] 1
+echo [$(<:B)] 2
+}
+
+rule .a.
+{
+ DEPENDS $(<) : $(>) ;
+}
+
+NOTFILE subtest ;
+.a. subtest_a : subtest ;
+.a. subtest_b : subtest ;
+DEPENDS all : subtest_a subtest_b ;
+""")
+
+t.run_build_system(["-ffile.jam", "-d2"], stdout="""\
+...found 4 targets...
+...updating 2 targets...
+.a. subtest_a
+
+echo [subtest_a] 0
+echo [subtest_a] 1
+echo [subtest_a] 2
+
+[subtest_a] 0
+[subtest_a] 1
+[subtest_a] 2
+.a. subtest_b
+
+echo [subtest_b] 0
+echo [subtest_b] 1
+echo [subtest_b] 2
+
+[subtest_b] 0
+[subtest_b] 1
+[subtest_b] 2
+...updated 2 targets...
+""")
+
+t.cleanup()
diff --git a/tools/build/test/core_option_l.py b/tools/build/test/core_option_l.py
new file mode 100755
index 0000000000..e237dcf63a
--- /dev/null
+++ b/tools/build/test/core_option_l.py
@@ -0,0 +1,44 @@
+#!/usr/bin/python
+
+# Copyright 2007 Rene Rivera.
+# Copyright 2011 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+t = BoostBuild.Tester(pass_toolset=0)
+
+t.write("sleep.bat", """\
+::@timeout /T %1 /NOBREAK >nul
+@ping 127.0.0.1 -n 2 -w 1000 >nul
+@ping 127.0.0.1 -n %1 -w 1000 >nul
+@exit /B 0
+""")
+
+t.write("file.jam", """\
+if $(NT)
+{
+ SLEEP = @call sleep.bat ;
+}
+else
+{
+ SLEEP = sleep ;
+}
+
+actions .a. {
+echo 001
+$(SLEEP) 4
+echo 002
+}
+
+.a. sleeper ;
+
+DEPENDS all : sleeper ;
+""")
+
+t.run_build_system(["-ffile.jam", "-d1", "-l2"], status=1)
+t.expect_output_lines("2 second time limit exceeded")
+
+t.cleanup()
diff --git a/tools/build/test/core_option_n.py b/tools/build/test/core_option_n.py
new file mode 100755
index 0000000000..4dab3bf99e
--- /dev/null
+++ b/tools/build/test/core_option_n.py
@@ -0,0 +1,51 @@
+#!/usr/bin/python
+
+# Copyright 2007 Rene Rivera.
+# Copyright 2011 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+t = BoostBuild.Tester(pass_toolset=0, pass_d0=False)
+
+t.write("file.jam", """\
+actions .a.
+{
+echo [$(<:B)] 0
+echo [$(<:B)] 1
+echo [$(<:B)] 2
+}
+
+rule .a.
+{
+ DEPENDS $(<) : $(>) ;
+}
+
+NOTFILE subtest ;
+.a. subtest_a : subtest ;
+.a. subtest_b : subtest ;
+FAIL_EXPECTED subtest_b ;
+DEPENDS all : subtest_a subtest_b ;
+""")
+
+t.run_build_system(["-ffile.jam", "-n"], stdout="""\
+...found 4 targets...
+...updating 2 targets...
+.a. subtest_a
+
+echo [subtest_a] 0
+echo [subtest_a] 1
+echo [subtest_a] 2
+
+.a. subtest_b
+
+echo [subtest_b] 0
+echo [subtest_b] 1
+echo [subtest_b] 2
+
+...updated 2 targets...
+""")
+t.expect_nothing_more()
+
+t.cleanup()
diff --git a/tools/build/test/core_parallel_actions.py b/tools/build/test/core_parallel_actions.py
new file mode 100755
index 0000000000..0d44149b74
--- /dev/null
+++ b/tools/build/test/core_parallel_actions.py
@@ -0,0 +1,103 @@
+#!/usr/bin/python
+
+# Copyright 2006 Rene Rivera.
+# Copyright 2011 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+t = BoostBuild.Tester(pass_toolset=0, pass_d0=False)
+
+t.write("sleep.bat", """\
+::@timeout /T %1 /NOBREAK >nul
+@ping 127.0.0.1 -n 2 -w 1000 >nul
+@ping 127.0.0.1 -n %1 -w 1000 >nul
+@exit /B 0
+""")
+
+t.write("file.jam", """\
+if $(NT)
+{
+ actions sleeper
+ {
+ echo [$(<:S)] 0
+ call sleep.bat 1
+ echo [$(<:S)] 1
+ call sleep.bat 1
+ echo [$(<:S)] 2
+ call sleep.bat $(<:B)
+ }
+}
+else
+{
+ actions sleeper
+ {
+ echo "[$(<:S)] 0"
+ sleep 1
+ echo "[$(<:S)] 1"
+ sleep 1
+ echo "[$(<:S)] 2"
+ sleep $(<:B)
+ }
+}
+
+rule sleeper
+{
+ DEPENDS $(<) : $(>) ;
+}
+
+NOTFILE front ;
+sleeper 1.a : front ;
+sleeper 2.a : front ;
+sleeper 3.a : front ;
+sleeper 4.a : front ;
+NOTFILE choke ;
+DEPENDS choke : 1.a 2.a 3.a 4.a ;
+sleeper 1.b : choke ;
+sleeper 2.b : choke ;
+sleeper 3.b : choke ;
+sleeper 4.b : choke ;
+DEPENDS bottom : 1.b 2.b 3.b 4.b ;
+DEPENDS all : bottom ;
+""")
+
+t.run_build_system(["-ffile.jam", "-j4"], stdout="""\
+...found 12 targets...
+...updating 8 targets...
+sleeper 1.a
+[.a] 0
+[.a] 1
+[.a] 2
+sleeper 2.a
+[.a] 0
+[.a] 1
+[.a] 2
+sleeper 3.a
+[.a] 0
+[.a] 1
+[.a] 2
+sleeper 4.a
+[.a] 0
+[.a] 1
+[.a] 2
+sleeper 1.b
+[.b] 0
+[.b] 1
+[.b] 2
+sleeper 2.b
+[.b] 0
+[.b] 1
+[.b] 2
+sleeper 3.b
+[.b] 0
+[.b] 1
+[.b] 2
+sleeper 4.b
+[.b] 0
+[.b] 1
+[.b] 2
+...updated 8 targets...
+""")
+
+t.cleanup()
diff --git a/tools/build/test/core_parallel_multifile_actions_1.py b/tools/build/test/core_parallel_multifile_actions_1.py
new file mode 100755
index 0000000000..8d9448e0c8
--- /dev/null
+++ b/tools/build/test/core_parallel_multifile_actions_1.py
@@ -0,0 +1,78 @@
+#!/usr/bin/python
+
+# Copyright 2007 Rene Rivera.
+# Copyright 2011 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Added to guard against a bug causing targets to be used before they
+# themselves have finished building. This used to happen for targets built by a
+# multi-file action that got triggered by another target.
+#
+# Example:
+# When target A and target B were declared as created by a single action and
+# target A triggered running that action then, while the action was still
+# running, target B was already reporting as being built causing other targets
+# depending on target A to be built prematurely.
+
+import BoostBuild
+
+t = BoostBuild.Tester(pass_toolset=0, pass_d0=False)
+
+t.write("sleep.bat", """\
+::@timeout /T %1 /NOBREAK >nul
+@ping 127.0.0.1 -n 2 -w 1000 >nul
+@ping 127.0.0.1 -n %1 -w 1000 >nul
+@exit /B 0
+""")
+
+t.write("file.jam", """\
+if $(NT)
+{
+ SLEEP = @call sleep.bat ;
+}
+else
+{
+ SLEEP = sleep ;
+}
+
+actions .gen.
+{
+ echo 001
+ $(SLEEP) 4
+ echo 002
+}
+rule .use.1 { DEPENDS $(<) : $(>) ; }
+actions .use.1
+{
+ echo 003
+}
+
+rule .use.2 { DEPENDS $(<) : $(>) ; }
+actions .use.2
+{
+ $(SLEEP) 1
+ echo 004
+}
+
+.gen. g1.generated g2.generated ;
+.use.1 u1.user : g1.generated ;
+.use.2 u2.user : g2.generated ;
+
+DEPENDS all : u1.user u2.user ;
+""")
+
+t.run_build_system(["-ffile.jam", "-j2"], stdout="""\
+...found 5 targets...
+...updating 4 targets...
+.gen. g1.generated
+001
+002
+.use.1 u1.user
+003
+.use.2 u2.user
+004
+...updated 4 targets...
+""")
+
+t.cleanup()
diff --git a/tools/build/test/core_parallel_multifile_actions_2.py b/tools/build/test/core_parallel_multifile_actions_2.py
new file mode 100755
index 0000000000..ea4034af07
--- /dev/null
+++ b/tools/build/test/core_parallel_multifile_actions_2.py
@@ -0,0 +1,71 @@
+#!/usr/bin/python
+
+# Copyright 2008 Jurko Gospodnetic, Vladimir Prus
+# Copyright 2011 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Added to guard against a bug causing targets to be used before they
+# themselves have finished building. This used to happen for targets built by a
+# multi-file action that got triggered by another target, except when the
+# target triggering the action was the first one in the list of targets
+# produced by that action.
+#
+# Example:
+# When target A and target B were declared as created by a single action with
+# A being the first one listed, and target B triggered running that action
+# then, while the action was still running, target A was already reporting as
+# being built causing other targets depending on target A to be built
+# prematurely.
+
+import BoostBuild
+
+t = BoostBuild.Tester(pass_toolset=0, pass_d0=False)
+
+t.write("sleep.bat", """\
+::@timeout /T %1 /NOBREAK >nul
+@ping 127.0.0.1 -n 2 -w 1000 >nul
+@ping 127.0.0.1 -n %1 -w 1000 >nul
+@exit /B 0
+""")
+
+t.write("file.jam", """\
+if $(NT)
+{
+ SLEEP = @call sleep.bat ;
+}
+else
+{
+ SLEEP = sleep ;
+}
+
+actions link
+{
+ $(SLEEP) 1
+ echo 001 - linked
+}
+
+link dll lib ;
+
+actions install
+{
+ echo 002 - installed
+}
+
+install installed_dll : dll ;
+DEPENDS installed_dll : dll ;
+
+DEPENDS all : lib installed_dll ;
+""")
+
+t.run_build_system(["-ffile.jam", "-j2"], stdout="""\
+...found 4 targets...
+...updating 3 targets...
+link dll
+001 - linked
+install installed_dll
+002 - installed
+...updated 3 targets...
+""")
+
+t.cleanup()
diff --git a/tools/build/test/core_source_line_tracking.py b/tools/build/test/core_source_line_tracking.py
new file mode 100755
index 0000000000..61526a2c51
--- /dev/null
+++ b/tools/build/test/core_source_line_tracking.py
@@ -0,0 +1,74 @@
+#!/usr/bin/python
+
+# Copyright 2012. Jurko Gospodnetic
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Test Boost Jam parser's source line tracking & reporting.
+
+import BoostBuild
+
+
+def test_eof_in_string():
+ t = BoostBuild.Tester(pass_toolset=False)
+ t.write("file.jam", '\n\n\naaa = "\n\n\n\n\n\n')
+ t.run_build_system(["-ffile.jam"], status=1)
+ t.expect_output_lines('file.jam:4: unmatched " in string at keyword =')
+ t.expect_output_lines("file.jam:4: syntax error at EOF")
+ t.cleanup()
+
+
+def test_error_missing_argument(eof):
+ """
+ This use case used to cause a missing argument error to be reported in
+ module '(builtin)' in line -1 when the input file did not contain a
+ trailing newline.
+
+ """
+ t = BoostBuild.Tester(pass_toolset=False)
+ t.write("file.jam", """\
+rule f ( param ) { }
+f ;%s""" % __trailing_newline(eof))
+ t.run_build_system(["-ffile.jam"], status=1)
+ t.expect_output_lines("file.jam:2: in module scope")
+ t.expect_output_lines("file.jam:1:see definition of rule 'f' being called")
+ t.cleanup()
+
+
+def test_error_syntax(eof):
+ t = BoostBuild.Tester(pass_toolset=False)
+ t.write("file.jam", "ECHO%s" % __trailing_newline(eof))
+ t.run_build_system(["-ffile.jam"], status=1)
+ t.expect_output_lines("file.jam:1: syntax error at EOF")
+ t.cleanup()
+
+
+def test_traceback():
+ t = BoostBuild.Tester(pass_toolset=False)
+ t.write("file.jam", """\
+NOTFILE all ;
+ECHO [ BACKTRACE ] ;""")
+ t.run_build_system(["-ffile.jam"])
+ t.expect_output_lines("file.jam 2 module scope")
+ t.cleanup()
+
+
+def __trailing_newline(eof):
+ """
+ Helper function returning an empty string or a newling character to
+ append to the current output line depending on whether we want that line to
+ be the last line in the file (eof == True) or not (eof == False).
+
+ """
+ if eof:
+ return ""
+ return "\n"
+
+
+test_error_missing_argument(eof=False)
+test_error_missing_argument(eof=True)
+test_error_syntax(eof=False)
+test_error_syntax(eof=True)
+test_traceback()
+test_eof_in_string()
diff --git a/tools/build/test/core_typecheck.py b/tools/build/test/core_typecheck.py
new file mode 100644
index 0000000000..225ea7a4bf
--- /dev/null
+++ b/tools/build/test/core_typecheck.py
@@ -0,0 +1,47 @@
+#!/usr/bin/python
+
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This tests the typechecking facilities.
+
+import BoostBuild
+
+t = BoostBuild.Tester(["-ffile.jam"], pass_toolset=0)
+
+t.write("file.jam", """
+module .typecheck
+{
+ rule [path] ( x )
+ {
+ if ! [ MATCH "^(::)" : $(x) ]
+ {
+ ECHO "Error: $(x) is not a path" ;
+ return true ;
+ }
+ }
+}
+
+rule do ( [path] a )
+{
+}
+
+do $(ARGUMENT) ;
+
+actions dummy { }
+dummy all ;
+""")
+
+t.run_build_system(["-sARGUMENT=::a/b/c"])
+t.run_build_system(["-sARGUMENT=a/b/c"], status=1, stdout="""\
+Error: a/b/c is not a path
+file.jam:18: in module scope
+*** argument error
+* rule do ( [path] a )
+* called with: ( a/b/c )
+* true a
+file.jam:16:see definition of rule 'do' being called
+""")
+
+t.cleanup()
diff --git a/tools/build/test/core_update_now.py b/tools/build/test/core_update_now.py
new file mode 100755
index 0000000000..819309a73a
--- /dev/null
+++ b/tools/build/test/core_update_now.py
@@ -0,0 +1,377 @@
+#!/usr/bin/python
+
+# Copyright 2011 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+import os
+
+
+def basic():
+ t = BoostBuild.Tester(pass_toolset=0, pass_d0=False)
+
+ t.write("file.jam", """\
+actions do-print
+{
+ echo updating $(<)
+}
+
+NOTFILE target1 ;
+ALWAYS target1 ;
+do-print target1 ;
+
+UPDATE_NOW target1 ;
+
+DEPENDS all : target1 ;
+""")
+
+ t.run_build_system(["-ffile.jam"], stdout="""\
+...found 1 target...
+...updating 1 target...
+do-print target1
+updating target1
+...updated 1 target...
+...found 1 target...
+""")
+
+ t.cleanup()
+
+
+def ignore_minus_n():
+ t = BoostBuild.Tester(pass_toolset=0, pass_d0=False)
+
+ t.write("file.jam", """\
+actions do-print
+{
+ echo updating $(<)
+}
+
+NOTFILE target1 ;
+ALWAYS target1 ;
+do-print target1 ;
+
+UPDATE_NOW target1 : : ignore-minus-n ;
+
+DEPENDS all : target1 ;
+""")
+
+ t.run_build_system(["-ffile.jam", "-n"], stdout="""\
+...found 1 target...
+...updating 1 target...
+do-print target1
+
+ echo updating target1
+
+updating target1
+...updated 1 target...
+...found 1 target...
+""")
+
+ t.cleanup()
+
+
+def failed_target():
+ t = BoostBuild.Tester(pass_toolset=0, pass_d0=False)
+
+ t.write("file.jam", """\
+actions fail
+{
+ exit 1
+}
+
+NOTFILE target1 ;
+ALWAYS target1 ;
+fail target1 ;
+
+actions do-print
+{
+ echo updating $(<)
+}
+
+NOTFILE target2 ;
+do-print target2 ;
+DEPENDS target2 : target1 ;
+
+UPDATE_NOW target1 : : ignore-minus-n ;
+
+DEPENDS all : target1 target2 ;
+""")
+
+ t.run_build_system(["-ffile.jam", "-n"], stdout="""\
+...found 1 target...
+...updating 1 target...
+fail target1
+
+ exit 1
+
+...failed fail target1...
+...failed updating 1 target...
+...found 2 targets...
+...updating 1 target...
+do-print target2
+
+ echo updating target2
+
+...updated 1 target...
+""")
+
+ t.cleanup()
+
+
+def missing_target():
+ t = BoostBuild.Tester(pass_toolset=0, pass_d0=False)
+
+ t.write("file.jam", """\
+actions do-print
+{
+ echo updating $(<)
+}
+
+NOTFILE target2 ;
+do-print target2 ;
+DEPENDS target2 : target1 ;
+
+UPDATE_NOW target1 : : ignore-minus-n ;
+
+DEPENDS all : target1 target2 ;
+""")
+
+ t.run_build_system(["-ffile.jam", "-n"], status=1, stdout="""\
+don't know how to make target1
+...found 1 target...
+...can't find 1 target...
+...found 2 targets...
+...can't make 1 target...
+""")
+
+ t.cleanup()
+
+
+def build_once():
+ """
+ Make sure that if we call UPDATE_NOW with ignore-minus-n, the target gets
+ updated exactly once regardless of previous calls to UPDATE_NOW with -n in
+ effect.
+
+ """
+ t = BoostBuild.Tester(pass_toolset=0, pass_d0=False)
+
+ t.write("file.jam", """\
+actions do-print
+{
+ echo updating $(<)
+}
+
+NOTFILE target1 ;
+ALWAYS target1 ;
+do-print target1 ;
+
+UPDATE_NOW target1 ;
+UPDATE_NOW target1 : : ignore-minus-n ;
+UPDATE_NOW target1 : : ignore-minus-n ;
+
+DEPENDS all : target1 ;
+""")
+
+ t.run_build_system(["-ffile.jam", "-n"], stdout="""\
+...found 1 target...
+...updating 1 target...
+do-print target1
+
+ echo updating target1
+
+...updated 1 target...
+do-print target1
+
+ echo updating target1
+
+updating target1
+...updated 1 target...
+...found 1 target...
+""")
+
+ t.cleanup()
+
+
+def return_status():
+ """
+ Make sure that UPDATE_NOW returns a failure status if
+ the target failed in a previous call to UPDATE_NOW
+ """
+ t = BoostBuild.Tester(pass_toolset=0, pass_d0=False)
+
+ t.write("file.jam", """\
+actions fail
+{
+ exit 1
+}
+
+NOTFILE target1 ;
+ALWAYS target1 ;
+fail target1 ;
+
+ECHO update1: [ UPDATE_NOW target1 ] ;
+ECHO update2: [ UPDATE_NOW target1 ] ;
+
+DEPENDS all : target1 ;
+""")
+
+ t.run_build_system(["-ffile.jam"], status=1, stdout="""\
+...found 1 target...
+...updating 1 target...
+fail target1
+
+ exit 1
+
+...failed fail target1...
+...failed updating 1 target...
+update1:
+update2:
+...found 1 target...
+""")
+
+ t.cleanup()
+
+
+def save_restore():
+ """Tests that ignore-minus-n and ignore-minus-q are
+ local to the call to UPDATE_NOW"""
+ t = BoostBuild.Tester(pass_toolset=0, pass_d0=False)
+
+ t.write("actions.jam", """\
+rule fail
+{
+ NOTFILE $(<) ;
+ ALWAYS $(<) ;
+}
+actions fail
+{
+ exit 1
+}
+
+rule pass
+{
+ NOTFILE $(<) ;
+ ALWAYS $(<) ;
+}
+actions pass
+{
+ echo updating $(<)
+}
+""")
+ t.write("file.jam", """
+include actions.jam ;
+fail target1 ;
+fail target2 ;
+UPDATE_NOW target1 target2 : : $(IGNORE_MINUS_N) : $(IGNORE_MINUS_Q) ;
+fail target3 ;
+fail target4 ;
+UPDATE_NOW target3 target4 ;
+UPDATE ;
+""")
+ t.run_build_system(['-n', '-sIGNORE_MINUS_N=1', '-ffile.jam'],
+ stdout='''...found 2 targets...
+...updating 2 targets...
+fail target1
+
+ exit 1
+
+...failed fail target1...
+fail target2
+
+ exit 1
+
+...failed fail target2...
+...failed updating 2 targets...
+...found 2 targets...
+...updating 2 targets...
+fail target3
+
+ exit 1
+
+fail target4
+
+ exit 1
+
+...updated 2 targets...
+''')
+
+ t.run_build_system(['-q', '-sIGNORE_MINUS_N=1', '-ffile.jam'],
+ status=1, stdout='''...found 2 targets...
+...updating 2 targets...
+fail target1
+
+ exit 1
+
+...failed fail target1...
+...failed updating 1 target...
+...found 2 targets...
+...updating 2 targets...
+fail target3
+
+ exit 1
+
+...failed fail target3...
+...failed updating 1 target...
+''')
+
+ t.run_build_system(['-n', '-sIGNORE_MINUS_Q=1', '-ffile.jam'],
+ stdout='''...found 2 targets...
+...updating 2 targets...
+fail target1
+
+ exit 1
+
+fail target2
+
+ exit 1
+
+...updated 2 targets...
+...found 2 targets...
+...updating 2 targets...
+fail target3
+
+ exit 1
+
+fail target4
+
+ exit 1
+
+...updated 2 targets...
+''')
+
+ t.run_build_system(['-q', '-sIGNORE_MINUS_Q=1', '-ffile.jam'],
+ status=1, stdout='''...found 2 targets...
+...updating 2 targets...
+fail target1
+
+ exit 1
+
+...failed fail target1...
+fail target2
+
+ exit 1
+
+...failed fail target2...
+...failed updating 2 targets...
+...found 2 targets...
+...updating 2 targets...
+fail target3
+
+ exit 1
+
+...failed fail target3...
+...failed updating 1 target...
+''')
+
+ t.cleanup()
+
+
+basic()
+ignore_minus_n()
+failed_target()
+missing_target()
+build_once()
+return_status()
+save_restore()
diff --git a/tools/build/test/core_variables_in_actions.py b/tools/build/test/core_variables_in_actions.py
new file mode 100755
index 0000000000..e3a7177e20
--- /dev/null
+++ b/tools/build/test/core_variables_in_actions.py
@@ -0,0 +1,39 @@
+#!/usr/bin/python
+
+# Copyright 2012. Jurko Gospodnetic
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Tests that variables in actions get expanded but double quote characters
+# get treated as regular characters and not string literal delimiters when
+# determining string tokens concatenated to the variable being expanded.
+#
+# We also take care to make this test work correctly when run using both
+# Windows and Unix echo command variant. That is why we add the extra single
+# quotes around the text being echoed - they will make the double quotes be
+# displayed as regular characters in both cases but will be displayed
+# themselves only when using the Windows cmd shell's echo command.
+
+import BoostBuild
+
+t = BoostBuild.Tester(pass_toolset=0)
+t.write("file.jam", """\
+rule dummy ( i )
+{
+ local a = 1 2 3 ;
+ ECHO From rule: $(a)" seconds" ;
+ a on $(i) = $(a) ;
+}
+
+actions dummy
+{
+ echo 'From action: $(a)" seconds"'
+}
+
+dummy all ;
+""")
+t.run_build_system(["-ffile.jam", "-d1"])
+t.expect_output_lines("From rule: 1 seconds 2 seconds 3 seconds")
+t.expect_output_lines('*From action: 1" 2" 3" seconds"*')
+t.cleanup()
diff --git a/tools/build/test/core_varnames.py b/tools/build/test/core_varnames.py
new file mode 100644
index 0000000000..6b61ffcd3e
--- /dev/null
+++ b/tools/build/test/core_varnames.py
@@ -0,0 +1,38 @@
+#!/usr/bin/python
+
+# Copyright 2003 Dave Abrahams
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# This tests the core rule for enumerating the variable names in a module.
+
+import BoostBuild
+
+t = BoostBuild.Tester(pass_toolset=0)
+
+t.write("file.jam", """\
+module foo
+{
+ rule bar { }
+ var1 = x y ;
+ var2 = fubar ;
+}
+
+expected = var1 var2 ;
+names = [ VARNAMES foo ] ;
+if $(names) in $(expected) && $(expected) in $(names)
+{
+ # everything OK
+}
+else
+{
+ EXIT expected to find variables $(expected:J=", ") in module foo,
+ but found $(names:J=", ") instead. ;
+}
+DEPENDS all : xx ;
+NOTFILE xx ;
+""")
+
+t.run_build_system(["-ffile.jam"], status=0)
+
+t.cleanup()
diff --git a/tools/build/v2/test/custom_generator.py b/tools/build/test/custom_generator.py
index 9a1188a03d..9a1188a03d 100644
--- a/tools/build/v2/test/custom_generator.py
+++ b/tools/build/test/custom_generator.py
diff --git a/tools/build/test/default_build.py b/tools/build/test/default_build.py
new file mode 100644
index 0000000000..6ad696ef07
--- /dev/null
+++ b/tools/build/test/default_build.py
@@ -0,0 +1,80 @@
+#!/usr/bin/python
+
+# Copyright 2003 Dave Abrahams
+# Copyright 2002, 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that default build clause actually has any effect.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("jamroot.jam", "")
+t.write("jamfile.jam", "exe a : a.cpp : : debug release ;")
+t.write("a.cpp", "int main() {}\n")
+
+t.run_build_system()
+t.expect_addition("bin/$toolset/debug/a.exe")
+t.expect_addition("bin/$toolset/release/a.exe")
+
+# Check that explictly-specified build variant supresses default-build.
+t.rm("bin")
+t.run_build_system(["release"])
+t.expect_addition(BoostBuild.List("bin/$toolset/release/") * "a.exe a.obj")
+t.expect_nothing_more()
+
+# Now check that we can specify explicit build request and default-build will be
+# combined with it.
+t.run_build_system(["optimization=space"])
+t.expect_addition("bin/$toolset/debug/optimization-space/a.exe")
+t.expect_addition("bin/$toolset/release/optimization-space/a.exe")
+
+# Test that default-build must be identical in all alternatives. Error case.
+t.write("jamfile.jam", """\
+exe a : a.cpp : : debug ;
+exe a : b.cpp : : ;
+""")
+t.run_build_system(["-n", "--no-error-backtrace"], status=1)
+t.fail_test(t.stdout().find("default build must be identical in all alternatives") == -1)
+
+# Test that default-build must be identical in all alternatives. No Error case,
+# empty default build.
+t.write("jamfile.jam", """\
+exe a : a.cpp : <variant>debug ;
+exe a : b.cpp : <variant>release ;
+""")
+t.run_build_system(["-n", "--no-error-backtrace"], status=0)
+
+# Now try a harder example: default build which contains <define> should cause
+# <define> to be present when "b" is compiled. This happens only if
+# "build-project b" is placed first.
+t.write("jamfile.jam", """\
+project : default-build <define>FOO ;
+build-project a ;
+build-project b ;
+""")
+
+t.write("a/jamfile.jam", "exe a : a.cpp ../b//b ;")
+t.write("a/a.cpp", """\
+#ifdef _WIN32
+__declspec(dllimport)
+#endif
+void foo();
+int main() { foo(); }
+""")
+
+t.write("b/jamfile.jam", "lib b : b.cpp ;")
+t.write("b/b.cpp", """\
+#ifdef FOO
+#ifdef _WIN32
+__declspec(dllexport)
+#endif
+void foo() {}
+#endif
+""")
+
+t.run_build_system()
+
+t.cleanup()
diff --git a/tools/build/test/default_features.py b/tools/build/test/default_features.py
new file mode 100644
index 0000000000..0d285a3f9d
--- /dev/null
+++ b/tools/build/test/default_features.py
@@ -0,0 +1,50 @@
+#!/usr/bin/python
+
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that features with default values are always present in build properties
+# of any target.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+# Declare *non-propagated* feature foo.
+t.write("jamroot.jam", """
+import feature : feature ;
+feature foo : on off ;
+""")
+
+# Note that '<foo>on' will not be propagated to 'd/l'.
+t.write("jamfile.jam", """
+exe hello : hello.cpp d//l ;
+""")
+
+t.write("hello.cpp", """
+#ifdef _WIN32
+__declspec(dllimport)
+#endif
+void foo();
+int main() { foo(); }
+""")
+
+t.write("d/jamfile.jam", """
+lib l : l.cpp : <foo>on:<define>FOO ;
+""")
+
+t.write("d/l.cpp", """
+#ifdef _WIN32
+__declspec(dllexport)
+#endif
+#ifdef FOO
+void foo() {}
+#endif
+""")
+
+t.run_build_system()
+
+t.expect_addition("bin/$toolset/debug/hello.exe")
+
+t.cleanup()
diff --git a/tools/build/test/default_toolset.py b/tools/build/test/default_toolset.py
new file mode 100755
index 0000000000..682e7fcc1e
--- /dev/null
+++ b/tools/build/test/default_toolset.py
@@ -0,0 +1,215 @@
+#!/usr/bin/python
+
+# Copyright 2008 Jurko Gospodnetic
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that the expected default toolset is used when no toolset is explicitly
+# specified on the command line or used from code via the using rule. Test that
+# the default toolset is correctly used just like any other explicitly used
+# toolset (e.g. toolset prerequisites, properties conditioned on toolset
+# related features, etc.).
+#
+# Note that we need to ignore regular site/user/test configuration files to
+# avoid them marking any toolsets not under our control as used.
+
+import BoostBuild
+
+
+# Line displayed by Boost Build when using the default toolset.
+configuring_default_toolset_message = \
+ 'warning: Configuring default toolset "%s".'
+
+
+###############################################################################
+#
+# test_conditions_on_default_toolset()
+# ------------------------------------
+#
+###############################################################################
+
+def test_conditions_on_default_toolset():
+ """Test that toolset and toolset subfeature conditioned properties get
+ applied correctly when the toolset is selected by default. Implicitly tests
+ that we can use the set-default-toolset rule to set the default toolset to
+ be used by Boost Build.
+ """
+
+ t = BoostBuild.Tester("--user-config= --ignore-site-config",
+ pass_toolset=False, use_test_config=False)
+
+ toolset_name = "myCustomTestToolset"
+ toolset_version = "v"
+ toolset_version_unused = "v_unused"
+ message_loaded = "Toolset '%s' loaded." % toolset_name
+ message_initialized = "Toolset '%s' initialized." % toolset_name ;
+
+ # Custom toolset.
+ t.write(toolset_name + ".jam", """
+import feature ;
+ECHO "%(message_loaded)s" ;
+feature.extend toolset : %(toolset_name)s ;
+feature.subfeature toolset %(toolset_name)s : version : %(toolset_version)s %(toolset_version_unused)s ;
+rule init ( version ) { ECHO "%(message_initialized)s" ; }
+""" % {'message_loaded' : message_loaded ,
+ 'message_initialized' : message_initialized,
+ 'toolset_name' : toolset_name ,
+ 'toolset_version' : toolset_version ,
+ 'toolset_version_unused': toolset_version_unused})
+
+ # Main Boost Build project script.
+ t.write("jamroot.jam", """
+import build-system ;
+import errors ;
+import feature ;
+import notfile ;
+
+build-system.set-default-toolset %(toolset_name)s : %(toolset_version)s ;
+
+feature.feature description : : free incidental ;
+
+# We use a rule instead of an action to avoid problems with action output not
+# getting piped to stdout by the testing system.
+rule buildRule ( names : targets ? : properties * )
+{
+ local descriptions = [ feature.get-values description : $(properties) ] ;
+ ECHO "descriptions:" /$(descriptions)/ ;
+ local toolset = [ feature.get-values toolset : $(properties) ] ;
+ ECHO "toolset:" /$(toolset)/ ;
+ local toolset-version = [ feature.get-values "toolset-$(toolset):version" : $(properties) ] ;
+ ECHO "toolset-version:" /$(toolset-version)/ ;
+}
+
+notfile testTarget
+ : @buildRule
+ :
+ :
+ <description>stand-alone
+ <toolset>%(toolset_name)s:<description>toolset
+ <toolset>%(toolset_name)s-%(toolset_version)s:<description>toolset-version
+ <toolset>%(toolset_name)s-%(toolset_version_unused)s:<description>toolset-version-unused ;
+""" % {'toolset_name' : toolset_name ,
+ 'toolset_version' : toolset_version,
+ 'toolset_version_unused': toolset_version_unused})
+
+ t.run_build_system()
+ t.expect_output_lines(configuring_default_toolset_message % toolset_name)
+ t.expect_output_lines(message_loaded)
+ t.expect_output_lines(message_initialized)
+ t.expect_output_lines("descriptions: /stand-alone/ /toolset/ "
+ "/toolset-version/")
+ t.expect_output_lines("toolset: /%s/" % toolset_name)
+ t.expect_output_lines("toolset-version: /%s/" % toolset_version)
+
+ t.cleanup()
+
+
+###############################################################################
+#
+# test_default_toolset_on_os()
+# ----------------------------
+#
+###############################################################################
+
+def test_default_toolset_on_os( os, expected_toolset ):
+ """Test that the given toolset is used as the default toolset on the given
+ os. Uses hardcoded knowledge of how Boost Build decides on which host OS it
+ is currently running. Note that we must not do much after tricking Boost
+ Build into believing it has a specific host OS as this might mess up other
+ important internal Boost Build state.
+ """
+
+ t = BoostBuild.Tester("--user-config= --ignore-site-config",
+ pass_toolset=False, use_test_config=False)
+
+ t.write("jamroot.jam", "modules.poke os : .name : %s ;" % os)
+
+ # We need to tell the test system to ignore stderr output as attempting to
+ # load missing toolsets might cause random failures with which we are not
+ # concerned in this test.
+ t.run_build_system(stderr=None)
+ t.expect_output_lines(configuring_default_toolset_message %
+ expected_toolset)
+
+ t.cleanup()
+
+
+###############################################################################
+#
+# test_default_toolset_requirements()
+# -----------------------------------
+#
+###############################################################################
+
+def test_default_toolset_requirements():
+ """Test that default toolset's requirements get applied correctly.
+ """
+
+ t = BoostBuild.Tester("--user-config= --ignore-site-config",
+ pass_toolset=False, use_test_config=False,
+ ignore_toolset_requirements=False)
+
+ toolset_name = "customTestToolsetWithRequirements"
+
+ # Custom toolset.
+ t.write(toolset_name + ".jam", """
+import feature ;
+import toolset ;
+feature.extend toolset : %(toolset_name)s ;
+toolset.add-requirements <description>toolset-requirement ;
+rule init ( ) { }
+""" % {'toolset_name': toolset_name})
+
+ # Main Boost Build project script.
+ t.write("jamroot.jam", """
+import build-system ;
+import errors ;
+import feature ;
+import notfile ;
+
+build-system.set-default-toolset %(toolset_name)s ;
+
+feature.feature description : : free incidental ;
+
+# We use a rule instead of an action to avoid problems with action output not
+# getting piped to stdout by the testing system.
+rule buildRule ( names : targets ? : properties * )
+{
+ local descriptions = [ feature.get-values description : $(properties) ] ;
+ ECHO "descriptions:" /$(descriptions)/ ;
+ local toolset = [ feature.get-values toolset : $(properties) ] ;
+ ECHO "toolset:" /$(toolset)/ ;
+}
+
+notfile testTarget
+ : @buildRule
+ :
+ :
+ <description>target-requirement
+ <description>toolset-requirement:<description>conditioned-requirement
+ <description>unrelated-condition:<description>unrelated-description ;
+""" % {'toolset_name': toolset_name})
+
+ t.run_build_system()
+ t.expect_output_lines(configuring_default_toolset_message % toolset_name)
+ t.expect_output_lines("descriptions: /conditioned-requirement/ "
+ "/target-requirement/ /toolset-requirement/")
+ t.expect_output_lines("toolset: /%s/" % toolset_name)
+
+ t.cleanup()
+
+
+###############################################################################
+#
+# main()
+# ------
+#
+###############################################################################
+
+test_default_toolset_on_os("NT" , "msvc")
+test_default_toolset_on_os("LINUX" , "gcc" )
+test_default_toolset_on_os("CYGWIN" , "gcc" )
+test_default_toolset_on_os("SomeOtherOS", "gcc" )
+test_default_toolset_requirements()
+test_conditions_on_default_toolset()
diff --git a/tools/build/test/dependency_property.py b/tools/build/test/dependency_property.py
new file mode 100644
index 0000000000..cdd8055b0e
--- /dev/null
+++ b/tools/build/test/dependency_property.py
@@ -0,0 +1,36 @@
+#!/usr/bin/python
+
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Regression test: virtual targets with different dependency properties were
+# considered different by 'virtual-target.register', but the code which
+# determined the actual target paths ignored dependency properties so both
+# targets ended up being in the same location.
+
+import BoostBuild
+import string
+
+
+t = BoostBuild.Tester()
+
+t.write("jamroot.jam", """\
+lib foo : foo.cpp ;
+exe hello : hello.cpp ;
+exe hello2 : hello.cpp : <library>foo ;
+""")
+
+t.write("hello.cpp", "int main() {}\n")
+
+t.write("foo.cpp", """\
+#ifdef _WIN32
+__declspec(dllexport)
+#endif
+void foo() {}
+""")
+
+t.run_build_system(["--no-error-backtrace"], status=1)
+t.fail_test(string.find(t.stdout(), "Tried to build the target twice") == -1)
+
+t.cleanup()
diff --git a/tools/build/test/dependency_test.py b/tools/build/test/dependency_test.py
new file mode 100644
index 0000000000..d53ce69a73
--- /dev/null
+++ b/tools/build/test/dependency_test.py
@@ -0,0 +1,239 @@
+#!/usr/bin/python
+
+# Copyright 2003 Dave Abrahams
+# Copyright 2002, 2003, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+
+def test_basic():
+ t = BoostBuild.Tester(["-d3", "-d+12"], pass_d0=False, use_test_config=False)
+
+ t.write("a.cpp", """
+#include <a.h>
+# include "a.h"
+#include <x.h>
+int main() {}
+""")
+ t.write("a.h", "\n")
+ t.write("a_c.c", """\
+#include <a.h>
+# include "a.h"
+#include <x.h>
+""")
+ t.write("b.cpp", """\
+#include "a.h"
+int main() {}
+""")
+ t.write("b.h", "\n")
+ t.write("c.cpp", """\
+#include "x.h"
+int main() {}
+""")
+ t.write("e.cpp", """\
+#include "x.h"
+int main() {}
+""")
+ t.write("x.foo", "")
+ t.write("y.foo", "")
+
+ t.write("src1/a.h", '#include "b.h"\n')
+ t.write("src1/b.h", '#include "c.h"\n')
+ t.write("src1/c.h", "\n")
+ t.write("src1/z.h", """\
+extern int dummy_variable_suppressing_empty_file_warning_on_hp_cxx_compiler;
+""")
+
+ t.write("src2/b.h", "\n")
+
+ t.write("jamroot.jam", """\
+import foo ;
+import types/cpp ;
+import types/exe ;
+
+project test : requirements <include>src1 ;
+
+exe a : x.foo a.cpp a_c.c ;
+exe b : b.cpp ;
+
+# Because of <define>FOO, c.cpp will be compiled to a different directory than
+# everything for main target "a". Therefore, without <implicit-dependency>, C
+# preprocessor processing that module will not find "x.h", which is part of
+# "a"'s dependency graph.
+#
+# --------------------------
+# More detailed explanation:
+# --------------------------
+# c.cpp includes x.h which does not exist on the current include path so Boost
+# Jam will try to match it to existing Jam targets to cover cases as this one
+# where the file is generated by the same build.
+#
+# However, as x.h is not part of "c" metatarget's dependency graph, Boost
+# Build will not actualize its target by default, i.e. create its Jam target.
+#
+# To get the Jam target created in time, we use the <implicit-dependency>
+# feature. This tells Boost Build that it needs to actualize the dependency
+# graph for metatarget "a", even though that metatarget has not been directly
+# mentioned and is not a dependency for any of the metatargets mentioned in the
+# current build request.
+#
+# Note that Boost Build does not automatically add a dependency between the
+# Jam targets in question so, if Boost Jam does not add a dependency on a target
+# from that other dependency graph (x.h in our case), i.e. if c.cpp does not
+# actually include x.h, us actualizing it will have no effect in the end as
+# Boost Jam will not have a reason to actually build those targets in spite of
+# knowing about them.
+exe c : c.cpp : <define>FOO <implicit-dependency>a ;
+""")
+
+ t.write("foo.jam", """\
+import generators ;
+import modules ;
+import os ;
+import print ;
+import type ;
+import types/cpp ;
+
+type.register FOO : foo ;
+
+generators.register-standard foo.foo : FOO : CPP H ;
+
+nl = "
+" ;
+
+rule foo ( targets * : sources * : properties * )
+{
+ # On NT, you need an exported symbol in order to have an import library
+ # generated. We will not really use the symbol defined here, just force the
+ # import library creation.
+ if ( [ os.name ] = NT || [ modules.peek : OS ] in CYGWIN ) &&
+ <main-target-type>LIB in $(properties)
+ {
+ .decl = "void __declspec(dllexport) foo() {}" ;
+ }
+ print.output $(<[1]) ;
+ print.text $(.decl:E="//")$(nl) ;
+ print.output $(<[2]) ;
+ print.text "#include <z.h>"$(nl) ;
+}
+""")
+
+ t.write("foo.py",
+r"""import bjam
+import b2.build.type as type
+import b2.build.generators as generators
+
+from b2.manager import get_manager
+
+type.register("FOO", ["foo"])
+generators.register_standard("foo.foo", ["FOO"], ["CPP", "H"])
+
+def prepare_foo(targets, sources, properties):
+ if properties.get('os') in ['windows', 'cygwin']:
+ bjam.call('set-target-variable', targets, "DECL",
+ "void __declspec(dllexport) foo() {}")
+
+get_manager().engine().register_action("foo.foo",
+ "echo -e $(DECL:E=//)\\n > $(<[1])\n"
+ "echo -e "#include <z.h>\\n" > $(<[2])\n", function=prepare_foo)
+""")
+
+ # Check that main target 'c' was able to find 'x.h' from 'a's dependency
+ # graph.
+ t.run_build_system()
+ t.expect_addition("bin/$toolset/debug/c.exe")
+
+ # Check handling of first level includes.
+
+ # Both 'a' and 'b' include "a.h" and should be updated.
+ t.touch("a.h")
+ t.run_build_system()
+
+ t.expect_touch("bin/$toolset/debug/a.exe")
+ t.expect_touch("bin/$toolset/debug/a.obj")
+ t.expect_touch("bin/$toolset/debug/a_c.obj")
+ t.expect_touch("bin/$toolset/debug/b.exe")
+ t.expect_touch("bin/$toolset/debug/b.obj")
+ t.expect_nothing_more()
+
+ # Only source files using include <a.h> should be compiled.
+ t.touch("src1/a.h")
+ t.run_build_system()
+
+ t.expect_touch("bin/$toolset/debug/a.exe")
+ t.expect_touch("bin/$toolset/debug/a.obj")
+ t.expect_touch("bin/$toolset/debug/a_c.obj")
+ t.expect_nothing_more()
+
+ # "src/a.h" includes "b.h" (in the same dir).
+ t.touch("src1/b.h")
+ t.run_build_system()
+ t.expect_touch("bin/$toolset/debug/a.exe")
+ t.expect_touch("bin/$toolset/debug/a.obj")
+ t.expect_touch("bin/$toolset/debug/a_c.obj")
+ t.expect_nothing_more()
+
+ # Included by "src/b.h". We had a bug: file included using double quotes
+ # (e.g. "b.h") was not scanned at all in this case.
+ t.touch("src1/c.h")
+ t.run_build_system()
+ t.expect_touch("bin/$toolset/debug/a.exe")
+
+ t.touch("b.h")
+ t.run_build_system()
+ t.expect_nothing_more()
+
+ # Test dependency on a generated header.
+ #
+ # TODO: we have also to check that generated header is found correctly if
+ # it is different for different subvariants. Lacking any toolset support,
+ # this check will be implemented later.
+ t.touch("x.foo")
+ t.run_build_system()
+ t.expect_touch("bin/$toolset/debug/a.obj")
+ t.expect_touch("bin/$toolset/debug/a_c.obj")
+
+ # Check that generated headers are scanned for dependencies as well.
+ t.touch("src1/z.h")
+ t.run_build_system()
+ t.expect_touch("bin/$toolset/debug/a.obj")
+ t.expect_touch("bin/$toolset/debug/a_c.obj")
+
+ t.cleanup()
+
+
+def test_scanned_includes_with_absolute_paths():
+ """
+ Regression test: on Windows, <includes> with absolute paths were not
+ considered when scanning dependencies.
+
+ """
+ t = BoostBuild.Tester(["-d3", "-d+12"], pass_d0=False)
+
+ t.write("jamroot.jam", """\
+path-constant TOP : . ;
+exe app : main.cpp : <include>$(TOP)/include ;
+""");
+
+ t.write("main.cpp", """\
+#include <dir/header.h>
+int main() {}
+""")
+
+ t.write("include/dir/header.h", "\n")
+
+ t.run_build_system()
+ t.expect_addition("bin/$toolset/debug/main.obj")
+
+ t.touch("include/dir/header.h")
+ t.run_build_system()
+ t.expect_touch("bin/$toolset/debug/main.obj")
+
+ t.cleanup()
+
+
+test_basic()
+test_scanned_includes_with_absolute_paths()
diff --git a/tools/build/test/direct_request_test.py b/tools/build/test/direct_request_test.py
new file mode 100644
index 0000000000..49a083202a
--- /dev/null
+++ b/tools/build/test/direct_request_test.py
@@ -0,0 +1,68 @@
+#!/usr/bin/python
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+# First check some startup.
+
+t.write("jamroot.jam", "")
+t.write("jamfile.jam", """\
+exe a : a.cpp b ;
+lib b : b.cpp ;
+""")
+
+t.write("a.cpp", """\
+void
+# ifdef _WIN32
+__declspec(dllimport)
+# endif
+foo();
+int main() { foo(); }
+""")
+
+t.write("b.cpp", """\
+#ifdef MACROS
+void
+# ifdef _WIN32
+__declspec(dllexport)
+# endif
+foo() {}
+#endif
+
+# ifdef _WIN32
+int __declspec(dllexport) force_implib_creation;
+# endif
+""")
+
+t.run_build_system(["define=MACROS"])
+t.expect_addition("bin/$toolset/debug/"
+ * (BoostBuild.List("a.obj b.obj b.dll a.exe")))
+
+
+# When building a debug version, the 'define' still applies.
+t.rm("bin")
+t.run_build_system(["debug", "define=MACROS"])
+t.expect_addition("bin/$toolset/debug/"
+ * (BoostBuild.List("a.obj b.obj b.dll a.exe")))
+
+
+# When building a release version, the 'define' still applies.
+t.write("jamfile.jam", """\
+exe a : a.cpp b : <variant>debug ;
+lib b : b.cpp ;
+""")
+t.rm("bin")
+t.run_build_system(["release", "define=MACROS"])
+
+
+# Regression test: direct build request was not working when there was more
+# than one level of 'build-project'.
+t.rm(".")
+t.write("jamroot.jam", "")
+t.write("jamfile.jam", "build-project a ;")
+t.write("a/jamfile.jam", "build-project b ;")
+t.write("a/b/jamfile.jam", "")
+t.run_build_system(["release"])
+
+t.cleanup()
diff --git a/tools/build/test/disambiguation.py b/tools/build/test/disambiguation.py
new file mode 100644
index 0000000000..72867e0a18
--- /dev/null
+++ b/tools/build/test/disambiguation.py
@@ -0,0 +1,32 @@
+#!/usr/bin/python
+
+# Copyright (C) Vladimir Prus 2006.
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that it is possible to add a suffix to a main target name to disambiguate
+# that main target from another, and that this does not affect the names of the
+# generated targets.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("jamroot.jam", """
+exe hello.exe : hello.obj ;
+obj hello.obj : hello.cpp : <variant>debug ;
+obj hello.obj2 : hello.cpp : <variant>release ;
+""")
+
+t.write("hello.cpp", """
+int main() {}
+""")
+
+t.run_build_system()
+
+t.expect_addition("bin/$toolset/debug/hello.exe")
+t.expect_addition("bin/$toolset/debug/hello.obj")
+t.expect_addition("bin/$toolset/release/hello.obj")
+
+t.cleanup()
diff --git a/tools/build/test/dll_path.py b/tools/build/test/dll_path.py
new file mode 100644
index 0000000000..60acf6a493
--- /dev/null
+++ b/tools/build/test/dll_path.py
@@ -0,0 +1,146 @@
+#!/usr/bin/python
+
+# Copyright (C) 2003. Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that the <dll-path> property is correctly set when using
+# <hardcode-dll-paths>true.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+# The point of this test is to have exe "main" which uses library "b", which
+# uses library "a". When "main" is built with <hardcode-dll-paths>true, paths
+# to both libraries should be present as values of <dll-path> feature. We
+# create a special target type which reports <dll-path> values on its sources
+# and compare the list of found values with out expectations.
+
+t.write("jamroot.jam", "using dll_paths ;")
+t.write("jamfile.jam", """\
+exe main : main.cpp b//b ;
+explicit main ;
+path-list mp : main ;
+""")
+
+t.write("main.cpp", "int main() {}\n")
+t.write("dll_paths.jam", """\
+import "class" : new ;
+import feature ;
+import generators ;
+import print ;
+import sequence ;
+import type ;
+
+rule init ( )
+{
+ type.register PATH_LIST : pathlist ;
+
+ class dll-paths-list-generator : generator
+ {
+ rule __init__ ( )
+ {
+ generator.__init__ dll_paths.list : EXE : PATH_LIST ;
+ }
+
+ rule generated-targets ( sources + : property-set : project name ? )
+ {
+ local dll-paths ;
+ for local s in $(sources)
+ {
+ local a = [ $(s).action ] ;
+ if $(a)
+ {
+ local p = [ $(a).properties ] ;
+ dll-paths += [ $(p).get <dll-path> ] ;
+ }
+ }
+ return [ generator.generated-targets $(sources) :
+ [ $(property-set).add-raw $(dll-paths:G=<dll-path>) ] :
+ $(project) $(name) ] ;
+ }
+ }
+ generators.register [ new dll-paths-list-generator ] ;
+}
+
+rule list ( target : sources * : properties * )
+{
+ local paths = [ feature.get-values <dll-path> : $(properties) ] ;
+ paths = [ sequence.insertion-sort $(paths) ] ;
+ print.output $(target) ;
+ print.text $(paths) ;
+}
+""")
+
+t.write("dll_paths.py", """\
+import bjam
+
+import b2.build.type as type
+import b2.build.generators as generators
+
+from b2.manager import get_manager
+
+def init():
+ type.register("PATH_LIST", ["pathlist"])
+
+ class DllPathsListGenerator(generators.Generator):
+
+ def __init__(self):
+ generators.Generator.__init__(self, "dll_paths.list", False,
+ ["EXE"], ["PATH_LIST"])
+
+ def generated_targets(self, sources, ps, project, name):
+ dll_paths = []
+ for s in sources:
+ a = s.action()
+ if a:
+ p = a.properties()
+ dll_paths += p.get('dll-path')
+ dll_paths.sort()
+ return generators.Generator.generated_targets(self, sources,
+ ps.add_raw(["<dll-path>" + p for p in dll_paths]), project,
+ name)
+
+ generators.register(DllPathsListGenerator())
+
+command = \"\"\"
+echo $(PATHS) > $(<[1])
+\"\"\"
+def function(target, sources, ps):
+ bjam.call('set-target-variable', target, "PATHS", ps.get('dll-path'))
+
+get_manager().engine().register_action("dll_paths.list", command,
+ function=function)
+""")
+
+t.write("a/jamfile.jam", "lib a : a.cpp ;")
+t.write("a/a.cpp", """\
+void
+#if defined(_WIN32)
+__declspec(dllexport)
+#endif
+foo() {}
+""")
+
+t.write("b/jamfile.jam", "lib b : b.cpp ../a//a ;")
+t.write("b/b.cpp", """\
+void
+#if defined(_WIN32)
+__declspec(dllexport)
+#endif
+bar() {}
+""")
+
+t.run_build_system(["hardcode-dll-paths=true"])
+
+t.expect_addition("bin/$toolset/debug/mp.pathlist")
+
+es1 = t.adjust_names("a/bin/$toolset/debug")[0]
+es2 = t.adjust_names("b/bin/$toolset/debug")[0]
+
+t.expect_content_lines("bin/$toolset/debug/mp.pathlist", "*" + es1);
+t.expect_content_lines("bin/$toolset/debug/mp.pathlist", "*" + es2);
+
+t.cleanup()
diff --git a/tools/build/test/double_loading.py b/tools/build/test/double_loading.py
new file mode 100644
index 0000000000..c708b00f7a
--- /dev/null
+++ b/tools/build/test/double_loading.py
@@ -0,0 +1,31 @@
+#!/usr/bin/python
+
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+t = BoostBuild.Tester()
+
+# Regression test for double loading of the same Jamfile.
+t.write("jamroot.jam", "")
+t.write("jamfile.jam", "build-project subdir ;")
+t.write("subdir/jamfile.jam", 'ECHO "Loaded subdir" ;')
+
+t.run_build_system(subdir="subdir")
+t.expect_output_lines("Loaded subdir")
+
+
+# Regression test for a more contrived case. The top-level Jamfile refers to
+# subdir via use-project, while subdir's Jamfile is being loaded. The
+# motivation why use-project referring to subprojects is useful can be found
+# at: http://article.gmane.org/gmane.comp.lib.boost.build/3906
+t.write("jamroot.jam", "")
+t.write("jamfile.jam", "use-project /subdir : subdir ;")
+t.write("subdir/jamfile.jam", "project subdir ;")
+
+t.run_build_system(subdir="subdir");
+
+t.cleanup()
diff --git a/tools/build/v2/test/duplicate.py b/tools/build/test/duplicate.py
index 1d0d5f7f22..1d0d5f7f22 100644
--- a/tools/build/v2/test/duplicate.py
+++ b/tools/build/test/duplicate.py
diff --git a/tools/build/v2/test/example_customization.py b/tools/build/test/example_customization.py
index f8fe15cc28..f8fe15cc28 100644
--- a/tools/build/v2/test/example_customization.py
+++ b/tools/build/test/example_customization.py
diff --git a/tools/build/v2/test/example_gettext.py b/tools/build/test/example_gettext.py
index e7cfa8eb7b..e7cfa8eb7b 100644
--- a/tools/build/v2/test/example_gettext.py
+++ b/tools/build/test/example_gettext.py
diff --git a/tools/build/test/example_libraries.py b/tools/build/test/example_libraries.py
new file mode 100644
index 0000000000..3097bd952d
--- /dev/null
+++ b/tools/build/test/example_libraries.py
@@ -0,0 +1,21 @@
+#!/usr/bin/python
+
+# Copyright (C) Vladimir Prus 2006.
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Test the 'libraries' example.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.set_tree("../example/libraries")
+
+t.run_build_system()
+
+t.expect_addition(["app/bin/$toolset/debug/app.exe",
+ "util/foo/bin/$toolset/debug/bar.dll"])
+
+t.cleanup()
diff --git a/tools/build/test/example_make.py b/tools/build/test/example_make.py
new file mode 100644
index 0000000000..2705418299
--- /dev/null
+++ b/tools/build/test/example_make.py
@@ -0,0 +1,17 @@
+#!/usr/bin/python
+
+# Copyright (C) Vladimir Prus 2006.
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Test the 'make' example.
+
+import BoostBuild
+import sys
+
+t = BoostBuild.Tester(['example.python.interpreter=%s' % sys.executable])
+t.set_tree("../example/make")
+t.run_build_system()
+t.expect_addition(["bin/$toolset/debug/main.cpp"])
+t.cleanup()
diff --git a/tools/build/v2/test/example_qt4.py b/tools/build/test/example_qt4.py
index 1b0dc27bda..1b0dc27bda 100644
--- a/tools/build/v2/test/example_qt4.py
+++ b/tools/build/test/example_qt4.py
diff --git a/tools/build/v2/test/exit_status.py b/tools/build/test/exit_status.py
index 11c4abf76f..11c4abf76f 100755
--- a/tools/build/v2/test/exit_status.py
+++ b/tools/build/test/exit_status.py
diff --git a/tools/build/test/expansion.py b/tools/build/test/expansion.py
new file mode 100644
index 0000000000..f49cbd833e
--- /dev/null
+++ b/tools/build/test/expansion.py
@@ -0,0 +1,80 @@
+#!/usr/bin/python
+
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("a.cpp", """
+#ifdef CF_IS_OFF
+int main() {}
+#endif
+""")
+
+t.write("b.cpp", """
+#ifdef CF_1
+int main() {}
+#endif
+""")
+
+t.write("c.cpp", """
+#ifdef FOO
+int main() {}
+#endif
+""")
+
+t.write("jamfile.jam", """
+# See if default value of composite feature 'cf' will be expanded to
+# <define>CF_IS_OFF.
+exe a : a.cpp ;
+
+# See if subfeature in requirements in expanded.
+exe b : b.cpp : <cf>on-1 ;
+
+# See if conditional requirements are recursively expanded.
+exe c : c.cpp : <toolset>$toolset:<variant>release <variant>release:<define>FOO
+ ;
+""")
+
+t.write("jamroot.jam", """
+import feature ;
+feature.feature cf : off on : composite incidental ;
+feature.compose <cf>off : <define>CF_IS_OFF ;
+feature.subfeature cf on : version : 1 2 : composite optional incidental ;
+feature.compose <cf-on:version>1 : <define>CF_1 ;
+""")
+
+t.expand_toolset("jamfile.jam")
+
+t.run_build_system()
+t.expect_addition(["bin/$toolset/debug/a.exe",
+ "bin/$toolset/debug/b.exe",
+ "bin/$toolset/release/c.exe"])
+
+t.rm("bin")
+
+
+# Test for issue BB60.
+
+t.write("test.cpp", """
+#include "header.h"
+int main() {}
+""")
+
+t.write("jamfile.jam", """
+project : requirements <toolset>$toolset:<include>foo ;
+exe test : test.cpp : <toolset>$toolset ;
+""")
+
+t.expand_toolset("jamfile.jam")
+t.write("foo/header.h", "\n")
+t.write("jamroot.jam", "")
+
+t.run_build_system()
+t.expect_addition("bin/$toolset/debug/test.exe")
+
+t.cleanup()
diff --git a/tools/build/test/explicit.py b/tools/build/test/explicit.py
new file mode 100644
index 0000000000..387f3646c7
--- /dev/null
+++ b/tools/build/test/explicit.py
@@ -0,0 +1,58 @@
+#!/usr/bin/python
+
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("jamroot.jam", """\
+exe hello : hello.cpp ;
+exe hello2 : hello.cpp ;
+explicit hello2 ;
+""")
+
+t.write("hello.cpp", "int main() {}\n")
+
+t.run_build_system()
+t.ignore("*.tds")
+t.expect_addition(BoostBuild.List("bin/$toolset/debug/hello") * \
+ [".exe", ".obj"])
+t.expect_nothing_more()
+
+t.run_build_system(["hello2"])
+t.expect_addition("bin/$toolset/debug/hello2.exe")
+
+t.rm(".")
+
+
+# Test that 'explicit' used in a helper rule applies to the current project, and
+# not to the Jamfile where the helper rule is defined.
+t.write("jamroot.jam", """\
+rule myinstall ( name : target )
+{
+ install $(name)-bin : $(target) ;
+ explicit $(name)-bin ;
+ alias $(name) : $(name)-bin ;
+}
+""")
+
+t.write("sub/a.cpp", "\n")
+t.write("sub/jamfile.jam", "myinstall dist : a.cpp ;")
+
+t.run_build_system(subdir="sub")
+t.expect_addition("sub/dist-bin/a.cpp")
+
+t.rm("sub/dist-bin")
+
+t.write("sub/jamfile.jam", """\
+myinstall dist : a.cpp ;
+explicit dist ;
+""")
+
+t.run_build_system(subdir="sub")
+t.expect_nothing_more()
+
+t.cleanup()
diff --git a/tools/build/test/feature_cxxflags.py b/tools/build/test/feature_cxxflags.py
new file mode 100755
index 0000000000..76e8b20d74
--- /dev/null
+++ b/tools/build/test/feature_cxxflags.py
@@ -0,0 +1,37 @@
+#!/usr/bin/python
+
+# Copyright 2014 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Tests the cxxflags feature
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+# cxxflags should be applied to C++ compilation,
+# but not to C.
+t.write("Jamroot.jam", """
+obj test-cpp : test.cpp : <cxxflags>-DOKAY ;
+obj test-c : test.c : <cxxflags>-DBAD ;
+""")
+
+t.write("test.cpp", """
+#ifndef OKAY
+#error Cannot compile without OKAY
+#endif
+""")
+
+t.write("test.c", """
+#ifdef BAD
+#error Cannot compile with BAD
+#endif
+""")
+
+t.run_build_system()
+t.expect_addition("bin/$toolset/debug/test-cpp.obj")
+t.expect_addition("bin/$toolset/debug/test-c.obj")
+
+t.cleanup()
diff --git a/tools/build/test/free_features_request.py b/tools/build/test/free_features_request.py
new file mode 100644
index 0000000000..e7949d1453
--- /dev/null
+++ b/tools/build/test/free_features_request.py
@@ -0,0 +1,42 @@
+#!/usr/bin/python
+
+# Copyright (C) Vladimir Prus 2007.
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Tests that a free feature specified on the command line applies to all
+# targets ever built.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("jamroot.jam", """\
+exe hello : hello.cpp foo ;
+lib foo : foo.cpp ;
+""")
+
+t.write("hello.cpp", """\
+extern void foo();
+#ifdef FOO
+int main() { foo(); }
+#endif
+""")
+
+t.write("foo.cpp", """\
+#ifdef FOO
+#ifdef _WIN32
+__declspec(dllexport)
+#endif
+void foo() {}
+#endif
+""")
+
+# If FOO is not defined when compiling the 'foo' target, we will get a link
+# error at this point.
+t.run_build_system(["hello", "define=FOO"])
+
+t.expect_addition("bin/$toolset/debug/hello.exe")
+
+t.cleanup()
diff --git a/tools/build/test/gcc_runtime.py b/tools/build/test/gcc_runtime.py
new file mode 100644
index 0000000000..bc56eae9fd
--- /dev/null
+++ b/tools/build/test/gcc_runtime.py
@@ -0,0 +1,28 @@
+#!/usr/bin/python
+
+# Copyright 2004 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Tests that on gcc, we correctly report a problem when static runtime is
+# requested for building a shared library.
+
+import BoostBuild
+
+t = BoostBuild.Tester()
+t.write("jamroot.jam", "lib hello : hello.cpp ;")
+t.write("hello.cpp", "int main() {}\n")
+
+t.run_build_system(["runtime-link=static"])
+t.expect_output_lines("warning: On gcc, DLLs can not be built with "
+ "'<runtime-link>static'.")
+t.expect_nothing_more()
+
+t.run_build_system(["link=static", "runtime-link=static"])
+binFolder = "bin/$toolset/debug/link-static/runtime-link-static"
+t.expect_addition("%s/hello.obj" % binFolder)
+t.expect_addition("%s/hello.lib" % binFolder)
+t.expect_nothing_more()
+
+t.cleanup()
diff --git a/tools/build/test/generator_selection.py b/tools/build/test/generator_selection.py
new file mode 100755
index 0000000000..87f0df33a9
--- /dev/null
+++ b/tools/build/test/generator_selection.py
@@ -0,0 +1,157 @@
+#!/usr/bin/python
+
+# Copyright 2008, 2012 Jurko Gospodnetic
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Tests that generators get selected correctly.
+#
+# We do not use the internal C++-compiler CPP --> OBJ generator to avoid
+# problems with specific compilers or their configurations, e.g. IBM's AIX test
+# runner 'AIX Version 5.3 TL7 SP5 (5300-07-05-0831)' using the 'IBM XL C/C++
+# for AIX, V12.1 (Version: 12.01.0000.0000)' reporting errors when run with a
+# source file whose suffix is not '.cpp'.
+
+import BoostBuild
+
+
+###############################################################################
+#
+# test_generator_added_after_already_building_a_target_of_its_target_type()
+# -------------------------------------------------------------------------
+#
+###############################################################################
+
+def test_generator_added_after_already_building_a_target_of_its_target_type():
+ """
+ Regression test for a Boost Build bug causing it to not use a generator
+ if it got added after already building a target of its target type.
+
+ """
+ t = BoostBuild.Tester()
+
+ t.write("dummy.cpp", "void f() {}\n")
+
+ t.write("jamroot.jam", """\
+import common ;
+import generators ;
+import type ;
+type.register MY_OBJ : my_obj ;
+generators.register-standard common.copy : CPP : MY_OBJ ;
+
+# Building this dummy target must not cause a later defined CPP target type
+# generator not to be recognized as viable.
+my-obj dummy : dummy.cpp ;
+alias the-other-obj : Other//other-obj ;
+""")
+
+ t.write("Other/source.extension", "A dummy source file.")
+
+ t.write("Other/mygen.jam", """\
+import common ;
+import generators ;
+import type ;
+type.register MY_TYPE : extension ;
+generators.register-standard $(__name__).generate-a-cpp-file : MY_TYPE : CPP ;
+rule generate-a-cpp-file { ECHO Generating a CPP file... ; }
+CREATE-FILE = [ common.file-creation-command ] ;
+actions generate-a-cpp-file { $(CREATE-FILE) "$(<)" }
+""")
+
+ t.write("Other/mygen.py", """\
+import b2.build.generators as generators
+import b2.build.type as type
+
+from b2.manager import get_manager
+
+import os
+
+type.register('MY_TYPE', ['extension'])
+generators.register_standard('mygen.generate-a-cpp-file', ['MY_TYPE'], ['CPP'])
+if os.name == 'nt':
+ action = 'echo void g() {} > "$(<)"'
+else:
+ action = 'echo "void g() {}" > "$(<)"'
+def f(*args):
+ print "Generating a CPP file..."
+
+get_manager().engine().register_action("mygen.generate-a-cpp-file", action,
+ function=f)
+""")
+
+ t.write("Other/jamfile.jam", """\
+import mygen ;
+my-obj other-obj : source.extension ;
+""")
+
+ t.run_build_system()
+ t.expect_output_lines("Generating a CPP file...")
+ t.expect_addition("bin/$toolset/debug/dummy.my_obj")
+ t.expect_addition("Other/bin/$toolset/debug/other-obj.cpp")
+ t.expect_addition("Other/bin/$toolset/debug/other-obj.my_obj")
+ t.expect_nothing_more()
+
+ t.cleanup()
+
+
+###############################################################################
+#
+# test_using_a_derived_source_type_created_after_generator_already_used()
+# -----------------------------------------------------------------------
+#
+###############################################################################
+
+def test_using_a_derived_source_type_created_after_generator_already_used():
+ """
+ Regression test for a Boost Build bug causing it to not use a generator
+ with a source type derived from one of the generator's sources but created
+ only after already using the generateor.
+
+ """
+ t = BoostBuild.Tester()
+
+ t.write("dummy.xxx", "Hello. My name is Peter Pan.\n")
+
+ t.write("jamroot.jam", """\
+import common ;
+import generators ;
+import type ;
+type.register XXX : xxx ;
+type.register YYY : yyy ;
+generators.register-standard common.copy : XXX : YYY ;
+
+# Building this dummy target must not cause a later defined XXX2 target type not
+# to be recognized as a viable source type for building YYY targets.
+yyy dummy : dummy.xxx ;
+alias the-test-output : Other//other ;
+""")
+
+ t.write("Other/source.xxx2", "Hello. My name is Tinkerbell.\n")
+
+ t.write("Other/jamfile.jam", """\
+import type ;
+type.register XXX2 : xxx2 : XXX ;
+# We are careful not to do anything between defining our new XXX2 target type
+# and using the XXX --> YYY generator that could potentially cover the Boost
+# Build bug by clearing its internal viable source target type state.
+yyy other : source.xxx2 ;
+""")
+
+ t.run_build_system()
+ t.expect_addition("bin/$toolset/debug/dummy.yyy")
+ t.expect_addition("Other/bin/$toolset/debug/other.yyy")
+ t.expect_nothing_more()
+
+ t.cleanup()
+
+
+###############################################################################
+#
+# main()
+# ------
+#
+###############################################################################
+
+test_generator_added_after_already_building_a_target_of_its_target_type()
+test_using_a_derived_source_type_created_after_generator_already_used()
diff --git a/tools/build/test/generators_test.py b/tools/build/test/generators_test.py
new file mode 100644
index 0000000000..755a391c38
--- /dev/null
+++ b/tools/build/test/generators_test.py
@@ -0,0 +1,433 @@
+#!/usr/bin/python
+
+# Copyright 2002, 2003 Dave Abrahams
+# Copyright 2002, 2003, 2004, 2005 Vladimir Prus
+# Copyright 2012 Jurko Gospodnetic
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+import re
+
+
+def test_basic():
+ t = BoostBuild.Tester(pass_d0=False)
+ __write_appender(t, "appender.jam")
+ t.write("a.cpp", "")
+ t.write("b.cxx", "")
+ t.write("c.tui", "")
+ t.write("d.wd", "")
+ t.write("e.cpp", "")
+ t.write("x.l", "")
+ t.write("y.x_pro", "")
+ t.write("z.cpp", "")
+ t.write("lib/c.cpp", "int bar() { return 0; }\n")
+ t.write("lib/jamfile.jam", "my-lib auxilliary : c.cpp ;")
+ t.write("jamroot.jam",
+r"""import appender ;
+
+import "class" : new ;
+import generators ;
+import type ;
+
+
+################################################################################
+#
+# We use our own custom EXE, LIB & OBJ target generators as using the regular
+# ones would force us to have to deal with different compiler/linker specific
+# 'features' that really have nothing to do with this test. For example, IBM XL
+# C/C++ for AIX, V12.1 (Version: 12.01.0000.0000) compiler exits with a non-zero
+# exit code and thus fails our build when run with a source file using an
+# unknown suffix like '.marked_cpp'.
+#
+################################################################################
+
+type.register MY_EXE : my_exe ;
+type.register MY_LIB : my_lib ;
+type.register MY_OBJ : my_obj ;
+
+appender.register compile-c : C : MY_OBJ ;
+appender.register compile-cpp : CPP : MY_OBJ ;
+appender.register link-lib composing : MY_OBJ : MY_LIB ;
+appender.register link-exe composing : MY_OBJ MY_LIB : MY_EXE ;
+
+
+################################################################################
+#
+# LEX --> C
+#
+################################################################################
+
+type.register LEX : l ;
+
+appender.register lex-to-c : LEX : C ;
+
+
+################################################################################
+#
+# /--> tUI_H --\
+# tUI --< >--> CPP
+# \------------/
+#
+################################################################################
+
+type.register tUI : tui ;
+type.register tUI_H : tui_h ;
+
+appender.register ui-to-cpp : tUI tUI_H : CPP ;
+appender.register ui-to-h : tUI : tUI_H ;
+
+
+################################################################################
+#
+# /--> X1 --\
+# X_PRO --< >--> CPP
+# \--> X2 --/
+#
+################################################################################
+
+type.register X1 : x1 ;
+type.register X2 : x2 ;
+type.register X_PRO : x_pro ;
+
+appender.register x1-x2-to-cpp : X1 X2 : CPP ;
+appender.register x-pro-to-x1-x2 : X_PRO : X1 X2 ;
+
+
+################################################################################
+#
+# When the main target type is NM_EXE, build OBJ from CPP-MARKED and not from
+# anything else, e.g. directly from CPP.
+#
+################################################################################
+
+type.register CPP_MARKED : marked_cpp : CPP ;
+type.register POSITIONS : positions ;
+type.register NM.TARGET.CPP : target_cpp : CPP ;
+type.register NM_EXE : : MY_EXE ;
+
+appender.register marked-to-target-cpp : CPP_MARKED : NM.TARGET.CPP ;
+appender.register cpp-to-marked-positions : CPP : CPP_MARKED POSITIONS ;
+
+class nm::target::cpp-obj-generator : generator
+{
+ rule __init__ ( id )
+ {
+ generator.__init__ $(id) : NM.TARGET.CPP : MY_OBJ ;
+ generator.set-rule-name appender.appender ;
+ }
+
+ rule requirements ( )
+ {
+ return <main-target-type>NM_EXE ;
+ }
+
+ rule run ( project name ? : properties * : source : multiple ? )
+ {
+ if [ $(source).type ] = CPP
+ {
+ local converted = [ generators.construct $(project) : NM.TARGET.CPP
+ : $(properties) : $(source) ] ;
+ if $(converted)
+ {
+ return [ generators.construct $(project) : MY_OBJ :
+ $(properties) : $(converted[2]) ] ;
+ }
+ }
+ }
+}
+generators.register [ new nm::target::cpp-obj-generator target-obj ] ;
+generators.override target-obj : all ;
+
+
+################################################################################
+#
+# A more complex test case scenario with the following generators:
+# 1. WHL --> CPP, WHL_LR0, H, H(%_symbols)
+# 2. DLP --> CPP
+# 3. WD --> WHL(%_parser) DLP(%_lexer)
+# 4. A custom generator of higher priority than generators 1. & 2. that helps
+# disambiguate between them when generating CPP files from WHL and DLP
+# sources.
+#
+################################################################################
+
+type.register WHL : whl ;
+type.register DLP : dlp ;
+type.register WHL_LR0 : lr0 ;
+type.register WD : wd ;
+
+local whale-generator-id = [ appender.register whale : WHL : CPP WHL_LR0 H
+ H(%_symbols) ] ;
+local dolphin-generator-id = [ appender.register dolphin : DLP : CPP ] ;
+appender.register wd : WD : WHL(%_parser) DLP(%_lexer) ;
+
+class wd-to-cpp : generator
+{
+ rule __init__ ( id : sources * : targets * )
+ {
+ generator.__init__ $(id) : $(sources) : $(targets) ;
+ }
+
+ rule run ( project name ? : property-set : source )
+ {
+ local new-sources = $(source) ;
+ if ! [ $(source).type ] in WHL DLP
+ {
+ local r1 = [ generators.construct $(project) $(name) : WHL :
+ $(property-set) : $(source) ] ;
+ local r2 = [ generators.construct $(project) $(name) : DLP :
+ $(property-set) : $(source) ] ;
+ new-sources = [ sequence.unique $(r1[2-]) $(r2[2-]) ] ;
+ }
+
+ local result ;
+ for local i in $(new-sources)
+ {
+ local t = [ generators.construct $(project) $(name) : CPP :
+ $(property-set) : $(i) ] ;
+ result += $(t[2-]) ;
+ }
+ return $(result) ;
+ }
+}
+generators.override $(__name__).wd-to-cpp : $(whale-generator-id) ;
+generators.override $(__name__).wd-to-cpp : $(dolphin-generator-id) ;
+generators.register [ new wd-to-cpp $(__name__).wd-to-cpp : : CPP ] ;
+
+
+################################################################################
+#
+# Declare build targets.
+#
+################################################################################
+
+# This should not cause two CPP --> MY_OBJ constructions for a.cpp or b.cpp.
+my-exe a : a.cpp b.cxx obj_1 obj_2 c.tui d.wd x.l y.x_pro lib//auxilliary ;
+my-exe f : a.cpp b.cxx obj_1 obj_2 lib//auxilliary ;
+
+# This should cause two CPP --> MY_OBJ constructions for z.cpp.
+my-obj obj_1 : z.cpp ;
+my-obj obj_2 : z.cpp ;
+
+nm-exe e : e.cpp ;
+""")
+
+ t.run_build_system()
+ t.expect_addition("bin/$toolset/debug/" * BoostBuild.List("a.my_exe "
+ "a.my_obj b.my_obj c.tui_h c.cpp c.my_obj d_parser.whl d_lexer.dlp "
+ "d_parser.cpp d_lexer.cpp d_lexer.my_obj d_parser.lr0 d_parser.h "
+ "d_parser.my_obj d_parser_symbols.h x.c x.my_obj y.x1 y.x2 y.cpp "
+ "y.my_obj e.marked_cpp e.positions e.target_cpp e.my_obj e.my_exe "
+ "f.my_exe obj_1.my_obj obj_2.my_obj"))
+ t.expect_addition("lib/bin/$toolset/debug/" * BoostBuild.List("c.my_obj "
+ "auxilliary.my_lib"))
+ t.expect_nothing_more()
+
+ folder = "bin/$toolset/debug"
+ t.expect_content_lines("%s/obj_1.my_obj" % folder, " Sources: 'z.cpp'")
+ t.expect_content_lines("%s/obj_2.my_obj" % folder, " Sources: 'z.cpp'")
+ t.expect_content_lines("%s/a.my_obj" % folder, " Sources: 'a.cpp'")
+
+ lines = t.stdout().splitlines()
+ source_lines = [x for x in lines if re.match("^ Sources: '", x)]
+ if not __match_count_is(source_lines, "'z.cpp'", 2):
+ BoostBuild.annotation("failure", "z.cpp must be compiled exactly "
+ "twice.")
+ t.fail_test(1)
+ if not __match_count_is(source_lines, "'a.cpp'", 1):
+ BoostBuild.annotation("failure", "a.cpp must be compiled exactly "
+ "once.")
+ t.fail_test(1)
+ t.cleanup()
+
+
+def test_generated_target_names():
+ """
+ Test generator generated target names. Unless given explicitly, target
+ names should be determined based on their specified source names. All
+ sources for generating a target need to have matching names in order for
+ Boost Build to be able to implicitly determine the target's name.
+
+ We use the following target generation structure with differently named
+ BBX targets:
+ /---> BB1 ---\
+ AAA --<----> BB2 ---->--> CCC --(composing)--> DDD
+ \---> BB3 ---/
+
+ The extra generator at the end is needed because generating a top-level
+ CCC target directly would requires us to explicitly specify a name for it.
+ The extra generator needs to be composing in order not to explicitly
+ request a specific name for its CCC source target based on its own target
+ name.
+
+ We also check for a regression where only the first two sources were
+ checked to see if their names match. Note that we need to try out all file
+ renaming combinations as we do not know what ordering Boost Build is going
+ to use when passing in those files as generator sources.
+
+ """
+ jamfile_template = """\
+import type ;
+type.register AAA : _a ;
+type.register BB1 : _b1 ;
+type.register BB2 : _b2 ;
+type.register BB3 : _b3 ;
+type.register CCC : _c ;
+type.register DDD : _d ;
+
+import appender ;
+appender.register aaa-to-bbX : AAA : BB1%s BB2%s BB3%s ;
+appender.register bbX-to-ccc : BB1 BB2 BB3 : CCC ;
+appender.register ccc-to-ddd composing : CCC : DDD ;
+
+ddd _xxx : _xxx._a ;
+"""
+
+ t = BoostBuild.Tester(pass_d0=False)
+ __write_appender(t, "appender.jam")
+ t.write("_xxx._a", "")
+
+ def test_one(t, rename1, rename2, rename3, status):
+ def f(rename):
+ if rename: return "(%_x)"
+ return ""
+
+ jamfile = jamfile_template % (f(rename1), f(rename2), f(rename3))
+ t.write("jamroot.jam", jamfile, wait=False)
+
+ # Remove any preexisting targets left over from a previous test run
+ # so we do not have to be careful about tracking which files have been
+ # newly added and which preexisting ones have only been modified.
+ t.rm("bin")
+
+ t.run_build_system(status=status)
+
+ if status:
+ t.expect_output_lines("*.bbX-to-ccc: source targets have "
+ "different names: cannot determine target name")
+ else:
+ def suffix(rename):
+ if rename: return "_x"
+ return ""
+ name = "bin/$toolset/debug/_xxx"
+ e = t.expect_addition
+ e("%s%s._b1" % (name, suffix(rename1)))
+ e("%s%s._b2" % (name, suffix(rename2)))
+ e("%s%s._b3" % (name, suffix(rename3)))
+ e("%s%s._c" % (name, suffix(rename1 and rename2 and rename3)))
+ e("%s._d" % name)
+ t.expect_nothing_more()
+
+ test_one(t, False, False, False, status=0)
+ test_one(t, True , False, False, status=1)
+ test_one(t, False, True , False, status=1)
+ test_one(t, False, False, True , status=1)
+ test_one(t, True , True , False, status=1)
+ test_one(t, True , False, True , status=1)
+ test_one(t, False, True , True , status=1)
+ test_one(t, True , True , True , status=0)
+ t.cleanup()
+
+
+def __match_count_is(lines, pattern, expected):
+ count = 0
+ for x in lines:
+ if re.search(pattern, x):
+ count += 1
+ if count > expected:
+ return False
+ return count == expected
+
+
+def __write_appender(t, name):
+ t.write(name,
+r"""# Copyright 2012 Jurko Gospodnetic
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Support for registering test generators that construct their targets by
+# simply appending their given input data, e.g. list of sources & targets.
+
+import "class" : new ;
+import generators ;
+import modules ;
+import sequence ;
+
+rule register ( id composing ? : source-types + : target-types + )
+{
+ local caller-module = [ CALLER_MODULE ] ;
+ id = $(caller-module).$(id) ;
+ local g = [ new generator $(id) $(composing) : $(source-types) :
+ $(target-types) ] ;
+ $(g).set-rule-name $(__name__).appender ;
+ generators.register $(g) ;
+ return $(id) ;
+}
+
+if [ modules.peek : NT ]
+{
+ X = ")" ;
+ ECHO_CMD = (echo. ;
+}
+else
+{
+ X = \" ;
+ ECHO_CMD = "echo $(X)" ;
+}
+
+local appender-runs ;
+
+# We set up separate actions for building each target in order to avoid having
+# to iterate over them in action (i.e. shell) code. We have to be extra careful
+# though to achieve the exact same effect as if doing all the work in just one
+# action. Otherwise Boost Jam might, under some circumstances, run only some of
+# our actions. To achieve this we register a series of actions for all the
+# targets (since they all have the same target list - either all or none of them
+# get run independent of which target actually needs to get built), each
+# building only a single target. Since all our actions use the same targets, we
+# can not use 'on-target' parameters to pass data to a specific action so we
+# pass them using the second 'sources' parameter which our actions then know how
+# to interpret correctly. This works well since Boost Jam does not automatically
+# add dependency relations between specified action targets & sources and so the
+# second argument, even though most often used to pass in a list of sources, can
+# actually be used for passing in any type of information.
+rule appender ( targets + : sources + : properties * )
+{
+ appender-runs = [ CALC $(appender-runs:E=0) + 1 ] ;
+ local target-index = 0 ;
+ local target-count = [ sequence.length $(targets) ] ;
+ local original-targets ;
+ for t in $(targets)
+ {
+ target-index = [ CALC $(target-index) + 1 ] ;
+ local appender-run = $(appender-runs) ;
+ if $(targets[2])-defined
+ {
+ appender-run += [$(target-index)/$(target-count)] ;
+ }
+ append $(targets) : $(appender-run:J=" ") $(t) $(sources) ;
+ }
+}
+
+actions append
+{
+ $(ECHO_CMD)-------------------------------------------------$(X)
+ $(ECHO_CMD)Appender run: $(>[1])$(X)
+ $(ECHO_CMD)Appender run: $(>[1])$(X)>> "$(>[2])"
+ $(ECHO_CMD)Target group: $(<:J=' ')$(X)
+ $(ECHO_CMD)Target group: $(<:J=' ')$(X)>> "$(>[2])"
+ $(ECHO_CMD) Target: '$(>[2])'$(X)
+ $(ECHO_CMD) Target: '$(>[2])'$(X)>> "$(>[2])"
+ $(ECHO_CMD) Sources: '$(>[3-]:J=' ')'$(X)
+ $(ECHO_CMD) Sources: '$(>[3-]:J=' ')'$(X)>> "$(>[2])"
+ $(ECHO_CMD)=================================================$(X)
+ $(ECHO_CMD)-------------------------------------------------$(X)>> "$(>[2])"
+}
+""")
+
+
+test_basic()
+test_generated_target_names()
diff --git a/tools/build/test/implicit_dependency.py b/tools/build/test/implicit_dependency.py
new file mode 100644
index 0000000000..d6392c93aa
--- /dev/null
+++ b/tools/build/test/implicit_dependency.py
@@ -0,0 +1,81 @@
+#!/usr/bin/python
+
+# Copyright (C) Vladimir Prus 2006.
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Test the <implicit-dependency> is respected even if the target referred to is
+# not built itself, but only referred to by <implicit-dependency>.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("jamroot.jam", """
+make a.h : : gen-header ;
+explicit a.h ;
+
+exe hello : hello.cpp : <implicit-dependency>a.h ;
+
+import os ;
+if [ os.name ] = NT
+{
+ actions gen-header
+ {
+ echo int i; > $(<)
+ }
+}
+else
+{
+ actions gen-header
+ {
+ echo "int i;" > $(<)
+ }
+}
+""")
+
+t.write("hello.cpp", """
+#include "a.h"
+int main() { return i; }
+""")
+
+
+t.run_build_system()
+
+t.expect_addition("bin/$toolset/debug/hello.exe")
+
+t.rm("bin")
+
+t.write("jamroot.jam", """
+make dir/a.h : : gen-header ;
+explicit dir/a.h ;
+
+exe hello : hello.cpp : <implicit-dependency>dir/a.h ;
+
+import os ;
+if [ os.name ] = NT
+{
+ actions gen-header
+ {
+ echo int i; > $(<)
+ }
+}
+else
+{
+ actions gen-header
+ {
+ echo "int i;" > $(<)
+ }
+}
+""")
+
+t.write("hello.cpp", """
+#include "dir/a.h"
+int main() { return i; }
+""")
+t.run_build_system()
+
+t.expect_addition("bin/$toolset/debug/hello.exe")
+
+t.cleanup()
diff --git a/tools/build/test/indirect_conditional.py b/tools/build/test/indirect_conditional.py
new file mode 100644
index 0000000000..b466910b3f
--- /dev/null
+++ b/tools/build/test/indirect_conditional.py
@@ -0,0 +1,105 @@
+#!/usr/bin/python
+
+# Copyright (C) 2006. Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+def test_basic():
+ t = BoostBuild.Tester(use_test_config=False)
+
+ t.write("jamroot.jam", """\
+exe a1 : a1.cpp : <conditional>@a1-rule ;
+rule a1-rule ( properties * )
+{
+ if <variant>debug in $(properties)
+ {
+ return <define>OK ;
+ }
+}
+
+exe a2 : a2.cpp : <conditional>@$(__name__).a2-rule
+ <variant>debug:<optimization>speed ;
+rule a2-rule ( properties * )
+{
+ if <optimization>speed in $(properties)
+ {
+ return <define>OK ;
+ }
+}
+
+exe a3 : a3.cpp :
+ <conditional>@$(__name__).a3-rule-1
+ <conditional>@$(__name__).a3-rule-2 ;
+rule a3-rule-1 ( properties * )
+{
+ if <optimization>speed in $(properties)
+ {
+ return <define>OK ;
+ }
+}
+rule a3-rule-2 ( properties * )
+{
+ if <variant>debug in $(properties)
+ {
+ return <optimization>speed ;
+ }
+}
+""")
+
+ t.write("a1.cpp", "#ifdef OK\nint main() {}\n#endif\n")
+ t.write("a2.cpp", "#ifdef OK\nint main() {}\n#endif\n")
+ t.write("a3.cpp", "#ifdef OK\nint main() {}\n#endif\n")
+
+ t.run_build_system()
+
+ t.expect_addition("bin/$toolset/debug/a1.exe")
+ t.expect_addition("bin/$toolset/debug/optimization-speed/a2.exe")
+ t.expect_addition("bin/$toolset/debug/optimization-speed/a3.exe")
+
+ t.cleanup()
+
+
+def test_glob_in_indirect_conditional():
+ """
+ Regression test: project-rules.glob rule run from inside an indirect
+ conditional should report an error as it depends on the 'currently loaded
+ project' concept and indirect conditional rules get called only after all
+ the project modules have already finished loading.
+
+ """
+ t = BoostBuild.Tester(use_test_config=False)
+
+ t.write("jamroot.jam", """\
+use-project /library-example/foo : util/foo ;
+build-project app ;
+""")
+ t.write("app/app.cpp", "int main() {}\n");
+ t.write("app/jamfile.jam", "exe app : app.cpp /library-example/foo//bar ;")
+ t.write("util/foo/bar.cpp", """\
+#ifdef _WIN32
+__declspec(dllexport)
+#endif
+void foo() {}
+""")
+ t.write("util/foo/jamfile.jam", """\
+rule print-my-sources ( properties * )
+{
+ ECHO My sources: ;
+ ECHO [ glob *.cpp ] ;
+}
+lib bar : bar.cpp : <conditional>@print-my-sources ;
+""")
+
+ t.run_build_system(status=1)
+ t.expect_output_lines(["My sources:", "bar.cpp"], False)
+ t.expect_output_lines("error: Reference to the project currently being "
+ "loaded requested when there was no project module being loaded.")
+
+ t.cleanup()
+
+
+test_basic()
+test_glob_in_indirect_conditional()
diff --git a/tools/build/test/inherit_toolset.py b/tools/build/test/inherit_toolset.py
new file mode 100644
index 0000000000..af18780039
--- /dev/null
+++ b/tools/build/test/inherit_toolset.py
@@ -0,0 +1,51 @@
+#!/usr/bin/python
+
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+import string
+
+t = BoostBuild.Tester(pass_toolset=0)
+
+t.write("a.cpp", "\n")
+
+t.write("yfc1.jam", """\
+import feature ;
+import generators ;
+
+feature.extend toolset : yfc1 ;
+rule init ( ) { }
+
+generators.register-standard yfc1.compile : CPP : OBJ : <toolset>yfc1 ;
+generators.register-standard yfc1.link : OBJ : EXE : <toolset>yfc1 ;
+
+actions compile { yfc1-compile }
+actions link { yfc1-link }
+""")
+
+t.write("yfc2.jam", """\
+import feature ;
+import toolset ;
+
+feature.extend toolset : yfc2 ;
+toolset.inherit yfc2 : yfc1 ;
+rule init ( ) { }
+
+actions link { yfc2-link }
+""")
+
+t.write("jamfile.jam", "exe a : a.cpp ;")
+t.write("jamroot.jam", "using yfc1 ;")
+
+t.run_build_system(["-n", "-d2", "yfc1"])
+t.fail_test(string.find(t.stdout(), "yfc1-link") == -1)
+
+# Make sure we do not have to explicitly 'use' yfc1.
+t.write("jamroot.jam", "using yfc2 ;")
+
+t.run_build_system(["-n", "-d2", "yfc2"])
+t.fail_test(string.find(t.stdout(), "yfc2-link") == -1)
+
+t.cleanup()
diff --git a/tools/build/test/inherited_dependency.py b/tools/build/test/inherited_dependency.py
new file mode 100755
index 0000000000..69eefeb8dc
--- /dev/null
+++ b/tools/build/test/inherited_dependency.py
@@ -0,0 +1,237 @@
+#!/usr/bin/python
+#
+# Copyright (c) 2008 Steven Watanabe
+#
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt) or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+tester = BoostBuild.Tester(use_test_config=False)
+
+
+################################################################################
+#
+# Test without giving the project an explicit id.
+#
+################################################################################
+
+tester.write("jamroot.jam", """
+lib test : test.cpp ;
+project : requirements <library>test ;
+build-project a ;
+""")
+
+tester.write("test.cpp", """
+#ifdef _WIN32
+ __declspec(dllexport)
+#endif
+void foo() {}
+""")
+
+tester.write("a/test1.cpp", """
+int main() {}
+""")
+
+tester.write("a/jamfile.jam", """
+exe test1 : test1.cpp ;
+""")
+
+tester.run_build_system()
+
+tester.expect_addition("bin/$toolset/debug/test.obj")
+tester.expect_addition("a/bin/$toolset/debug/test1.exe")
+
+tester.rm("bin")
+tester.rm("a/bin")
+
+
+################################################################################
+#
+# Run the same test from the "a" directory.
+#
+################################################################################
+
+tester.run_build_system(subdir="a")
+
+tester.expect_addition("bin/$toolset/debug/test.obj")
+tester.expect_addition("a/bin/$toolset/debug/test1.exe")
+
+tester.rm("bin")
+tester.rm("a/bin")
+
+
+################################################################################
+#
+# This time, do give the project an id.
+#
+################################################################################
+
+tester.write("jamroot.jam", """
+lib test : test.cpp ;
+project test_project : requirements <library>test ;
+build-project a ;
+""")
+
+tester.run_build_system()
+
+tester.expect_addition("bin/$toolset/debug/test.obj")
+tester.expect_addition("a/bin/$toolset/debug/test1.exe")
+
+tester.rm("bin")
+tester.rm("a/bin")
+
+
+################################################################################
+#
+# Now, give the project an id in its attributes.
+#
+################################################################################
+
+tester.write("jamroot.jam", """
+lib test : test.cpp ;
+project : id test_project : requirements <library>test ;
+build-project a ;
+""")
+
+tester.run_build_system()
+
+tester.expect_addition("bin/$toolset/debug/test.obj")
+tester.expect_addition("a/bin/$toolset/debug/test1.exe")
+
+tester.rm("bin")
+tester.rm("a/bin")
+
+
+################################################################################
+#
+# Give the project an id in both ways at once.
+#
+################################################################################
+
+tester.write("jamroot.jam", """
+lib test : test.cpp ;
+project test_project1 : id test_project : requirements <library>test ;
+build-project a ;
+""")
+
+tester.run_build_system()
+
+tester.expect_addition("bin/$toolset/debug/test.obj")
+tester.expect_addition("a/bin/$toolset/debug/test1.exe")
+
+tester.rm("bin")
+tester.rm("a/bin")
+
+
+################################################################################
+#
+# Test an absolute path in native format.
+#
+################################################################################
+
+tester.write("jamroot.jam", """
+import path ;
+path-constant here : . ;
+current-location = [ path.native [ path.root [ path.make $(here) ] [ path.pwd ]
+ ] ] ;
+project test : requirements <source>$(current-location)/a/test1.cpp ;
+exe test : test.cpp ;
+""")
+
+tester.run_build_system()
+tester.expect_addition("bin/$toolset/debug/test.exe")
+
+tester.rm("bin")
+tester.rm("a/bin")
+
+
+################################################################################
+#
+# Test an absolute path in canonical format.
+#
+################################################################################
+
+tester.write("jamroot.jam", """
+import path ;
+path-constant here : . ;
+current-location = [ path.root [ path.make $(here) ] [ path.pwd ] ] ;
+project test : requirements <source>$(current-location)/a/test1.cpp ;
+exe test : test.cpp ;
+""")
+
+tester.run_build_system()
+tester.expect_addition("bin/$toolset/debug/test.exe")
+
+tester.rm("bin")
+tester.rm("a/bin")
+
+
+################################################################################
+#
+# Test dependency properties (e.g. <source>) whose targets are specified using a
+# relative path.
+#
+################################################################################
+
+# Use jamroot.jam rather than jamfile.jam to avoid inheriting the <source> from
+# the parent as that would would make test3 a source of itself.
+tester.write("b/jamroot.jam", """
+obj test3 : test3.cpp ;
+""")
+
+tester.write("b/test3.cpp", """
+void bar() {}
+""")
+
+tester.write("jamroot.jam", """
+project test : requirements <source>b//test3 ;
+build-project a ;
+""")
+
+tester.write("a/jamfile.jam", """
+exe test : test1.cpp ;
+""")
+
+tester.write("a/test1.cpp", """
+void bar();
+int main() { bar(); }
+""")
+
+tester.run_build_system()
+tester.expect_addition("b/bin/$toolset/debug/test3.obj")
+tester.expect_addition("a/bin/$toolset/debug/test.exe")
+
+tester.rm("bin")
+tester.rm("a")
+tester.rm("jamroot.jam")
+tester.rm("test.cpp")
+
+
+################################################################################
+#
+# Test that source-location is respected.
+#
+################################################################################
+
+tester.write("build/jamroot.jam", """
+project : requirements <source>test.cpp : source-location ../src ;
+""")
+
+tester.write("src/test.cpp", """
+int main() {}
+""")
+
+tester.write("build/a/jamfile.jam", """
+project : source-location ../../a_src ;
+exe test : test1.cpp ;
+""")
+
+tester.write("a_src/test1.cpp", """
+""")
+
+tester.run_build_system(subdir="build/a")
+tester.expect_addition("build/a/bin/$toolset/debug/test.exe")
+
+tester.cleanup()
diff --git a/tools/build/test/inline.py b/tools/build/test/inline.py
new file mode 100644
index 0000000000..f076fde6f3
--- /dev/null
+++ b/tools/build/test/inline.py
@@ -0,0 +1,62 @@
+#!/usr/bin/python
+
+# Copyright 2003, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("jamroot.jam", """\
+project : requirements <link>static ;
+exe a : a.cpp [ lib helper : helper.cpp ] ;
+""")
+
+t.write("a.cpp", """\
+extern void helper();
+int main() {}
+""")
+
+t.write("helper.cpp", "void helper() {}\n")
+
+t.run_build_system()
+t.expect_addition("bin/$toolset/debug/link-static/a__helper.lib")
+t.rm("bin/$toolset/debug/link-static/a__helper.lib")
+
+t.run_build_system(["a__helper"])
+t.expect_addition("bin/$toolset/debug/link-static/a__helper.lib")
+
+t.rm("bin")
+
+
+# Now check that inline targets with the same name but present in different
+# places are not confused between each other, and with top-level targets.
+t.write("jamroot.jam", """\
+project : requirements <link>static ;
+exe a : a.cpp [ lib helper : helper.cpp ] ;
+exe a2 : a.cpp [ lib helper : helper.cpp ] ;
+""")
+
+t.run_build_system()
+t.expect_addition("bin/$toolset/debug/link-static/a.exe")
+t.expect_addition("bin/$toolset/debug/link-static/a__helper.lib")
+t.expect_addition("bin/$toolset/debug/link-static/a2__helper.lib")
+
+
+# Check that the 'alias' target does not change the name of inline targets, and
+# that inline targets are explicit.
+t.write("jamroot.jam", """\
+project : requirements <link>static ;
+alias a : [ lib helper : helper.cpp ] ;
+explicit a ;
+""")
+t.rm("bin")
+
+t.run_build_system()
+t.expect_nothing_more()
+
+t.run_build_system(["a"])
+t.expect_addition("bin/$toolset/debug/link-static/helper.lib")
+
+t.cleanup()
diff --git a/tools/build/test/lib_source_property.py b/tools/build/test/lib_source_property.py
new file mode 100644
index 0000000000..8267293980
--- /dev/null
+++ b/tools/build/test/lib_source_property.py
@@ -0,0 +1,45 @@
+#!/usr/bin/python
+
+# Copyright (C) Vladimir Prus 2006.
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Regression test: if a library had no explicit sources, but only <source>
+# properties, it was built as if it were a searched library, and the specified
+# sources were not compiled.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("jamroot.jam", """
+lib a : : <source>a.cpp ;
+""")
+
+t.write("a.cpp", """
+#ifdef _WIN32
+__declspec(dllexport)
+#endif
+void foo() {}
+""")
+
+t.run_build_system()
+t.expect_addition("bin/$toolset/debug/a.obj")
+
+t.rm("bin")
+
+
+# Now try with <conditional>.
+t.write("jamroot.jam", """
+rule test ( properties * )
+{
+ return <source>a.cpp ;
+}
+lib a : : <conditional>@test ;
+""")
+
+t.run_build_system()
+t.expect_addition("bin/$toolset/debug/a.obj")
+
+t.cleanup()
diff --git a/tools/build/test/libjpeg.py b/tools/build/test/libjpeg.py
new file mode 100755
index 0000000000..dcf81c9b0a
--- /dev/null
+++ b/tools/build/test/libjpeg.py
@@ -0,0 +1,119 @@
+#!/usr/bin/python
+
+# Copyright (C) 2013 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+import MockToolset
+
+t = BoostBuild.Tester(arguments=['toolset=mock', '--ignore-site-config', '--user-config='], pass_toolset=0)
+
+MockToolset.create(t)
+
+# Build from source
+t.write("libjpeg/jpeg.h", 'libjpeg')
+t.write("libjpeg/jpeg.c", 'jpeg')
+
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using libjpeg : : <source>$(here)/libjpeg ;
+alias libjpeg : /libjpeg//libjpeg : : <link>static <link>shared ;
+""")
+
+MockToolset.set_expected(t, '''
+source_file('jpeg.c', 'jpeg')
+action('-c -x c -I./libjpeg -o $jpeg.o $jpeg.c')
+action('--dll $jpeg.o -o $jpeg.so')
+action('--archive $jpeg.o -o $jpeg.a')
+''')
+
+t.run_build_system()
+t.expect_addition('bin/standalone/libjpeg/mock/debug/jpeg.dll')
+t.expect_addition('bin/standalone/libjpeg/mock/debug/link-static/jpeg.lib')
+
+t.rm('libjpeg')
+
+# Generic definitions that aren't configuration specific
+common_stuff = '''
+source_file('test.cpp', 'test.cpp')
+source_file('main.cpp', 'int main() {}')
+source_file('jpeg.h.cpp', '#include <jpeg.h>')
+action('-c -x c++ $main.cpp -o $main.o')
+'''
+t.write('test.cpp', 'test.cpp')
+
+# Default initialization - static library
+t.rm('bin')
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using libjpeg ;
+exe test : test.cpp /libjpeg//libjpeg : : <link>static <link>shared ;
+""")
+
+MockToolset.set_expected(t, common_stuff + '''
+action('$main.o --static-lib=jpeg -o $config.exe')
+action('-c -x c++ $jpeg.h.cpp -o $jpeg.h.o')
+action('-c -x c++ $test.cpp -o $test.o')
+action('$test.o --static-lib=jpeg -o $test')
+''')
+t.run_build_system()
+t.expect_addition('bin/mock/debug/test.exe')
+t.expect_addition('bin/mock/debug/link-static/test.exe')
+
+# Default initialization - shared library
+t.rm('bin')
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using libjpeg ;
+exe test : test.cpp /libjpeg//libjpeg : : <link>static <link>shared ;
+""")
+
+MockToolset.set_expected(t, common_stuff + '''
+action('$main.o --shared-lib=jpeg -o $config.exe')
+action('-c -x c++ $jpeg.h.cpp -o $jpeg.h.o')
+action('-c -x c++ $test.cpp -o $test.o')
+action('$test.o --shared-lib=jpeg -o $test')
+''')
+t.run_build_system()
+t.expect_addition('bin/mock/debug/test.exe')
+t.expect_addition('bin/mock/debug/link-static/test.exe')
+
+# Initialization in explicit location - static library
+t.rm('bin')
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using libjpeg : : <name>mylibjpeg <include>$(here)/libjpeg <search>$(here)/libjpeg ;
+exe test : test.cpp /libjpeg//libjpeg : : <link>static <link>shared ;
+""")
+
+t.write('libjpeg/jpeg.h', 'libjpeg')
+
+MockToolset.set_expected(t, common_stuff + '''
+action('$main.o -L./libjpeg --static-lib=mylibjpeg -o $config.exe')
+action('-c -x c++ $test.cpp -I./libjpeg -o $test.o')
+action('$test.o -L./libjpeg --static-lib=mylibjpeg -o $test')
+''')
+t.run_build_system()
+t.expect_addition('bin/mock/debug/test.exe')
+t.expect_addition('bin/mock/debug/link-static/test.exe')
+
+# Initialization in explicit location - shared library
+t.rm('bin')
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using libjpeg : : <name>mylibjpeg <include>$(here)/libjpeg <search>$(here)/libjpeg ;
+exe test : test.cpp /libjpeg//libjpeg : : <link>static <link>shared ;
+""")
+
+MockToolset.set_expected(t, common_stuff + '''
+action('$main.o -L./libjpeg --shared-lib=mylibjpeg -o $config.exe')
+action('-c -x c++ $test.cpp -I./libjpeg -o $test.o')
+action('$test.o -L./libjpeg --shared-lib=mylibjpeg -o $test')
+''')
+t.run_build_system()
+t.expect_addition('bin/mock/debug/test.exe')
+t.expect_addition('bin/mock/debug/link-static/test.exe')
+
+t.cleanup()
diff --git a/tools/build/test/libpng.py b/tools/build/test/libpng.py
new file mode 100755
index 0000000000..3e7e5cd2df
--- /dev/null
+++ b/tools/build/test/libpng.py
@@ -0,0 +1,119 @@
+#!/usr/bin/python
+
+# Copyright (C) 2013 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+import MockToolset
+
+t = BoostBuild.Tester(arguments=['toolset=mock', '--ignore-site-config', '--user-config='], pass_toolset=0)
+
+MockToolset.create(t)
+
+# Build from source
+t.write("libpng/png.h", 'libpng')
+t.write("libpng/png.c", 'png')
+
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using libpng : : <source>$(here)/libpng ;
+alias libpng : /libpng//libpng : : <link>static <link>shared ;
+""")
+
+MockToolset.set_expected(t, '''
+source_file('png.c', 'png')
+action('-c -x c -I./libpng -o $png.o $png.c')
+action('--dll $png.o -o $png.so')
+action('--archive $png.o -o $png.a')
+''')
+
+t.run_build_system()
+t.expect_addition('bin/standalone/libpng/mock/debug/png.dll')
+t.expect_addition('bin/standalone/libpng/mock/debug/link-static/png.lib')
+
+t.rm('libpng')
+
+# Generic definitions that aren't configuration specific
+common_stuff = '''
+source_file('test.cpp', 'test.cpp')
+source_file('main.cpp', 'int main() {}')
+source_file('png.h.cpp', '#include <png.h>')
+action('-c -x c++ $main.cpp -o $main.o')
+'''
+t.write('test.cpp', 'test.cpp')
+
+# Default initialization - static library
+t.rm('bin')
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using libpng ;
+exe test : test.cpp /libpng//libpng : : <link>static <link>shared ;
+""")
+
+MockToolset.set_expected(t, common_stuff + '''
+action('$main.o --static-lib=png -o $config.exe')
+action('-c -x c++ $png.h.cpp -o $png.h.o')
+action('-c -x c++ $test.cpp -o $test.o')
+action('$test.o --static-lib=png -o $test')
+''')
+t.run_build_system()
+t.expect_addition('bin/mock/debug/test.exe')
+t.expect_addition('bin/mock/debug/link-static/test.exe')
+
+# Default initialization - shared library
+t.rm('bin')
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using libpng ;
+exe test : test.cpp /libpng//libpng : : <link>static <link>shared ;
+""")
+
+MockToolset.set_expected(t, common_stuff + '''
+action('$main.o --shared-lib=png -o $config.exe')
+action('-c -x c++ $png.h.cpp -o $png.h.o')
+action('-c -x c++ $test.cpp -o $test.o')
+action('$test.o --shared-lib=png -o $test')
+''')
+t.run_build_system()
+t.expect_addition('bin/mock/debug/test.exe')
+t.expect_addition('bin/mock/debug/link-static/test.exe')
+
+# Initialization in explicit location - static library
+t.rm('bin')
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using libpng : : <name>mylibpng <include>$(here)/libpng <search>$(here)/libpng ;
+exe test : test.cpp /libpng//libpng : : <link>static <link>shared ;
+""")
+
+t.write('libpng/png.h', 'libpng')
+
+MockToolset.set_expected(t, common_stuff + '''
+action('$main.o -L./libpng --static-lib=mylibpng -o $config.exe')
+action('-c -x c++ $test.cpp -I./libpng -o $test.o')
+action('$test.o -L./libpng --static-lib=mylibpng -o $test')
+''')
+t.run_build_system()
+t.expect_addition('bin/mock/debug/test.exe')
+t.expect_addition('bin/mock/debug/link-static/test.exe')
+
+# Initialization in explicit location - shared library
+t.rm('bin')
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using libpng : : <name>mylibpng <include>$(here)/libpng <search>$(here)/libpng ;
+exe test : test.cpp /libpng//libpng : : <link>static <link>shared ;
+""")
+
+MockToolset.set_expected(t, common_stuff + '''
+action('$main.o -L./libpng --shared-lib=mylibpng -o $config.exe')
+action('-c -x c++ $test.cpp -I./libpng -o $test.o')
+action('$test.o -L./libpng --shared-lib=mylibpng -o $test')
+''')
+t.run_build_system()
+t.expect_addition('bin/mock/debug/test.exe')
+t.expect_addition('bin/mock/debug/link-static/test.exe')
+
+t.cleanup()
diff --git a/tools/build/test/library_chain.py b/tools/build/test/library_chain.py
new file mode 100644
index 0000000000..e7c3dcd92f
--- /dev/null
+++ b/tools/build/test/library_chain.py
@@ -0,0 +1,152 @@
+#!/usr/bin/python
+
+# Copyright 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that a chain of libraries works ok, no matter if we use static or shared
+# linking.
+
+import BoostBuild
+import os
+import string
+
+t = BoostBuild.Tester(use_test_config=False)
+
+# Stage the binary, so that it will be relinked without hardcode-dll-paths.
+# That will check that we pass correct -rpath-link, even if not passing -rpath.
+t.write("jamfile.jam", """\
+stage dist : main ;
+exe main : main.cpp b ;
+""")
+
+t.write("main.cpp", """\
+void foo();
+int main() { foo(); }
+""")
+
+t.write("jamroot.jam", "")
+
+t.write("a/a.cpp", """\
+void
+#if defined(_WIN32)
+__declspec(dllexport)
+#endif
+gee() {}
+void
+#if defined(_WIN32)
+__declspec(dllexport)
+#endif
+geek() {}
+""")
+
+t.write("a/jamfile.jam", "lib a : a.cpp ;")
+
+t.write("b/b.cpp", """\
+void geek();
+void
+#if defined(_WIN32)
+__declspec(dllexport)
+#endif
+foo() { geek(); }
+""")
+
+t.write("b/jamfile.jam", "lib b : b.cpp ../a//a ;")
+
+t.run_build_system(["-d2"], stderr=None)
+t.expect_addition("bin/$toolset/debug/main.exe")
+t.rm(["bin", "a/bin", "b/bin"])
+
+t.run_build_system(["link=static"])
+t.expect_addition("bin/$toolset/debug/link-static/main.exe")
+t.rm(["bin", "a/bin", "b/bin"])
+
+
+# Check that <library> works for static linking.
+t.write("b/jamfile.jam", "lib b : b.cpp : <library>../a//a ;")
+
+t.run_build_system(["link=static"])
+t.expect_addition("bin/$toolset/debug/link-static/main.exe")
+
+t.rm(["bin", "a/bin", "b/bin"])
+
+t.write("b/jamfile.jam", "lib b : b.cpp ../a//a/<link>shared : <link>static ;")
+
+t.run_build_system()
+t.expect_addition("bin/$toolset/debug/main.exe")
+
+t.rm(["bin", "a/bin", "b/bin"])
+
+
+# Test that putting a library in sources of a searched library works.
+t.write("jamfile.jam", """\
+exe main : main.cpp png ;
+lib png : z : <name>png ;
+lib z : : <name>zzz ;
+""")
+
+t.run_build_system(["-a", "-d+2"], status=None, stderr=None)
+# Try to find the "zzz" string either in response file (for Windows compilers),
+# or in the standard output.
+rsp = t.adjust_names("bin/$toolset/debug/main.exe.rsp")[0]
+if os.path.exists(rsp) and ( string.find(open(rsp).read(), "zzz") != -1 ):
+ pass
+elif string.find(t.stdout(), "zzz") != -1:
+ pass
+else:
+ t.fail_test(1)
+
+# Test main -> libb -> liba chain in the case where liba is a file and not a
+# Boost.Build target.
+t.rm(".")
+
+t.write("jamroot.jam", "")
+t.write("a/jamfile.jam", """\
+lib a : a.cpp ;
+install dist : a ;
+""")
+
+t.write("a/a.cpp", """\
+#if defined(_WIN32)
+__declspec(dllexport)
+#endif
+void a() {}
+""")
+
+t.run_build_system(subdir="a")
+t.expect_addition("a/dist/a.dll")
+
+if ( os.name == 'nt' or os.uname()[0].lower().startswith('cygwin') ) and \
+ BoostBuild.get_toolset() != 'gcc':
+ # This is a Windows import library -- we know the exact name.
+ file = "a/dist/a.lib"
+else:
+ file = t.adjust_names("a/dist/a.dll")[0]
+
+t.write("b/jamfile.jam", "lib b : b.cpp ../%s ;" % file)
+
+t.write("b/b.cpp", """\
+#if defined(_WIN32)
+__declspec(dllimport)
+#endif
+void a();
+#if defined(_WIN32)
+__declspec(dllexport)
+#endif
+void b() { a(); }
+""")
+
+t.write("jamroot.jam", "exe main : main.cpp b//b ;")
+
+t.write("main.cpp", """\
+#if defined(_WIN32)
+__declspec(dllimport)
+#endif
+void b();
+int main() { b(); }
+""")
+
+t.run_build_system()
+t.expect_addition("bin/$toolset/debug/main.exe")
+
+t.cleanup()
diff --git a/tools/build/test/library_order.py b/tools/build/test/library_order.py
new file mode 100644
index 0000000000..188f533ab3
--- /dev/null
+++ b/tools/build/test/library_order.py
@@ -0,0 +1,94 @@
+#!/usr/bin/python
+
+# Copyright 2004 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that on compilers sensitive to library order on linker's command line,
+# we generate the correct order.
+
+import BoostBuild
+
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("main.cpp", """\
+void a();
+int main() { a(); }
+""")
+
+t.write("a.cpp", """\
+void b();
+void a() { b(); }
+""")
+
+t.write("b.cpp", """\
+void c();
+void b() { c(); }
+""")
+
+t.write("c.cpp", """\
+void d();
+void c() { d(); }
+""")
+
+t.write("d.cpp", """\
+void d() {}
+""")
+
+# The order of libraries in 'main' is crafted so that we get an error unless we
+# do something about the order ourselves.
+t.write("jamroot.jam", """\
+exe main : main.cpp libd libc libb liba ;
+lib libd : d.cpp ;
+lib libc : c.cpp : <link>static <use>libd ;
+lib libb : b.cpp : <use>libc ;
+lib liba : a.cpp : <use>libb ;
+""")
+
+t.run_build_system(["-d2"])
+t.expect_addition("bin/$toolset/debug/main.exe")
+
+
+# Test the order between searched libraries.
+t.write("jamroot.jam", """\
+exe main : main.cpp png z ;
+lib png : z : <name>png ;
+lib z : : <name>zzz ;
+""")
+
+t.run_build_system(["-a", "-n", "-d+2"])
+t.fail_test(t.stdout().find("png") > t.stdout().find("zzz"))
+
+t.write("jamroot.jam", """\
+exe main : main.cpp png z ;
+lib png : : <name>png ;
+lib z : png : <name>zzz ;
+""")
+
+t.run_build_system(["-a", "-n", "-d+2"])
+t.fail_test(t.stdout().find("png") < t.stdout().find("zzz"))
+
+
+# Test the order between prebuilt libraries.
+t.write("first.a", "")
+t.write("second.a", "")
+t.write("jamroot.jam", """\
+exe main : main.cpp first second ;
+lib first : second : <file>first.a ;
+lib second : : <file>second.a ;
+""")
+
+t.run_build_system(["-a", "-n", "-d+2"])
+t.fail_test(t.stdout().find("first") > t.stdout().find("second"))
+
+t.write("jamroot.jam", """
+exe main : main.cpp first second ;
+lib first : : <file>first.a ;
+lib second : first : <file>second.a ;
+""")
+
+t.run_build_system(["-a", "-n", "-d+2"])
+t.fail_test(t.stdout().find("first") < t.stdout().find("second"))
+
+t.cleanup()
diff --git a/tools/build/test/library_property.py b/tools/build/test/library_property.py
new file mode 100644
index 0000000000..b7c24c83b0
--- /dev/null
+++ b/tools/build/test/library_property.py
@@ -0,0 +1,56 @@
+#!/usr/bin/python
+
+# Copyright 2004 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that the <library> property has no effect on "obj" targets. Previously,
+# it affected all targets, so
+#
+# project : requirements <library>foo ;
+# exe a : a.cpp helper ;
+# obj helper : helper.cpp : <optimization>off ;
+#
+# caused 'foo' to be built with and without optimization.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("jamroot.jam", """
+project : requirements <library>lib//x ;
+exe a : a.cpp foo ;
+obj foo : foo.cpp : <variant>release ;
+""")
+
+t.write("a.cpp", """
+void aux();
+int main() { aux(); }
+""")
+
+t.write("foo.cpp", """
+void gee();
+void aux() { gee(); }
+""")
+
+t.write("lib/x.cpp", """
+void
+#if defined(_WIN32)
+__declspec(dllexport)
+#endif
+gee() {}
+""")
+
+t.write("lib/jamfile.jam", """
+lib x : x.cpp ;
+""")
+
+t.write("lib/jamroot.jam", """
+""")
+
+
+t.run_build_system()
+t.expect_addition("bin/$toolset/debug/a.exe")
+t.expect_nothing("lib/bin/$toolset/release/x.obj")
+
+t.cleanup()
diff --git a/tools/build/test/libtiff.py b/tools/build/test/libtiff.py
new file mode 100755
index 0000000000..cb0d07b0ff
--- /dev/null
+++ b/tools/build/test/libtiff.py
@@ -0,0 +1,119 @@
+#!/usr/bin/python
+
+# Copyright (C) 2013 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+import MockToolset
+
+t = BoostBuild.Tester(arguments=['toolset=mock', '--ignore-site-config', '--user-config='], pass_toolset=0)
+
+MockToolset.create(t)
+
+# Build from source
+t.write("libtiff/tiff.h", 'libtiff')
+t.write("libtiff/tiff.c", 'tiff')
+
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using libtiff : : <source>$(here)/libtiff ;
+alias libtiff : /libtiff//libtiff : : <link>static <link>shared ;
+""")
+
+MockToolset.set_expected(t, '''
+source_file('tiff.c', 'tiff')
+action('-c -x c -I./libtiff -o $tiff.o $tiff.c')
+action('--dll $tiff.o -o $tiff.so')
+action('--archive $tiff.o -o $tiff.a')
+''')
+
+t.run_build_system()
+t.expect_addition('bin/standalone/libtiff/mock/debug/tiff.dll')
+t.expect_addition('bin/standalone/libtiff/mock/debug/link-static/tiff.lib')
+
+t.rm('libtiff')
+
+# Generic definitions that aren't configuration specific
+common_stuff = '''
+source_file('test.cpp', 'test.cpp')
+source_file('main.cpp', 'int main() {}')
+source_file('tiff.h.cpp', '#include <tiff.h>')
+action('-c -x c++ $main.cpp -o $main.o')
+'''
+t.write('test.cpp', 'test.cpp')
+
+# Default initialization - static library
+t.rm('bin')
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using libtiff ;
+exe test : test.cpp /libtiff//libtiff : : <link>static <link>shared ;
+""")
+
+MockToolset.set_expected(t, common_stuff + '''
+action('$main.o --static-lib=tiff -o $config.exe')
+action('-c -x c++ $tiff.h.cpp -o $tiff.h.o')
+action('-c -x c++ $test.cpp -o $test.o')
+action('$test.o --static-lib=tiff -o $test')
+''')
+t.run_build_system()
+t.expect_addition('bin/mock/debug/test.exe')
+t.expect_addition('bin/mock/debug/link-static/test.exe')
+
+# Default initialization - shared library
+t.rm('bin')
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using libtiff ;
+exe test : test.cpp /libtiff//libtiff : : <link>static <link>shared ;
+""")
+
+MockToolset.set_expected(t, common_stuff + '''
+action('$main.o --shared-lib=tiff -o $config.exe')
+action('-c -x c++ $tiff.h.cpp -o $tiff.h.o')
+action('-c -x c++ $test.cpp -o $test.o')
+action('$test.o --shared-lib=tiff -o $test')
+''')
+t.run_build_system()
+t.expect_addition('bin/mock/debug/test.exe')
+t.expect_addition('bin/mock/debug/link-static/test.exe')
+
+# Initialization in explicit location - static library
+t.rm('bin')
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using libtiff : : <name>mylibtiff <include>$(here)/libtiff <search>$(here)/libtiff ;
+exe test : test.cpp /libtiff//libtiff : : <link>static <link>shared ;
+""")
+
+t.write('libtiff/tiff.h', 'libtiff')
+
+MockToolset.set_expected(t, common_stuff + '''
+action('$main.o -L./libtiff --static-lib=mylibtiff -o $config.exe')
+action('-c -x c++ $test.cpp -I./libtiff -o $test.o')
+action('$test.o -L./libtiff --static-lib=mylibtiff -o $test')
+''')
+t.run_build_system()
+t.expect_addition('bin/mock/debug/test.exe')
+t.expect_addition('bin/mock/debug/link-static/test.exe')
+
+# Initialization in explicit location - shared library
+t.rm('bin')
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using libtiff : : <name>mylibtiff <include>$(here)/libtiff <search>$(here)/libtiff ;
+exe test : test.cpp /libtiff//libtiff : : <link>static <link>shared ;
+""")
+
+MockToolset.set_expected(t, common_stuff + '''
+action('$main.o -L./libtiff --shared-lib=mylibtiff -o $config.exe')
+action('-c -x c++ $test.cpp -I./libtiff -o $test.o')
+action('$test.o -L./libtiff --shared-lib=mylibtiff -o $test')
+''')
+t.run_build_system()
+t.expect_addition('bin/mock/debug/test.exe')
+t.expect_addition('bin/mock/debug/link-static/test.exe')
+
+t.cleanup()
diff --git a/tools/build/test/link.py b/tools/build/test/link.py
new file mode 100755
index 0000000000..0d63615b78
--- /dev/null
+++ b/tools/build/test/link.py
@@ -0,0 +1,154 @@
+#!/usr/bin/python
+
+# Copyright 2004 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Tests the link-directory rule used to create the
+# common boost/ directory in the new git layout.
+
+import BoostBuild
+
+def ignore_config(t):
+ """These files are created by the configuration logic in link.jam
+ They may or may not exist, depending on the system."""
+ t.ignore("bin/symlink/test-hardlink")
+ t.ignore("bin/test-hardlink-source")
+ t.ignore("bin/test-symlink")
+ t.ignore("bin/test-symlink-source")
+
+def test_basic():
+ """Test creation of a single link"""
+ t = BoostBuild.Tester()
+ t.write("jamroot.jam", """\
+ import link ;
+ link-directory dir1-link : src/dir1/include : <location>. ;
+ """)
+
+ t.write("src/dir1/include/file1.h", "file1")
+
+ t.run_build_system()
+
+ t.expect_addition("include/file1.h")
+ t.expect_content("include/file1.h", "file1")
+ ignore_config(t)
+ t.expect_nothing_more()
+ t.cleanup()
+
+def test_merge_two():
+ """Test merging two directories"""
+ t = BoostBuild.Tester()
+ t.write("jamroot.jam", """\
+ import link ;
+ link-directory dir1-link : src/dir1/include : <location>. ;
+ link-directory dir2-link : src/dir2/include : <location>. ;
+ """)
+
+ t.write("src/dir1/include/file1.h", "file1")
+ t.write("src/dir2/include/file2.h", "file2")
+
+ t.run_build_system()
+
+ t.expect_addition("include/file1.h")
+ t.expect_content("include/file1.h", "file1")
+ t.expect_addition("include/file2.h")
+ t.expect_content("include/file2.h", "file2")
+ ignore_config(t)
+ t.expect_nothing_more()
+ t.cleanup()
+
+def test_merge_existing():
+ """Test adding a link when a different symlink already exists"""
+ t = BoostBuild.Tester()
+ t.write("jamroot.jam", """\
+ import link ;
+ link-directory dir1-link : src/dir1/include : <location>. ;
+ link-directory dir2-link : src/dir2/include : <location>. ;
+ """)
+
+ t.write("src/dir1/include/file1.h", "file1")
+ t.write("src/dir2/include/file2.h", "file2")
+
+ t.run_build_system(["dir1-link"])
+
+ t.expect_addition("include/file1.h")
+ t.expect_content("include/file1.h", "file1")
+ ignore_config(t)
+ t.expect_nothing_more()
+
+ t.run_build_system(["dir2-link"])
+
+ t.expect_addition("include/file2.h")
+ t.expect_content("include/file2.h", "file2")
+ # If include is a symlink to src/dir1/include, then
+ # we have to delete it and add a directory.
+ t.ignore_removal("include/file1.h")
+ ignore_config(t)
+ t.expect_nothing_more()
+
+ t.cleanup()
+
+def test_merge_recursive():
+ "Test merging several directories including common prefixes"
+ t = BoostBuild.Tester()
+ t.write("jamroot.jam", """\
+ import link ;
+ link-directory dir1-link : src/dir1/include : <location>. ;
+ link-directory dir2-link : src/dir2/include : <location>. ;
+ link-directory dir3-link : src/dir3/include : <location>. ;
+ """)
+
+ t.write("src/dir1/include/file1.h", "file1")
+ t.write("src/dir2/include/file2.h", "file2")
+ t.write("src/dir2/include/nested/file3.h", "file3")
+ t.write("src/dir3/include/nested/file4.h", "file4")
+
+ t.run_build_system()
+
+ t.expect_addition("include/file1.h")
+ t.expect_content("include/file1.h", "file1")
+ t.expect_addition("include/file2.h")
+ t.expect_content("include/file2.h", "file2")
+ t.expect_addition("include/nested/file3.h")
+ t.expect_content("include/nested/file3.h", "file3")
+ t.expect_addition("include/nested/file4.h")
+ t.expect_content("include/nested/file4.h", "file4")
+ ignore_config(t)
+ t.expect_nothing_more()
+
+ t.cleanup()
+
+def test_include_scan():
+ """Make sure that the #include scanner finds the headers"""
+ t = BoostBuild.Tester()
+ t.write("jamroot.jam", """\
+ import link ;
+ link-directory dir1-link : src/dir1/include : <location>. ;
+ link-directory dir2-link : src/dir2/include : <location>. ;
+ obj test : test.cpp :
+ <include>include
+ <implicit-dependency>dir1-link
+ <implicit-dependency>dir2-link ;
+ """)
+
+ t.write("src/dir1/include/file1.h", "#include <file2.h>\n")
+ t.write("src/dir2/include/file2.h", "int f();\n")
+ t.write("test.cpp", """\
+ #include <file1.h>
+ int main() { f(); }
+ """);
+
+ t.run_build_system(["test"])
+
+ t.expect_addition("bin/$toolset/debug/test.obj")
+
+ t.run_build_system()
+ t.expect_nothing_more()
+
+ t.cleanup()
+
+test_basic()
+test_merge_two()
+test_merge_existing()
+test_merge_recursive()
+test_include_scan()
diff --git a/tools/build/v2/test/load_dir.py b/tools/build/test/load_dir.py
index 09a59f9519..09a59f9519 100644
--- a/tools/build/v2/test/load_dir.py
+++ b/tools/build/test/load_dir.py
diff --git a/tools/build/test/load_order.py b/tools/build/test/load_order.py
new file mode 100644
index 0000000000..c35ca8da38
--- /dev/null
+++ b/tools/build/test/load_order.py
@@ -0,0 +1,71 @@
+#!/usr/bin/python
+
+# Copyright 2004 Vladimir Prus.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that we load parent projects before loading children.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("jamroot.jam", """\
+use-project /child : child ;
+ECHO "Setting parent requirements" ;
+project : requirements <define>PASS_THE_TEST ;
+alias x : child//main ;
+""")
+
+t.write("child/jamfile.jam", """\
+ECHO "Setting child requirements" ;
+project /child ;
+exe main : main.cpp ;
+""")
+
+t.write("child/main.cpp", """\
+#if defined(PASS_THE_TEST)
+int main() {}
+#endif
+""")
+
+t.run_build_system()
+
+t.expect_addition("child/bin/$toolset/debug/main.exe")
+t.fail_test(t.stdout().find("Setting child requirements") < t.stdout().find(
+ "Setting parent requirements"))
+
+
+# Regression test: parent requirements were ignored in some cases.
+t.rm(".")
+t.write("jamroot.jam", "build-project src ;")
+t.write("src/jamfile.jam", "project : requirements <define>EVERYTHING_OK ;")
+t.write("src/app/jamfile.jam", "exe test : test.cpp ;")
+t.write("src/app/test.cpp", """\
+#ifdef EVERYTHING_OK
+int main() {}
+#endif
+""")
+
+t.run_build_system(subdir="src/app")
+t.expect_addition("src/app/bin/$toolset/debug/test.exe")
+
+
+# child/child2 used to be loaded before child
+t.rm(".")
+t.write("jamroot.jam", """\
+use-project /child/child2 : child/child2 ;
+rule parent-rule ( )
+{
+ ECHO "Running parent-rule" ;
+}
+""")
+t.write("child/jamfile.jam", "")
+t.write("child/child1/jamfile.jam", "")
+t.write("child/child2/jamfile.jam", "parent-rule ;")
+
+t.run_build_system(subdir="child/child1")
+t.expect_output_lines("Running parent-rule")
+
+t.cleanup()
diff --git a/tools/build/test/loop.py b/tools/build/test/loop.py
new file mode 100644
index 0000000000..30b0690016
--- /dev/null
+++ b/tools/build/test/loop.py
@@ -0,0 +1,24 @@
+#!/usr/bin/python
+
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+import string
+
+t = BoostBuild.Tester()
+
+t.write("jamroot.jam", """\
+lib main : main.cpp l ;
+lib l : l.cpp main ;
+""")
+
+t.write("main.cpp", "")
+t.write("l.cpp", "")
+
+t.run_build_system(["--no-error-backtrace"], status=1)
+t.fail_test(string.find(t.stdout(),
+ "error: Recursion in main target references") == -1)
+
+t.cleanup()
diff --git a/tools/build/test/make_rule.py b/tools/build/test/make_rule.py
new file mode 100644
index 0000000000..f13bdb4cdb
--- /dev/null
+++ b/tools/build/test/make_rule.py
@@ -0,0 +1,54 @@
+#!/usr/bin/python
+
+# Copyright 2003 Dave Abrahams
+# Copyright 2003, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test the 'make' rule.
+
+import BoostBuild
+import string
+
+t = BoostBuild.Tester(pass_toolset=1)
+
+t.write("jamroot.jam", """\
+import feature ;
+feature.feature test_feature : : free ;
+
+import toolset ;
+toolset.flags creator STRING : <test_feature> ;
+
+actions creator
+{
+ echo $(STRING) > $(<)
+}
+
+make foo.bar : : creator : <test_feature>12345678 ;
+""")
+
+t.run_build_system()
+t.expect_addition("bin/$toolset/debug/foo.bar")
+t.fail_test(string.find(t.read("bin/$toolset/debug/foo.bar"), "12345678") == -1)
+
+
+# Regression test. Make sure that if a main target is requested two times, and
+# build requests differ only in incidental properties, the main target is
+# created only once. The bug was discovered by Kirill Lapshin.
+t.write("jamroot.jam", """\
+exe a : dir//hello1.cpp ;
+exe b : dir//hello1.cpp/<hardcode-dll-paths>true ;
+""")
+
+t.write("dir/jamfile.jam", """\
+import common ;
+make hello1.cpp : hello.cpp : common.copy ;
+""")
+
+t.write("dir/hello.cpp", "int main() {}\n")
+
+# Show only action names.
+t.run_build_system(["-d1", "-n"])
+t.fail_test(t.stdout().count("copy") != 1)
+
+t.cleanup()
diff --git a/tools/build/test/message.py b/tools/build/test/message.py
new file mode 100755
index 0000000000..f0f0d3152a
--- /dev/null
+++ b/tools/build/test/message.py
@@ -0,0 +1,38 @@
+#!/usr/bin/python
+
+# Copyright (C) Vladimir Prus 2003.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Test for the regression testing framework.
+
+import BoostBuild
+
+# Create a temporary working directory.
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("Jamroot.jam", """
+project
+ :
+ : usage-requirements <define>TEST=1
+ : default-build <link>static
+;
+message hello : "Hello World!" ;
+alias hello : : <link>shared ;
+obj test : test.cpp hello : <link>static ;
+""")
+
+t.write("test.cpp", """
+#ifndef TEST
+#error TEST not defined
+#endif
+""")
+
+t.run_build_system(["test"], stdout="""Hello World!
+""")
+
+t.expect_addition("bin/$toolset/link-static/test.obj")
+t.expect_nothing_more()
+
+t.cleanup()
diff --git a/tools/build/test/module_actions.py b/tools/build/test/module_actions.py
new file mode 100644
index 0000000000..33c563526e
--- /dev/null
+++ b/tools/build/test/module_actions.py
@@ -0,0 +1,105 @@
+#!/usr/bin/python
+
+# Copyright 2003 Dave Abrahams
+# Copyright 2006 Rene Rivera
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Demonstration that module variables have the correct effect in actions.
+
+import BoostBuild
+import os
+import re
+
+t = BoostBuild.Tester(["-d+1"], pass_toolset=0)
+
+t.write("boost-build.jam", "boost-build . ;")
+t.write("bootstrap.jam", """\
+# Top-level rule causing a target to be built by invoking the specified action.
+rule make ( target : sources * : act )
+{
+ DEPENDS all : $(target) ;
+ DEPENDS $(target) : $(sources) ;
+ $(act) $(target) : $(sources) ;
+}
+
+X1 = X1-global ;
+X2 = X2-global ;
+X3 = X3-global ;
+
+module A
+{
+ X1 = X1-A ;
+
+ rule act ( target )
+ {
+ NOTFILE $(target) ;
+ ALWAYS $(target) ;
+ }
+
+ actions act { echo A.act $(<): $(X1) $(X2) $(X3) }
+
+ make t1 : : A.act ;
+ make t2 : : A.act ;
+ make t3 : : A.act ;
+}
+
+module B
+{
+ X2 = X2-B ;
+
+ actions act { echo B.act $(<): $(X1) $(X2) $(X3) }
+
+ make t1 : : B.act ;
+ make t2 : : B.act ;
+ make t3 : : B.act ;
+}
+
+actions act { echo act $(<): $(X1) $(X2) $(X3) }
+
+make t1 : : act ;
+make t2 : : act ;
+make t3 : : act ;
+
+X1 on t1 = X1-t1 ;
+X2 on t2 = X2-t2 ;
+X3 on t3 = X3-t3 ;
+
+DEPENDS all : t1 t2 t3 ;
+""")
+
+expected_lines = [
+ "...found 4 targets...",
+ "...updating 3 targets...",
+ "A.act t1",
+ "A.act t1: X1-t1 ",
+ "B.act t1",
+ "B.act t1: X1-t1 X2-B ",
+ "act t1",
+ "act t1: X1-t1 X2-global X3-global ",
+ "A.act t2",
+ "A.act t2: X1-A X2-t2 ",
+ "B.act t2",
+ "B.act t2: X2-t2 ",
+ "act t2",
+ "act t2: X1-global X2-t2 X3-global ",
+ "A.act t3",
+ "A.act t3: X1-A X3-t3 ",
+ "B.act t3",
+ "B.act t3: X2-B X3-t3 ",
+ "act t3",
+ "act t3: X1-global X2-global X3-t3 ",
+ "...updated 3 targets...",
+ ""]
+
+# Accommodate for the fact that on Unixes, a call to 'echo 1 2 3 '
+# produces '1 2 3' (note the spacing).
+if os.name != 'nt':
+ expected_lines = [re.sub(" +", " ", x.rstrip()) for x in expected_lines]
+
+t.run_build_system()
+t.expect_output_lines(expected_lines)
+t.expect_nothing_more()
+t.cleanup()
diff --git a/tools/build/test/ndebug.py b/tools/build/test/ndebug.py
new file mode 100644
index 0000000000..87fbc6c6de
--- /dev/null
+++ b/tools/build/test/ndebug.py
@@ -0,0 +1,33 @@
+#!/usr/bin/python
+
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that building with optimization brings NDEBUG define, and, more
+# importantly, that dependency targets are built with NDEBUG as well, even if
+# they are not directly requested.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("jamroot.jam", "exe hello : hello.cpp lib//lib1 ;")
+t.write("hello.cpp", """\
+#ifdef NDEBUG
+void foo();
+int main() { foo(); }
+#endif
+""")
+t.write("lib/jamfile.jam", "lib lib1 : lib1.cpp ;")
+t.write("lib/lib1.cpp", """\
+#ifdef NDEBUG
+void foo() {}
+#endif
+""")
+
+# 'release' builds should get the NDEBUG define. We use static linking to avoid
+# messing with imports/exports on Windows.
+t.run_build_system(["link=static", "release"])
+
+t.cleanup()
diff --git a/tools/build/test/no_type.py b/tools/build/test/no_type.py
new file mode 100644
index 0000000000..0384ec6043
--- /dev/null
+++ b/tools/build/test/no_type.py
@@ -0,0 +1,19 @@
+#!/usr/bin/python
+
+# Copyright 2002 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that we cannot specify targets of unknown type as sources. This is based
+# on the fact that Unix 'ar' will happily consume just about anything.
+
+import BoostBuild
+
+t = BoostBuild.Tester()
+
+t.write("jamroot.jam", "static-lib a : a.foo ;")
+t.write("a.foo", "")
+
+t.run_build_system(status=1)
+
+t.cleanup()
diff --git a/tools/build/test/notfile.py b/tools/build/test/notfile.py
new file mode 100644
index 0000000000..10205f6ad7
--- /dev/null
+++ b/tools/build/test/notfile.py
@@ -0,0 +1,36 @@
+#!/usr/bin/python
+
+# Copyright (C) Vladimir Prus 2005.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Basic tests for the 'notfile' rule.
+
+import BoostBuild
+import os
+
+t = BoostBuild.Tester()
+
+t.write("jamroot.jam", """\
+import notfile ;
+notfile say : "echo hi" ;
+exe hello : hello.cpp ;
+notfile hello_valgrind : @valgrind : hello ;
+actions valgrind { valgrind $(>[1]) }
+""")
+
+t.write("hello.cpp", """\
+#include <iostream>
+int main() { std::cout << "Hello!\\n"; }
+""")
+
+t.run_build_system(["-n", "-d+2"])
+
+t.fail_test(t.stdout().find("echo hi") == -1)
+
+name = t.adjust_names("bin/$toolset/debug/hello.exe")[0]
+name = apply(os.path.join, name.split("/"));
+t.expect_output_lines(" valgrind *%s " % name)
+
+t.cleanup()
diff --git a/tools/build/test/ordered_include.py b/tools/build/test/ordered_include.py
new file mode 100644
index 0000000000..2ce955c0da
--- /dev/null
+++ b/tools/build/test/ordered_include.py
@@ -0,0 +1,173 @@
+#!/usr/bin/python
+#
+# Copyright (c) 2008 Steven Watanabe
+#
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt) or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+def test_basic():
+ tester = BoostBuild.Tester(use_test_config=False)
+ tester.write("jamroot.jam", """
+ obj test : test.cpp : <include>a&&b ;
+ """)
+
+ tester.write("test.cpp", """
+ #include <test1.hpp>
+ #include <test2.hpp>
+ int main() {}
+ """)
+
+ tester.write("a/test1.hpp", """
+ """)
+
+ tester.write("b/test2.hpp", """
+ """)
+
+ tester.run_build_system()
+
+ tester.expect_addition("bin/$toolset/debug/test.obj")
+
+ # Check that the dependencies are correct
+ tester.touch("a/test1.hpp")
+ tester.run_build_system()
+ tester.expect_touch("bin/$toolset/debug/test.obj")
+
+ tester.touch("b/test2.hpp")
+ tester.run_build_system()
+ tester.expect_touch("bin/$toolset/debug/test.obj")
+
+ tester.cleanup()
+
+def test_order1():
+ t = BoostBuild.Tester(use_test_config=False)
+ t.write("jamroot.jam", """
+ obj test : test.cpp : <include>a&&b ;
+ """)
+ t.write("test.cpp", """
+ #include <test.h>
+ int main() {}
+ """)
+ t.write("a/test.h", """
+ """)
+ t.write("b/test.h", """
+ #error should find a/test.h
+ """)
+ t.run_build_system()
+
+ t.touch("a/test.h")
+ t.run_build_system()
+ t.expect_touch("bin/$toolset/debug/test.obj")
+ t.expect_nothing_more()
+
+ t.touch("b/test.h")
+ t.run_build_system()
+ t.expect_nothing_more()
+
+ t.cleanup()
+
+def test_order2():
+ t = BoostBuild.Tester(use_test_config=False)
+ t.write("jamroot.jam", """
+ obj test : test.cpp : <include>b&&a ;
+ """)
+ t.write("test.cpp", """
+ #include <test.h>
+ int main() {}
+ """)
+ t.write("a/test.h", """
+ #error should find b/test.h
+ """)
+ t.write("b/test.h", """
+ """)
+ t.run_build_system()
+
+ t.touch("a/test.h")
+ t.run_build_system()
+ t.expect_nothing_more()
+
+ t.touch("b/test.h")
+ t.run_build_system()
+ t.expect_touch("bin/$toolset/debug/test.obj")
+ t.expect_nothing_more()
+
+ t.cleanup()
+
+def test_order_graph():
+ t = BoostBuild.Tester(use_test_config=False)
+ t.write("jamroot.jam", """
+ obj test : test.cpp :
+ <include>b&&a
+ <include>c&&b
+ <include>a
+ <include>c
+ <include>b
+ <include>e&&b&&d
+ ;
+ """)
+ t.write("test.cpp", """
+ #include <test1.h>
+ #include <test2.h>
+ #include <test3.h>
+ #include <test4.h>
+ int main() {}
+ """)
+ t.write("b/test1.h", "")
+ t.write("a/test1.h", "#error should find b/test1.h\n")
+
+ t.write("c/test2.h", "")
+ t.write("b/test2.h", "#error should find c/test2.h\n")
+
+ t.write("e/test3.h", "")
+ t.write("b/test3.h", "#error should find e/test3.h\n")
+
+ t.write("b/test4.h", "")
+ t.write("d/test4.h", "#error should find b/test4.h\n")
+
+ t.run_build_system()
+ t.expect_addition("bin/$toolset/debug/test.obj")
+
+ t.touch("b/test1.h")
+ t.run_build_system()
+ t.expect_touch("bin/$toolset/debug/test.obj")
+ t.expect_nothing_more()
+
+ t.touch("a/test1.h")
+ t.run_build_system()
+ t.expect_nothing_more()
+
+ t.touch("c/test2.h")
+ t.run_build_system()
+ t.expect_touch("bin/$toolset/debug/test.obj")
+ t.expect_nothing_more()
+
+ t.touch("b/test2.h")
+ t.run_build_system()
+ t.expect_nothing_more()
+
+ t.touch("e/test3.h")
+ t.run_build_system()
+ t.expect_touch("bin/$toolset/debug/test.obj")
+ t.expect_nothing_more()
+
+ t.touch("b/test3.h")
+ t.run_build_system()
+ t.expect_nothing_more()
+
+ t.touch("b/test4.h")
+ t.run_build_system()
+ t.expect_touch("bin/$toolset/debug/test.obj")
+ t.expect_nothing_more()
+
+ t.touch("d/test4.h")
+ t.run_build_system()
+ t.expect_nothing_more()
+
+ t.cleanup()
+
+test_basic()
+test_order1()
+test_order2()
+test_order_graph()
diff --git a/tools/build/v2/test/ordered_properties.py b/tools/build/test/ordered_properties.py
index 58ea5a9f1f..58ea5a9f1f 100644
--- a/tools/build/v2/test/ordered_properties.py
+++ b/tools/build/test/ordered_properties.py
diff --git a/tools/build/test/out_of_tree.py b/tools/build/test/out_of_tree.py
new file mode 100644
index 0000000000..b655291515
--- /dev/null
+++ b/tools/build/test/out_of_tree.py
@@ -0,0 +1,29 @@
+#!/usr/bin/python
+
+# Copyright (C) Vladimir Prus 2005.
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Tests that we can build a project when the current directory is outside of
+# that project tree, that is 'bjam some_dir' works.
+
+import BoostBuild
+
+# Create a temporary working directory.
+t = BoostBuild.Tester(use_test_config=False)
+
+# Create the needed files.
+t.write("p1/jamroot.jam", "exe hello : hello.cpp ;")
+t.write("p1/hello.cpp", "int main() {}\n")
+t.write("p2/jamroot.jam", """\
+exe hello2 : hello.cpp ;
+exe hello3 : hello.cpp ;
+""")
+t.write("p2/hello.cpp", "int main() {}\n")
+
+t.run_build_system(["p1", "p2//hello3"])
+t.expect_addition("p1/bin/$toolset/debug/hello.exe")
+t.expect_addition("p2/bin/$toolset/debug/hello3.exe")
+
+t.cleanup()
diff --git a/tools/build/test/path_features.py b/tools/build/test/path_features.py
new file mode 100644
index 0000000000..774c161969
--- /dev/null
+++ b/tools/build/test/path_features.py
@@ -0,0 +1,152 @@
+#!/usr/bin/python
+
+# Copyright 2003 Dave Abrahams
+# Copyright 2002, 2003, 2004 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+def test_basic():
+ t = BoostBuild.Tester(use_test_config=False)
+
+ t.write("jamroot.jam", "lib a : a.cpp : <include>. ;")
+ t.write("a.cpp", """\
+#include <a.h>
+void
+# ifdef _WIN32
+__declspec(dllexport)
+# endif
+foo() {}
+""")
+ t.write("a.h", "//empty file\n")
+ t.write("d/jamfile.jam", "exe b : b.cpp ..//a ;")
+ t.write("d/b.cpp", """\
+void foo();
+int main() { foo(); }
+""")
+ t.run_build_system(subdir="d")
+
+ # Path features with condition.
+ t.write("jamroot.jam", "lib a : a.cpp : <variant>debug:<include>. ;")
+ t.rm("bin")
+ t.run_build_system(subdir="d")
+
+
+ # Path features with condition in usage requirements.
+ t.write("jamroot.jam", """\
+lib a : a.cpp : <include>. : : <variant>debug:<include>. ;
+""")
+ t.write("d/b.cpp", """\
+#include <a.h>
+void foo();
+int main() { foo(); }
+""")
+ t.rm("d/bin")
+ t.run_build_system(subdir="d")
+
+ t.cleanup()
+
+
+def test_absolute_paths():
+ """
+ Test that absolute paths inside requirements are ok. The problems
+ appeared only when building targets in subprojects.
+
+ """
+ t = BoostBuild.Tester(use_test_config=False)
+
+ t.write("jamroot.jam", "build-project x ;")
+ t.write("x/jamfile.jam", """\
+local pwd = [ PWD ] ;
+project : requirements <include>$(pwd)/x/include ;
+exe m : m.cpp : <include>$(pwd)/x/include2 ;
+""")
+ t.write("x/m.cpp", """\
+#include <h1.hpp>
+#include <h2.hpp>
+int main() {}
+""")
+ t.write("x/include/h1.hpp", "\n")
+ t.write("x/include2/h2.hpp", "\n")
+
+ t.run_build_system()
+ t.expect_addition("x/bin/$toolset/debug/m.exe")
+
+ t.cleanup()
+
+
+def test_ordered_paths():
+ """Test that "&&" in path features is handled correctly."""
+
+ t = BoostBuild.Tester(use_test_config=False)
+
+ t.write("jamroot.jam", "build-project sub ;")
+ t.write("sub/jamfile.jam", "exe a : a.cpp : <include>../h1&&../h2 ;")
+ t.write("sub/a.cpp", """\
+#include <header.h>
+int main() { return OK; }
+""")
+ t.write("h2/header.h", "int const OK = 0;\n")
+ t.run_build_system()
+ t.expect_addition("sub/bin/$toolset/debug/a.exe")
+
+ t.cleanup()
+
+
+def test_paths_set_by_indirect_conditionals():
+ t = BoostBuild.Tester(pass_d0=False, use_test_config=False)
+
+ header = "child_dir/folder_to_include/some_header.h"
+
+ t.write("jamroot.jam", "build-project child_dir ;")
+ t.write("child_dir/jamfile.jam", """\
+import remote/remote ;
+
+# If we set the <include>folder_to_include property directly, it will work
+obj x1 : x.cpp : <conditional>@attach-include-local ;
+obj x2 : x.cpp : <conditional>@remote/remote.attach-include-remote ;
+
+rule attach-include-local ( properties * )
+{
+ return <include>folder_to_include ;
+}
+""")
+ t.write("child_dir/remote/remote.jam", """\
+rule attach-include-remote ( properties * )
+{
+ return <include>folder_to_include ;
+}
+""")
+ t.write("child_dir/x.cpp", """\
+#include <some_header.h>
+int main() {}
+""")
+ t.write(header, "int some_func();\n")
+ t.write("child_dir/folder_to_include/jamfile.jam", "")
+
+ expected_x1 = "child_dir/bin/$toolset/debug/x1.obj"
+ expected_x2 = "child_dir/bin/$toolset/debug/x2.obj"
+
+ t.run_build_system()
+ t.expect_addition(expected_x1)
+ t.expect_addition(expected_x2)
+
+ t.touch(header)
+ t.run_build_system(subdir="child_dir")
+ t.expect_touch(expected_x1)
+ t.expect_touch(expected_x2)
+
+ t.touch(header)
+ t.run_build_system(["..", "-d2"], subdir="child_dir/folder_to_include")
+ t.expect_touch(expected_x1)
+ t.expect_touch(expected_x2)
+
+ t.cleanup()
+
+
+test_basic()
+test_absolute_paths()
+test_ordered_paths()
+test_paths_set_by_indirect_conditionals() \ No newline at end of file
diff --git a/tools/build/v2/test/pch.py b/tools/build/test/pch.py
index d36260a55f..d36260a55f 100644
--- a/tools/build/v2/test/pch.py
+++ b/tools/build/test/pch.py
diff --git a/tools/build/test/prebuilt.py b/tools/build/test/prebuilt.py
new file mode 100644
index 0000000000..5fe46f4657
--- /dev/null
+++ b/tools/build/test/prebuilt.py
@@ -0,0 +1,43 @@
+#!/usr/bin/python
+
+# Copyright 2002, 2003, 2004 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that we can use already built sources
+
+import BoostBuild
+
+t = BoostBuild.Tester(["debug", "release"], use_test_config=False)
+
+t.set_tree('prebuilt')
+
+t.expand_toolset("ext/jamroot.jam")
+t.expand_toolset("jamroot.jam")
+
+# First, build the external project.
+t.run_build_system(subdir="ext")
+
+# Then pretend that we do not have the sources for the external project, and
+# can only use compiled binaries.
+t.copy("ext/jamfile2.jam", "ext/jamfile.jam")
+t.expand_toolset("ext/jamfile.jam")
+
+# Now check that we can build the main project, and that correct prebuilt file
+# is picked, depending of variant. This also checks that correct includes for
+# prebuilt libraries are used.
+t.run_build_system()
+t.expect_addition("bin/$toolset/debug/hello.exe")
+t.expect_addition("bin/$toolset/release/hello.exe")
+
+t.rm("bin")
+
+
+# Now test that prebuilt file specified by absolute name works too.
+t.copy("ext/jamfile3.jam", "ext/jamfile.jam")
+t.expand_toolset("ext/jamfile.jam")
+t.run_build_system()
+t.expect_addition("bin/$toolset/debug/hello.exe")
+t.expect_addition("bin/$toolset/release/hello.exe")
+
+t.cleanup()
diff --git a/tools/build/v2/test/prebuilt/ext/a.cpp b/tools/build/test/prebuilt/ext/a.cpp
index c49a041533..c49a041533 100644
--- a/tools/build/v2/test/prebuilt/ext/a.cpp
+++ b/tools/build/test/prebuilt/ext/a.cpp
diff --git a/tools/build/v2/test/prebuilt/ext/debug/a.h b/tools/build/test/prebuilt/ext/debug/a.h
index 31b3182265..31b3182265 100644
--- a/tools/build/v2/test/prebuilt/ext/debug/a.h
+++ b/tools/build/test/prebuilt/ext/debug/a.h
diff --git a/tools/build/v2/test/prebuilt/ext/jamfile.jam b/tools/build/test/prebuilt/ext/jamfile.jam
index e563f0d741..e563f0d741 100644
--- a/tools/build/v2/test/prebuilt/ext/jamfile.jam
+++ b/tools/build/test/prebuilt/ext/jamfile.jam
diff --git a/tools/build/test/prebuilt/ext/jamfile2.jam b/tools/build/test/prebuilt/ext/jamfile2.jam
new file mode 100644
index 0000000000..a38bfa7654
--- /dev/null
+++ b/tools/build/test/prebuilt/ext/jamfile2.jam
@@ -0,0 +1,39 @@
+
+import os ;
+
+local dll-suffix = so ;
+local prefix = "" ;
+if [ os.name ] in CYGWIN NT
+{
+ if [ MATCH ^(gcc) : $toolset ]
+ {
+ dll-suffix = dll ;
+ }
+ else
+ {
+ dll-suffix = lib ;
+ }
+}
+else
+{
+ prefix = "lib" ;
+}
+if [ MATCH ^(clang-)?(darwin) : $toolset ]
+{
+ dll-suffix = dylib ;
+}
+
+project ext ;
+
+lib a :
+ : <file>debug/$(prefix)a.$(dll-suffix) <variant>debug
+ :
+ : <include>debug
+ ;
+
+lib a :
+ : <file>release/$(prefix)a.$(dll-suffix) <variant>release
+ :
+ : <include>release
+ ;
+
diff --git a/tools/build/test/prebuilt/ext/jamfile3.jam b/tools/build/test/prebuilt/ext/jamfile3.jam
new file mode 100644
index 0000000000..1731f1743b
--- /dev/null
+++ b/tools/build/test/prebuilt/ext/jamfile3.jam
@@ -0,0 +1,46 @@
+
+# This Jamfile is the same as Jamfile2, except that
+# it tries to access prebuilt targets using absolute
+# paths. It used to be broken on Windows.
+
+import os ;
+
+local dll-suffix = so ;
+local prefix = "" ;
+if [ os.name ] in CYGWIN NT
+{
+ if [ MATCH ^(gcc) : $toolset ]
+ {
+ dll-suffix = dll ;
+ }
+ else
+ {
+ dll-suffix = lib ;
+ }
+}
+else
+{
+ prefix = "lib" ;
+}
+if [ MATCH ^(clang-)?(darwin) : $toolset ]
+{
+ dll-suffix = dylib ;
+}
+
+project ext ;
+
+# Assumed bjam was invoked from the project root
+local pwd = [ PWD ] ;
+
+lib a :
+ : <file>$(pwd)/ext/debug/$(prefix)a.$(dll-suffix) <variant>debug
+ :
+ : <include>debug
+ ;
+
+lib a :
+ : <file>$(pwd)/ext/release/$(prefix)a.$(dll-suffix) <variant>release
+ :
+ : <include>release
+ ;
+
diff --git a/tools/build/v2/test/prebuilt/ext/jamroot.jam b/tools/build/test/prebuilt/ext/jamroot.jam
index c7617d5d33..c7617d5d33 100644
--- a/tools/build/v2/test/prebuilt/ext/jamroot.jam
+++ b/tools/build/test/prebuilt/ext/jamroot.jam
diff --git a/tools/build/v2/test/prebuilt/ext/release/a.h b/tools/build/test/prebuilt/ext/release/a.h
index 9ab71d88ce..9ab71d88ce 100644
--- a/tools/build/v2/test/prebuilt/ext/release/a.h
+++ b/tools/build/test/prebuilt/ext/release/a.h
diff --git a/tools/build/v2/test/prebuilt/hello.cpp b/tools/build/test/prebuilt/hello.cpp
index 4c1ab7036f..4c1ab7036f 100644
--- a/tools/build/v2/test/prebuilt/hello.cpp
+++ b/tools/build/test/prebuilt/hello.cpp
diff --git a/tools/build/v2/test/prebuilt/jamfile.jam b/tools/build/test/prebuilt/jamfile.jam
index 18b731ae17..18b731ae17 100644
--- a/tools/build/v2/test/prebuilt/jamfile.jam
+++ b/tools/build/test/prebuilt/jamfile.jam
diff --git a/tools/build/v2/test/prebuilt/jamroot.jam b/tools/build/test/prebuilt/jamroot.jam
index f022c0d64c..f022c0d64c 100644
--- a/tools/build/v2/test/prebuilt/jamroot.jam
+++ b/tools/build/test/prebuilt/jamroot.jam
diff --git a/tools/build/v2/test/preprocessor.py b/tools/build/test/preprocessor.py
index 9faa711b76..9faa711b76 100755
--- a/tools/build/v2/test/preprocessor.py
+++ b/tools/build/test/preprocessor.py
diff --git a/tools/build/v2/test/print.py b/tools/build/test/print.py
index 65caf95e19..65caf95e19 100644
--- a/tools/build/v2/test/print.py
+++ b/tools/build/test/print.py
diff --git a/tools/build/v2/test/project-test3/a.cpp b/tools/build/test/project-test3/a.cpp
index ccecbb414f..ccecbb414f 100644
--- a/tools/build/v2/test/project-test3/a.cpp
+++ b/tools/build/test/project-test3/a.cpp
diff --git a/tools/build/v2/test/project-test3/jamfile.jam b/tools/build/test/project-test3/jamfile.jam
index f079607707..f079607707 100644
--- a/tools/build/v2/test/project-test3/jamfile.jam
+++ b/tools/build/test/project-test3/jamfile.jam
diff --git a/tools/build/v2/test/project-test3/jamroot.jam b/tools/build/test/project-test3/jamroot.jam
index 8de43be51d..8de43be51d 100644
--- a/tools/build/v2/test/project-test3/jamroot.jam
+++ b/tools/build/test/project-test3/jamroot.jam
diff --git a/tools/build/v2/test/project-test3/lib/b.cpp b/tools/build/test/project-test3/lib/b.cpp
index ccecbb414f..ccecbb414f 100644
--- a/tools/build/v2/test/project-test3/lib/b.cpp
+++ b/tools/build/test/project-test3/lib/b.cpp
diff --git a/tools/build/v2/test/project-test3/lib/jamfile.jam b/tools/build/test/project-test3/lib/jamfile.jam
index 76b0829a9c..76b0829a9c 100644
--- a/tools/build/v2/test/project-test3/lib/jamfile.jam
+++ b/tools/build/test/project-test3/lib/jamfile.jam
diff --git a/tools/build/v2/test/project-test3/lib2/c.cpp b/tools/build/test/project-test3/lib2/c.cpp
index ccecbb414f..ccecbb414f 100644
--- a/tools/build/v2/test/project-test3/lib2/c.cpp
+++ b/tools/build/test/project-test3/lib2/c.cpp
diff --git a/tools/build/v2/test/project-test3/lib2/d.cpp b/tools/build/test/project-test3/lib2/d.cpp
index ccecbb414f..ccecbb414f 100644
--- a/tools/build/v2/test/project-test3/lib2/d.cpp
+++ b/tools/build/test/project-test3/lib2/d.cpp
diff --git a/tools/build/v2/test/generators-test/e.cpp b/tools/build/test/project-test3/lib2/helper/e.cpp
index ccecbb414f..ccecbb414f 100644
--- a/tools/build/v2/test/generators-test/e.cpp
+++ b/tools/build/test/project-test3/lib2/helper/e.cpp
diff --git a/tools/build/v2/test/project-test3/lib2/helper/jamfile.jam b/tools/build/test/project-test3/lib2/helper/jamfile.jam
index 0c82f92481..0c82f92481 100644
--- a/tools/build/v2/test/project-test3/lib2/helper/jamfile.jam
+++ b/tools/build/test/project-test3/lib2/helper/jamfile.jam
diff --git a/tools/build/v2/test/project-test3/lib2/jamfile.jam b/tools/build/test/project-test3/lib2/jamfile.jam
index b6b0abc44b..b6b0abc44b 100644
--- a/tools/build/v2/test/project-test3/lib2/jamfile.jam
+++ b/tools/build/test/project-test3/lib2/jamfile.jam
diff --git a/tools/build/v2/test/project-test3/lib3/f.cpp b/tools/build/test/project-test3/lib3/f.cpp
index ccecbb414f..ccecbb414f 100644
--- a/tools/build/v2/test/project-test3/lib3/f.cpp
+++ b/tools/build/test/project-test3/lib3/f.cpp
diff --git a/tools/build/v2/test/project-test3/lib3/jamfile.jam b/tools/build/test/project-test3/lib3/jamfile.jam
index 0d457817e7..0d457817e7 100644
--- a/tools/build/v2/test/project-test3/lib3/jamfile.jam
+++ b/tools/build/test/project-test3/lib3/jamfile.jam
diff --git a/tools/build/v2/test/project-test3/lib3/jamroot.jam b/tools/build/test/project-test3/lib3/jamroot.jam
index 971f03096e..971f03096e 100644
--- a/tools/build/v2/test/project-test3/lib3/jamroot.jam
+++ b/tools/build/test/project-test3/lib3/jamroot.jam
diff --git a/tools/build/v2/test/project-test3/readme.txt b/tools/build/test/project-test3/readme.txt
index da27e54b2e..da27e54b2e 100644
--- a/tools/build/v2/test/project-test3/readme.txt
+++ b/tools/build/test/project-test3/readme.txt
diff --git a/tools/build/v2/test/project-test4/a.cpp b/tools/build/test/project-test4/a.cpp
index ccecbb414f..ccecbb414f 100644
--- a/tools/build/v2/test/project-test4/a.cpp
+++ b/tools/build/test/project-test4/a.cpp
diff --git a/tools/build/v2/test/project-test4/a_gcc.cpp b/tools/build/test/project-test4/a_gcc.cpp
index ccecbb414f..ccecbb414f 100644
--- a/tools/build/v2/test/project-test4/a_gcc.cpp
+++ b/tools/build/test/project-test4/a_gcc.cpp
diff --git a/tools/build/v2/test/project-test4/jamfile.jam b/tools/build/test/project-test4/jamfile.jam
index a34d5f2db7..a34d5f2db7 100644
--- a/tools/build/v2/test/project-test4/jamfile.jam
+++ b/tools/build/test/project-test4/jamfile.jam
diff --git a/tools/build/v2/test/project-test4/jamfile3.jam b/tools/build/test/project-test4/jamfile3.jam
index 9770362d7f..9770362d7f 100644
--- a/tools/build/v2/test/project-test4/jamfile3.jam
+++ b/tools/build/test/project-test4/jamfile3.jam
diff --git a/tools/build/v2/test/project-test4/jamfile4.jam b/tools/build/test/project-test4/jamfile4.jam
index e3257801a7..e3257801a7 100644
--- a/tools/build/v2/test/project-test4/jamfile4.jam
+++ b/tools/build/test/project-test4/jamfile4.jam
diff --git a/tools/build/v2/test/project-test4/jamfile5.jam b/tools/build/test/project-test4/jamfile5.jam
index 1010be5e46..1010be5e46 100644
--- a/tools/build/v2/test/project-test4/jamfile5.jam
+++ b/tools/build/test/project-test4/jamfile5.jam
diff --git a/tools/build/v2/test/project-test4/jamroot.jam b/tools/build/test/project-test4/jamroot.jam
index 801f0afb27..801f0afb27 100644
--- a/tools/build/v2/test/project-test4/jamroot.jam
+++ b/tools/build/test/project-test4/jamroot.jam
diff --git a/tools/build/v2/test/project-test4/lib/b.cpp b/tools/build/test/project-test4/lib/b.cpp
index ccecbb414f..ccecbb414f 100644
--- a/tools/build/v2/test/project-test4/lib/b.cpp
+++ b/tools/build/test/project-test4/lib/b.cpp
diff --git a/tools/build/v2/test/project-test4/lib/jamfile.jam b/tools/build/test/project-test4/lib/jamfile.jam
index 1bdb7c122c..1bdb7c122c 100644
--- a/tools/build/v2/test/project-test4/lib/jamfile.jam
+++ b/tools/build/test/project-test4/lib/jamfile.jam
diff --git a/tools/build/v2/test/project-test4/lib/jamfile1.jam b/tools/build/test/project-test4/lib/jamfile1.jam
index be2c3649a2..be2c3649a2 100644
--- a/tools/build/v2/test/project-test4/lib/jamfile1.jam
+++ b/tools/build/test/project-test4/lib/jamfile1.jam
diff --git a/tools/build/v2/test/project-test4/lib/jamfile2.jam b/tools/build/test/project-test4/lib/jamfile2.jam
index d47274bdfe..d47274bdfe 100644
--- a/tools/build/v2/test/project-test4/lib/jamfile2.jam
+++ b/tools/build/test/project-test4/lib/jamfile2.jam
diff --git a/tools/build/v2/test/project-test4/lib/jamfile3.jam b/tools/build/test/project-test4/lib/jamfile3.jam
index 73a78324b8..73a78324b8 100644
--- a/tools/build/v2/test/project-test4/lib/jamfile3.jam
+++ b/tools/build/test/project-test4/lib/jamfile3.jam
diff --git a/tools/build/v2/test/project-test4/lib2/jamfile.jam b/tools/build/test/project-test4/lib2/jamfile.jam
index 389492bf0e..389492bf0e 100644
--- a/tools/build/v2/test/project-test4/lib2/jamfile.jam
+++ b/tools/build/test/project-test4/lib2/jamfile.jam
diff --git a/tools/build/v2/test/project-test4/lib2/jamfile2.jam b/tools/build/test/project-test4/lib2/jamfile2.jam
index 94b144d068..94b144d068 100644
--- a/tools/build/v2/test/project-test4/lib2/jamfile2.jam
+++ b/tools/build/test/project-test4/lib2/jamfile2.jam
diff --git a/tools/build/v2/test/project-test4/readme.txt b/tools/build/test/project-test4/readme.txt
index 0c0ba2ca4b..0c0ba2ca4b 100644
--- a/tools/build/v2/test/project-test4/readme.txt
+++ b/tools/build/test/project-test4/readme.txt
diff --git a/tools/build/test/project_dependencies.py b/tools/build/test/project_dependencies.py
new file mode 100644
index 0000000000..2e439d2914
--- /dev/null
+++ b/tools/build/test/project_dependencies.py
@@ -0,0 +1,51 @@
+#!/usr/bin/python
+
+# Copyright 2003 Dave Abrahams
+# Copyright 2002, 2003, 2004 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that we can specify a dependency property in project requirements, and
+# that it will not cause every main target in the project to be generated in its
+# own subdirectory.
+
+# The whole test is somewhat moot now.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("jamroot.jam", "build-project src ;")
+
+t.write("lib/jamfile.jam", "lib lib1 : lib1.cpp ;")
+
+t.write("lib/lib1.cpp", """
+#ifdef _WIN32
+__declspec(dllexport)
+#endif
+void foo() {}\n
+""")
+
+t.write("src/jamfile.jam", """
+project : requirements <library>../lib//lib1 ;
+exe a : a.cpp ;
+exe b : b.cpp ;
+""")
+
+t.write("src/a.cpp", """
+#ifdef _WIN32
+__declspec(dllimport)
+#endif
+void foo();
+int main() { foo(); }
+""")
+
+t.copy("src/a.cpp", "src/b.cpp")
+
+t.run_build_system()
+
+# Test that there is no "main-target-a" part.
+# t.expect_addition("src/bin/$toolset/debug/a.exe")
+# t.expect_addition("src/bin/$toolset/debug/b.exe")
+
+t.cleanup()
diff --git a/tools/build/test/project_glob.py b/tools/build/test/project_glob.py
new file mode 100644
index 0000000000..95e3f30cfe
--- /dev/null
+++ b/tools/build/test/project_glob.py
@@ -0,0 +1,212 @@
+#!/usr/bin/python
+
+# Copyright (C) 2003. Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Test the 'glob' rule in Jamfile context.
+
+import BoostBuild
+
+
+def test_basic():
+ t = BoostBuild.Tester(use_test_config=False)
+
+ t.write("jamroot.jam", "")
+ t.write("d1/a.cpp", "int main() {}\n")
+ t.write("d1/jamfile.jam", "exe a : [ glob *.cpp ] ../d2/d//l ;")
+ t.write("d2/d/l.cpp", """\
+#if defined(_WIN32)
+__declspec(dllexport)
+void force_import_lib_creation() {}
+#endif
+""")
+ t.write("d2/d/jamfile.jam", "lib l : [ glob *.cpp ] ;")
+ t.write("d3/d/jamfile.jam", "exe a : [ glob ../*.cpp ] ;")
+ t.write("d3/a.cpp", "int main() {}\n")
+
+ t.run_build_system(subdir="d1")
+ t.expect_addition("d1/bin/$toolset/debug/a.exe")
+
+ t.run_build_system(subdir="d3/d")
+ t.expect_addition("d3/d/bin/$toolset/debug/a.exe")
+
+ t.rm("d2/d/bin")
+ t.run_build_system(subdir="d2/d")
+ t.expect_addition("d2/d/bin/$toolset/debug/l.dll")
+
+ t.cleanup()
+
+
+def test_source_location():
+ """
+ Test that when 'source-location' is explicitly-specified glob works
+ relative to the source location.
+
+ """
+ t = BoostBuild.Tester(use_test_config=False)
+
+ t.write("jamroot.jam", "")
+ t.write("d1/a.cpp", "very bad non-compilable file\n")
+ t.write("d1/src/a.cpp", "int main() {}\n")
+ t.write("d1/jamfile.jam", """\
+project : source-location src ;
+exe a : [ glob *.cpp ] ../d2/d//l ;
+""")
+ t.write("d2/d/l.cpp", """\
+#if defined(_WIN32)
+__declspec(dllexport)
+void force_import_lib_creation() {}
+#endif
+""")
+ t.write("d2/d/jamfile.jam", "lib l : [ glob *.cpp ] ;")
+
+ t.run_build_system(subdir="d1")
+ t.expect_addition("d1/bin/$toolset/debug/a.exe")
+
+ t.cleanup()
+
+
+def test_wildcards_and_exclusion_patterns():
+ """
+ Test that wildcards can include directories. Also test exclusion
+ patterns.
+
+ """
+ t = BoostBuild.Tester(use_test_config=False)
+
+ t.write("jamroot.jam", "")
+ t.write("d1/src/foo/a.cpp", "void bar(); int main() { bar(); }\n")
+ t.write("d1/src/bar/b.cpp", "void bar() {}\n")
+ t.write("d1/src/bar/bad.cpp", "very bad non-compilable file\n")
+ t.write("d1/jamfile.jam", """\
+project : source-location src ;
+exe a : [ glob foo/*.cpp bar/*.cpp : bar/bad* ] ../d2/d//l ;
+""")
+ t.write("d2/d/l.cpp", """\
+#if defined(_WIN32)
+__declspec(dllexport)
+void force_import_lib_creation() {}
+#endif
+""")
+ t.write("d2/d/jamfile.jam", "lib l : [ glob *.cpp ] ;")
+
+ t.run_build_system(subdir="d1")
+ t.expect_addition("d1/bin/$toolset/debug/a.exe")
+
+ t.cleanup()
+
+
+def test_glob_tree():
+ """Test that 'glob-tree' works."""
+
+ t = BoostBuild.Tester(use_test_config=False)
+
+ t.write("jamroot.jam", "")
+ t.write("d1/src/foo/a.cpp", "void bar(); int main() { bar(); }\n")
+ t.write("d1/src/bar/b.cpp", "void bar() {}\n")
+ t.write("d1/src/bar/bad.cpp", "very bad non-compilable file\n")
+ t.write("d1/jamfile.jam", """\
+project : source-location src ;
+exe a : [ glob-tree *.cpp : bad* ] ../d2/d//l ;
+""")
+ t.write("d2/d/l.cpp", """\
+#if defined(_WIN32)
+__declspec(dllexport)
+void force_import_lib_creation() {}
+#endif
+""")
+ t.write("d2/d/jamfile.jam", "lib l : [ glob *.cpp ] ;")
+
+ t.run_build_system(subdir="d1")
+ t.expect_addition("d1/bin/$toolset/debug/a.exe")
+
+ t.cleanup()
+
+
+def test_directory_names_in_glob_tree():
+ """Test that directory names in patterns for 'glob-tree' are rejected."""
+
+ t = BoostBuild.Tester(use_test_config=False)
+
+ t.write("jamroot.jam", "")
+ t.write("d1/src/a.cpp", "very bad non-compilable file\n")
+ t.write("d1/src/foo/a.cpp", "void bar(); int main() { bar(); }\n")
+ t.write("d1/src/bar/b.cpp", "void bar() {}\n")
+ t.write("d1/src/bar/bad.cpp", "very bad non-compilable file\n")
+ t.write("d1/jamfile.jam", """\
+project : source-location src ;
+exe a : [ glob-tree foo/*.cpp bar/*.cpp : bad* ] ../d2/d//l ;
+""")
+ t.write("d2/d/l.cpp", """\
+#if defined(_WIN32)
+__declspec(dllexport)
+void force_import_lib_creation() {}
+#endif
+""")
+ t.write("d2/d/jamfile.jam", "lib l : [ glob *.cpp ] ;")
+
+ t.run_build_system(subdir="d1", status=1)
+ t.expect_output_lines("error: The patterns * may not include directory")
+
+ t.cleanup()
+
+
+def test_glob_with_absolute_names():
+ """Test that 'glob' works with absolute names."""
+
+ t = BoostBuild.Tester(use_test_config=False)
+
+ t.write("jamroot.jam", "")
+ t.write("d1/src/a.cpp", "very bad non-compilable file\n")
+ t.write("d1/src/foo/a.cpp", "void bar(); int main() { bar(); }\n")
+ t.write("d1/src/bar/b.cpp", "void bar() {}\n")
+ # Note that to get the current dir, we use bjam's PWD, not Python's
+ # os.getcwd(), because the former will always return a long path while the
+ # latter might return a short path, which would confuse path.glob.
+ t.write("d1/jamfile.jam", """\
+project : source-location src ;
+local pwd = [ PWD ] ; # Always absolute.
+exe a : [ glob $(pwd)/src/foo/*.cpp $(pwd)/src/bar/*.cpp ] ../d2/d//l ;
+""")
+ t.write("d2/d/l.cpp", """\
+#if defined(_WIN32)
+__declspec(dllexport)
+void force_import_lib_creation() {}
+#endif
+""")
+ t.write("d2/d/jamfile.jam", "lib l : [ glob *.cpp ] ;")
+
+ t.run_build_system(subdir="d1")
+ t.expect_addition("d1/bin/$toolset/debug/a.exe")
+
+ t.cleanup()
+
+
+def test_glob_excludes_in_subdirectory():
+ """
+ Regression test: glob excludes used to be broken when building from a
+ subdirectory.
+
+ """
+ t = BoostBuild.Tester(use_test_config=False)
+
+ t.write("jamroot.jam", "build-project p ;")
+ t.write("p/p.c", "int main() {}\n")
+ t.write("p/p_x.c", "very bad non-compilable file\n")
+ t.write("p/jamfile.jam", "exe p : [ glob *.c : p_x.c ] ;")
+
+ t.run_build_system(subdir="p")
+ t.expect_addition("p/bin/$toolset/debug/p.exe")
+
+ t.cleanup()
+
+
+test_basic()
+test_source_location()
+test_wildcards_and_exclusion_patterns()
+test_glob_tree()
+test_directory_names_in_glob_tree()
+test_glob_with_absolute_names()
+test_glob_excludes_in_subdirectory()
diff --git a/tools/build/test/project_id.py b/tools/build/test/project_id.py
new file mode 100755
index 0000000000..f6846af5fd
--- /dev/null
+++ b/tools/build/test/project_id.py
@@ -0,0 +1,414 @@
+#!/usr/bin/python
+
+# Copyright (C) 2012. Jurko Gospodnetic
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Tests Boost Build's project-id handling.
+
+import BoostBuild
+import sys
+
+
+def test_assigning_project_ids():
+ t = BoostBuild.Tester(pass_toolset=False)
+ t.write("jamroot.jam", """\
+import assert ;
+import modules ;
+import notfile ;
+import project ;
+
+rule assert-project-id ( id ? : module-name ? )
+{
+ module-name ?= [ CALLER_MODULE ] ;
+ assert.result $(id) : project.attribute $(module-name) id ;
+}
+
+# Project rule modifies the main project id.
+assert-project-id ; # Initial project id is empty
+project foo ; assert-project-id /foo ;
+project ; assert-project-id /foo ;
+project foo ; assert-project-id /foo ;
+project bar ; assert-project-id /bar ;
+project /foo ; assert-project-id /foo ;
+project "" ; assert-project-id /foo ;
+
+# Calling the use-project rule does not modify the project's main id.
+use-project id1 : a ;
+# We need to load the 'a' Jamfile module manually as the use-project rule will
+# only schedule the load to be done after the current module load finishes.
+a-module = [ project.load a ] ;
+assert-project-id : $(a-module) ;
+use-project id2 : a ;
+assert-project-id : $(a-module) ;
+modules.call-in $(a-module) : project baz ;
+assert-project-id /baz : $(a-module) ;
+use-project id3 : a ;
+assert-project-id /baz : $(a-module) ;
+
+# Make sure the project id still holds after all the scheduled use-project loads
+# complete. We do this by scheduling the assert for the Jam action scheduling
+# phase.
+notfile x : @assert-a-rule ;
+rule assert-a-rule ( target : : properties * )
+{
+ assert-project-id /baz : $(a-module) ;
+}
+""")
+ t.write("a/jamfile.jam", """\
+# Initial project id for this module is empty.
+assert-project-id ;
+""")
+ t.run_build_system()
+ t.cleanup()
+
+
+def test_using_project_ids_in_target_references():
+ t = BoostBuild.Tester()
+ __write_appender(t, "appender.jam")
+ t.write("jamroot.jam", """\
+import type ;
+type.register AAA : _a ;
+type.register BBB : _b ;
+
+import appender ;
+appender.register aaa-to-bbb : AAA : BBB ;
+
+use-project id1 : a ;
+use-project /id2 : a ;
+
+bbb b1 : /id1//target ;
+bbb b2 : /id2//target ;
+bbb b3 : /id3//target ;
+bbb b4 : a//target ;
+bbb b5 : /project-a1//target ;
+bbb b6 : /project-a2//target ;
+bbb b7 : /project-a3//target ;
+
+use-project id3 : a ;
+""")
+ t.write("a/source._a", "")
+ t.write("a/jamfile.jam", """\
+project project-a1 ;
+project /project-a2 ;
+import alias ;
+alias target : source._a ;
+project /project-a3 ;
+""")
+
+ t.run_build_system()
+ t.expect_addition("bin/$toolset/b%d._b" % x for x in range(1, 8))
+ t.expect_nothing_more()
+
+ t.cleanup()
+
+
+def test_repeated_ids_for_different_projects():
+ t = BoostBuild.Tester()
+
+ t.write("a/jamfile.jam", "")
+ t.write("jamroot.jam", "project foo ; use-project foo : a ;")
+ t.run_build_system(status=1)
+ t.expect_output_lines("""\
+error: Attempt to redeclare already registered project id '/foo'.
+error: Original project:
+error: Name: Jamfile<*>
+error: Module: Jamfile<*>
+error: Main id: /foo
+error: File: jamroot.jam
+error: Location: .
+error: New project:
+error: Module: Jamfile<*>
+error: File: a*jamfile.jam
+error: Location: a""")
+
+ t.write("jamroot.jam", "use-project foo : a ; project foo ;")
+ t.run_build_system(status=1)
+ t.expect_output_lines("""\
+error: Attempt to redeclare already registered project id '/foo'.
+error: Original project:
+error: Name: Jamfile<*>
+error: Module: Jamfile<*>
+error: Main id: /foo
+error: File: jamroot.jam
+error: Location: .
+error: New project:
+error: Module: Jamfile<*>
+error: File: a*jamfile.jam
+error: Location: a""")
+
+ t.write("jamroot.jam", """\
+import modules ;
+import project ;
+modules.call-in [ project.load a ] : project foo ;
+project foo ;
+""")
+ t.run_build_system(status=1)
+ t.expect_output_lines("""\
+error: at jamroot.jam:4
+error: Attempt to redeclare already registered project id '/foo'.
+error: Original project:
+error: Name: Jamfile<*>
+error: Module: Jamfile<*>
+error: Main id: /foo
+error: File: a*jamfile.jam
+error: Location: a
+error: New project:
+error: Module: Jamfile<*>
+error: File: jamroot.jam
+error: Location: .""")
+
+ t.cleanup()
+
+
+def test_repeated_ids_for_same_project():
+ t = BoostBuild.Tester()
+
+ t.write("jamroot.jam", "project foo ; project foo ;")
+ t.run_build_system()
+
+ t.write("jamroot.jam", "project foo ; use-project foo : . ;")
+ t.run_build_system()
+
+ t.write("jamroot.jam", "project foo ; use-project foo : ./. ;")
+ t.run_build_system()
+
+ t.write("jamroot.jam", """\
+project foo ;
+use-project foo : . ;
+use-project foo : ./aaa/.. ;
+use-project foo : ./. ;
+""")
+ t.run_build_system()
+
+ # On Windows we have a case-insensitive file system and we can use
+ # backslashes as path separators.
+ # FIXME: Make a similar test pass on Cygwin.
+ if sys.platform in ['win32']:
+ t.write("a/fOo bAr/b/jamfile.jam", "")
+ t.write("jamroot.jam", r"""
+use-project bar : "a/foo bar/b" ;
+use-project bar : "a/foO Bar/b" ;
+use-project bar : "a/foo BAR/b/" ;
+use-project bar : "a\\.\\FOO bar\\b\\" ;
+""")
+ t.run_build_system()
+ t.rm("a")
+
+ t.write("bar/jamfile.jam", "")
+ t.write("jamroot.jam", """\
+use-project bar : bar ;
+use-project bar : bar/ ;
+use-project bar : bar// ;
+use-project bar : bar/// ;
+use-project bar : bar//// ;
+use-project bar : bar/. ;
+use-project bar : bar/./ ;
+use-project bar : bar/////./ ;
+use-project bar : bar/../bar/xxx/.. ;
+use-project bar : bar/..///bar/xxx///////.. ;
+use-project bar : bar/./../bar/xxx/.. ;
+use-project bar : bar/.////../bar/xxx/.. ;
+use-project bar : bar/././../bar/xxx/.. ;
+use-project bar : bar/././//////////../bar/xxx/.. ;
+use-project bar : bar/.///.////../bar/xxx/.. ;
+use-project bar : bar/./././xxx/.. ;
+use-project bar : bar/xxx////.. ;
+use-project bar : bar/xxx/.. ;
+use-project bar : bar///////xxx/.. ;
+""")
+ t.run_build_system()
+ t.rm("bar")
+
+ # On Windows we have a case-insensitive file system and we can use
+ # backslashes as path separators.
+ # FIXME: Make a similar test pass on Cygwin.
+ if sys.platform in ['win32']:
+ t.write("baR/jamfile.jam", "")
+ t.write("jamroot.jam", r"""
+use-project bar : bar ;
+use-project bar : BAR ;
+use-project bar : bAr ;
+use-project bar : bAr/ ;
+use-project bar : bAr\\ ;
+use-project bar : bAr\\\\ ;
+use-project bar : bAr\\\\///// ;
+use-project bar : bAr/. ;
+use-project bar : bAr/./././ ;
+use-project bar : bAr\\.\\.\\.\\ ;
+use-project bar : bAr\\./\\/.\\.\\ ;
+use-project bar : bAr/.\\././ ;
+use-project bar : Bar ;
+use-project bar : BaR ;
+use-project bar : BaR/./../bAr/xxx/.. ;
+use-project bar : BaR/./..\\bAr\\xxx/.. ;
+use-project bar : BaR/xxx/.. ;
+use-project bar : BaR///\\\\\\//xxx/.. ;
+use-project bar : Bar\\xxx/.. ;
+use-project bar : BAR/xXx/.. ;
+use-project bar : BAR/xXx\\\\/\\/\\//\\.. ;
+""")
+ t.run_build_system()
+ t.rm("baR")
+
+ t.cleanup()
+
+
+def test_unresolved_project_references():
+ t = BoostBuild.Tester()
+
+ __write_appender(t, "appender.jam")
+ t.write("a/source._a", "")
+ t.write("a/jamfile.jam", "import alias ; alias target : source._a ;")
+ t.write("jamroot.jam", """\
+import type ;
+type.register AAA : _a ;
+type.register BBB : _b ;
+
+import appender ;
+appender.register aaa-to-bbb : AAA : BBB ;
+
+use-project foo : a ;
+
+bbb b1 : a//target ;
+bbb b2 : /foo//target ;
+bbb b-invalid : invalid//target ;
+bbb b-root-invalid : /invalid//target ;
+bbb b-missing-root : foo//target ;
+bbb b-invalid-target : /foo//invalid ;
+""")
+
+ t.run_build_system(["b1", "b2"])
+ t.expect_addition("bin/$toolset/debug/b%d._b" % x for x in range(1, 3))
+ t.expect_nothing_more()
+
+ t.run_build_system(["b-invalid"], status=1)
+ t.expect_output_lines("""\
+error: Unable to find file or target named
+error: 'invalid//target'
+error: referred to from project at
+error: '.'
+error: could not resolve project reference 'invalid'""")
+
+ t.run_build_system(["b-root-invalid"], status=1)
+ t.expect_output_lines("""\
+error: Unable to find file or target named
+error: '/invalid//target'
+error: referred to from project at
+error: '.'
+error: could not resolve project reference '/invalid'""")
+
+ t.run_build_system(["b-missing-root"], status=1)
+ t.expect_output_lines("""\
+error: Unable to find file or target named
+error: 'foo//target'
+error: referred to from project at
+error: '.'
+error: could not resolve project reference 'foo' - possibly missing a """
+ "leading slash ('/') character.")
+
+ t.run_build_system(["b-invalid-target"], status=1)
+ t.expect_output_lines("""\
+error: Unable to find file or target named
+error: '/foo//invalid'
+error: referred to from project at
+error: '.'""")
+ t.expect_output_lines("*could not resolve project reference*", False)
+
+ t.cleanup()
+
+
+def __write_appender(t, name):
+ t.write(name,
+r"""# Copyright 2012 Jurko Gospodnetic
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Support for registering test generators that construct their targets by
+# simply appending their given input data, e.g. list of sources & targets.
+
+import "class" : new ;
+import generators ;
+import modules ;
+import sequence ;
+
+rule register ( id composing ? : source-types + : target-types + )
+{
+ local caller-module = [ CALLER_MODULE ] ;
+ id = $(caller-module).$(id) ;
+ local g = [ new generator $(id) $(composing) : $(source-types) :
+ $(target-types) ] ;
+ $(g).set-rule-name $(__name__).appender ;
+ generators.register $(g) ;
+ return $(id) ;
+}
+
+if [ modules.peek : NT ]
+{
+ X = ")" ;
+ ECHO_CMD = (echo. ;
+}
+else
+{
+ X = \" ;
+ ECHO_CMD = "echo $(X)" ;
+}
+
+local appender-runs ;
+
+# We set up separate actions for building each target in order to avoid having
+# to iterate over them in action (i.e. shell) code. We have to be extra careful
+# though to achieve the exact same effect as if doing all the work in just one
+# action. Otherwise Boost Jam might, under some circumstances, run only some of
+# our actions. To achieve this we register a series of actions for all the
+# targets (since they all have the same target list - either all or none of them
+# get run independent of which target actually needs to get built), each
+# building only a single target. Since all our actions use the same targets, we
+# can not use 'on-target' parameters to pass data to a specific action so we
+# pass them using the second 'sources' parameter which our actions then know how
+# to interpret correctly. This works well since Boost Jam does not automatically
+# add dependency relations between specified action targets & sources and so the
+# second argument, even though most often used to pass in a list of sources, can
+# actually be used for passing in any type of information.
+rule appender ( targets + : sources + : properties * )
+{
+ appender-runs = [ CALC $(appender-runs:E=0) + 1 ] ;
+ local target-index = 0 ;
+ local target-count = [ sequence.length $(targets) ] ;
+ local original-targets ;
+ for t in $(targets)
+ {
+ target-index = [ CALC $(target-index) + 1 ] ;
+ local appender-run = $(appender-runs) ;
+ if $(targets[2])-defined
+ {
+ appender-run += [$(target-index)/$(target-count)] ;
+ }
+ append $(targets) : $(appender-run:J=" ") $(t) $(sources) ;
+ }
+}
+
+actions append
+{
+ $(ECHO_CMD)-------------------------------------------------$(X)
+ $(ECHO_CMD)Appender run: $(>[1])$(X)
+ $(ECHO_CMD)Appender run: $(>[1])$(X)>> "$(>[2])"
+ $(ECHO_CMD)Target group: $(<:J=' ')$(X)
+ $(ECHO_CMD)Target group: $(<:J=' ')$(X)>> "$(>[2])"
+ $(ECHO_CMD) Target: '$(>[2])'$(X)
+ $(ECHO_CMD) Target: '$(>[2])'$(X)>> "$(>[2])"
+ $(ECHO_CMD) Sources: '$(>[3-]:J=' ')'$(X)
+ $(ECHO_CMD) Sources: '$(>[3-]:J=' ')'$(X)>> "$(>[2])"
+ $(ECHO_CMD)=================================================$(X)
+ $(ECHO_CMD)-------------------------------------------------$(X)>> "$(>[2])"
+}
+""")
+
+
+test_assigning_project_ids()
+test_using_project_ids_in_target_references()
+test_repeated_ids_for_same_project()
+test_repeated_ids_for_different_projects()
+test_unresolved_project_references()
diff --git a/tools/build/test/project_root_constants.py b/tools/build/test/project_root_constants.py
new file mode 100644
index 0000000000..b3bdcbb256
--- /dev/null
+++ b/tools/build/test/project_root_constants.py
@@ -0,0 +1,62 @@
+#!/usr/bin/python
+
+# Copyright 2003, 2004, 2005 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+# Create a temporary working directory.
+t = BoostBuild.Tester()
+
+# Create the needed files.
+t.write("jamroot.jam", """\
+constant FOO : foobar gee ;
+ECHO $(FOO) ;
+""")
+
+t.run_build_system()
+t.expect_output_lines("foobar gee")
+
+# Regression test: when absolute paths were passed to path-constant rule,
+# Boost.Build failed to recognize path as absolute and prepended the current
+# dir.
+t.write("jamroot.jam", """\
+import path ;
+local here = [ path.native [ path.pwd ] ] ;
+path-constant HERE : $(here) ;
+if $(HERE) != $(here)
+{
+ ECHO "PWD =" $(here) ;
+ ECHO "path constant =" $(HERE) ;
+ EXIT ;
+}
+""")
+t.write("jamfile.jam", "")
+
+t.run_build_system()
+
+t.write("jamfile.jam", """\
+# This tests that rule 'hello' will be imported to children unlocalized, and
+# will still access variables in this Jamfile.
+x = 10 ;
+constant FOO : foo ;
+rule hello ( ) { ECHO "Hello $(x)" ; }
+""")
+
+t.write("d/jamfile.jam", """\
+ECHO "d: $(FOO)" ;
+constant BAR : bar ;
+""")
+
+t.write("d/d2/jamfile.jam", """\
+ECHO "d2: $(FOO)" ;
+ECHO "d2: $(BAR)" ;
+hello ;
+""")
+
+t.run_build_system(subdir="d/d2")
+t.expect_output_lines("d: foo\nd2: foo\nd2: bar\nHello 10")
+
+t.cleanup()
diff --git a/tools/build/test/project_root_rule.py b/tools/build/test/project_root_rule.py
new file mode 100644
index 0000000000..9f5ca47f89
--- /dev/null
+++ b/tools/build/test/project_root_rule.py
@@ -0,0 +1,34 @@
+#!/usr/bin/python
+
+# Copyright (C) Vladimir Prus 2005.
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Tests that we can declare a rule in Jamroot that will be can be called in
+# child Jamfile to declare a target. Specifically test for use of 'glob' in that
+# rule.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+
+t.write("jamroot.jam", """
+project : requirements <link>static ;
+rule my-lib ( name ) { lib $(name) : [ glob *.cpp ] ; }
+""")
+
+t.write("sub/a.cpp", """
+""")
+
+t.write("sub/jamfile.jam", """
+my-lib foo ;
+""")
+
+
+t.run_build_system(subdir="sub")
+
+t.expect_addition("sub/bin/$toolset/debug/link-static/foo.lib")
+
+t.cleanup()
diff --git a/tools/build/test/project_test3.py b/tools/build/test/project_test3.py
new file mode 100644
index 0000000000..4f6cda2a76
--- /dev/null
+++ b/tools/build/test/project_test3.py
@@ -0,0 +1,136 @@
+#!/usr/bin/python
+
+# Copyright 2002, 2003 Dave Abrahams
+# Copyright 2002, 2003, 2004, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+import os
+
+t = BoostBuild.Tester(translate_suffixes=0)
+
+# First check some startup.
+t.set_tree("project-test3")
+os.remove("jamroot.jam")
+t.run_build_system(status=1)
+
+t.expect_output_lines("error: Could not find parent for project at '.'\n"
+ "error: Did not find Jamfile.jam or Jamroot.jam in any parent directory.")
+
+t.set_tree("project-test3")
+t.run_build_system()
+
+t.expect_addition("bin/$toolset/debug/a.obj")
+t.expect_content("bin/$toolset/debug/a.obj", """\
+$toolset/debug
+a.cpp
+""")
+
+t.expect_addition("bin/$toolset/debug/a.exe")
+t.expect_content("bin/$toolset/debug/a.exe",
+"$toolset/debug\n" +
+"bin/$toolset/debug/a.obj lib/bin/$toolset/debug/b.obj " +
+"lib2/bin/$toolset/debug/c.obj lib2/bin/$toolset/debug/d.obj " +
+"lib2/helper/bin/$toolset/debug/e.obj " +
+"lib3/bin/$toolset/debug/f.obj\n"
+)
+
+t.expect_addition("lib/bin/$toolset/debug/b.obj")
+t.expect_content("lib/bin/$toolset/debug/b.obj", """\
+$toolset/debug
+lib/b.cpp
+""")
+
+t.expect_addition("lib/bin/$toolset/debug/m.exe")
+t.expect_content("lib/bin/$toolset/debug/m.exe", """\
+$toolset/debug
+lib/bin/$toolset/debug/b.obj lib2/bin/$toolset/debug/c.obj
+""")
+
+t.expect_addition("lib2/bin/$toolset/debug/c.obj")
+t.expect_content("lib2/bin/$toolset/debug/c.obj", """\
+$toolset/debug
+lib2/c.cpp
+""")
+
+t.expect_addition("lib2/bin/$toolset/debug/d.obj")
+t.expect_content("lib2/bin/$toolset/debug/d.obj", """\
+$toolset/debug
+lib2/d.cpp
+""")
+
+t.expect_addition("lib2/bin/$toolset/debug/l.exe")
+t.expect_content("lib2/bin/$toolset/debug/l.exe", """\
+$toolset/debug
+lib2/bin/$toolset/debug/c.obj bin/$toolset/debug/a.obj
+""")
+
+t.expect_addition("lib2/helper/bin/$toolset/debug/e.obj")
+t.expect_content("lib2/helper/bin/$toolset/debug/e.obj", """\
+$toolset/debug
+lib2/helper/e.cpp
+""")
+
+t.expect_addition("lib3/bin/$toolset/debug/f.obj")
+t.expect_content("lib3/bin/$toolset/debug/f.obj", """\
+$toolset/debug
+lib3/f.cpp lib2/helper/bin/$toolset/debug/e.obj
+""")
+
+t.touch("a.cpp")
+t.run_build_system()
+t.expect_touch(["bin/$toolset/debug/a.obj",
+ "bin/$toolset/debug/a.exe",
+ "lib2/bin/$toolset/debug/l.exe"])
+
+t.run_build_system(["release", "optimization=off,speed"])
+t.expect_addition(["bin/$toolset/release/a.exe",
+ "bin/$toolset/release/a.obj",
+ "bin/$toolset/release/optimization-off/a.exe",
+ "bin/$toolset/release/optimization-off/a.obj"])
+
+t.run_build_system(["--clean-all"])
+t.expect_removal(["bin/$toolset/debug/a.obj",
+ "bin/$toolset/debug/a.exe",
+ "lib/bin/$toolset/debug/b.obj",
+ "lib/bin/$toolset/debug/m.exe",
+ "lib2/bin/$toolset/debug/c.obj",
+ "lib2/bin/$toolset/debug/d.obj",
+ "lib2/bin/$toolset/debug/l.exe",
+ "lib3/bin/$toolset/debug/f.obj"])
+
+# Now test target ids in command line.
+t.set_tree("project-test3")
+t.run_build_system(["lib//b.obj"])
+t.expect_addition("lib/bin/$toolset/debug/b.obj")
+t.expect_nothing_more()
+
+t.run_build_system(["--clean", "lib//b.obj"])
+t.expect_removal("lib/bin/$toolset/debug/b.obj")
+t.expect_nothing_more()
+
+t.run_build_system(["lib//b.obj"])
+t.expect_addition("lib/bin/$toolset/debug/b.obj")
+t.expect_nothing_more()
+
+t.run_build_system(["release", "lib2/helper//e.obj", "/lib3//f.obj"])
+t.expect_addition("lib2/helper/bin/$toolset/release/e.obj")
+t.expect_addition("lib3/bin/$toolset/release/f.obj")
+t.expect_nothing_more()
+
+# Test project ids in command line work as well.
+t.set_tree("project-test3")
+t.run_build_system(["/lib2"])
+t.expect_addition("lib2/bin/$toolset/debug/" *
+ BoostBuild.List("c.obj d.obj l.exe"))
+t.expect_addition("bin/$toolset/debug/a.obj")
+t.expect_nothing_more()
+
+t.run_build_system(["lib"])
+t.expect_addition("lib/bin/$toolset/debug/" *
+ BoostBuild.List("b.obj m.exe"))
+t.expect_nothing_more()
+
+t.cleanup()
diff --git a/tools/build/v2/test/project_test4.py b/tools/build/test/project_test4.py
index fc4115017b..fc4115017b 100644
--- a/tools/build/v2/test/project_test4.py
+++ b/tools/build/test/project_test4.py
diff --git a/tools/build/test/property_expansion.py b/tools/build/test/property_expansion.py
new file mode 100644
index 0000000000..53fc13616e
--- /dev/null
+++ b/tools/build/test/property_expansion.py
@@ -0,0 +1,28 @@
+#!/usr/bin/python
+
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that free property inside.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("jamroot.jam", """\
+variant debug-AA : debug : <define>AA ;
+alias all : hello ;
+exe hello : hello.cpp ;
+explicit hello ;
+""")
+
+t.write("hello.cpp", """\
+#ifdef AA
+int main() {}
+#endif
+""")
+
+t.run_build_system(["debug-AA"])
+
+t.cleanup()
diff --git a/tools/build/v2/test/qt4.py b/tools/build/test/qt4.py
index abb9594d58..abb9594d58 100755
--- a/tools/build/v2/test/qt4.py
+++ b/tools/build/test/qt4.py
diff --git a/tools/build/test/qt4/jamroot.jam b/tools/build/test/qt4/jamroot.jam
new file mode 100644
index 0000000000..3d8e7d73b5
--- /dev/null
+++ b/tools/build/test/qt4/jamroot.jam
@@ -0,0 +1,82 @@
+# (c) Copyright Juergen Hunold 2008
+# Use, modification, and distribution are subject to the
+# Boost Software License, Version 1.0. (See accompanying file
+# LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+import qt4 ;
+import testing ;
+import cast ;
+
+
+
+if [ qt4.initialized ]
+{
+ use-project /boost : ../../../.. ;
+
+ project qttest
+ : requirements
+ <library>/boost/test//boost_unit_test_framework
+ ;
+
+ alias qt-tests :
+ # Check for explicit libraries, <use>/qt should not link any lib
+ [ link-fail qtcorefail.cpp : <use>/qt ]
+
+ [ run qtcore.cpp /qt//QtCore ]
+ [ run qtsql.cpp /qt//QtSql ]
+ [ run qtxml.cpp /qt//QtXml ]
+ [ run qtnetwork.cpp /qt//QtNetwork ]
+ [ run qtscript.cpp /qt//QtScript ]
+ [ run qtscripttools.cpp /qt//QtScriptTools ]
+ [ run qtxmlpatterns.cpp /qt//QtXmlPatterns ]
+
+ # ToDo: runable example code
+ [ link qtsvg.cpp /qt//QtSvg ]
+ [ link qtgui.cpp /qt//QtGui ]
+
+ # Multimedia toolkits.
+ [ link qtwebkit.cpp /qt//QtWebKit ]
+ [ link phonon.cpp /qt//phonon ]
+ [ link qtmultimedia.cpp /qt//QtMultimedia ]
+
+ # QML
+ [ link qtdeclarative.cpp /qt//QtDeclarative ]
+
+ # Help systems.
+ [ link qthelp.cpp /qt//QtHelp ]
+ [ link qtassistant.cpp /qt//QtAssistantClient : <conditional>@check_for_assistant ]
+
+ # Check working and disabled Qt3Support
+ [ link qt3support.cpp /qt//Qt3Support : <qt3support>on ]
+ [ compile-fail qt3support.cpp /qt//Qt3Support : <qt3support>off ]
+
+ # Testing using QtTest. Simple sample
+ # ToDo: better support for "automoc" aka '#include "qttest.moc"'
+ [ run qttest.cpp [ cast _ moccable-cpp : qttest.cpp ] /qt//QtTest : : : <define>TEST_MOCK ]
+
+ # Test moc rule
+ [ run mock.cpp mock.h /qt//QtCore : : : <define>TEST_MOCK ]
+
+ # Test resource compiler
+ [ run rcc.cpp rcc.qrc /qt//QtCore : : : <rccflags>"-compress 9 -threshold 10" ]
+
+ : # requirements
+ : # default-build
+ : # usage-requirements
+ ;
+}
+
+# QtAssistant is removed from Qt >= 4.6
+rule check_for_assistant ( properties * )
+{
+ # Extract version number from toolset
+ local version = [ MATCH "<qt>([0-9.]+).*"
+ : $(properties) ] ;
+
+ if $(version) > "4.6.99"
+ {
+ result += <build>no ;
+ }
+}
+
+
diff --git a/tools/build/v2/test/qt4/mock.cpp b/tools/build/test/qt4/mock.cpp
index 8f7a35c2dc..8f7a35c2dc 100644
--- a/tools/build/v2/test/qt4/mock.cpp
+++ b/tools/build/test/qt4/mock.cpp
diff --git a/tools/build/v2/test/qt4/mock.h b/tools/build/test/qt4/mock.h
index 1cc95b057a..1cc95b057a 100644
--- a/tools/build/v2/test/qt4/mock.h
+++ b/tools/build/test/qt4/mock.h
diff --git a/tools/build/v2/test/qt4/phonon.cpp b/tools/build/test/qt4/phonon.cpp
index 3151f59115..3151f59115 100644
--- a/tools/build/v2/test/qt4/phonon.cpp
+++ b/tools/build/test/qt4/phonon.cpp
diff --git a/tools/build/v2/test/qt4/qt3support.cpp b/tools/build/test/qt4/qt3support.cpp
index 35d8c73b9d..35d8c73b9d 100644
--- a/tools/build/v2/test/qt4/qt3support.cpp
+++ b/tools/build/test/qt4/qt3support.cpp
diff --git a/tools/build/v2/test/qt4/qtassistant.cpp b/tools/build/test/qt4/qtassistant.cpp
index e2a6ed7bd2..e2a6ed7bd2 100644
--- a/tools/build/v2/test/qt4/qtassistant.cpp
+++ b/tools/build/test/qt4/qtassistant.cpp
diff --git a/tools/build/v2/test/qt4/qtcore.cpp b/tools/build/test/qt4/qtcore.cpp
index f3c09039be..f3c09039be 100644
--- a/tools/build/v2/test/qt4/qtcore.cpp
+++ b/tools/build/test/qt4/qtcore.cpp
diff --git a/tools/build/v2/test/qt4/qtcorefail.cpp b/tools/build/test/qt4/qtcorefail.cpp
index 15fd36aef8..15fd36aef8 100644
--- a/tools/build/v2/test/qt4/qtcorefail.cpp
+++ b/tools/build/test/qt4/qtcorefail.cpp
diff --git a/tools/build/v2/test/qt4/qtdeclarative.cpp b/tools/build/test/qt4/qtdeclarative.cpp
index 817855bad1..817855bad1 100644
--- a/tools/build/v2/test/qt4/qtdeclarative.cpp
+++ b/tools/build/test/qt4/qtdeclarative.cpp
diff --git a/tools/build/v2/test/qt4/qtgui.cpp b/tools/build/test/qt4/qtgui.cpp
index 478e07a2af..478e07a2af 100644
--- a/tools/build/v2/test/qt4/qtgui.cpp
+++ b/tools/build/test/qt4/qtgui.cpp
diff --git a/tools/build/v2/test/qt4/qthelp.cpp b/tools/build/test/qt4/qthelp.cpp
index 32327de58b..32327de58b 100644
--- a/tools/build/v2/test/qt4/qthelp.cpp
+++ b/tools/build/test/qt4/qthelp.cpp
diff --git a/tools/build/v2/test/qt4/qtmultimedia.cpp b/tools/build/test/qt4/qtmultimedia.cpp
index dc5914aff3..dc5914aff3 100644
--- a/tools/build/v2/test/qt4/qtmultimedia.cpp
+++ b/tools/build/test/qt4/qtmultimedia.cpp
diff --git a/tools/build/v2/test/qt4/qtnetwork.cpp b/tools/build/test/qt4/qtnetwork.cpp
index 3f628d880c..3f628d880c 100644
--- a/tools/build/v2/test/qt4/qtnetwork.cpp
+++ b/tools/build/test/qt4/qtnetwork.cpp
diff --git a/tools/build/v2/test/qt4/qtscript.cpp b/tools/build/test/qt4/qtscript.cpp
index 65353daeca..65353daeca 100644
--- a/tools/build/v2/test/qt4/qtscript.cpp
+++ b/tools/build/test/qt4/qtscript.cpp
diff --git a/tools/build/v2/test/qt4/qtscripttools.cpp b/tools/build/test/qt4/qtscripttools.cpp
index 4d0b7f2560..4d0b7f2560 100644
--- a/tools/build/v2/test/qt4/qtscripttools.cpp
+++ b/tools/build/test/qt4/qtscripttools.cpp
diff --git a/tools/build/v2/test/qt4/qtsql.cpp b/tools/build/test/qt4/qtsql.cpp
index aa506b1c7c..aa506b1c7c 100644
--- a/tools/build/v2/test/qt4/qtsql.cpp
+++ b/tools/build/test/qt4/qtsql.cpp
diff --git a/tools/build/v2/test/qt4/qtsvg.cpp b/tools/build/test/qt4/qtsvg.cpp
index 8a13943805..8a13943805 100644
--- a/tools/build/v2/test/qt4/qtsvg.cpp
+++ b/tools/build/test/qt4/qtsvg.cpp
diff --git a/tools/build/v2/test/qt4/qttest.cpp b/tools/build/test/qt4/qttest.cpp
index a2744cdc73..a2744cdc73 100644
--- a/tools/build/v2/test/qt4/qttest.cpp
+++ b/tools/build/test/qt4/qttest.cpp
diff --git a/tools/build/v2/test/qt4/qtwebkit.cpp b/tools/build/test/qt4/qtwebkit.cpp
index 7d85f14731..7d85f14731 100644
--- a/tools/build/v2/test/qt4/qtwebkit.cpp
+++ b/tools/build/test/qt4/qtwebkit.cpp
diff --git a/tools/build/v2/test/qt4/qtxml.cpp b/tools/build/test/qt4/qtxml.cpp
index 8002c26584..8002c26584 100644
--- a/tools/build/v2/test/qt4/qtxml.cpp
+++ b/tools/build/test/qt4/qtxml.cpp
diff --git a/tools/build/v2/test/qt4/qtxmlpatterns.cpp b/tools/build/test/qt4/qtxmlpatterns.cpp
index dcec92fd34..dcec92fd34 100644
--- a/tools/build/v2/test/qt4/qtxmlpatterns.cpp
+++ b/tools/build/test/qt4/qtxmlpatterns.cpp
diff --git a/tools/build/test/qt4/rcc.cpp b/tools/build/test/qt4/rcc.cpp
new file mode 100644
index 0000000000..cae553bb1c
--- /dev/null
+++ b/tools/build/test/qt4/rcc.cpp
@@ -0,0 +1,20 @@
+// (c) Copyright Juergen Hunold 2012
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtCore
+#include <QtCore>
+
+#include <boost/test/unit_test.hpp>
+
+std::ostream& operator<<(std::ostream& out, QString const& text)
+{
+ out << text.toUtf8().constData();
+ return out;
+}
+
+BOOST_AUTO_TEST_CASE (check_exists)
+{
+ BOOST_CHECK(QFile::exists(":/test/rcc.cpp"));
+}
diff --git a/tools/build/test/qt4/rcc.qrc b/tools/build/test/qt4/rcc.qrc
new file mode 100644
index 0000000000..13ca38a5dc
--- /dev/null
+++ b/tools/build/test/qt4/rcc.qrc
@@ -0,0 +1,5 @@
+<!DOCTYPE RCC><RCC version="1.0">
+ <qresource prefix="/test/">
+ <file>rcc.cpp</file>
+ </qresource>
+</RCC>
diff --git a/tools/build/test/qt5.py b/tools/build/test/qt5.py
new file mode 100755
index 0000000000..75c4e670f9
--- /dev/null
+++ b/tools/build/test/qt5.py
@@ -0,0 +1,19 @@
+#!/usr/bin/python
+
+# (c) Copyright Juergen Hunold 2012
+# Use, modification, and distribution are subject to the
+# Boost Software License, Version 1.0. (See accompanying file
+# LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+import os
+
+# Run test in real directory in order to find Boost.Test via Boost Top-Level
+# Jamroot.
+qt5_dir = os.getcwd() + "/qt5"
+
+t = BoostBuild.Tester(workdir=qt5_dir)
+
+t.run_build_system()
+
+t.cleanup()
diff --git a/tools/build/test/qt5/jamroot.jam b/tools/build/test/qt5/jamroot.jam
new file mode 100644
index 0000000000..90da392edb
--- /dev/null
+++ b/tools/build/test/qt5/jamroot.jam
@@ -0,0 +1,68 @@
+# (c) Copyright Juergen Hunold 2008
+# Use, modification, and distribution are subject to the
+# Boost Software License, Version 1.0. (See accompanying file
+# LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+import qt5 ;
+import testing ;
+import cast ;
+
+path-constant CWD : . ;
+
+
+if [ qt5.initialized ]
+{
+ use-project /boost : ../../../.. ;
+
+ project qttest
+ : requirements
+ <library>/boost/test//boost_unit_test_framework
+ ;
+
+ alias qt-tests :
+ # Check for explicit libraries, <use>/qt should not link any lib
+ [ link-fail qtcorefail.cpp : <use>/qt ]
+
+ [ run qtcore.cpp /qt5//QtCore ]
+ [ run qtsql.cpp /qt5//QtSql ]
+ [ run qtxml.cpp /qt5//QtXml ]
+ [ run qtnetwork.cpp /qt5//QtNetwork ]
+ [ run qtscript.cpp /qt5//QtScript ]
+ [ run qtscripttools.cpp /qt5//QtScriptTools ]
+ [ run qtxmlpatterns.cpp /qt5//QtXmlPatterns ]
+
+ # ToDo: runable example code
+ [ link qtsvg.cpp /qt5//QtSvg ]
+ [ link qtwidgets.cpp /qt5//QtWidgets ]
+
+ # Multimedia toolkits.
+ [ link qtwebkit.cpp /qt5//QtWebKit ]
+ [ link qtwebkitwidgets.cpp /qt5//QtWebKitWidgets ]
+ [ link qtmultimedia.cpp /qt5//QtMultimedia ]
+
+ # QtQuick version1
+ [ link qtdeclarative.cpp /qt5//QtDeclarative ]
+
+ # QtQuick version2
+ [ run qtquick.cpp /qt5//QtQuick : -platform offscreen : $(CWD)/qtquick.qml ]
+
+ # Help systems.
+ [ link qthelp.cpp /qt5//QtHelp ]
+
+ # Testing using QtTest. Simple sample
+ # ToDo: better support for "automoc" aka '#include "qttest.moc"'
+ [ run qttest.cpp [ cast _ moccable5-cpp : qttest.cpp ] /qt5//QtTest : : : <define>TEST_MOCK ]
+
+ # Test moc rule
+ [ run mock.cpp mock.h /qt5//QtCore : : : <define>TEST_MOCK ]
+
+ # Test resource compiler
+ [ run rcc.cpp rcc.qrc /qt5//QtCore : : : <rccflags>"-compress 9 -threshold 10" ]
+
+ : # requirements
+ : # default-build
+ : # usage-requirements
+ ;
+}
+
+
diff --git a/tools/build/test/qt5/mock.cpp b/tools/build/test/qt5/mock.cpp
new file mode 100644
index 0000000000..82fc608dc0
--- /dev/null
+++ b/tools/build/test/qt5/mock.cpp
@@ -0,0 +1,26 @@
+// (c) Copyright Juergen Hunold 2012
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtMoc
+
+#include "mock.h"
+
+#include <boost/test/unit_test.hpp>
+
+Mock::Mock()
+{
+}
+
+/*!
+ Check that the compiler get the correct #defines.
+ The logic to test the moc is in the header file "mock.h"
+ */
+BOOST_AUTO_TEST_CASE(construct_mock)
+{
+ delete new Mock();
+
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(TEST_MOCK), true);
+}
diff --git a/tools/build/test/qt5/mock.h b/tools/build/test/qt5/mock.h
new file mode 100644
index 0000000000..eac177d4da
--- /dev/null
+++ b/tools/build/test/qt5/mock.h
@@ -0,0 +1,21 @@
+// (c) Copyright Juergen Hunold 2012
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#include <QtCore/QObject>
+
+class Mock : public QObject
+{
+ /*!
+ Test that the moc gets the necessary #defines
+ Else the moc will not see the Q_OBJECT macro, issue a warning
+ and linking will fail due to missing vtable symbols.
+ */
+#if defined(TEST_MOCK)
+ Q_OBJECT
+#endif
+ public:
+
+ Mock();
+};
diff --git a/tools/build/test/qt5/qtassistant.cpp b/tools/build/test/qt5/qtassistant.cpp
new file mode 100644
index 0000000000..c15ee4eca5
--- /dev/null
+++ b/tools/build/test/qt5/qtassistant.cpp
@@ -0,0 +1,21 @@
+// (c) Copyright Juergen Hunold 2012
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtAssistant
+
+#include <QAssistantClient>
+
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_CASE( defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_GUI_LIB), true);
+}
+
+BOOST_AUTO_TEST_CASE( empty_assistant)
+{
+ QAssistantClient client(QString());
+}
diff --git a/tools/build/test/qt5/qtcore.cpp b/tools/build/test/qt5/qtcore.cpp
new file mode 100644
index 0000000000..6a2c62c8d1
--- /dev/null
+++ b/tools/build/test/qt5/qtcore.cpp
@@ -0,0 +1,22 @@
+// (c) Copyright Juergen Hunold 2012
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtCore
+#include <QtCore>
+
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_CASE (defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+}
+
+
+BOOST_AUTO_TEST_CASE( qstring_test)
+{
+ QString dummy;
+
+ BOOST_CHECK_EQUAL(dummy.isEmpty(), true);
+}
diff --git a/tools/build/test/qt5/qtcorefail.cpp b/tools/build/test/qt5/qtcorefail.cpp
new file mode 100644
index 0000000000..8032d47cd1
--- /dev/null
+++ b/tools/build/test/qt5/qtcorefail.cpp
@@ -0,0 +1,23 @@
+// (c) Copyright Juergen Hunold 2012
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtCoreFail
+
+#include <QtCore>
+
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_CASE (defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+}
+
+
+BOOST_AUTO_TEST_CASE( qstring_test)
+{
+ QString dummy;
+
+ BOOST_CHECK_EQUAL(dummy.isEmpty(), true);
+}
diff --git a/tools/build/test/qt5/qtdeclarative.cpp b/tools/build/test/qt5/qtdeclarative.cpp
new file mode 100644
index 0000000000..df70f5e461
--- /dev/null
+++ b/tools/build/test/qt5/qtdeclarative.cpp
@@ -0,0 +1,26 @@
+// (c) Copyright Juergen Hunold 2012
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtDeclarative
+
+#include <QCoreApplication>
+#include <QDeclarativeView>
+
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_CASE( defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_XML_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_DECLARATIVE_LIB), true);
+}
+
+
+BOOST_AUTO_TEST_CASE( declarative )
+{
+ QCoreApplication app(boost::unit_test::framework::master_test_suite().argc,
+ boost::unit_test::framework::master_test_suite().argv);
+ QDeclarativeView view;
+}
diff --git a/tools/build/test/qt5/qthelp.cpp b/tools/build/test/qt5/qthelp.cpp
new file mode 100644
index 0000000000..b0e877a6a7
--- /dev/null
+++ b/tools/build/test/qt5/qthelp.cpp
@@ -0,0 +1,22 @@
+// (c) Copyright Juergen Hunold 2012
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtHelp
+
+#include <QtHelp>
+
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_CASE( defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_GUI_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_XML_LIB), true);
+}
+
+BOOST_AUTO_TEST_CASE( empty_engine)
+{
+ QHelpEngine engine(QString());
+}
diff --git a/tools/build/test/qt5/qtmultimedia.cpp b/tools/build/test/qt5/qtmultimedia.cpp
new file mode 100644
index 0000000000..dc5914aff3
--- /dev/null
+++ b/tools/build/test/qt5/qtmultimedia.cpp
@@ -0,0 +1,25 @@
+// (c) Copyright Juergen Hunold 2009
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtMultimedia
+
+#include <QAudioDeviceInfo>
+
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_CASE( defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_GUI_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_MULTIMEDIA_LIB), true);
+}
+
+BOOST_AUTO_TEST_CASE( audiodevices)
+{
+ QList<QAudioDeviceInfo> devices = QAudioDeviceInfo::availableDevices(QAudio::AudioOutput);
+ for(int i = 0; i < devices.size(); ++i) {
+ BOOST_TEST_MESSAGE(QAudioDeviceInfo(devices.at(i)).deviceName().constData());
+ }
+}
diff --git a/tools/build/test/qt5/qtnetwork.cpp b/tools/build/test/qt5/qtnetwork.cpp
new file mode 100644
index 0000000000..d342466897
--- /dev/null
+++ b/tools/build/test/qt5/qtnetwork.cpp
@@ -0,0 +1,33 @@
+// (c) Copyright Juergen Hunold 2012
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtNetwork
+
+#include <QHostInfo>
+
+#include <QTextStream>
+
+#include <boost/test/unit_test.hpp>
+
+#include <iostream>
+
+BOOST_AUTO_TEST_CASE (defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_NETWORK_LIB), true);
+}
+
+BOOST_AUTO_TEST_CASE( hostname )
+{
+ QHostInfo info(QHostInfo::fromName("www.boost.org")); //blocking lookup
+
+ QTextStream stream(stdout, QIODevice::WriteOnly);
+
+ Q_FOREACH(QHostAddress address, info.addresses())
+ {
+ BOOST_CHECK_EQUAL(address.isNull(), false);
+ stream << address.toString() << endl;
+ }
+}
diff --git a/tools/build/test/qt5/qtquick.cpp b/tools/build/test/qt5/qtquick.cpp
new file mode 100644
index 0000000000..bec0d809c4
--- /dev/null
+++ b/tools/build/test/qt5/qtquick.cpp
@@ -0,0 +1,43 @@
+// (c) Copyright Juergen Hunold 2012
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtQuick
+#include <QDir>
+#include <QTimer>
+#include <QGuiApplication>
+#include <QQmlEngine>
+#include <QQuickView>
+#include <QDebug>
+
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_CASE (defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_QML_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_QUICK_LIB), true);
+}
+
+BOOST_AUTO_TEST_CASE (simple_test)
+{
+ QGuiApplication app(boost::unit_test::framework::master_test_suite().argc,
+ boost::unit_test::framework::master_test_suite().argv);
+ QQuickView view;
+
+ QString fileName(boost::unit_test::framework::master_test_suite().argv[1]);
+
+ view.connect(view.engine(), SIGNAL(quit()), &app, SLOT(quit()));
+ view.setSource(QUrl::fromLocalFile(fileName)); \
+
+ QTimer::singleShot(2000, &app, SLOT(quit())); // Auto-close window
+
+ if (QGuiApplication::platformName() == QLatin1String("qnx") ||
+ QGuiApplication::platformName() == QLatin1String("eglfs")) {
+ view.setResizeMode(QQuickView::SizeRootObjectToView);
+ view.showFullScreen();
+ } else {
+ view.show();
+ }
+ BOOST_CHECK_EQUAL(app.exec(), 0);
+}
diff --git a/tools/build/test/qt5/qtquick.qml b/tools/build/test/qt5/qtquick.qml
new file mode 100644
index 0000000000..8defc05030
--- /dev/null
+++ b/tools/build/test/qt5/qtquick.qml
@@ -0,0 +1,20 @@
+// (c) Copyright Juergen Hunold 2012
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+import QtQuick 2.0
+
+Rectangle {
+ id: page
+ width: 400; height: 200
+ color: "#d6d6d6"
+ Text {
+ id: helloText
+ text: "Boost.Build built!"
+ color: "darkgray"
+ anchors.horizontalCenter: page.horizontalCenter
+ anchors.verticalCenter: page.verticalCenter
+ font.pointSize: 30; font.italic: true ; font.bold: true
+ }
+}
diff --git a/tools/build/test/qt5/qtscript.cpp b/tools/build/test/qt5/qtscript.cpp
new file mode 100644
index 0000000000..d48c073ebd
--- /dev/null
+++ b/tools/build/test/qt5/qtscript.cpp
@@ -0,0 +1,37 @@
+// (c) Copyright Juergen Hunold 2012
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtScript
+
+#include <QScriptEngine>
+
+#include <QCoreApplication>
+
+#include <boost/test/unit_test.hpp>
+
+#include <iostream>
+
+std::ostream&
+operator << (std::ostream& stream, QString const& string)
+{
+ stream << qPrintable(string);
+ return stream;
+}
+
+BOOST_AUTO_TEST_CASE( defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_SCRIPT_LIB), true);
+}
+
+BOOST_AUTO_TEST_CASE( script )
+{
+ QCoreApplication app(boost::unit_test::framework::master_test_suite().argc,
+ boost::unit_test::framework::master_test_suite().argv);
+ QScriptEngine myEngine;
+ QScriptValue three = myEngine.evaluate("1 + 2");
+
+ BOOST_CHECK_EQUAL(three.toNumber(), 3);
+ BOOST_CHECK_EQUAL(three.toString(), QLatin1String("3"));
+}
diff --git a/tools/build/test/qt5/qtscripttools.cpp b/tools/build/test/qt5/qtscripttools.cpp
new file mode 100644
index 0000000000..002056a0f1
--- /dev/null
+++ b/tools/build/test/qt5/qtscripttools.cpp
@@ -0,0 +1,47 @@
+// (c) Copyright Juergen Hunold 2012
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtScriptTools
+
+#include <QScriptEngine>
+
+#include <QScriptEngineDebugger>
+
+#include <QCoreApplication>
+
+#include <boost/test/unit_test.hpp>
+
+#include <iostream>
+
+namespace utf = boost::unit_test::framework;
+
+std::ostream&
+operator << (std::ostream& stream, QString const& string)
+{
+ stream << qPrintable(string);
+ return stream;
+}
+
+BOOST_AUTO_TEST_CASE( defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_SCRIPTTOOLS_LIB), true);
+}
+
+BOOST_AUTO_TEST_CASE( script )
+{
+ QCoreApplication app(utf::master_test_suite().argc,
+ utf::master_test_suite().argv);
+
+ QScriptEngine myEngine;
+ QScriptValue three = myEngine.evaluate("1 + 2");
+
+ QScriptEngineDebugger debugger;
+ debugger.attachTo(&myEngine);
+
+ BOOST_CHECK_EQUAL(three.toNumber(), 3);
+ BOOST_CHECK_EQUAL(three.toString(), QLatin1String("3"));
+
+ debugger.detach();
+}
diff --git a/tools/build/test/qt5/qtsql.cpp b/tools/build/test/qt5/qtsql.cpp
new file mode 100644
index 0000000000..127c5a3f47
--- /dev/null
+++ b/tools/build/test/qt5/qtsql.cpp
@@ -0,0 +1,37 @@
+// (c) Copyright Juergen Hunold 2012
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtSql
+
+#include <QSqlDatabase>
+
+#include <QTextStream>
+#include <QStringList>
+
+#include <boost/test/unit_test.hpp>
+
+#include <iostream>
+
+BOOST_AUTO_TEST_CASE (defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_SQL_LIB), true);
+}
+
+BOOST_AUTO_TEST_CASE( drivers )
+{
+ QTextStream stream(stdout, QIODevice::WriteOnly);
+
+ Q_FOREACH(QString it, QSqlDatabase:: drivers())
+ {
+ stream << it << endl;
+ }
+}
+
+BOOST_AUTO_TEST_CASE( construct )
+{
+ QSqlDatabase database;
+ BOOST_CHECK_EQUAL(database.isOpen(), false);
+}
diff --git a/tools/build/test/qt5/qtsvg.cpp b/tools/build/test/qt5/qtsvg.cpp
new file mode 100644
index 0000000000..ccfd6b4d19
--- /dev/null
+++ b/tools/build/test/qt5/qtsvg.cpp
@@ -0,0 +1,21 @@
+// (c) Copyright Juergen Hunold 2012
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtSvg
+
+#include <QtSvg>
+
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_CASE( defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_SVG_LIB), true);
+}
+
+BOOST_AUTO_TEST_CASE( generator_construct)
+{
+ QSvgGenerator generator;
+}
diff --git a/tools/build/test/qt5/qttest.cpp b/tools/build/test/qt5/qttest.cpp
new file mode 100644
index 0000000000..ddc8f686ae
--- /dev/null
+++ b/tools/build/test/qt5/qttest.cpp
@@ -0,0 +1,30 @@
+// (c) Copyright Juergen Hunold 2012
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#include <QtTest>
+
+class QtTest: public QObject
+{
+ /*!
+ Test if the moc gets the #define
+ */
+#if defined(TEST_MOCK)
+ Q_OBJECT
+#endif
+
+private Q_SLOTS:
+ void toUpper();
+};
+
+void
+QtTest::toUpper()
+{
+ QString str = "Hello";
+ QCOMPARE(str.toUpper(), QString("HELLO"));
+}
+
+QTEST_MAIN(QtTest)
+#include "qttest.moc"
+
diff --git a/tools/build/test/qt5/qtwebkit.cpp b/tools/build/test/qt5/qtwebkit.cpp
new file mode 100644
index 0000000000..aa6fdc92f1
--- /dev/null
+++ b/tools/build/test/qt5/qtwebkit.cpp
@@ -0,0 +1,22 @@
+// (c) Copyright Juergen Hunold 2012
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtWebKit
+
+#include <QWebSettings>
+
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_CASE( defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_GUI_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_WEBKIT_LIB), true);
+}
+
+BOOST_AUTO_TEST_CASE( webkit )
+{
+ BOOST_CHECK(QWebSettings::globalSettings());
+}
diff --git a/tools/build/test/qt5/qtwebkitwidgets.cpp b/tools/build/test/qt5/qtwebkitwidgets.cpp
new file mode 100644
index 0000000000..52c05c9a98
--- /dev/null
+++ b/tools/build/test/qt5/qtwebkitwidgets.cpp
@@ -0,0 +1,23 @@
+// (c) Copyright Juergen Hunold 2012
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtWebKitWidgets
+
+#include <QWebPage>
+
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_CASE( defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_GUI_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_WEBKITWIDGETS_LIB), true);
+}
+
+BOOST_AUTO_TEST_CASE( webkit )
+{
+ QWebPage page;
+ BOOST_CHECK_EQUAL(page.isModified(), false);
+}
diff --git a/tools/build/test/qt5/qtwidgets.cpp b/tools/build/test/qt5/qtwidgets.cpp
new file mode 100644
index 0000000000..0020034483
--- /dev/null
+++ b/tools/build/test/qt5/qtwidgets.cpp
@@ -0,0 +1,43 @@
+// (c) Copyright Juergen Hunold 2012
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtGui
+
+#include <QtWidgets/QApplication>
+
+#include <boost/test/unit_test.hpp>
+
+struct Fixture
+{
+ Fixture()
+ : application(boost::unit_test::framework::master_test_suite().argc,
+ boost::unit_test::framework::master_test_suite().argv,
+ false)
+ {
+ BOOST_TEST_MESSAGE( "setup QApplication fixture" );
+ }
+
+ ~Fixture()
+ {
+ BOOST_TEST_MESSAGE( "teardown QApplication fixture" );
+ }
+
+ QApplication application;
+};
+
+BOOST_GLOBAL_FIXTURE( Fixture )
+
+BOOST_AUTO_TEST_CASE( defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_GUI_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_WIDGETS_LIB), true);
+}
+
+
+BOOST_AUTO_TEST_CASE( qtgui_test)
+{
+ BOOST_CHECK_EQUAL(true, true);
+}
diff --git a/tools/build/test/qt5/qtxml.cpp b/tools/build/test/qt5/qtxml.cpp
new file mode 100644
index 0000000000..3df6dd2c19
--- /dev/null
+++ b/tools/build/test/qt5/qtxml.cpp
@@ -0,0 +1,29 @@
+// (c) Copyright Juergen Hunold 2012
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtXml
+
+#include <QtXml>
+
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_CASE( defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_XML_LIB), true);
+}
+
+BOOST_AUTO_TEST_CASE( reader_construct)
+{
+ QXmlStreamReader reader;
+ BOOST_CHECK_EQUAL(reader.atEnd(), false);
+}
+
+BOOST_AUTO_TEST_CASE( writer_construct)
+{
+ QXmlStreamWriter writer;
+ BOOST_CHECK_EQUAL(writer.device(), static_cast<QIODevice*>(0));
+}
+
diff --git a/tools/build/test/qt5/qtxmlpatterns.cpp b/tools/build/test/qt5/qtxmlpatterns.cpp
new file mode 100644
index 0000000000..9a9b854306
--- /dev/null
+++ b/tools/build/test/qt5/qtxmlpatterns.cpp
@@ -0,0 +1,76 @@
+// (c) Copyright Juergen Hunold 2012
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtXmlPatterns
+
+#include <QXmlQuery>
+#include <QXmlSerializer>
+
+#include <QCoreApplication>
+#include <QString>
+#include <QTextStream>
+#include <QBuffer>
+
+#include <boost/test/unit_test.hpp>
+
+
+struct Fixture
+{
+ Fixture()
+ : application(boost::unit_test::framework::master_test_suite().argc,
+ boost::unit_test::framework::master_test_suite().argv)
+ {
+ BOOST_TEST_MESSAGE( "setup QCoreApplication fixture" );
+ }
+
+ ~Fixture()
+ {
+ BOOST_TEST_MESSAGE( "teardown QCoreApplication fixture" );
+ }
+
+ QCoreApplication application;
+};
+
+BOOST_GLOBAL_FIXTURE( Fixture )
+
+QByteArray doc("<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
+ "<html xmlns=\"http://www.w3.org/1999/xhtml/\" xml:lang=\"en\" lang=\"en\">"
+" <head>"
+" <title>Global variables report for globals.gccxml</title>"
+" </head>"
+"<body><p>Some Test text</p></body></html>");
+
+BOOST_AUTO_TEST_CASE( defines)
+{
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true);
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_XMLPATTERNS_LIB), true);
+
+ BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_XML_LIB), false);
+}
+
+BOOST_AUTO_TEST_CASE( extract )
+{
+
+ QBuffer buffer(&doc); // This is a QIODevice.
+ buffer.open(QIODevice::ReadOnly);
+ QXmlQuery query;
+ query.bindVariable("myDocument", &buffer);
+ query.setQuery("declare variable $myDocument external;"
+ "doc($myDocument)");///p[1]");
+
+ BOOST_CHECK_EQUAL(query.isValid(), true);
+
+ QByteArray result;
+ QBuffer out(&result);
+ out.open(QIODevice::WriteOnly);
+
+ QXmlSerializer serializer(query, &out);
+ BOOST_CHECK_EQUAL(query.evaluateTo(&serializer), true);
+
+ QTextStream stream(stdout);
+ BOOST_CHECK_EQUAL(result.isEmpty(), false);
+ stream << "hallo" << result << endl;
+}
+
diff --git a/tools/build/test/qt5/rcc.cpp b/tools/build/test/qt5/rcc.cpp
new file mode 100644
index 0000000000..cae553bb1c
--- /dev/null
+++ b/tools/build/test/qt5/rcc.cpp
@@ -0,0 +1,20 @@
+// (c) Copyright Juergen Hunold 2012
+// Use, modification and distribution is subject to the Boost Software
+// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#define BOOST_TEST_MODULE QtCore
+#include <QtCore>
+
+#include <boost/test/unit_test.hpp>
+
+std::ostream& operator<<(std::ostream& out, QString const& text)
+{
+ out << text.toUtf8().constData();
+ return out;
+}
+
+BOOST_AUTO_TEST_CASE (check_exists)
+{
+ BOOST_CHECK(QFile::exists(":/test/rcc.cpp"));
+}
diff --git a/tools/build/test/qt5/rcc.qrc b/tools/build/test/qt5/rcc.qrc
new file mode 100644
index 0000000000..13ca38a5dc
--- /dev/null
+++ b/tools/build/test/qt5/rcc.qrc
@@ -0,0 +1,5 @@
+<!DOCTYPE RCC><RCC version="1.0">
+ <qresource prefix="/test/">
+ <file>rcc.cpp</file>
+ </qresource>
+</RCC>
diff --git a/tools/build/v2/test/railsys.py b/tools/build/test/railsys.py
index 7b7f8bd0bd..7b7f8bd0bd 100644
--- a/tools/build/v2/test/railsys.py
+++ b/tools/build/test/railsys.py
diff --git a/tools/build/v2/test/railsys/libx/include/test_libx.h b/tools/build/test/railsys/libx/include/test_libx.h
index fe573fc16f..fe573fc16f 100644
--- a/tools/build/v2/test/railsys/libx/include/test_libx.h
+++ b/tools/build/test/railsys/libx/include/test_libx.h
diff --git a/tools/build/v2/test/railsys/libx/jamroot.jam b/tools/build/test/railsys/libx/jamroot.jam
index d09982dd1a..d09982dd1a 100644
--- a/tools/build/v2/test/railsys/libx/jamroot.jam
+++ b/tools/build/test/railsys/libx/jamroot.jam
diff --git a/tools/build/v2/test/railsys/libx/src/jamfile.jam b/tools/build/test/railsys/libx/src/jamfile.jam
index 639e0cc90b..639e0cc90b 100644
--- a/tools/build/v2/test/railsys/libx/src/jamfile.jam
+++ b/tools/build/test/railsys/libx/src/jamfile.jam
diff --git a/tools/build/v2/test/railsys/libx/src/test_libx.cpp b/tools/build/test/railsys/libx/src/test_libx.cpp
index be1fbc27f7..be1fbc27f7 100644
--- a/tools/build/v2/test/railsys/libx/src/test_libx.cpp
+++ b/tools/build/test/railsys/libx/src/test_libx.cpp
diff --git a/tools/build/v2/test/railsys/program/include/test_a.h b/tools/build/test/railsys/program/include/test_a.h
index 8002859e15..8002859e15 100644
--- a/tools/build/v2/test/railsys/program/include/test_a.h
+++ b/tools/build/test/railsys/program/include/test_a.h
diff --git a/tools/build/test/railsys/program/jamfile.jam b/tools/build/test/railsys/program/jamfile.jam
new file mode 100644
index 0000000000..9e36f408fc
--- /dev/null
+++ b/tools/build/test/railsys/program/jamfile.jam
@@ -0,0 +1,45 @@
+# ================================================================
+#
+# Railsys
+# --------------
+#
+# Copyright (c) 2002 Institute of Transport,
+# Railway Construction and Operation,
+# University of Hanover, Germany
+# Copyright (c) 2006 Jürgen Hunold
+#
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+#
+# 02/21/02! Jürgen Hunold
+#
+# $Id$
+#
+# ================================================================
+
+local BOOST_ROOT = [ modules.peek : BOOST_ROOT ] ;
+
+use-project /libx : ../libx/src ;
+
+project program
+ : requirements
+ <include>$(BOOST_ROOT)
+ <threading>multi
+ <library>/qt3//qt
+ <hardcode-dll-paths>true
+ <stdlib>stlport
+ <use>/libx
+ <library>/libx//libx
+
+ : usage-requirements
+ <include>$(BOOST_ROOT)
+ :
+ default-build release
+ <threading>multi
+ <library>/qt3//qt
+ <hardcode-dll-paths>true
+ ;
+
+build-project main ;
+
diff --git a/tools/build/v2/test/railsys/program/jamroot.jam b/tools/build/test/railsys/program/jamroot.jam
index 23d42195f7..23d42195f7 100644
--- a/tools/build/v2/test/railsys/program/jamroot.jam
+++ b/tools/build/test/railsys/program/jamroot.jam
diff --git a/tools/build/v2/test/railsys/program/liba/jamfile.jam b/tools/build/test/railsys/program/liba/jamfile.jam
index f74311d0d0..f74311d0d0 100644
--- a/tools/build/v2/test/railsys/program/liba/jamfile.jam
+++ b/tools/build/test/railsys/program/liba/jamfile.jam
diff --git a/tools/build/v2/test/railsys/program/liba/test_a.cpp b/tools/build/test/railsys/program/liba/test_a.cpp
index f9e5388570..f9e5388570 100644
--- a/tools/build/v2/test/railsys/program/liba/test_a.cpp
+++ b/tools/build/test/railsys/program/liba/test_a.cpp
diff --git a/tools/build/v2/test/railsys/program/main/jamfile.jam b/tools/build/test/railsys/program/main/jamfile.jam
index 095978eaf9..095978eaf9 100644
--- a/tools/build/v2/test/railsys/program/main/jamfile.jam
+++ b/tools/build/test/railsys/program/main/jamfile.jam
diff --git a/tools/build/v2/test/railsys/program/main/main.cpp b/tools/build/test/railsys/program/main/main.cpp
index 3f13f4bfc4..3f13f4bfc4 100644
--- a/tools/build/v2/test/railsys/program/main/main.cpp
+++ b/tools/build/test/railsys/program/main/main.cpp
diff --git a/tools/build/v2/test/readme.txt b/tools/build/test/readme.txt
index 48459f805c..48459f805c 100644
--- a/tools/build/v2/test/readme.txt
+++ b/tools/build/test/readme.txt
diff --git a/tools/build/test/rebuilds.py b/tools/build/test/rebuilds.py
new file mode 100644
index 0000000000..89ad4d84f7
--- /dev/null
+++ b/tools/build/test/rebuilds.py
@@ -0,0 +1,68 @@
+#!/usr/bin/python
+
+# Copyright 2005 Dave Abrahams
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+
+def wait_for_bar(t):
+ """
+ Wait to make the test system correctly recognize the 'bar' file as
+ touched after the next build run. Without the wait, the next build run may
+ rebuild the 'bar' file with the new and the old file modification timestamp
+ too close to each other - which could, depending on the currently supported
+ file modification timestamp resolution, be detected as 'no change' by the
+ testing system.
+
+ """
+ t.wait_for_time_change("bar", touch=False)
+
+
+t = BoostBuild.Tester(["-ffile.jam", "-d+3", "-d+12", "-d+13"], pass_d0=False,
+ pass_toolset=0)
+
+t.write("file.jam", """\
+rule make
+{
+ DEPENDS $(<) : $(>) ;
+ DEPENDS all : $(<) ;
+}
+actions make
+{
+ echo "******" making $(<) from $(>) "******"
+ echo made from $(>) > $(<)
+}
+
+make aux1 : bar ;
+make foo : bar ;
+REBUILDS foo : bar ;
+make bar : baz ;
+make aux2 : bar ;
+""")
+
+t.write("baz", "nothing")
+
+t.run_build_system(["bar"])
+t.expect_addition("bar")
+t.expect_nothing_more()
+
+wait_for_bar(t)
+t.run_build_system(["foo"])
+t.expect_touch("bar")
+t.expect_addition("foo")
+t.expect_nothing_more()
+
+t.run_build_system()
+t.expect_addition(["aux1", "aux2"])
+t.expect_nothing_more()
+
+t.touch("bar")
+wait_for_bar(t)
+t.run_build_system()
+t.expect_touch(["foo", "bar", "aux1", "aux2"])
+t.expect_nothing_more()
+
+t.cleanup()
diff --git a/tools/build/test/regression.py b/tools/build/test/regression.py
new file mode 100644
index 0000000000..dde4d8151c
--- /dev/null
+++ b/tools/build/test/regression.py
@@ -0,0 +1,113 @@
+#!/usr/bin/python
+
+# Copyright (C) Vladimir Prus 2003.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Test for the regression testing framework.
+
+import BoostBuild
+
+# Create a temporary working directory.
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("c.cpp", "\n")
+t.write("r.cpp", """\
+void helper();
+
+#include <iostream>
+int main( int ac, char * av[] )
+{
+ helper();
+ for ( int i = 1; i < ac; ++i )
+ std::cout << av[ i ] << '\\n';
+}
+""")
+t.write("c-f.cpp", "int\n")
+t.write("r-f.cpp", "int main() { return 1; }\n")
+
+t.write("jamroot.jam", "")
+t.write("jamfile.jam", """\
+import testing ;
+compile c.cpp ;
+compile-fail c-f.cpp ;
+run r.cpp libs//helper : foo bar ;
+run-fail r-f.cpp ;
+""")
+
+t.write("libs/jamfile.jam", "lib helper : helper.cpp ;")
+t.write("libs/helper.cpp", """\
+void
+#if defined(_WIN32)
+__declspec(dllexport)
+#endif
+helper() {}
+""")
+
+# First test that when outcomes are expected, all .test files are created.
+t.run_build_system(["hardcode-dll-paths=false"], stderr=None, status=None)
+t.expect_addition("bin/c.test/$toolset/debug/c.test")
+t.expect_addition("bin/c-f.test/$toolset/debug/c-f.test")
+t.expect_addition("bin/r.test/$toolset/debug/r.test")
+t.expect_addition("bin/r-f.test/$toolset/debug/r-f.test")
+
+# Make sure args are handled.
+t.expect_content("bin/r.test/$toolset/debug/r.output",
+ "foo\nbar\n*\nEXIT STATUS: 0*\n", True)
+
+# Test that input file is handled as well.
+t.write("r.cpp", """\
+#include <iostream>
+#include <fstream>
+int main( int ac, char * av[] )
+{
+ for ( int i = 1; i < ac; ++i )
+ {
+ std::ifstream ifs( av[ i ] );
+ std::cout << ifs.rdbuf();
+ }
+}
+""")
+
+t.write("dir/input.txt", "test input")
+
+t.write("jamfile.jam", """\
+import testing ;
+compile c.cpp ;
+obj c-obj : c.cpp ;
+compile-fail c-f.cpp ;
+run r.cpp : : dir/input.txt ;
+run-fail r-f.cpp ;
+time execution : r ;
+time compilation : c-obj ;
+""")
+
+t.run_build_system(["hardcode-dll-paths=false"])
+t.expect_content("bin/r.test/$toolset/debug/r.output", """\
+test input
+EXIT STATUS: 0
+""")
+
+t.expect_addition('bin/$toolset/debug/execution.time')
+t.expect_addition('bin/$toolset/debug/compilation.time')
+
+# Make sure test failures are detected. Reverse expectation and see if .test
+# files are created or not.
+t.write("jamfile.jam", """\
+import testing ;
+compile-fail c.cpp ;
+compile c-f.cpp ;
+run-fail r.cpp : : dir/input.txt ;
+run r-f.cpp ;
+""")
+
+t.touch(BoostBuild.List("c.cpp c-f.cpp r.cpp r-f.cpp"))
+
+t.run_build_system(["hardcode-dll-paths=false"], stderr=None, status=1)
+t.expect_removal("bin/c.test/$toolset/debug/c.test")
+t.expect_removal("bin/c-f.test/$toolset/debug/c-f.test")
+t.expect_removal("bin/r.test/$toolset/debug/r.test")
+t.expect_removal("bin/r-f.test/$toolset/debug/r-f.test")
+
+t.cleanup()
diff --git a/tools/build/test/relative_sources.py b/tools/build/test/relative_sources.py
new file mode 100644
index 0000000000..bd4620fc60
--- /dev/null
+++ b/tools/build/test/relative_sources.py
@@ -0,0 +1,38 @@
+#!/usr/bin/python
+
+# Copyright 2003 Dave Abrahams
+# Copyright 2002, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that we can specify sources using relative names.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+# Test that relative path to source, 'src', is preserved.
+t.write("jamroot.jam", "exe a : src/a.cpp ;")
+t.write("src/a.cpp", "int main() {}\n")
+
+t.run_build_system()
+t.expect_addition("bin/$toolset/debug/src/a.obj")
+
+# Test that the relative path to source is preserved
+# when using 'glob'.
+t.rm("bin")
+t.write("jamroot.jam", "exe a : [ glob src/*.cpp ] ;")
+t.run_build_system()
+t.expect_addition("bin/$toolset/debug/src/a.obj")
+
+
+# Test that relative path with ".." is *not* added to
+# target path.
+t.rm(".")
+t.write("jamroot.jam", "")
+t.write("a.cpp", "int main() { return 0; }\n")
+t.write("build/Jamfile", "exe a : ../a.cpp ; ")
+t.run_build_system(subdir="build")
+t.expect_addition("build/bin/$toolset/debug/a.obj")
+
+t.cleanup()
diff --git a/tools/build/test/remove_requirement.py b/tools/build/test/remove_requirement.py
new file mode 100644
index 0000000000..fa820c869f
--- /dev/null
+++ b/tools/build/test/remove_requirement.py
@@ -0,0 +1,89 @@
+#!/usr/bin/python
+
+# Copyright (C) Vladimir Prus 2006.
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+
+t.write("jamroot.jam", """
+project : requirements <threading>multi <variant>debug:<link>static ;
+
+build-project sub ;
+build-project sub2 ;
+build-project sub3 ;
+build-project sub4 ;
+""")
+
+t.write("sub/jamfile.jam", """
+exe hello : hello.cpp : -<threading>multi ;
+""")
+
+t.write("sub/hello.cpp", """
+int main() {}
+""")
+
+t.write("sub2/jamfile.jam", """
+project : requirements -<threading>multi ;
+exe hello : hello.cpp ;
+""")
+
+t.write("sub2/hello.cpp", """
+int main() {}
+""")
+
+t.write("sub3/hello.cpp", """
+int main() {}
+""")
+
+t.write("sub3/jamfile.jam", """
+exe hello : hello.cpp : -<variant>debug:<link>static ;
+""")
+
+t.write("sub4/hello.cpp", """
+int main() {}
+""")
+
+t.write("sub4/jamfile.jam", """
+project : requirements -<variant>debug:<link>static ;
+exe hello : hello.cpp ;
+""")
+
+t.run_build_system()
+
+t.expect_addition("sub/bin/$toolset/debug/link-static/hello.exe")
+t.expect_addition("sub2/bin/$toolset/debug/link-static/hello.exe")
+t.expect_addition("sub3/bin/$toolset/debug/threading-multi/hello.exe")
+t.expect_addition("sub4/bin/$toolset/debug/threading-multi/hello.exe")
+
+t.rm(".")
+
+# Now test that path requirements can be removed as well.
+t.write("jamroot.jam", """
+build-project sub ;
+""")
+
+t.write("sub/jamfile.jam", """
+project : requirements <include>broken ;
+exe hello : hello.cpp : -<include>broken ;
+""")
+
+t.write("sub/hello.cpp", """
+#include "math.h"
+int main() {}
+""")
+
+t.write("sub/broken/math.h", """
+Broken
+""")
+
+
+t.run_build_system()
+
+t.expect_addition("sub/bin/$toolset/debug/hello.exe")
+
+t.cleanup()
diff --git a/tools/build/test/rescan_header.py b/tools/build/test/rescan_header.py
new file mode 100755
index 0000000000..1f07acaa8b
--- /dev/null
+++ b/tools/build/test/rescan_header.py
@@ -0,0 +1,265 @@
+#!/usr/bin/python
+
+# Copyright 2012 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+# Test a header loop that depends on (but does not contain) a generated header.
+t.write("test.cpp", '#include "header1.h"\n')
+
+t.write("header1.h", """\
+#ifndef HEADER1_H
+#define HEADER1_H
+#include "header2.h"
+#endif
+""")
+
+t.write("header2.h", """\
+#ifndef HEADER2_H
+#define HEADER2_H
+#include "header1.h"
+#include "header3.h"
+#endif
+""")
+
+t.write("header3.in", "/* empty file */\n")
+
+t.write("jamroot.jam", """\
+import common ;
+make header3.h : header3.in : @common.copy ;
+obj test : test.cpp : <implicit-dependency>header3.h ;
+""")
+
+t.run_build_system(["-j2"])
+t.expect_addition("bin/$toolset/debug/header3.h")
+t.expect_addition("bin/$toolset/debug/test.obj")
+t.expect_nothing_more()
+
+t.rm(".")
+
+# Test a linear sequence of generated headers.
+t.write("test.cpp", '#include "header1.h"\n')
+
+t.write("header1.in", """\
+#ifndef HEADER1_H
+#define HEADER1_H
+#include "header2.h"
+#endif
+""")
+
+t.write("header2.in", """\
+#ifndef HEADER2_H
+#define HEADER2_H
+#include "header3.h"
+#endif
+""")
+
+t.write("header3.in", "/* empty file */\n")
+
+t.write("jamroot.jam", """\
+import common ;
+make header1.h : header1.in : @common.copy ;
+make header2.h : header2.in : @common.copy ;
+make header3.h : header3.in : @common.copy ;
+obj test : test.cpp :
+ <implicit-dependency>header1.h
+ <implicit-dependency>header2.h
+ <implicit-dependency>header3.h ;
+""")
+
+t.run_build_system(["-j2", "test"])
+t.expect_addition("bin/$toolset/debug/header1.h")
+t.expect_addition("bin/$toolset/debug/header2.h")
+t.expect_addition("bin/$toolset/debug/header3.h")
+t.expect_addition("bin/$toolset/debug/test.obj")
+t.expect_nothing_more()
+
+t.rm(".")
+
+# Test a loop in generated headers.
+t.write("test.cpp", '#include "header1.h"\n')
+
+t.write("header1.in", """\
+#ifndef HEADER1_H
+#define HEADER1_H
+#include "header2.h"
+#endif
+""")
+
+t.write("header2.in", """\
+#ifndef HEADER2_H
+#define HEADER2_H
+#include "header3.h"
+#endif
+""")
+
+t.write("header3.in", """\
+#ifndef HEADER2_H
+#define HEADER2_H
+#include "header1.h"
+#endif
+""")
+
+t.write("jamroot.jam", """\
+import common ;
+
+actions copy {
+ sleep 1
+ cp $(>) $(<)
+}
+
+make header1.h : header1.in : @common.copy ;
+make header2.h : header2.in : @common.copy ;
+make header3.h : header3.in : @common.copy ;
+obj test : test.cpp :
+ <implicit-dependency>header1.h
+ <implicit-dependency>header2.h
+ <implicit-dependency>header3.h ;
+""")
+
+t.run_build_system(["-j2", "test"])
+t.expect_addition("bin/$toolset/debug/header1.h")
+t.expect_addition("bin/$toolset/debug/header2.h")
+t.expect_addition("bin/$toolset/debug/header3.h")
+t.expect_addition("bin/$toolset/debug/test.obj")
+t.expect_nothing_more()
+
+t.rm(".")
+
+# Test that all the dependencies of a loop are updated before any of the
+# dependents.
+t.write("test1.cpp", '#include "header1.h"\n')
+
+t.write("test2.cpp", """\
+#include "header2.h"
+int main() {}
+""")
+
+t.write("header1.h", """\
+#ifndef HEADER1_H
+#define HEADER1_H
+#include "header2.h"
+#endif
+""")
+
+t.write("header2.h", """\
+#ifndef HEADER2_H
+#define HEADER2_H
+#include "header1.h"
+#include "header3.h"
+#endif
+""")
+
+t.write("header3.in", "\n")
+
+t.write("sleep.bat", """\
+::@timeout /T %1 /NOBREAK >nul
+@ping 127.0.0.1 -n 2 -w 1000 >nul
+@ping 127.0.0.1 -n %1 -w 1000 >nul
+@exit /B 0
+""")
+
+t.write("jamroot.jam", """\
+import common ;
+import os ;
+
+if [ os.name ] = NT
+{
+ SLEEP = call sleep.bat ;
+}
+else
+{
+ SLEEP = sleep ;
+}
+
+rule copy { common.copy $(<) : $(>) ; }
+actions copy { $(SLEEP) 1 }
+
+make header3.h : header3.in : @copy ;
+exe test : test2.cpp test1.cpp : <implicit-dependency>header3.h ;
+""")
+
+t.run_build_system(["-j2", "test"])
+t.expect_addition("bin/$toolset/debug/header3.h")
+t.expect_addition("bin/$toolset/debug/test1.obj")
+t.expect_addition("bin/$toolset/debug/test2.obj")
+t.expect_addition("bin/$toolset/debug/test.exe")
+t.expect_nothing_more()
+
+t.touch("header3.in")
+t.run_build_system(["-j2", "test"])
+t.expect_touch("bin/$toolset/debug/header3.h")
+t.expect_touch("bin/$toolset/debug/test1.obj")
+t.expect_touch("bin/$toolset/debug/test2.obj")
+t.expect_touch("bin/$toolset/debug/test.exe")
+t.expect_nothing_more()
+
+t.rm(".")
+
+# Test a loop that includes a generated header
+t.write("test1.cpp", '#include "header1.h"\n')
+t.write("test2.cpp", """\
+#include "header2.h"
+int main() {}
+""")
+
+t.write("header1.h", """\
+#ifndef HEADER1_H
+#define HEADER1_H
+#include "header2.h"
+#endif
+""")
+
+t.write("header2.in", """\
+#ifndef HEADER2_H
+#define HEADER2_H
+#include "header3.h"
+#endif
+""")
+
+t.write("header3.h", """\
+#ifndef HEADER3_H
+#define HEADER3_H
+#include "header1.h"
+#endif
+""")
+
+t.write("sleep.bat", """\
+::@timeout /T %1 /NOBREAK >nul
+@ping 127.0.0.1 -n 2 -w 1000 >nul
+@ping 127.0.0.1 -n %1 -w 1000 >nul
+@exit /B 0
+""")
+
+t.write("jamroot.jam", """\
+import common ;
+import os ;
+
+if [ os.name ] = NT
+{
+ SLEEP = call sleep.bat ;
+}
+else
+{
+ SLEEP = sleep ;
+}
+
+rule copy { common.copy $(<) : $(>) ; }
+actions copy { $(SLEEP) 1 }
+
+make header2.h : header2.in : @copy ;
+exe test : test2.cpp test1.cpp : <implicit-dependency>header2.h <include>. ;
+""")
+
+t.run_build_system(["-j2", "test"])
+t.expect_addition("bin/$toolset/debug/header2.h")
+t.expect_addition("bin/$toolset/debug/test1.obj")
+t.expect_addition("bin/$toolset/debug/test2.obj")
+t.expect_addition("bin/$toolset/debug/test.exe")
+t.expect_nothing_more()
+
+t.cleanup()
diff --git a/tools/build/test/resolution.py b/tools/build/test/resolution.py
new file mode 100644
index 0000000000..a970857328
--- /dev/null
+++ b/tools/build/test/resolution.py
@@ -0,0 +1,35 @@
+#!/usr/bin/python
+
+# Copyright (C) Vladimir Prus 2006.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Tests for the target id resolution process.
+
+import BoostBuild
+
+# Create a temporary working directory.
+t = BoostBuild.Tester(use_test_config=False)
+
+# Create the needed files
+t.write("jamroot.jam", """\
+exe hello : hello.cpp ;
+# This should use the 'hello' target, even if there is a 'hello' file in the
+# current dir.
+install s : hello : <location>. ;
+""")
+
+t.write("hello.cpp", "int main() {}\n")
+
+t.run_build_system()
+
+t.expect_addition("bin/$toolset/debug/hello.obj")
+
+t.touch("hello.cpp")
+t.run_build_system(["s"])
+# If 'hello' in the 's' target resolved to file in the current dir, nothing
+# will be rebuilt.
+t.expect_touch("bin/$toolset/debug/hello.obj")
+
+t.cleanup()
diff --git a/tools/build/test/results-python.txt b/tools/build/test/results-python.txt
new file mode 100644
index 0000000000..83b351b2ce
--- /dev/null
+++ b/tools/build/test/results-python.txt
@@ -0,0 +1,132 @@
+Note: skipping extra tests
+unit_tests : FAILED
+module_actions : PASSED
+startup_v2 : PASSED
+core_d12 : PASSED
+core_typecheck : PASSED
+core_delete_module : PASSED
+core_language : PASSED
+core_arguments : PASSED
+core_varnames : PASSED
+core_import_module : PASSED
+absolute_sources : PASSED
+alias : PASSED
+alternatives : PASSED
+bad_dirname : PASSED
+build_dir : PASSED
+build_file : PASSED
+build_no : PASSED
+builtin_echo : PASSED
+builtin_exit : PASSED
+builtin_split_by_characters : FAILED
+c_file : PASSED
+chain : PASSED
+clean : PASSED
+composite : PASSED
+conditionals : PASSED
+conditionals2 : PASSED
+conditionals3 : PASSED
+conditionals_multiple : PASSED
+configuration : FAILED
+copy_time : PASSED
+core_action_output : PASSED
+core_action_status : PASSED
+core_actions_quietly : PASSED
+core_at_file : PASSED
+core_bindrule : PASSED
+core_multifile_actions : PASSED
+core_nt_cmd_line : PASSED
+core_option_d2 : PASSED
+core_option_l : PASSED
+core_option_n : PASSED
+core_parallel_actions : PASSED
+core_parallel_multifile_actions_1 : PASSED
+core_parallel_multifile_actions_2 : PASSED
+core_source_line_tracking : PASSED
+core_update_now : PASSED
+core_variables_in_actions : PASSED
+custom_generator : PASSED
+default_build : PASSED
+default_features : PASSED
+dependency_property : PASSED
+dependency_test : FAILED
+direct_request_test : PASSED
+disambiguation : PASSED
+dll_path : PASSED
+double_loading : PASSED
+duplicate : PASSED
+example_libraries : PASSED
+example_make : PASSED
+exit_status : PASSED
+expansion : PASSED
+explicit : PASSED
+free_features_request : PASSED
+generator_selection : FAILED
+generators_test : FAILED
+implicit_dependency : PASSED
+indirect_conditional : FAILED
+inherit_toolset : FAILED
+inherited_dependency : PASSED
+inline : PASSED
+lib_source_property : PASSED
+library_chain : PASSED
+library_property : PASSED
+link : FAILED
+load_order : FAILED
+loop : PASSED
+make_rule : PASSED
+message : FAILED
+ndebug : PASSED
+no_type : PASSED
+notfile : PASSED
+ordered_include : PASSED
+out_of_tree : PASSED
+path_features : FAILED
+prebuilt : PASSED
+print : FAILED
+project_dependencies : PASSED
+project_glob : PASSED
+project_id : FAILED
+project_root_constants : PASSED
+project_root_rule : PASSED
+project_test3 : FAILED
+project_test4 : FAILED
+property_expansion : PASSED
+rebuilds : PASSED
+regression : PASSED
+relative_sources : PASSED
+remove_requirement : PASSED
+rescan_header : PASSED
+resolution : PASSED
+scanner_causing_rebuilds : FAILED
+searched_lib : PASSED
+skipping : PASSED
+sort_rule : PASSED
+source_locations : PASSED
+source_order : FAILED
+space_in_path : PASSED
+stage : PASSED
+standalone : PASSED
+static_and_shared_library : PASSED
+suffix : PASSED
+tag : PASSED
+test_result_dumping : PASSED
+test_rc : FAILED
+testing_support : PASSED
+timedata : FAILED
+unit_test : PASSED
+unused : FAILED
+use_requirements : PASSED
+using : PASSED
+wrapper : PASSED
+wrong_project : PASSED
+zlib : PASSED
+symlink : PASSED
+library_order : FAILED
+gcc_runtime : FAILED
+pch : PASSED
+
+ === Test summary ===
+ PASS: 103
+ FAIL: 23
+
diff --git a/tools/build/test/scanner_causing_rebuilds.py b/tools/build/test/scanner_causing_rebuilds.py
new file mode 100755
index 0000000000..2b5fc501c5
--- /dev/null
+++ b/tools/build/test/scanner_causing_rebuilds.py
@@ -0,0 +1,84 @@
+#!/usr/bin/python
+
+# Copyright 2012 Jurko Gospodnetic
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Tests for a bug causing Boost Build's scanner targets to be rebuilt.
+# unnecessarily in the following scenario:
+# * We want to build target X requiring target A.
+# * We have a multi-file action generating targets A & B.
+# * Out action generates target B with a more recent timestamp than target A.
+# * Target A includes target B.
+# * Target A has a registered include scanner.
+# Now even if our targets A & B have already been built and are up-to-date
+# (e.g. in a state left by a previous successful build run), our scanner target
+# tasked with scanning target A will be marked for updating, thus causing any
+# targets depending on it to be updated/rebuilt as well.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("foo.jam", r"""
+import common ;
+import generators ;
+import modules ;
+import type ;
+import types/cpp ;
+
+type.register FOO : foo ;
+type.register BAR : bar ;
+generators.register-standard foo.foo : FOO : CPP BAR ;
+
+local rule sleep-cmd ( delay )
+{
+ if [ modules.peek : NT ]
+ {
+ return ping 127.0.0.1 -n $(delay) -w 1000 >NUL ;
+ }
+ else
+ {
+ return sleep $(delay) ;
+ }
+}
+
+.touch = [ common.file-creation-command ] ;
+.sleep = [ sleep-cmd 2 ] ;
+
+rule foo ( cpp bar : foo : properties * )
+{
+ # We add the INCLUDE relationship between our generated CPP & BAR targets
+ # explicitly instead of relying on Boost Jam's internal implementation
+ # detail - automatically adding such relationships between all files
+ # generated by the same action. This way our test will continue to function
+ # correctly even if the related Boost Jam implementation detail changes.
+ # Note that adding this relationship by adding an #include directive in our
+ # generated CPP file is not good enough as such a relationship would get
+ # added only after the scanner target's relationships have already been
+ # established and they (as affected by our initial INCLUDE relationship) are
+ # the original reason for this test failing.
+ INCLUDES $(cpp) : $(bar) ;
+}
+
+actions foo
+{
+ $(.touch) "$(<[1])"
+ $(.sleep)
+ $(.touch) "$(<[2])"
+}
+""")
+t.write("x.foo", "")
+t.write("jamroot.jam", """\
+import foo ;
+lib x : x.foo : <link>static ;
+""")
+
+
+# Get everything built once.
+t.run_build_system()
+
+# Simply rerunning the build without touching any of its source target files
+# should not cause any files to be affected.
+t.run_build_system()
+t.expect_nothing_more()
diff --git a/tools/build/test/searched_lib.py b/tools/build/test/searched_lib.py
new file mode 100644
index 0000000000..c9ad5852d0
--- /dev/null
+++ b/tools/build/test/searched_lib.py
@@ -0,0 +1,183 @@
+#!/usr/bin/python
+
+# Copyright 2003 Dave Abrahams
+# Copyright 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test usage of searched-libs: one which are found via -l
+# switch to the linker/compiler.
+
+import BoostBuild
+import os
+import string
+
+t = BoostBuild.Tester(use_test_config=False)
+
+
+# To start with, we have to prepare a library to link with.
+t.write("lib/jamroot.jam", "")
+t.write("lib/jamfile.jam", "lib test_lib : test_lib.cpp ;")
+t.write("lib/test_lib.cpp", """\
+#ifdef _WIN32
+__declspec(dllexport)
+#endif
+void foo() {}
+""");
+
+t.run_build_system(subdir="lib")
+t.expect_addition("lib/bin/$toolset/debug/test_lib.dll")
+
+
+# Auto adjusting of suffixes does not work, since we need to
+# change dll to lib.
+if ( ( os.name == "nt" ) or os.uname()[0].lower().startswith("cygwin") ) and \
+ ( BoostBuild.get_toolset() != "gcc" ):
+ t.copy("lib/bin/$toolset/debug/test_lib.implib", "lib/test_lib.implib")
+ t.copy("lib/bin/$toolset/debug/test_lib.dll", "lib/test_lib.dll")
+else:
+ t.copy("lib/bin/$toolset/debug/test_lib.dll", "lib/test_lib.dll")
+
+
+# Test that the simplest usage of searched library works.
+t.write("jamroot.jam", "")
+
+t.write("jamfile.jam", """\
+import path ;
+import project ;
+exe main : main.cpp helper ;
+lib helper : helper.cpp test_lib ;
+lib test_lib : : <name>test_lib <search>lib ;
+""")
+
+t.write("main.cpp", """\
+void helper();
+int main() { helper(); }
+""")
+
+t.write("helper.cpp", """\
+void foo();
+void
+#if defined(_WIN32)
+__declspec(dllexport)
+#endif
+helper() { foo(); }
+""")
+
+t.run_build_system(["-d2"])
+t.expect_addition("bin/$toolset/debug/main.exe")
+t.rm("bin/$toolset/debug/main.exe")
+
+
+# Test that 'unit-test' will correctly add runtime paths to searched libraries.
+t.write("jamfile.jam", """\
+import path ;
+import project ;
+import testing ;
+
+project : requirements <hardcode-dll-paths>false ;
+
+unit-test main : main.cpp helper ;
+lib helper : helper.cpp test_lib ;
+lib test_lib : : <name>test_lib <search>lib ;
+""")
+
+t.run_build_system()
+t.expect_addition("bin/$toolset/debug/main.passed")
+t.rm("bin/$toolset/debug/main.exe")
+
+
+# Now try using searched lib from static lib. Request shared version of searched
+# lib, since we do not have a static one handy.
+t.write("jamfile.jam", """\
+exe main : main.cpp helper ;
+lib helper : helper.cpp test_lib/<link>shared : <link>static ;
+lib test_lib : : <name>test_lib <search>lib ;
+""")
+
+t.run_build_system(stderr=None)
+t.expect_addition("bin/$toolset/debug/main.exe")
+t.expect_addition("bin/$toolset/debug/link-static/helper.lib")
+t.rm("bin/$toolset/debug/main.exe")
+
+# A regression test: <library>property referring to searched-lib was being
+# mishandled. As the result, we were putting target name to the command line!
+# Note that
+# g++ ...... <.>z
+# works nicely in some cases, sending output from compiler to file 'z'. This
+# problem shows up when searched libs are in usage requirements.
+t.write("jamfile.jam", "exe main : main.cpp d/d2//a ;")
+t.write("main.cpp", """\
+void foo();
+int main() { foo(); }
+""")
+
+t.write("d/d2/jamfile.jam", """\
+lib test_lib : : <name>test_lib <search>../../lib ;
+lib a : a.cpp : : : <library>test_lib ;
+""")
+
+t.write("d/d2/a.cpp", """\
+#ifdef _WIN32
+__declspec(dllexport) int force_library_creation_for_a;
+#endif
+""")
+
+t.run_build_system()
+
+
+# A regression test. Searched targets were not associated with any properties.
+# For that reason, if the same searched lib is generated with two different
+# properties, we had an error saying they are actualized to the same Jam target
+# name.
+t.write("jamroot.jam", "")
+
+t.write("a.cpp", "")
+
+# The 'l' library will be built in two variants: 'debug' (directly requested)
+# and 'release' (requested from 'a').
+t.write("jamfile.jam", """\
+exe a : a.cpp l/<variant>release ;
+lib l : : <name>l_d <variant>debug ;
+lib l : : <name>l_r <variant>release ;
+""")
+
+t.run_build_system(["-n"])
+
+
+# A regression test. Two virtual target with the same properties were created
+# for 'l' target, which caused and error to be reported when actualizing
+# targets. The final error is correct, but we should not create two duplicated
+# targets. Thanks to Andre Hentz for finding this bug.
+t.write("jamroot.jam", "")
+t.write("a.cpp", "")
+t.write("jamfile.jam", """\
+project a : requirements <runtime-link>static ;
+static-lib a : a.cpp l ;
+lib l : : <name>l_f ;
+""")
+
+t.run_build_system(["-n"])
+
+
+# Make sure plain "lib foobar ; " works.
+t.write("jamfile.jam", """\
+exe a : a.cpp foobar ;
+lib foobar ;
+""")
+
+t.run_build_system(["-n", "-d2"])
+t.fail_test(string.find(t.stdout(), "foobar") == -1)
+
+
+# Make sure plain "lib foo bar ; " works.
+t.write("jamfile.jam", """\
+exe a : a.cpp foo bar ;
+lib foo bar ;
+""")
+
+t.run_build_system(["-n", "-d2"])
+t.fail_test(string.find(t.stdout(), "foo") == -1)
+t.fail_test(string.find(t.stdout(), "bar") == -1)
+
+t.cleanup()
diff --git a/tools/build/test/skipping.py b/tools/build/test/skipping.py
new file mode 100644
index 0000000000..a187a4bedd
--- /dev/null
+++ b/tools/build/test/skipping.py
@@ -0,0 +1,27 @@
+#!/usr/bin/python
+
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that V2 does not fail gracelessy when any target is skipped.
+
+import BoostBuild
+
+# Create a temporary working directory.
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("a.cpp", "int main() {}\n")
+t.write("b.cpp", "int main() {}\n")
+t.write("c.cpp", "int main() {}\n")
+t.write("jamroot.jam", """\
+import feature ;
+feature.feature foo : 1 2 : link-incompatible ;
+exe a : a.cpp : <foo>1 ;
+exe b : b.cpp : <foo>2 ;
+exe c : c.cpp ;
+""")
+
+t.run_build_system(["foo=1"])
+
+t.cleanup()
diff --git a/tools/build/test/sort_rule.py b/tools/build/test/sort_rule.py
new file mode 100755
index 0000000000..31db771569
--- /dev/null
+++ b/tools/build/test/sort_rule.py
@@ -0,0 +1,96 @@
+#!/usr/bin/python
+
+# Copyright (C) 2008. Jurko Gospodnetic
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Tests for the Boost Jam builtin SORT rule.
+
+import BoostBuild
+
+
+###############################################################################
+#
+# testSORTCorrectness()
+# ---------------------
+#
+###############################################################################
+
+def testSORTCorrectness():
+ """Testing that Boost Jam's SORT builtin rule actually sorts correctly."""
+ t = BoostBuild.Tester(["-ftest.jam", "-d1"], pass_toolset=False,
+ use_test_config=False)
+
+ t.write("test.jam", """\
+NOCARE all ;
+source-data = 1 8 9 2 7 3 4 7 1 27 27 9 98 98 1 1 4 5 6 2 3 4 8 1 -2 -2 0 0 0 ;
+target-data = -2 -2 0 0 0 1 1 1 1 1 2 2 27 27 3 3 4 4 4 5 6 7 7 8 8 9 9 98 98 ;
+ECHO "starting up" ;
+sorted-data = [ SORT $(source-data) ] ;
+ECHO "done" ;
+if $(sorted-data) != $(target-data)
+{
+ ECHO "Source :" $(source-data) ;
+ ECHO "Expected :" $(target-data) ;
+ ECHO "SORT returned:" $(sorted-data) ;
+ EXIT "SORT error" : -2 ;
+}
+""")
+
+ t.run_build_system()
+ t.expect_output_lines("starting up")
+ t.expect_output_lines("done")
+ t.expect_output_lines("SORT error", False)
+
+ t.cleanup()
+
+
+###############################################################################
+#
+# testSORTDuration()
+# ------------------
+#
+###############################################################################
+
+def testSORTDuration():
+ """
+ Regression test making sure Boost Jam's SORT builtin rule does not get
+ quadratic behaviour again in this use case.
+
+ """
+ t = BoostBuild.Tester(["-ftest.jam", "-d1"], pass_toolset=False,
+ use_test_config=False)
+
+ f = open(t.workpath("test.jam"), "w")
+ print >> f, "data = "
+ for i in range(0, 20000):
+ if i % 2:
+ print >> f, '"aaa"'
+ else:
+ print >> f, '"bbb"'
+ print >> f, """;
+
+ECHO "starting up" ;
+sorted = [ SORT $(data) ] ;
+ECHO "done" ;
+NOCARE all ;
+"""
+ f.close()
+
+ t.run_build_system(expected_duration=1)
+ t.expect_output_lines("starting up")
+ t.expect_output_lines("done")
+
+ t.cleanup()
+
+
+###############################################################################
+#
+# main()
+# ------
+#
+###############################################################################
+
+testSORTCorrectness()
+testSORTDuration()
diff --git a/tools/build/test/source_locations.py b/tools/build/test/source_locations.py
new file mode 100644
index 0000000000..8123a1864b
--- /dev/null
+++ b/tools/build/test/source_locations.py
@@ -0,0 +1,42 @@
+#!/usr/bin/python
+
+# Copyright (C) Craig Rodrigues 2005.
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that projects with multiple source-location directories are handled OK.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("jamroot.jam", """
+path-constant SRC1 : "./src1" ;
+path-constant SRC2 : "./src2" ;
+path-constant SRC3 : "./src3" ;
+path-constant BUILD : "build" ;
+
+project : requirements <include>$(SRC1)/include <threading>multi
+ : build-dir $(BUILD) ;
+
+build-project project1 ;
+""")
+
+t.write("project1/jamfile.jam", """
+project project1 : source-location $(SRC1) $(SRC2) $(SRC3) ;
+SRCS = s1.cpp s2.cpp testfoo.cpp ;
+exe test : $(SRCS) ;
+""")
+
+t.write("src1/s1.cpp", "int main() {}\n")
+t.write("src2/s2.cpp", "void hello() {}\n")
+t.write("src3/testfoo.cpp", "void testfoo() {}\n")
+
+# This file should not be picked up, because "src2" is before "src3" in the list
+# of source directories.
+t.write("src3/s2.cpp", "void hello() {}\n")
+
+t.run_build_system()
+
+t.cleanup()
diff --git a/tools/build/test/source_order.py b/tools/build/test/source_order.py
new file mode 100755
index 0000000000..f42f4ccae2
--- /dev/null
+++ b/tools/build/test/source_order.py
@@ -0,0 +1,53 @@
+#!/usr/bin/python
+
+# Copyright 2013 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Tests that action sources are not reordered
+
+import BoostBuild
+
+t = BoostBuild.Tester()
+
+t.write("check-order.jam", """\
+import type ;
+import generators ;
+
+type.register ORDER_TEST : order-test ;
+
+SPACE = " " ;
+nl = "\n" ;
+actions check-order
+{
+ echo$(SPACE)$(>[1])>$(<[1])
+ echo$(SPACE)$(>[2-])>>$(<[1])$(nl)
+}
+
+generators.register-composing check-order.check-order : C : ORDER_TEST ;
+""")
+
+# The aliases are necessary for this test, since
+# the targets were sorted by virtual target
+# id, not by file name.
+t.write("jamroot.jam", """\
+import check-order ;
+alias file1 : file1.c ;
+alias file2 : file2.c ;
+alias file3 : file3.c ;
+order-test check : file2 file1 file3 ;
+""")
+
+t.write("file1.c", "")
+t.write("file2.c", "")
+t.write("file3.c", "")
+
+t.run_build_system()
+t.expect_addition("bin/$toolset/debug/check.order-test")
+t.expect_content("bin/$toolset/debug/check.order-test", """\
+file2.c
+file1.c
+file3.c
+""", True)
+
+t.cleanup()
diff --git a/tools/build/test/space_in_path.py b/tools/build/test/space_in_path.py
new file mode 100755
index 0000000000..4cd320c5b4
--- /dev/null
+++ b/tools/build/test/space_in_path.py
@@ -0,0 +1,21 @@
+#!/usr/bin/python
+
+# Copyright 2012 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that paths containing spaces are handled correctly by actions.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("has space/jamroot.jam", """\
+import testing ;
+unit-test test : test.cpp ;
+""")
+t.write("has space/test.cpp", "int main() {}\n")
+
+t.run_build_system(["has space"])
+
+t.cleanup()
diff --git a/tools/build/test/stage.py b/tools/build/test/stage.py
new file mode 100644
index 0000000000..4dd4e2f947
--- /dev/null
+++ b/tools/build/test/stage.py
@@ -0,0 +1,207 @@
+#!/usr/bin/python
+
+# Copyright 2003 Dave Abrahams
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test staging.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("jamroot.jam", "")
+t.write("jamfile.jam", """\
+lib a : a.cpp ;
+stage dist : a a.h auxilliary/1 ;
+""")
+t.write("a.cpp", """\
+int
+#ifdef _WIN32
+__declspec(dllexport)
+#endif
+must_export_something;
+""")
+t.write("a.h", "")
+t.write("auxilliary/1", "")
+
+t.run_build_system()
+t.expect_addition(["dist/a.dll", "dist/a.h", "dist/1"])
+
+
+# Regression test: the following was causing a "duplicate target name" error.
+t.write("jamfile.jam", """\
+project : requirements <hardcode-dll-paths>true ;
+lib a : a.cpp ;
+stage dist : a a.h auxilliary/1 ;
+alias dist-alias : dist ;
+""")
+
+t.run_build_system()
+
+
+# Test the <location> property.
+t.write("jamfile.jam", """\
+lib a : a.cpp ;
+stage dist : a : <variant>debug:<location>ds <variant>release:<location>rs ;
+""")
+
+t.run_build_system()
+t.expect_addition("ds/a.dll")
+
+t.run_build_system(["release"])
+t.expect_addition("rs/a.dll")
+
+
+# Test the <location> property in subprojects. Thanks to Kirill Lapshin for the
+# bug report.
+
+t.write("jamroot.jam", "path-constant DIST : dist ;")
+t.write("jamfile.jam", "build-project d ;")
+t.write("d/jamfile.jam", """\
+exe a : a.cpp ;
+stage dist : a : <location>$(DIST) ;
+""")
+t.write("d/a.cpp", "int main() {}\n")
+
+t.run_build_system()
+t.expect_addition("dist/a.exe")
+
+t.rm("dist")
+
+# Workaround a BIG BUG: the response file is not deleted, even if application
+# *is* deleted. We will try to use the same response file when building from
+# subdir, with very bad results.
+t.rm("d/bin")
+t.run_build_system(subdir="d")
+t.expect_addition("dist/a.exe")
+
+
+# Check that 'stage' does not incorrectly reset target suffixes.
+t.write("a.cpp", "int main() {}\n")
+t.write("jamroot.jam", """\
+import type ;
+type.register MYEXE : : EXE ;
+type.set-generated-target-suffix MYEXE : <optimization>off : myexe ;
+""")
+
+# Since <optimization>off is in properties when 'a' is built and staged, its
+# suffix should be "myexe".
+t.write("jamfile.jam", """\
+stage dist : a ;
+myexe a : a.cpp ;
+""")
+
+t.run_build_system()
+t.expect_addition("dist/a.myexe")
+
+# Test 'stage's ability to traverse dependencies.
+t.write("a.cpp", "int main() {}\n")
+t.write("l.cpp", """\
+void
+#if defined(_WIN32)
+__declspec(dllexport)
+#endif
+foo() {}
+""")
+t.write("jamfile.jam", """\
+lib l : l.cpp ;
+exe a : a.cpp l ;
+stage dist : a : <install-dependencies>on <install-type>EXE <install-type>LIB ;
+""")
+t.write("jamroot.jam", "")
+t.rm("dist")
+
+t.run_build_system()
+t.expect_addition("dist/a.exe")
+t.expect_addition("dist/l.dll")
+
+# Check that <use> properties are ignored the traversing target for staging.
+t.copy("l.cpp", "l2.cpp")
+t.copy("l.cpp", "l3.cpp")
+t.write("jamfile.jam", """\
+lib l2 : l2.cpp ;
+lib l3 : l3.cpp ;
+lib l : l.cpp : <use>l2 <dependency>l3 ;
+exe a : a.cpp l ;
+stage dist : a : <install-dependencies>on <install-type>EXE <install-type>LIB ;
+""")
+t.rm("dist")
+
+t.run_build_system()
+t.expect_addition("dist/l3.dll")
+t.expect_nothing("dist/l2.dll")
+
+# Check if <dependency> on 'stage' works.
+t.rm(".")
+t.write("jamroot.jam", """\
+stage a1 : a1.txt : <location>dist ;
+stage a2 : a2.txt : <location>dist <dependency>a1 ;
+""")
+t.write("a1.txt", "")
+t.write("a2.txt", "")
+t.run_build_system(["a2"])
+t.expect_addition(["dist/a1.txt", "dist/a2.txt"])
+
+# Regression test: check that <location>. works.
+t.rm(".")
+t.write("jamroot.jam", "stage a1 : d/a1.txt : <location>. ;")
+t.write("d/a1.txt", "")
+
+t.run_build_system()
+t.expect_addition("a1.txt")
+
+# Test that relative paths of sources can be preserved.
+t.rm(".")
+t.write("jamroot.jam", "install dist : a/b/c.h : <install-source-root>. ;")
+t.write("a/b/c.h", "")
+
+t.run_build_system()
+t.expect_addition("dist/a/b/c.h")
+
+t.write("jamroot.jam", "install dist : a/b/c.h : <install-source-root>a ;")
+t.write("a/b/c.h", "")
+
+t.run_build_system()
+t.expect_addition("dist/b/c.h")
+
+t.rm(".")
+t.write("build/jamroot.jam", """\
+install dist : ../a/b/c.h : <location>../dist <install-source-root>../a ;
+""")
+t.write("a/b/c.h", "")
+
+t.run_build_system(subdir="build")
+t.expect_addition("dist/b/c.h")
+
+t.write("jamroot.jam", "install dist2 : a/b/c.h : <install-source-root>a ;")
+t.write("a/b/c.h", "")
+t.write("sub/jamfile.jam", "alias h : ..//dist2 ;")
+
+t.run_build_system(subdir="sub")
+t.expect_addition("dist2/b/c.h")
+
+# Test that when installing .cpp files, we do not scan include dependencies.
+t.rm(".")
+t.write("jamroot.jam", "install dist : a.cpp ;")
+t.write("a.cpp", '#include "a.h"')
+t.write("a.h", "")
+
+t.run_build_system()
+t.expect_addition("dist/a.cpp")
+
+t.touch("a.h")
+
+t.run_build_system()
+t.expect_nothing("dist/a.cpp")
+
+# Test that <name> property works, when there is just one file in sources.
+t.rm(".")
+t.write("jamroot.jam", "install dist : a.cpp : <name>b.cpp ;")
+t.write("a.cpp", "test file")
+
+t.run_build_system()
+t.expect_addition("dist/b.cpp")
+
+t.cleanup()
diff --git a/tools/build/test/standalone.py b/tools/build/test/standalone.py
new file mode 100644
index 0000000000..6d9e9e8622
--- /dev/null
+++ b/tools/build/test/standalone.py
@@ -0,0 +1,53 @@
+#!/usr/bin/python
+
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+
+# Regression tests: standalone project were not able to refer to targets
+# declared in themselves.
+
+t.write("a.cpp", "int main() {}\n")
+t.write("jamroot.jam", "import standalone ;")
+t.write("standalone.jam", """\
+import alias ;
+import project ;
+
+project.initialize $(__name__) ;
+project standalone ;
+
+local pwd = [ PWD ] ;
+
+alias x : $(pwd)/../a.cpp ;
+alias runtime : x ;
+""")
+
+t.write("standalone.py", """\
+from b2.manager import get_manager
+
+# FIXME: this is ugly as death
+get_manager().projects().initialize(__name__)
+
+import os ;
+
+# This use of list as parameter is also ugly.
+project(['standalone'])
+
+pwd = os.getcwd()
+alias('x', [os.path.join(pwd, '../a.cpp')])
+alias('runtime', ['x'])
+""")
+
+
+t.write("sub/jamfile.jam", "stage bin : /standalone//runtime ;")
+
+t.run_build_system(subdir="sub")
+t.expect_addition("sub/bin/a.cpp")
+
+t.cleanup()
diff --git a/tools/build/v2/test/startup/boost-root/boost-build.jam b/tools/build/test/startup/boost-root/boost-build.jam
index 098889f7b7..098889f7b7 100644
--- a/tools/build/v2/test/startup/boost-root/boost-build.jam
+++ b/tools/build/test/startup/boost-root/boost-build.jam
diff --git a/tools/build/v2/test/startup/boost-root/build/boost-build.jam b/tools/build/test/startup/boost-root/build/boost-build.jam
index 610ec79eeb..610ec79eeb 100644
--- a/tools/build/v2/test/startup/boost-root/build/boost-build.jam
+++ b/tools/build/test/startup/boost-root/build/boost-build.jam
diff --git a/tools/build/v2/test/startup/boost-root/build/bootstrap.jam b/tools/build/test/startup/boost-root/build/bootstrap.jam
index 2ee3507c30..2ee3507c30 100644
--- a/tools/build/v2/test/startup/boost-root/build/bootstrap.jam
+++ b/tools/build/test/startup/boost-root/build/bootstrap.jam
diff --git a/tools/build/v2/test/startup/bootstrap-env/boost-build.jam b/tools/build/test/startup/bootstrap-env/boost-build.jam
index 67a285e7cc..67a285e7cc 100644
--- a/tools/build/v2/test/startup/bootstrap-env/boost-build.jam
+++ b/tools/build/test/startup/bootstrap-env/boost-build.jam
diff --git a/tools/build/v2/test/startup/bootstrap-explicit/boost-build.jam b/tools/build/test/startup/bootstrap-explicit/boost-build.jam
index 27d9108b77..27d9108b77 100644
--- a/tools/build/v2/test/startup/bootstrap-explicit/boost-build.jam
+++ b/tools/build/test/startup/bootstrap-explicit/boost-build.jam
diff --git a/tools/build/v2/test/startup/bootstrap-implicit/readme.txt b/tools/build/test/startup/bootstrap-implicit/readme.txt
index 0278716e5b..0278716e5b 100644
--- a/tools/build/v2/test/startup/bootstrap-implicit/readme.txt
+++ b/tools/build/test/startup/bootstrap-implicit/readme.txt
diff --git a/tools/build/v2/test/startup/no-bootstrap1/boost-build.jam b/tools/build/test/startup/no-bootstrap1/boost-build.jam
index b1b4dc696e..b1b4dc696e 100644
--- a/tools/build/v2/test/startup/no-bootstrap1/boost-build.jam
+++ b/tools/build/test/startup/no-bootstrap1/boost-build.jam
diff --git a/tools/build/v2/test/startup/no-bootstrap1/subdir/readme.txt b/tools/build/test/startup/no-bootstrap1/subdir/readme.txt
index 00f428d443..00f428d443 100644
--- a/tools/build/v2/test/startup/no-bootstrap1/subdir/readme.txt
+++ b/tools/build/test/startup/no-bootstrap1/subdir/readme.txt
diff --git a/tools/build/v2/test/startup/no-bootstrap2/boost-build.jam b/tools/build/test/startup/no-bootstrap2/boost-build.jam
index 505dcd7759..505dcd7759 100644
--- a/tools/build/v2/test/startup/no-bootstrap2/boost-build.jam
+++ b/tools/build/test/startup/no-bootstrap2/boost-build.jam
diff --git a/tools/build/v2/test/startup/no-bootstrap3/boost-build.jam b/tools/build/test/startup/no-bootstrap3/boost-build.jam
index 252a3993ca..252a3993ca 100644
--- a/tools/build/v2/test/startup/no-bootstrap3/boost-build.jam
+++ b/tools/build/test/startup/no-bootstrap3/boost-build.jam
diff --git a/tools/build/test/startup_v2.py b/tools/build/test/startup_v2.py
new file mode 100644
index 0000000000..2dd867a1da
--- /dev/null
+++ b/tools/build/test/startup_v2.py
@@ -0,0 +1,94 @@
+#!/usr/bin/python
+
+# Copyright 2002 Dave Abrahams
+# Copyright 2003, 2004 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+import os.path
+import re
+
+
+def check_for_existing_boost_build_jam(t):
+ """
+ This test depends on no boost-build.jam file existing in any of the
+ folders along the current folder's path. If it does exist, not only would
+ this test fail but it could point to a completely wrong Boost Build
+ installation, thus causing headaches when attempting to diagnose the
+ problem. That is why we explicitly check for this scenario.
+
+ """
+ problem = find_up_to_root(t.workdir, "boost-build.jam")
+ if problem:
+ BoostBuild.annotation("misconfiguration", """\
+This test expects to be run from a folder with no 'boost-build.jam' file in any
+of the folders along its path.
+
+Working folder:
+ '%s'
+
+Problematic boost-build.jam found at:
+ '%s'
+
+Please remove this file or change the test's working folder and rerun the test.
+""" % (t.workdir, problem))
+ t.fail_test(1, dump_stdio=False, dump_stack=False)
+
+
+def find_up_to_root(folder, name):
+ last = ""
+ while last != folder:
+ candidate = os.path.join(folder, name)
+ if os.path.exists(candidate):
+ return candidate
+ last = folder
+ folder = os.path.dirname(folder)
+
+
+def match_re(actual, expected):
+ return re.match(expected, actual, re.DOTALL) != None
+
+
+t = BoostBuild.Tester(match=match_re, boost_build_path="", pass_toolset=0)
+t.set_tree("startup")
+check_for_existing_boost_build_jam(t)
+
+t.run_build_system(status=1, stdout=
+r"""Unable to load Boost\.Build: could not find "boost-build\.jam"
+.*Attempted search from .* up to the root""")
+
+t.run_build_system(status=1, subdir="no-bootstrap1",
+ stdout=r"Unable to load Boost\.Build: could not find build system\."
+ r".*attempted to load the build system by invoking"
+ r".*'boost-build ;'"
+ r'.*but we were unable to find "bootstrap\.jam"')
+
+# Descend to a subdirectory which /does not/ contain a boost-build.jam file,
+# and try again to test the crawl-up behavior.
+t.run_build_system(status=1, subdir=os.path.join("no-bootstrap1", "subdir"),
+ stdout=r"Unable to load Boost\.Build: could not find build system\."
+ r".*attempted to load the build system by invoking"
+ r".*'boost-build ;'"
+ r'.*but we were unable to find "bootstrap\.jam"')
+
+t.run_build_system(status=1, subdir="no-bootstrap2",
+ stdout=r"Unable to load Boost\.Build: could not find build system\."
+ r".*attempted to load the build system by invoking"
+ r".*'boost-build \. ;'"
+ r'.*but we were unable to find "bootstrap\.jam"')
+
+t.run_build_system(status=1, subdir='no-bootstrap3', stdout=
+r"""Unable to load Boost.Build
+.*boost-build\.jam" was found.*
+However, it failed to call the "boost-build" rule""")
+
+# Test bootstrapping based on BOOST_BUILD_PATH.
+t.run_build_system(["-sBOOST_BUILD_PATH=../boost-root/build"],
+ subdir="bootstrap-env", stdout="build system bootstrapped")
+
+# Test bootstrapping based on an explicit path in boost-build.jam.
+t.run_build_system(subdir="bootstrap-explicit",
+ stdout="build system bootstrapped")
+
+t.cleanup()
diff --git a/tools/build/test/static_and_shared_library.py b/tools/build/test/static_and_shared_library.py
new file mode 100755
index 0000000000..ca50e26ad2
--- /dev/null
+++ b/tools/build/test/static_and_shared_library.py
@@ -0,0 +1,38 @@
+#!/usr/bin/python
+
+# Copyright 2002, 2003 Dave Abrahams
+# Copyright 2002, 2003, 2005 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+t.write("jamroot.jam", "")
+t.write("lib/c.cpp", "int bar() { return 0; }\n")
+t.write("lib/jamfile.jam", """\
+static-lib auxilliary1 : c.cpp ;
+lib auxilliary2 : c.cpp ;
+""")
+
+def reset():
+ t.rm("lib/bin")
+
+t.run_build_system(subdir='lib')
+t.expect_addition("lib/bin/$toolset/debug/" * BoostBuild.List("c.obj "
+ "auxilliary1.lib auxilliary2.dll"))
+t.expect_nothing_more()
+
+reset()
+t.run_build_system(["link=shared"], subdir="lib")
+t.expect_addition("lib/bin/$toolset/debug/" * BoostBuild.List("c.obj "
+ "auxilliary1.lib auxilliary2.dll"))
+t.expect_nothing_more()
+
+reset()
+t.run_build_system(["link=static"], subdir="lib")
+t.expect_addition("lib/bin/$toolset/debug/link-static/" * BoostBuild.List(
+ "c.obj auxilliary1.lib auxilliary2.lib"))
+t.expect_nothing_more()
+
+t.cleanup()
diff --git a/tools/build/v2/test/suffix.py b/tools/build/test/suffix.py
index 386e36a9d5..386e36a9d5 100644
--- a/tools/build/v2/test/suffix.py
+++ b/tools/build/test/suffix.py
diff --git a/tools/build/test/symlink.py b/tools/build/test/symlink.py
new file mode 100644
index 0000000000..a4a7f34353
--- /dev/null
+++ b/tools/build/test/symlink.py
@@ -0,0 +1,41 @@
+#!/usr/bin/python
+
+# Copyright 2003 Dave Abrahams
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test the 'symlink' rule.
+
+import os
+import BoostBuild
+
+
+if os.name != 'posix':
+ print "The symlink tests can be run on posix only."
+ import sys
+ sys.exit(1)
+
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("jamroot.jam", "import gcc ;")
+
+t.write("jamfile.jam", """
+exe hello : hello.cpp ;
+symlink hello_release : hello/<variant>release ;
+symlink hello_debug : hello/<variant>debug ;
+symlink links/hello_release : hello/<variant>release ;
+""")
+
+t.write("hello.cpp", """
+int main() {}
+""")
+
+t.run_build_system()
+t.expect_addition([
+ 'hello_debug.exe',
+ 'hello_release.exe',
+ 'links/hello_release.exe'])
+
+t.cleanup()
diff --git a/tools/build/test/tag.py b/tools/build/test/tag.py
new file mode 100644
index 0000000000..09b4308a5a
--- /dev/null
+++ b/tools/build/test/tag.py
@@ -0,0 +1,122 @@
+#!/usr/bin/python
+
+# Copyright (C) 2003. Pedro Ferreira
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+
+###############################################################################
+#
+# test_folder_with_dot_in_name()
+# ------------------------------
+#
+###############################################################################
+
+def test_folder_with_dot_in_name(t):
+ """
+ Regression test: the 'tag' feature did not work in directories that had a
+ dot in their name.
+
+ """
+ t.write("version-1.32.0/jamroot.jam", """\
+project test : requirements <tag>@$(__name__).tag ;
+
+rule tag ( name : type ? : property-set )
+{
+ # Do nothing, just make sure the rule is invoked OK.
+ ECHO The tag rule has been invoked. ;
+}
+exe a : a.cpp ;
+""")
+ t.write("version-1.32.0/a.cpp", "int main() {}\n")
+
+ t.run_build_system(subdir="version-1.32.0")
+ t.expect_addition("version-1.32.0/bin/$toolset/debug/a.exe")
+ t.expect_output_lines("The tag rule has been invoked.")
+
+
+###############################################################################
+#
+# test_tag_property()
+# -------------------
+#
+###############################################################################
+
+def test_tag_property(t):
+ """Basic tag property test."""
+
+ t.write("jamroot.jam", """\
+import virtual-target ;
+
+rule tag ( name : type ? : property-set )
+{
+ local tags ;
+ switch [ $(property-set).get <variant> ]
+ {
+ case debug : tags += d ;
+ case release : tags += r ;
+ }
+ switch [ $(property-set).get <link> ]
+ {
+ case shared : tags += s ;
+ case static : tags += t ;
+ }
+ if $(tags)
+ {
+ return [ virtual-target.add-prefix-and-suffix $(name)_$(tags:J="")
+ : $(type) : $(property-set) ] ;
+ }
+}
+
+# Test both fully-qualified and local name of the rule
+exe a : a.cpp : <tag>@$(__name__).tag ;
+lib b : a.cpp : <tag>@tag ;
+stage c : a ;
+""")
+
+ t.write("a.cpp", """\
+int main() {}
+#ifdef _MSC_VER
+__declspec (dllexport) void x () {}
+#endif
+""")
+
+ file_list = (
+ BoostBuild.List("bin/$toolset/debug/a_ds.exe") +
+ BoostBuild.List("bin/$toolset/debug/b_ds.dll") +
+ BoostBuild.List("c/a_ds.exe") +
+ BoostBuild.List("bin/$toolset/release/a_rs.exe") +
+ BoostBuild.List("bin/$toolset/release/b_rs.dll") +
+ BoostBuild.List("c/a_rs.exe") +
+ BoostBuild.List("bin/$toolset/debug/link-static/a_dt.exe") +
+ BoostBuild.List("bin/$toolset/debug/link-static/b_dt.lib") +
+ BoostBuild.List("c/a_dt.exe") +
+ BoostBuild.List("bin/$toolset/release/link-static/a_rt.exe") +
+ BoostBuild.List("bin/$toolset/release/link-static/b_rt.lib") +
+ BoostBuild.List("c/a_rt.exe"))
+
+ variants = ["debug", "release", "link=static,shared"]
+
+ t.run_build_system(variants)
+ t.expect_addition(file_list)
+
+ t.run_build_system(variants + ["clean"])
+ t.expect_removal(file_list)
+
+
+###############################################################################
+#
+# main()
+# ------
+#
+###############################################################################
+
+t = BoostBuild.Tester(use_test_config=False)
+
+test_tag_property(t)
+test_folder_with_dot_in_name(t)
+
+t.cleanup()
diff --git a/tools/build/v2/test/template.py b/tools/build/test/template.py
index 1fbef07b8c..1fbef07b8c 100644
--- a/tools/build/v2/test/template.py
+++ b/tools/build/test/template.py
diff --git a/tools/build/v2/test/test-config-example.jam b/tools/build/test/test-config-example.jam
index 6cb813fa0d..6cb813fa0d 100644
--- a/tools/build/v2/test/test-config-example.jam
+++ b/tools/build/test/test-config-example.jam
diff --git a/tools/build/v2/test/test.jam b/tools/build/test/test.jam
index 1ae1a2059e..1ae1a2059e 100644
--- a/tools/build/v2/test/test.jam
+++ b/tools/build/test/test.jam
diff --git a/tools/build/v2/test/test1.py b/tools/build/test/test1.py
index 05b3966483..05b3966483 100644
--- a/tools/build/v2/test/test1.py
+++ b/tools/build/test/test1.py
diff --git a/tools/build/v2/test/test2.py b/tools/build/test/test2.py
index 938b36545a..938b36545a 100644
--- a/tools/build/v2/test/test2.py
+++ b/tools/build/test/test2.py
diff --git a/tools/build/v2/test/test2/foo.cpp b/tools/build/test/test2/foo.cpp
index 135fa90f6d..135fa90f6d 100644
--- a/tools/build/v2/test/test2/foo.cpp
+++ b/tools/build/test/test2/foo.cpp
diff --git a/tools/build/v2/test/test2/jamroot.jam b/tools/build/test/test2/jamroot.jam
index 4fb3f28865..4fb3f28865 100644
--- a/tools/build/v2/test/test2/jamroot.jam
+++ b/tools/build/test/test2/jamroot.jam
diff --git a/tools/build/test/test_all.py b/tools/build/test/test_all.py
new file mode 100644
index 0000000000..fd72b66513
--- /dev/null
+++ b/tools/build/test/test_all.py
@@ -0,0 +1,307 @@
+#!/usr/bin/python
+
+# Copyright 2002-2005 Dave Abrahams.
+# Copyright 2002-2006 Vladimir Prus.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+import os
+import os.path
+import sys
+
+xml = "--xml" in sys.argv
+toolset = BoostBuild.get_toolset()
+
+
+# Clear environment for testing.
+#
+for s in ("BOOST_ROOT", "BOOST_BUILD_PATH", "JAM_TOOLSET", "BCCROOT",
+ "MSVCDir", "MSVC", "MSVCNT", "MINGW", "watcom"):
+ try:
+ del os.environ[s]
+ except:
+ pass
+
+BoostBuild.set_defer_annotations(1)
+
+
+def run_tests(critical_tests, other_tests):
+ """
+ Runs first the critical_tests and then the other_tests.
+
+ Writes the name of the first failed test to test_results.txt. Critical
+ tests are run in the specified order, other tests are run starting with the
+ one that failed first on the last test run.
+
+ """
+ last_failed = last_failed_test()
+ other_tests = reorder_tests(other_tests, last_failed)
+ all_tests = critical_tests + other_tests
+
+ invocation_dir = os.getcwd()
+ max_test_name_len = 10
+ for x in all_tests:
+ if len(x) > max_test_name_len:
+ max_test_name_len = len(x)
+
+ pass_count = 0
+ failures_count = 0
+
+ for test in all_tests:
+ if not xml:
+ print("%%-%ds :" % max_test_name_len % test),
+
+ passed = 0
+ try:
+ __import__(test)
+ passed = 1
+ except KeyboardInterrupt:
+ """This allows us to abort the testing manually using Ctrl-C."""
+ raise
+ except SystemExit:
+ """This is the regular way our test scripts are supposed to report
+ test failures."""
+ except:
+ exc_type, exc_value, exc_tb = sys.exc_info()
+ try:
+ BoostBuild.annotation("failure - unhandled exception", "%s - "
+ "%s" % (exc_type.__name__, exc_value))
+ BoostBuild.annotate_stack_trace(exc_tb)
+ finally:
+ # Explicitly clear a hard-to-garbage-collect traceback
+ # related reference cycle as per documented sys.exc_info()
+ # usage suggestion.
+ del exc_tb
+
+ if passed:
+ pass_count += 1
+ else:
+ failures_count += 1
+ if failures_count == 1:
+ f = open(os.path.join(invocation_dir, "test_results.txt"), "w")
+ try:
+ f.write(test)
+ finally:
+ f.close()
+
+ # Restore the current directory, which might have been changed by the
+ # test.
+ os.chdir(invocation_dir)
+
+ if not xml:
+ if passed:
+ print("PASSED")
+ else:
+ print("FAILED")
+ else:
+ rs = "succeed"
+ if not passed:
+ rs = "fail"
+ print """
+<test-log library="build" test-name="%s" test-type="run" toolset="%s" test-program="%s" target-directory="%s">
+<run result="%s">""" % (test, toolset, "tools/build/v2/test/" + test + ".py",
+ "boost/bin.v2/boost.build.tests/" + toolset + "/" + test, rs)
+ if not passed:
+ BoostBuild.flush_annotations(1)
+ print """
+</run>
+</test-log>
+"""
+ sys.stdout.flush() # Makes testing under emacs more entertaining.
+ BoostBuild.clear_annotations()
+
+ # Erase the file on success.
+ if failures_count == 0:
+ open("test_results.txt", "w").close()
+
+ if not xml:
+ print """
+ === Test summary ===
+ PASS: %d
+ FAIL: %d
+ """ % (pass_count, failures_count)
+
+
+def last_failed_test():
+ "Returns the name of the last failed test or None."
+ try:
+ f = open("test_results.txt")
+ try:
+ return f.read().strip()
+ finally:
+ f.close()
+ except Exception:
+ return None
+
+
+def reorder_tests(tests, first_test):
+ try:
+ n = tests.index(first_test)
+ return [first_test] + tests[:n] + tests[n + 1:]
+ except ValueError:
+ return tests
+
+
+critical_tests = ["unit_tests", "module_actions", "startup_v2", "core_d12",
+ "core_typecheck", "core_delete_module", "core_language", "core_arguments",
+ "core_varnames", "core_import_module"]
+
+# We want to collect debug information about the test site before running any
+# of the tests, but only when not running the tests interactively. Then the
+# user can easily run this always-failing test directly to see what it would
+# have returned and there is no need to have it spoil a possible 'all tests
+# passed' result.
+if xml:
+ critical_tests.insert(0, "collect_debug_info")
+
+tests = ["absolute_sources",
+ "alias",
+ "alternatives",
+ "bad_dirname",
+ "build_dir",
+ "build_file",
+ "build_no",
+ "builtin_echo",
+ "builtin_exit",
+ "builtin_glob",
+ "builtin_split_by_characters",
+ "c_file",
+ "chain",
+ "clean",
+ "composite",
+ "conditionals",
+ "conditionals2",
+ "conditionals3",
+ "conditionals_multiple",
+ "configuration",
+ "copy_time",
+ "core_action_output",
+ "core_action_status",
+ "core_actions_quietly",
+ "core_at_file",
+ "core_bindrule",
+ "core_jamshell",
+ "core_multifile_actions",
+ "core_nt_cmd_line",
+ "core_option_d2",
+ "core_option_l",
+ "core_option_n",
+ "core_parallel_actions",
+ "core_parallel_multifile_actions_1",
+ "core_parallel_multifile_actions_2",
+ "core_source_line_tracking",
+ "core_update_now",
+ "core_variables_in_actions",
+ "custom_generator",
+ "default_build",
+ "default_features",
+# This test is known to be broken itself.
+# "default_toolset",
+ "dependency_property",
+ "dependency_test",
+ "direct_request_test",
+ "disambiguation",
+ "dll_path",
+ "double_loading",
+ "duplicate",
+ "example_libraries",
+ "example_make",
+ "exit_status",
+ "expansion",
+ "explicit",
+ "feature_cxxflags",
+ "free_features_request",
+ "generator_selection",
+ "generators_test",
+ "implicit_dependency",
+ "indirect_conditional",
+ "inherit_toolset",
+ "inherited_dependency",
+ "inline",
+ "lib_source_property",
+ "library_chain",
+ "library_property",
+ "link",
+ "load_order",
+ "loop",
+ "make_rule",
+ "message",
+ "ndebug",
+ "no_type",
+ "notfile",
+ "ordered_include",
+ "out_of_tree",
+ "path_features",
+ "prebuilt",
+ "print",
+ "project_dependencies",
+ "project_glob",
+ "project_id",
+ "project_root_constants",
+ "project_root_rule",
+ "project_test3",
+ "project_test4",
+ "property_expansion",
+ "rebuilds",
+ "regression",
+ "relative_sources",
+ "remove_requirement",
+ "rescan_header",
+ "resolution",
+ "scanner_causing_rebuilds",
+ "searched_lib",
+ "skipping",
+ "sort_rule",
+ "source_locations",
+ "source_order",
+ "space_in_path",
+ "stage",
+ "standalone",
+ "static_and_shared_library",
+ "suffix",
+ "tag",
+ "test_result_dumping",
+ "test_rc",
+ "testing_support",
+ "timedata",
+ "toolset_requirements",
+ "unit_test",
+ "unused",
+ "use_requirements",
+ "using",
+ "wrapper",
+ "wrong_project",
+ "zlib"
+ ]
+
+if os.name == "posix":
+ tests.append("symlink")
+ # On Windows, library order is not important, so skip this test. Besides,
+ # it fails ;-). Further, the test relies on the fact that on Linux, one can
+ # build a shared library with unresolved symbols. This is not true on
+ # Windows, even with cygwin gcc.
+ if "CYGWIN" not in os.uname()[0]:
+ tests.append("library_order")
+
+if toolset.startswith("gcc"):
+ tests.append("gcc_runtime")
+
+if toolset.startswith("gcc") or toolset.startswith("msvc"):
+ tests.append("pch")
+
+if "--extras" in sys.argv:
+ tests.append("boostbook")
+ tests.append("qt4")
+ tests.append("qt5")
+ tests.append("example_qt4")
+ # Requires ./whatever.py to work, so is not guaranted to work everywhere.
+ tests.append("example_customization")
+ # Requires gettext tools.
+ tests.append("example_gettext")
+elif not xml:
+ print("Note: skipping extra tests")
+
+run_tests(critical_tests, tests)
diff --git a/tools/build/test/test_rc.py b/tools/build/test/test_rc.py
new file mode 100755
index 0000000000..510b47275b
--- /dev/null
+++ b/tools/build/test/test_rc.py
@@ -0,0 +1,110 @@
+#!/usr/bin/python
+
+# Copyright 2012 Jurko Gospodnetic
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Tests rc toolset behaviour.
+
+import BoostBuild
+
+
+def included_resource_newer_than_rc_script():
+ """
+ When a .rc script file includes another resource file - the resource file
+ being newer than the .rc script file should not cause the .rc script file
+ to be considered old and force all of its dependents to rebuild.
+
+ """
+ toolsetName = "__myDummyResourceCompilerToolset__"
+
+ # Used options rationale:
+ #
+ # -d4 & --debug-configuration
+ # Display additional information in case of test failure. In the past
+ # we have had testing system issues causing this test to fail
+ # sporadically for which -d+3 output had been instrumental in getting to
+ # the root cause (a touched file's timestamp was not as new as it should
+ # have been).
+ #
+ # --ignore-site-config --user-config=
+ # Disable reading any external Boost Build configuration. This test is
+ # self sufficient so these options protect it from being adversly
+ # affected by any local (mis)configuration..
+ t = BoostBuild.Tester(["-d4", "--debug-configuration",
+ "--ignore-site-config", "--user-config=", "toolset=%s" % toolsetName],
+ pass_d0=False, pass_toolset=False, use_test_config=False,
+ translate_suffixes=False)
+
+ # Prepare a dummy toolset so we do not get errors in case the default one
+ # is not found and that we can test rc.jam functionality without having to
+ # depend on the externally specified toolset actually supporting it exactly
+ # the way it is required for this test, e.g. gcc toolset, under some
+ # circumstances, uses a quiet action for generating its null RC targets.
+ t.write(toolsetName + ".jam", """\
+import feature ;
+import rc ;
+import type ;
+local toolset-name = "%s" ;
+feature.extend toolset : $(toolset-name) ;
+rule init ( ) { }
+rc.configure dummy-rc-command : <toolset>$(toolset-name) : <rc-type>dummy ;
+module rc
+{
+ rule compile.resource.dummy ( targets * : sources * : properties * )
+ {
+ import common ;
+ .TOUCH on $(targets) = [ common.file-touch-command ] ;
+ }
+ actions compile.resource.dummy { $(.TOUCH) "$(<)" }
+}
+# Make OBJ files generated by our toolset use the "obj" suffix on all
+# platforms. We need to do this explicitly for <target-os> windows & cygwin to
+# override the default OBJ type configuration (otherwise we would get
+# 'ambiguous key' errors on those platforms).
+local rule set-generated-obj-suffix ( target-os ? )
+{
+ type.set-generated-target-suffix OBJ : <toolset>$(toolset-name)
+ <target-os>$(target-os) : obj ;
+}
+set-generated-obj-suffix ;
+set-generated-obj-suffix windows ;
+set-generated-obj-suffix cygwin ;
+""" % toolsetName)
+
+ # Prepare project source files.
+ t.write("jamroot.jam", """\
+ECHO {{{ [ modules.peek : XXX ] [ modules.peek : NOEXEC ] }}} ;
+obj xxx : xxx.rc ;
+""")
+ t.write("xxx.rc", '1 MESSAGETABLE "xxx.bin"\n')
+ t.write("xxx.bin", "foo")
+
+ def test1(n, expect, noexec=False):
+ params = ["-sXXX=%d" % n]
+ if noexec:
+ params.append("-n")
+ params.append("-sNOEXEC=NOEXEC")
+ t.run_build_system(params)
+ t.expect_output_lines("*NOEXEC*", noexec)
+ obj_file = "xxx_res.obj"
+ t.expect_output_lines("compile.resource.dummy *%s" % obj_file, expect)
+ if expect and not noexec:
+ expect("bin/%s/debug/%s" % (toolsetName, obj_file))
+ t.expect_nothing_more()
+
+ def test(n, expect):
+ test1(n, expect, noexec=True)
+ test1(n, expect)
+
+ test(1, t.expect_addition)
+ test(2, None)
+ t.touch("xxx.bin")
+ test(3, t.expect_touch)
+ test(4, None)
+
+ t.cleanup()
+
+
+included_resource_newer_than_rc_script()
diff --git a/tools/build/test/test_result_dumping.py b/tools/build/test/test_result_dumping.py
new file mode 100755
index 0000000000..bdfc41ee22
--- /dev/null
+++ b/tools/build/test/test_result_dumping.py
@@ -0,0 +1,33 @@
+#!/usr/bin/python
+
+# Copyright 2008 Jurko Gospodnetic
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Tests dumping Boost Build based testing results.
+
+import BoostBuild
+
+
+###############################################################################
+#
+# Test that dumping Boost Build based testing results works in case test code
+# is not located in a folder under the Jamroot folder.
+#
+###############################################################################
+
+t = BoostBuild.Tester(["--dump-tests"], use_test_config=False)
+
+t.write("TestBuild/jamroot.jam", """\
+import testing ;
+test-suite testit : [ run ../TestSource/test.cpp ] ;
+""")
+
+t.write("TestSource/test.cpp", "int main() {}\n")
+
+t.run_build_system(subdir="TestBuild")
+t.expect_output_lines('boost-test(RUN) "*/TestBuild/test" : '
+ '"../TestSource/test.cpp"')
+
+t.cleanup()
diff --git a/tools/build/v2/test/test_system.html b/tools/build/test/test_system.html
index e425ee1030..e425ee1030 100644
--- a/tools/build/v2/test/test_system.html
+++ b/tools/build/test/test_system.html
diff --git a/tools/build/test/testing_support.py b/tools/build/test/testing_support.py
new file mode 100755
index 0000000000..01a7c48268
--- /dev/null
+++ b/tools/build/test/testing_support.py
@@ -0,0 +1,61 @@
+#!/usr/bin/python
+
+# Copyright 2008 Jurko Gospodnetic
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Tests different aspects of Boost Builds automated testing support.
+
+import BoostBuild
+
+
+################################################################################
+#
+# test_files_with_spaces_in_their_name()
+# --------------------------------------
+#
+################################################################################
+
+def test_files_with_spaces_in_their_name():
+ """Regression test making sure test result files get created correctly when
+ testing files with spaces in their name.
+ """
+
+ t = BoostBuild.Tester(use_test_config=False)
+
+ t.write("valid source.cpp", "int main() {}\n");
+
+ t.write("invalid source.cpp", "this is not valid source code");
+
+ t.write("jamroot.jam", """
+import testing ;
+testing.compile "valid source.cpp" ;
+testing.compile-fail "invalid source.cpp" ;
+""")
+
+ t.run_build_system(status=0)
+ t.expect_addition("bin/invalid source.test/$toolset/debug/invalid source.obj")
+ t.expect_addition("bin/invalid source.test/$toolset/debug/invalid source.test")
+ t.expect_addition("bin/valid source.test/$toolset/debug/valid source.obj")
+ t.expect_addition("bin/valid source.test/$toolset/debug/valid source.test")
+
+ t.expect_content("bin/valid source.test/$toolset/debug/valid source.test", \
+ "passed" )
+ t.expect_content( \
+ "bin/invalid source.test/$toolset/debug/invalid source.test", \
+ "passed" )
+ t.expect_content( \
+ "bin/invalid source.test/$toolset/debug/invalid source.obj", \
+ "failed as expected" )
+
+ t.cleanup()
+
+
+################################################################################
+#
+# main()
+# ------
+#
+################################################################################
+
+test_files_with_spaces_in_their_name()
diff --git a/tools/build/test/timedata.py b/tools/build/test/timedata.py
new file mode 100644
index 0000000000..20a95c2a22
--- /dev/null
+++ b/tools/build/test/timedata.py
@@ -0,0 +1,175 @@
+#!/usr/bin/python
+
+# Copyright 2005 David Abrahams
+# Copyright 2008, 2012 Jurko Gospodnetic
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Tests the build step timing facilities.
+
+# TODO: Missing tests:
+# 1. 'time' target with a source target representing more than one virtual
+# target. This happens in practice, e.g. when using the time rule on a msvc
+# exe target whose generator actually constructs an EXE and a PDB target.
+# When this is done - only the main virtual target's constructing action
+# should be timed.
+# 2. 'time' target with a source target representing a virtual target that
+# actually gets built by multiple actions run in sequence. In that case a
+# separate timing result should be reported for each of those actions. This
+# happens in practice, e.g. when using the time rule on a msvc exe target
+# which first gets created as a result of some link action and then its
+# manifest gets embedded inside it as a resource using a separate action
+# (assuming an appropriate property has been set for this target - see the
+# msvc module for details).
+
+import BoostBuild
+import re
+
+
+###############################################################################
+#
+# basic_jam_action_test()
+# -----------------------
+#
+###############################################################################
+
+def basic_jam_action_test():
+ """Tests basic Jam action timing support."""
+
+ t = BoostBuild.Tester(pass_toolset=0)
+
+ t.write("file.jam", """\
+rule time
+{
+ DEPENDS $(<) : $(>) ;
+ __TIMING_RULE__ on $(>) = record_time $(<) ;
+ DEPENDS all : $(<) ;
+}
+
+actions time
+{
+ echo $(>) user: $(__USER_TIME__) system: $(__SYSTEM_TIME__)
+ echo timed from $(>) >> $(<)
+}
+
+rule record_time ( target : source : start end user system )
+{
+ __USER_TIME__ on $(target) = $(user) ;
+ __SYSTEM_TIME__ on $(target) = $(system) ;
+}
+
+rule make
+{
+ DEPENDS $(<) : $(>) ;
+}
+
+actions make
+{
+ echo made from $(>) >> $(<)
+}
+
+time foo : bar ;
+make bar : baz ;
+""")
+
+ t.write("baz", "nothing")
+
+ expected_output = """\
+\.\.\.found 4 targets\.\.\.
+\.\.\.updating 2 targets\.\.\.
+make bar
+time foo
+bar +user: [0-9\.]+ +system: +[0-9\.]+ *
+\.\.\.updated 2 targets\.\.\.$
+"""
+
+ t.run_build_system(["-ffile.jam", "-d+1"], stdout=expected_output,
+ match=lambda actual, expected: re.search(expected, actual, re.DOTALL))
+ t.expect_addition("foo")
+ t.expect_addition("bar")
+ t.expect_nothing_more()
+
+ t.cleanup()
+
+
+###############################################################################
+#
+# boost_build_testing_support_timing_rule():
+# ------------------------------------------
+#
+###############################################################################
+
+def boost_build_testing_support_timing_rule():
+ """
+ Tests the target build timing rule provided by the Boost Build testing
+ support system.
+
+ """
+ t = BoostBuild.Tester(use_test_config=False)
+
+ t.write("aaa.cpp", "int main() {}\n")
+
+ t.write("jamroot.jam", """\
+import testing ;
+exe my-exe : aaa.cpp ;
+time my-time : my-exe ;
+""")
+
+ t.run_build_system()
+ t.expect_addition("bin/$toolset/debug/aaa.obj")
+ t.expect_addition("bin/$toolset/debug/my-exe.exe")
+ t.expect_addition("bin/$toolset/debug/my-time.time")
+
+ t.expect_content_lines("bin/$toolset/debug/my-time.time",
+ "user: *[0-9] seconds")
+ t.expect_content_lines("bin/$toolset/debug/my-time.time",
+ "system: *[0-9] seconds")
+
+ t.cleanup()
+
+
+###############################################################################
+#
+# boost_build_testing_support_timing_rule_with_spaces_in_names()
+# --------------------------------------------------------------
+#
+###############################################################################
+
+def boost_build_testing_support_timing_rule_with_spaces_in_names():
+ """
+ Tests the target build timing rule provided by the Boost Build testing
+ support system when used with targets contining spaces in their names.
+
+ """
+ t = BoostBuild.Tester(use_test_config=False)
+
+ t.write("aaa bbb.cpp", "int main() {}\n")
+
+ t.write("jamroot.jam", """\
+import testing ;
+exe "my exe" : "aaa bbb.cpp" ;
+time "my time" : "my exe" ;
+""")
+
+ t.run_build_system()
+ t.expect_addition("bin/$toolset/debug/aaa bbb.obj")
+ t.expect_addition("bin/$toolset/debug/my exe.exe")
+ t.expect_addition("bin/$toolset/debug/my time.time")
+
+ t.expect_content_lines("bin/$toolset/debug/my time.time", "user: *")
+ t.expect_content_lines("bin/$toolset/debug/my time.time", "system: *")
+
+ t.cleanup()
+
+
+###############################################################################
+#
+# main()
+# ------
+#
+###############################################################################
+
+basic_jam_action_test()
+boost_build_testing_support_timing_rule()
+boost_build_testing_support_timing_rule_with_spaces_in_names() \ No newline at end of file
diff --git a/tools/build/test/toolset_requirements.py b/tools/build/test/toolset_requirements.py
new file mode 100644
index 0000000000..c9a8fa8ee9
--- /dev/null
+++ b/tools/build/test/toolset_requirements.py
@@ -0,0 +1,44 @@
+#!/usr/bin/python
+
+# Copyright 2014 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test the handling of toolset.add-requirements
+
+import BoostBuild
+
+t = BoostBuild.Tester(pass_toolset=0, ignore_toolset_requirements=False)
+
+t.write('jamroot.jam', '''
+import toolset ;
+import errors ;
+
+rule test-rule ( properties * )
+{
+ return <define>TEST_INDIRECT_CONDITIONAL ;
+}
+
+toolset.add-requirements
+ <define>TEST_MACRO
+ <conditional>@test-rule
+ <link>shared:<define>TEST_CONDITIONAL
+;
+
+rule check-requirements ( target : sources * : properties * )
+{
+ local macros = TEST_MACRO TEST_CONDITIONAL TEST_INDIRECT_CONDITIONAL ;
+ for local m in $(macros)
+ {
+ if ! <define>$(m) in $(properties)
+ {
+ errors.error $(m) not defined ;
+ }
+ }
+}
+make test : : @check-requirements ;
+''')
+
+t.run_build_system()
+
+t.cleanup()
diff --git a/tools/build/test/tree.py b/tools/build/test/tree.py
new file mode 100644
index 0000000000..70c66a3dbb
--- /dev/null
+++ b/tools/build/test/tree.py
@@ -0,0 +1,243 @@
+# Copyright 2003 Dave Abrahams
+# Copyright 2001, 2002 Vladimir Prus
+# Copyright 2012 Jurko Gospodnetic
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+###############################################################################
+#
+# Based in part on an old Subversion tree.py source file (tools for comparing
+# directory trees). See http://subversion.tigris.org for more information.
+#
+# Copyright (c) 2001 Sam Tobin-Hochstadt. All rights reserved.
+#
+# This software is licensed as described in the file COPYING, which you should
+# have received as part of this distribution. The terms are also available at
+# http://subversion.tigris.org/license-1.html. If newer versions of this
+# license are posted there, you may use a newer version instead, at your
+# option.
+#
+###############################################################################
+
+import os
+import os.path
+import stat
+import sys
+
+
+class TreeNode:
+ """
+ Fundamental data type used to build file system tree structures.
+
+ If CHILDREN is None, then the node represents a file. Otherwise, CHILDREN
+ is a list of the nodes representing that directory's children.
+
+ NAME is simply the name of the file or directory. CONTENTS is a string
+ holding the file's contents (if a file).
+
+ """
+
+ def __init__(self, name, children=None, contents=None):
+ assert children is None or contents is None
+ self.name = name
+ self.mtime = 0
+ self.children = children
+ self.contents = contents
+ self.path = name
+
+ def add_child(self, newchild):
+ assert not self.is_file()
+ for a in self.children:
+ if a.name == newchild.name:
+ if newchild.is_file():
+ a.contents = newchild.contents
+ a.path = os.path.join(self.path, newchild.name)
+ else:
+ for i in newchild.children:
+ a.add_child(i)
+ break
+ else:
+ self.children.append(newchild)
+ newchild.path = os.path.join(self.path, newchild.name)
+
+ def get_child(self, name):
+ """
+ If the given TreeNode directory NODE contains a child named NAME,
+ return the child; else, return None.
+
+ """
+ for n in self.children:
+ if n.name == name:
+ return n
+
+ def is_file(self):
+ return self.children is None
+
+ def pprint(self):
+ print(" * Node name: %s" % self.name)
+ print(" Path: %s" % self.path)
+ print(" Contents: %s" % self.contents)
+ if self.is_file():
+ print(" Children: is a file.")
+ else:
+ print(" Children: %d" % len(self.children))
+
+
+class TreeDifference:
+ def __init__(self):
+ self.added_files = []
+ self.removed_files = []
+ self.modified_files = []
+ self.touched_files = []
+
+ def append(self, other):
+ self.added_files.extend(other.added_files)
+ self.removed_files.extend(other.removed_files)
+ self.modified_files.extend(other.modified_files)
+ self.touched_files.extend(other.touched_files)
+
+ def ignore_directories(self):
+ """Removes directories from our lists of found differences."""
+ not_dir = lambda x : x[-1] != "/"
+ self.added_files = filter(not_dir, self.added_files)
+ self.removed_files = filter(not_dir, self.removed_files)
+ self.modified_files = filter(not_dir, self.modified_files)
+ self.touched_files = filter(not_dir, self.touched_files)
+
+ def pprint(self, file=sys.stdout):
+ file.write("Added files : %s\n" % self.added_files)
+ file.write("Removed files : %s\n" % self.removed_files)
+ file.write("Modified files: %s\n" % self.modified_files)
+ file.write("Touched files : %s\n" % self.touched_files)
+
+ def empty(self):
+ return not (self.added_files or self.removed_files or
+ self.modified_files or self.touched_files)
+
+
+def build_tree(path):
+ """
+ Takes PATH as the folder path, walks the file system below that path, and
+ creates a tree structure based on any files and folders found there.
+ Returns the prepared tree structure plus the maximum file modification
+ timestamp under the given folder.
+
+ """
+ return _handle_dir(os.path.normpath(path))
+
+
+def tree_difference(a, b):
+ """Compare TreeNodes A and B, and create a TreeDifference instance."""
+ return _do_tree_difference(a, b, "", True)
+
+
+def _do_tree_difference(a, b, parent_path, root=False):
+ """Internal recursive worker function for tree_difference()."""
+
+ # We do not want to list root node names.
+ if root:
+ assert not parent_path
+ assert not a.is_file()
+ assert not b.is_file()
+ full_path = ""
+ else:
+ assert a.name == b.name
+ full_path = parent_path + a.name
+ result = TreeDifference()
+
+ # A and B are both files.
+ if a.is_file() and b.is_file():
+ if a.contents != b.contents:
+ result.modified_files.append(full_path)
+ elif a.mtime != b.mtime:
+ result.touched_files.append(full_path)
+ return result
+
+ # Directory converted to file.
+ if not a.is_file() and b.is_file():
+ result.removed_files.extend(_traverse_tree(a, parent_path))
+ result.added_files.append(full_path)
+
+ # File converted to directory.
+ elif a.is_file() and not b.is_file():
+ result.removed_files.append(full_path)
+ result.added_files.extend(_traverse_tree(b, parent_path))
+
+ # A and B are both directories.
+ else:
+ if full_path:
+ full_path += "/"
+ accounted_for = [] # Children present in both trees.
+ for a_child in a.children:
+ b_child = b.get_child(a_child.name)
+ if b_child:
+ accounted_for.append(b_child)
+ result.append(_do_tree_difference(a_child, b_child, full_path))
+ else:
+ result.removed_files.append(full_path + a_child.name)
+ for b_child in b.children:
+ if b_child not in accounted_for:
+ result.added_files.extend(_traverse_tree(b_child, full_path))
+
+ return result
+
+
+def _traverse_tree(t, parent_path):
+ """Returns a list of all names in a tree."""
+ assert not parent_path or parent_path[-1] == "/"
+ full_node_name = parent_path + t.name
+ if t.is_file():
+ result = [full_node_name]
+ else:
+ name_prefix = full_node_name + "/"
+ result = [name_prefix]
+ for i in t.children:
+ result.extend(_traverse_tree(i, name_prefix))
+ return result
+
+
+def _get_text(path):
+ """Return a string with the textual contents of a file at PATH."""
+ fp = open(path, 'r')
+ try:
+ return fp.read()
+ finally:
+ fp.close()
+
+
+def _handle_dir(path):
+ """
+ Main recursive worker function for build_tree(). Returns a newly created
+ tree node representing the given normalized folder path as well as the
+ maximum file/folder modification time detected under the same path.
+
+ """
+ files = []
+ dirs = []
+ node = TreeNode(os.path.basename(path), children=[])
+ max_mtime = node.mtime = os.stat(path).st_mtime
+
+ # List files & folders.
+ for f in os.listdir(path):
+ f = os.path.join(path, f)
+ if os.path.isdir(f):
+ dirs.append(f)
+ elif os.path.isfile(f):
+ files.append(f)
+
+ # Add a child node for each file.
+ for f in files:
+ fcontents = _get_text(f)
+ new_file_node = TreeNode(os.path.basename(f), contents=fcontents)
+ new_file_node.mtime = os.stat(f).st_mtime
+ max_mtime = max(max_mtime, new_file_node.mtime)
+ node.add_child(new_file_node)
+
+ # For each subdir, create a node, walk its tree, add it as a child.
+ for d in dirs:
+ new_dir_node, new_max_mtime = _handle_dir(d)
+ max_mtime = max(max_mtime, new_max_mtime)
+ node.add_child(new_dir_node)
+
+ return node, max_mtime
diff --git a/tools/build/test/unit_test.py b/tools/build/test/unit_test.py
new file mode 100644
index 0000000000..5b2c5dbc9d
--- /dev/null
+++ b/tools/build/test/unit_test.py
@@ -0,0 +1,36 @@
+#!/usr/bin/python
+
+# Copyright 2003, 2004 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test the unit_test rule.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+# Create the needed files.
+t.write("jamroot.jam", """
+using testing ;
+lib helper : helper.cpp ;
+unit-test test : test.cpp : <library>helper ;
+""")
+
+t.write("test.cpp", """
+void helper();
+int main() { helper(); }
+""")
+
+t.write("helper.cpp", """
+void
+#if defined(_WIN32)
+__declspec(dllexport)
+#endif
+helper() {}
+""")
+
+t.run_build_system(["link=static"])
+t.expect_addition("bin/$toolset/debug/link-static/test.passed")
+
+t.cleanup()
diff --git a/tools/build/test/unit_tests.py b/tools/build/test/unit_tests.py
new file mode 100644
index 0000000000..705764b6bd
--- /dev/null
+++ b/tools/build/test/unit_tests.py
@@ -0,0 +1,11 @@
+#!/usr/bin/python
+
+# Copyright 2002, 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+t = BoostBuild.Tester(pass_toolset=0)
+t.run_build_system(["--debug", "--build-system=test/test"])
+t.cleanup()
diff --git a/tools/build/test/unused.py b/tools/build/test/unused.py
new file mode 100644
index 0000000000..d7595547cb
--- /dev/null
+++ b/tools/build/test/unused.py
@@ -0,0 +1,81 @@
+#!/usr/bin/python
+
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that unused sources are at least reported.
+
+import BoostBuild
+
+t = BoostBuild.Tester(["-d+2"], use_test_config=False)
+
+t.write("a.cpp", "int main() {}\n")
+t.write("b.cpp", "\n")
+t.write("b.x", "")
+t.write("jamroot.jam", """\
+import "class" : new ;
+import modules ;
+import project ;
+import targets ;
+import type ;
+import virtual-target ;
+
+type.register X : x ;
+
+class test-target-class : basic-target
+{
+ rule construct ( name : source-targets * : property-set )
+ {
+ local result = [ property-set.empty ] ;
+ if ! [ modules.peek : GENERATE_NOTHING ]
+ {
+ result += [ virtual-target.from-file b.x : . : $(self.project) ] ;
+ if ! [ modules.peek : GENERATE_ONLY_UNUSABLE ]
+ {
+ result += [ virtual-target.from-file b.cpp : . : $(self.project)
+ ] ;
+ }
+ }
+ return $(result) ;
+ }
+
+ rule compute-usage-requirements ( rproperties : targets * )
+ {
+ return [ property-set.create <define>FOO ] ;
+ }
+}
+
+rule make-b-main-target
+{
+ local project = [ project.current ] ;
+ targets.main-target-alternative [ new test-target-class b : $(project) ] ;
+}
+
+exe a : a.cpp b c ;
+make-b-main-target ;
+alias c ; # Expands to nothing, intentionally.
+""")
+
+t.run_build_system()
+
+# The second invocation should do nothing, and produce no warning. The previous
+# invocation might have printed executed actions and other things, so it is not
+# easy to check if a warning was issued or not.
+t.run_build_system(stdout="")
+
+t.run_build_system(["-sGENERATE_ONLY_UNUSABLE=1"], stdout="")
+
+# Check that even if main target generates nothing, its usage requirements are
+# still propagated to dependants.
+t.write("a.cpp", """\
+#ifndef FOO
+ #error We refuse to compile without FOO being defined!
+ We_refuse_to_compile_without_FOO_being_defined
+#endif
+int main() {}
+""")
+t.run_build_system(["-sGENERATE_NOTHING=1"])
+
+t.cleanup()
diff --git a/tools/build/test/use_requirements.py b/tools/build/test/use_requirements.py
new file mode 100644
index 0000000000..7fe829c0bd
--- /dev/null
+++ b/tools/build/test/use_requirements.py
@@ -0,0 +1,283 @@
+#!/usr/bin/python
+
+# Copyright 2003 Dave Abrahams
+# Copyright 2002, 2003, 2004, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+
+# Test that usage requirements on main targets work (and are propagated all the
+# way up, and not only to direct dependants).
+t.write("jamroot.jam", "")
+
+# Note: 'lib cc ..', not 'lib c'. If using 'lib c: ...' the HP-CXX linker will
+# confuse it with the system C runtime.
+t.write("jamfile.jam", """\
+lib b : b.cpp : <link>shared:<define>SHARED_B : :
+ <define>FOO <link>shared:<define>SHARED_B ;
+lib cc : c.cpp b ;
+exe a : a.cpp cc ;
+""")
+
+t.write("b.cpp", """\
+void
+#if defined(_WIN32) && defined(SHARED_B)
+__declspec(dllexport)
+#endif
+foo() {}
+""")
+
+t.write("c.cpp", """\
+void
+#if defined(_WIN32) && defined(SHARED_B)
+__declspec(dllexport)
+#endif
+create_lib_please() {}
+""")
+
+t.write("a.cpp", """\
+#ifdef FOO
+void
+# if defined(_WIN32) && defined(SHARED_B)
+__declspec(dllexport)
+# endif
+foo() {}
+#endif
+int main() { foo(); }
+""")
+
+t.run_build_system()
+t.run_build_system(["--clean"])
+
+
+# Test that use requirements on main target work, when they are referred using
+# 'dependency' features.
+
+t.write("jamfile.jam", """\
+lib b : b.cpp : <link>shared:<define>SHARED_B : : <define>FOO
+ <link>shared:<define>SHARED_B ;
+exe a : a.cpp : <use>b ;
+""")
+
+t.write("b.cpp", """\
+void
+#if defined(_WIN32) && defined(SHARED_B)
+__declspec(dllexport)
+#endif
+foo() {}
+""")
+
+t.write("a.cpp", """\
+#ifdef FOO
+int main() {}
+#endif
+""")
+
+t.run_build_system()
+t.run_build_system(["--clean"])
+
+
+# Test that usage requirements on a project work.
+t.write("jamfile.jam", "exe a : a.cpp lib//b ;")
+
+t.write("lib/jamfile.jam", """\
+project
+ : requirements <link>shared:<define>SHARED_B
+ : usage-requirements <define>FOO <link>shared:<define>SHARED_B ;
+lib b : b.cpp ;
+""")
+
+t.write("lib/b.cpp", """\
+void
+#if defined(_WIN32) && defined(SHARED_B)
+__declspec(dllexport)
+#endif
+foo() {}
+""")
+
+t.run_build_system()
+
+
+# Test that use requirements are inherited correctly.
+t.write("jamfile.jam", "exe a : a.cpp lib/1//b ;")
+
+t.write("a.cpp", """\
+#if defined(FOO) && defined(ZOO)
+void foo() {}
+#endif
+int main() { foo(); }
+""")
+
+t.write("lib/jamfile.jam", """\
+project : requirements : usage-requirements <define>FOO ;
+""")
+
+t.write("lib/1/jamfile.jam", """\
+project
+ : requirements <link>shared:<define>SHARED_B
+ : usage-requirements <define>ZOO <link>shared:<define>SHARED_B ;
+lib b : b.cpp ;
+""")
+
+t.write("lib/1/b.cpp", """\
+void
+#if defined(_WIN32) && defined(SHARED_B)
+__declspec(dllexport)
+#endif
+foo() {}
+""")
+
+t.run_build_system()
+t.run_build_system(["--clean"])
+
+
+# Test that we correctly handle dependency features in usage requirements on
+# target.
+t.write("jamfile.jam", """\
+lib b : b.cpp : <link>shared:<define>SHARED_B : : <define>FOO
+ <link>shared:<define>SHARED_B ;
+
+# Here's the test: we should correctly handle dependency feature and get usage
+# requirements from 'b'.
+lib cc : c.cpp : <link>shared:<define>SHARED_C : : <library>b ;
+
+# This will build only if <define>FOO was propagated from 'c'.
+exe a : a.cpp cc ;
+""")
+
+t.write("a.cpp", """\
+#ifdef FOO
+void
+# if defined(_WIN32) && defined(SHARED_B)
+__declspec(dllexport)
+# endif
+foo();
+#endif
+
+int main() { foo(); }
+""")
+
+t.write("c.cpp", """\
+int
+#if defined(_WIN32) && defined(SHARED_C)
+__declspec(dllexport)
+#endif
+must_export_something;
+""")
+
+t.run_build_system()
+t.run_build_system(["--clean"])
+
+
+# Test correct handling of dependency features in project requirements.
+t.write("jamfile.jam", "exe a : a.cpp lib1//cc ;")
+
+t.write("lib1/jamfile.jam", """\
+project
+ : requirements <link>shared:<define>SHARED_C
+ : usage-requirements <library>../lib2//b <link>shared:<define>SHARED_C ;
+lib cc : c.cpp ;
+""")
+
+t.write("lib1/c.cpp", """\
+int
+#if defined(_WIN32) && defined(SHARED_C)
+__declspec(dllexport)
+#endif
+must_export_something;
+""")
+
+t.write("lib2/jamfile.jam", """\
+lib b : b.cpp : <link>shared:<define>SHARED_B : : <define>FOO
+ <link>shared:<define>SHARED_B ;
+""")
+
+t.copy("b.cpp", "lib2/b.cpp")
+
+t.run_build_system()
+
+
+# Test that targets listed in dependency features in usage requirements are
+# built with the correct properties.
+t.rm(".")
+
+t.write("jamroot.jam", "")
+t.write("jamfile.jam", """\
+lib main : main.cpp : <use>libs//lib1 : : <library>libs//lib1 ;
+exe hello : hello.cpp main : ;
+""")
+
+t.write("main.cpp", """\
+void
+#if defined(_WIN32) && defined(SHARED_LIB1)
+__declspec(dllimport)
+#endif
+foo();
+
+int main() { foo(); }
+""")
+
+t.write("hello.cpp", "\n")
+t.write("libs/a.cpp", """\
+void
+#if defined(_WIN32) && defined(SHARED_LIB1)
+__declspec(dllexport)
+#endif
+foo() {}
+""")
+
+
+# This library should be built with the same properties as 'main'. This is a
+# regression test for a bug when they were generated with empty properties, and
+# there were ambiguities between variants.
+t.write("libs/jamfile.jam", """\
+lib lib1 : a_d.cpp : <variant>debug <link>shared:<define>SHARED_LIB1 : :
+ <link>shared:<define>SHARED_LIB1 ;
+lib lib1 : a.cpp : <variant>release <link>shared:<define>SHARED_LIB1 : :
+ <link>shared:<define>SHARED_LIB1 ;
+""")
+
+t.write("libs/a_d.cpp", """\
+void
+#if defined(_WIN32) && defined(SHARED_LIB1)
+__declspec(dllexport)
+#endif
+foo() {}
+""")
+
+t.run_build_system(["link=static"])
+t.expect_addition("libs/bin/$toolset/debug/link-static/a_d.obj")
+
+
+# Test that indirect conditionals are respected in usage requirements.
+t.rm(".")
+
+t.write("jamroot.jam", """\
+rule has-foo ( properties * ) { return <define>HAS_FOO ; }
+exe a : a.cpp b ;
+lib b : b.cpp : <link>static : : <conditional>@has-foo ;
+""")
+
+t.write("a.cpp", """\
+#ifdef HAS_FOO
+void foo();
+int main() { foo(); }
+#endif
+""")
+
+t.write("b.cpp", """\
+void
+#if defined(_WIN32) && defined(SHARED_B)
+__declspec(dllexport)
+#endif
+foo() {}
+""")
+
+t.run_build_system()
+t.expect_addition("bin/$toolset/debug/a.exe")
+
+t.cleanup()
diff --git a/tools/build/test/using.py b/tools/build/test/using.py
new file mode 100644
index 0000000000..31a07eb7bf
--- /dev/null
+++ b/tools/build/test/using.py
@@ -0,0 +1,32 @@
+#!/usr/bin/python
+
+# Copyright (C) Vladimir Prus 2005.
+# Distributed under the Boost Software License, Version 1.0. (See
+# accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("jamroot.jam", "using some_tool ;")
+t.write("some_tool.jam", """\
+import project ;
+project.initialize $(__name__) ;
+rule init ( ) { }
+""")
+
+t.write("some_tool.py", """\
+from b2.manager import get_manager
+get_manager().projects().initialize(__name__)
+def init():
+ pass
+""")
+
+t.write("sub/a.cpp", "int main() {}\n")
+t.write("sub/jamfile.jam", "exe a : a.cpp ;")
+
+t.run_build_system(subdir="sub")
+t.expect_addition("sub/bin/$toolset/debug/a.exe")
+
+t.cleanup()
diff --git a/tools/build/test/wrapper.py b/tools/build/test/wrapper.py
new file mode 100644
index 0000000000..8501a7340a
--- /dev/null
+++ b/tools/build/test/wrapper.py
@@ -0,0 +1,38 @@
+#!/usr/bin/python
+
+# Copyright 2004 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
+# Test that the user can define his own rule that will call built-in main target
+# rule and that this will work.
+
+import BoostBuild
+
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("jamfile.jam", """
+my-test : test.cpp ;
+""")
+
+t.write("test.cpp", """
+int main() {}
+""")
+
+t.write("jamroot.jam", """
+using testing ;
+
+rule my-test ( name ? : sources + )
+{
+ name ?= test ;
+ unit-test $(name) : $(sources) ; # /site-config//cppunit /util//testMain ;
+}
+
+IMPORT $(__name__) : my-test : : my-test ;
+""")
+
+t.run_build_system()
+t.expect_addition("bin/$toolset/debug/test.passed")
+
+t.cleanup()
diff --git a/tools/build/test/wrong_project.py b/tools/build/test/wrong_project.py
new file mode 100644
index 0000000000..273ff5c97d
--- /dev/null
+++ b/tools/build/test/wrong_project.py
@@ -0,0 +1,39 @@
+#!/usr/bin/python
+
+# Copyright Vladimir Prus 2005.
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+# Regression test. When Jamfile contained "using whatever ; " and the 'whatever'
+# module declared a project, then all targets in Jamfile were considered to be
+# declared in the project associated with 'whatever', not with the Jamfile.
+
+import BoostBuild
+
+t = BoostBuild.Tester(use_test_config=False)
+
+t.write("a.cpp", "int main() {}\n")
+
+t.write("jamroot.jam", """\
+using some_tool ;
+exe a : a.cpp ;
+""")
+
+t.write("some_tool.jam", """\
+import project ;
+project.initialize $(__name__) ;
+rule init ( ) { }
+""")
+
+t.write("some_tool.py", """\
+from b2.manager import get_manager
+get_manager().projects().initialize(__name__)
+def init():
+ pass
+""")
+
+t.run_build_system()
+t.expect_addition("bin/$toolset/debug/a.exe")
+
+t.cleanup()
diff --git a/tools/build/test/zlib.py b/tools/build/test/zlib.py
new file mode 100755
index 0000000000..2821bd7448
--- /dev/null
+++ b/tools/build/test/zlib.py
@@ -0,0 +1,119 @@
+#!/usr/bin/python
+
+# Copyright (C) 2013 Steven Watanabe
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or copy at
+# http://www.boost.org/LICENSE_1_0.txt)
+
+import BoostBuild
+import MockToolset
+
+t = BoostBuild.Tester(arguments=['toolset=mock', '--ignore-site-config', '--user-config='], pass_toolset=0)
+
+MockToolset.create(t)
+
+# Build from source
+t.write("zlib/zlib.h", 'zlib')
+t.write("zlib/deflate.c", 'deflate')
+
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using zlib : : <source>$(here)/zlib ;
+alias zlib : /zlib//zlib : : <link>static <link>shared ;
+""")
+
+MockToolset.set_expected(t, '''
+source_file('deflate.c', 'deflate')
+action('-c -x c -I./zlib -o $deflate.o $deflate.c')
+action('--dll $deflate.o -o $deflate.so')
+action('--archive $deflate.o -o $deflate.a')
+''')
+
+t.run_build_system()
+t.expect_addition('bin/standalone/zlib/mock/debug/z.dll')
+t.expect_addition('bin/standalone/zlib/mock/debug/link-static/z.lib')
+
+t.rm('zlib')
+
+# Generic definitions that aren't configuration specific
+common_stuff = '''
+source_file('test.cpp', 'test.cpp')
+source_file('main.cpp', 'int main() {}')
+source_file('zlib.h.cpp', '#include <zlib.h>')
+action('-c -x c++ $main.cpp -o $main.o')
+'''
+t.write('test.cpp', 'test.cpp')
+
+# Default initialization - static library
+t.rm('bin')
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using zlib ;
+exe test : test.cpp /zlib//zlib : : <link>static <link>shared ;
+""")
+
+MockToolset.set_expected(t, common_stuff + '''
+action('$main.o --static-lib=z -o $config.exe')
+action('-c -x c++ $zlib.h.cpp -o $zlib.h.o')
+action('-c -x c++ $test.cpp -o $test.o')
+action('$test.o --static-lib=z -o $test')
+''')
+t.run_build_system()
+t.expect_addition('bin/mock/debug/test.exe')
+t.expect_addition('bin/mock/debug/link-static/test.exe')
+
+# Default initialization - shared library
+t.rm('bin')
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using zlib ;
+exe test : test.cpp /zlib//zlib : : <link>static <link>shared ;
+""")
+
+MockToolset.set_expected(t, common_stuff + '''
+action('$main.o --shared-lib=z -o $config.exe')
+action('-c -x c++ $zlib.h.cpp -o $zlib.h.o')
+action('-c -x c++ $test.cpp -o $test.o')
+action('$test.o --shared-lib=z -o $test')
+''')
+t.run_build_system()
+t.expect_addition('bin/mock/debug/test.exe')
+t.expect_addition('bin/mock/debug/link-static/test.exe')
+
+# Initialization in explicit location - static library
+t.rm('bin')
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using zlib : : <name>myzlib <include>$(here)/zlib <search>$(here)/zlib ;
+exe test : test.cpp /zlib//zlib : : <link>static <link>shared ;
+""")
+
+t.write('zlib/zlib.h', 'zlib')
+
+MockToolset.set_expected(t, common_stuff + '''
+action('$main.o -L./zlib --static-lib=myzlib -o $config.exe')
+action('-c -x c++ $test.cpp -I./zlib -o $test.o')
+action('$test.o -L./zlib --static-lib=myzlib -o $test')
+''')
+t.run_build_system()
+t.expect_addition('bin/mock/debug/test.exe')
+t.expect_addition('bin/mock/debug/link-static/test.exe')
+
+# Initialization in explicit location - shared library
+t.rm('bin')
+t.write("Jamroot.jam", """
+path-constant here : . ;
+using zlib : : <name>myzlib <include>$(here)/zlib <search>$(here)/zlib ;
+exe test : test.cpp /zlib//zlib : : <link>static <link>shared ;
+""")
+
+MockToolset.set_expected(t, common_stuff + '''
+action('$main.o -L./zlib --shared-lib=myzlib -o $config.exe')
+action('-c -x c++ $test.cpp -I./zlib -o $test.o')
+action('$test.o -L./zlib --shared-lib=myzlib -o $test')
+''')
+t.run_build_system()
+t.expect_addition('bin/mock/debug/test.exe')
+t.expect_addition('bin/mock/debug/link-static/test.exe')
+
+t.cleanup()
diff --git a/tools/build/v2/Jamroot.jam b/tools/build/v2/Jamroot.jam
deleted file mode 100644
index 73d86ab15b..0000000000
--- a/tools/build/v2/Jamroot.jam
+++ /dev/null
@@ -1,47 +0,0 @@
-
-path-constant SELF : . ;
-
-import path ;
-import package ;
-import os ;
-
-local ext = "" ;
-if [ os.on-windows ]
-{
- ext = ".exe" ;
-}
-
-
-package.install boost-build-engine boost-build
- : # properties
- : # binaries
- b2$(ext) bjam$(ext)
- ;
-
-local e1 = [ path.glob-tree $(SELF)/example : * : . .svn ] ;
-local e2 ;
-for e in $(e1)
-{
- e = [ path.native $(e) ] ;
- if [ CHECK_IF_FILE $(e) ]
- {
- e2 += $(e) ;
- }
-}
-
-package.install-data boost-build-core
- : # Which subdir of $prefix/share
- boost-build
- : # What to install
- $(SELF)/boost-build.jam
- $(SELF)/build-system.jam
- [ path.glob-tree $(SELF)/build : *.jam *.py ]
- [ path.glob-tree $(SELF)/kernel : *.jam *.py ]
- [ path.glob-tree $(SELF)/util : *.jam *.py ]
- [ path.glob-tree $(SELF)/tools : *.jam *.py *.xml *.xsl *.doxyfile *.hpp ]
- $(e2)
- : # What is the root of the directory
- <install-source-root>.
- ;
-
-alias install : boost-build-engine boost-build-core ; \ No newline at end of file
diff --git a/tools/build/v2/boost-build.jam b/tools/build/v2/boost-build.jam
deleted file mode 100644
index 73db0497be..0000000000
--- a/tools/build/v2/boost-build.jam
+++ /dev/null
@@ -1,8 +0,0 @@
-# Copyright 2001, 2002 Dave Abrahams
-# Copyright 2002 Rene Rivera
-# Copyright 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-
-boost-build kernel ;
diff --git a/tools/build/v2/bootstrap.bat b/tools/build/v2/bootstrap.bat
deleted file mode 100644
index 58f7613983..0000000000
--- a/tools/build/v2/bootstrap.bat
+++ /dev/null
@@ -1,49 +0,0 @@
-@ECHO OFF
-
-REM Copyright (C) 2009 Vladimir Prus
-REM
-REM Distributed under the Boost Software License, Version 1.0.
-REM (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-ECHO Bootstrapping the build engine
-if exist ".\engine\bin.ntx86\bjam.exe" del engine\bin.ntx86\bjam.exe
-if exist ".\engine\bin.ntx86_64\bjam.exe" del engine\bin.ntx86_64\bjam.exe
-cd engine
-
-call .\build.bat %* > ..\bootstrap.log
-@ECHO OFF
-cd ..
-
-if exist ".\engine\bin.ntx86\b2.exe" (
- copy .\engine\bin.ntx86\b2.exe . > nul
- copy .\engine\bin.ntx86\bjam.exe . > nul
- goto :bjam_built)
-
-if exist ".\engine\bin.ntx86_64\b2.exe" (
- copy .\engine\bin.ntx86_64\b2.exe . > nul
- copy .\engine\bin.ntx86_64\bjam.exe . > nul
- goto :bjam_built)
-
-goto :bjam_failure
-
-:bjam_built
-
-ECHO.
-ECHO Bootstrapping is done. To build, run:
-ECHO.
-ECHO .\b2 --prefix=DIR install
-ECHO.
-
-goto :end
-
-:bjam_failure
-
-ECHO.
-ECHO Failed to bootstrap the build engine
-ECHO Please consult bootstrap.log for furter diagnostics.
-ECHO.
-
-
-goto :end
-
-:end
diff --git a/tools/build/v2/bootstrap.sh b/tools/build/v2/bootstrap.sh
deleted file mode 100755
index 5083551736..0000000000
--- a/tools/build/v2/bootstrap.sh
+++ /dev/null
@@ -1,120 +0,0 @@
-#!/bin/sh
-# Copyright (C) 2005, 2006 Douglas Gregor.
-# Copyright (C) 2006 The Trustees of Indiana University
-# Copyright (C) 2010 Bryce Lelbach
-#
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# boostinspect:notab - Tabs are required for the Makefile.
-
-BJAM=""
-TOOLSET=""
-BJAM_CONFIG=""
-
-for option
-do
- case $option in
-
- -help | --help | -h)
- want_help=yes ;;
-
- -with-toolset=* | --with-toolset=* )
- TOOLSET=`expr "x$option" : "x-*with-toolset=\(.*\)"`
- ;;
-
- -*)
- { echo "error: unrecognized option: $option
-Try \`$0 --help' for more information." >&2
- { (exit 1); exit 1; }; }
- ;;
-
- esac
-done
-
-if test "x$want_help" = xyes; then
- cat <<EOF
-\`./bootstrap.sh' creates minimal Boost.Build, which can install itself.
-
-Usage: $0 [OPTION]...
-
-Defaults for the options are specified in brackets.
-
-Configuration:
- -h, --help display this help and exit
- --with-bjam=BJAM use existing Boost.Jam executable (bjam)
- [automatically built]
- --with-toolset=TOOLSET use specific Boost.Build toolset
- [automatically detected]
-EOF
-fi
-test -n "$want_help" && exit 0
-
-# TBD: Determine where the script is located
-my_dir="."
-
-# Determine the toolset, if not already decided
-if test "x$TOOLSET" = x; then
- guessed_toolset=`$my_dir/engine/build.sh --guess-toolset`
- case $guessed_toolset in
- acc | darwin | gcc | como | mipspro | pathscale | pgi | qcc | vacpp )
- TOOLSET=$guessed_toolset
- ;;
-
- intel-* )
- TOOLSET=intel
- ;;
-
- mingw )
- TOOLSET=gcc
- ;;
-
- clang* )
- TOOLSET=clang
- ;;
-
- sun* )
- TOOLSET=sun
- ;;
-
- * )
- # Not supported by Boost.Build
- ;;
- esac
-fi
-
-case $TOOLSET in
- clang*)
- TOOLSET=clang
- ;;
-esac
-
-
-rm -f config.log
-
-# Build bjam
-if test "x$BJAM" = x; then
- echo -n "Bootstrapping the build engine with toolset $TOOLSET... "
- pwd=`pwd`
- (cd "$my_dir/engine" && ./build.sh "$TOOLSET") > bootstrap.log 2>&1
- if [ $? -ne 0 ]; then
- echo
- echo "Failed to bootstrap the build engine"
- echo "Consult 'bootstrap.log' for more details"
- exit 1
- fi
- cd "$pwd"
- arch=`cd $my_dir/engine && ./bootstrap/jam0 -d0 -f build.jam --toolset=$TOOLSET --toolset-root= --show-locate-target && cd ..`
- BJAM="$my_dir/engine/$arch/b2"
- echo "engine/$arch/bjam"
- cp "$BJAM" .
- cp "$my_dir/engine/$arch/bjam" .
-fi
-
-cat << EOF
-
-Bootstrapping is done. To build and install, run:
-
- ./b2 install --prefix=<DIR>
-
-EOF
diff --git a/tools/build/v2/build-system.jam b/tools/build/v2/build-system.jam
deleted file mode 100644
index 9f9c884cc6..0000000000
--- a/tools/build/v2/build-system.jam
+++ /dev/null
@@ -1,1008 +0,0 @@
-# Copyright 2003, 2005, 2007 Dave Abrahams
-# Copyright 2006, 2007 Rene Rivera
-# Copyright 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# This file is part of Boost Build version 2. You can think of it as forming the
-# main() routine. It is invoked by the bootstrapping code in bootstrap.jam.
-
-import build-request ;
-import builtin ;
-import "class" : new ;
-import errors ;
-import feature ;
-import make ;
-import modules ;
-import os ;
-import path ;
-import project ;
-import property-set ;
-import regex ;
-import sequence ;
-import targets ;
-import toolset ;
-import utility ;
-import version ;
-import virtual-target ;
-import generators ;
-import configure ;
-
-################################################################################
-#
-# Module global data.
-#
-################################################################################
-
-# Shortcut used in this module for accessing used command-line parameters.
-.argv = [ modules.peek : ARGV ] ;
-
-# Flag indicating we should display additional debugging information related to
-# locating and loading Boost Build configuration files.
-.debug-config = [ MATCH ^(--debug-configuration)$ : $(.argv) ] ;
-
-# Legacy option doing too many things, some of which are not even documented.
-# Should be phased out.
-# * Disables loading site and user configuration files.
-# * Disables auto-configuration for toolsets specified explicitly on the
-# command-line.
-# * Causes --toolset command-line options to be ignored.
-# * Prevents the default toolset from being used even if no toolset has been
-# configured at all.
-.legacy-ignore-config = [ MATCH ^(--ignore-config)$ : $(.argv) ] ;
-
-# The cleaning is tricky. Say, if user says 'bjam --clean foo' where 'foo' is a
-# directory, then we want to clean targets which are in 'foo' as well as those
-# in any children Jamfiles under foo but not in any unrelated Jamfiles. To
-# achieve this we collect a list of projects under which cleaning is allowed.
-.project-targets = ;
-
-# Virtual targets obtained when building main targets references on the command
-# line. When running 'bjam --clean main_target' we want to clean only files
-# belonging to that main target so we need to record which targets are produced
-# for it.
-.results-of-main-targets = ;
-
-# Was an XML dump requested?
-.out-xml = [ MATCH ^--out-xml=(.*)$ : $(.argv) ] ;
-
-# Default toolset & version to be used in case no other toolset has been used
-# explicitly by either the loaded configuration files, the loaded project build
-# scripts or an explicit toolset request on the command line. If not specified,
-# an arbitrary default will be used based on the current host OS. This value,
-# while not strictly necessary, has been added to allow testing Boost-Build's
-# default toolset usage functionality.
-.default-toolset = ;
-.default-toolset-version = ;
-
-
-################################################################################
-#
-# Public rules.
-#
-################################################################################
-
-# Returns the property set with the free features from the currently processed
-# build request.
-#
-rule command-line-free-features ( )
-{
- return $(.command-line-free-features) ;
-}
-
-
-# Returns the location of the build system. The primary use case is building
-# Boost where it is sometimes needed to get the location of other components
-# (e.g. BoostBook files) and it is convenient to use locations relative to the
-# Boost Build path.
-#
-rule location ( )
-{
- local r = [ modules.binding build-system ] ;
- return $(r:P) ;
-}
-
-
-# Sets the default toolset & version to be used in case no other toolset has
-# been used explicitly by either the loaded configuration files, the loaded
-# project build scripts or an explicit toolset request on the command line. For
-# more detailed information see the comment related to used global variables.
-#
-rule set-default-toolset ( toolset : version ? )
-{
- .default-toolset = $(toolset) ;
- .default-toolset-version = $(version) ;
-}
-
-rule set-pre-build-hook ( function )
-{
- .pre-build-hook = $(function) ;
-}
-
-rule set-post-build-hook ( function )
-{
- .post-build-hook = $(function) ;
-}
-
-################################################################################
-#
-# Local rules.
-#
-################################################################################
-
-# Returns actual Jam targets to be used for executing a clean request.
-#
-local rule actual-clean-targets ( )
-{
- # Construct a list of projects explicitly detected as targets on this build
- # system run. These are the projects under which cleaning is allowed.
- for local t in $(targets)
- {
- if [ class.is-a $(t) : project-target ]
- {
- .project-targets += [ $(t).project-module ] ;
- }
- }
-
- # Construct a list of targets explicitly detected on this build system run
- # as a result of building main targets.
- local targets-to-clean ;
- for local t in $(.results-of-main-targets)
- {
- # Do not include roots or sources.
- targets-to-clean += [ virtual-target.traverse $(t) ] ;
- }
- targets-to-clean = [ sequence.unique $(targets-to-clean) ] ;
-
- local to-clean ;
- for local t in [ virtual-target.all-targets ]
- {
- local p = [ $(t).project ] ;
-
- # Remove only derived targets.
- if [ $(t).action ]
- {
- if $(t) in $(targets-to-clean) ||
- [ should-clean-project [ $(p).project-module ] ] = true
- {
- to-clean += $(t) ;
- }
- }
- }
-
- local to-clean-actual ;
- for local t in $(to-clean)
- {
- to-clean-actual += [ $(t).actualize ] ;
- }
- return $(to-clean-actual) ;
-}
-
-
-# Given a target id, try to find and return the corresponding target. This is
-# only invoked when there is no Jamfile in ".". This code somewhat duplicates
-# code in project-target.find but we can not reuse that code without a
-# project-targets instance.
-#
-local rule find-target ( target-id )
-{
- local split = [ MATCH (.*)//(.*) : $(target-id) ] ;
-
- local pm ;
- if $(split)
- {
- pm = [ project.find $(split[1]) : "." ] ;
- }
- else
- {
- pm = [ project.find $(target-id) : "." ] ;
- }
-
- local result ;
- if $(pm)
- {
- result = [ project.target $(pm) ] ;
- }
-
- if $(split)
- {
- result = [ $(result).find $(split[2]) ] ;
- }
-
- return $(result) ;
-}
-
-
-# Initializes a new configuration module.
-#
-local rule initialize-config-module ( module-name : location ? )
-{
- project.initialize $(module-name) : $(location) ;
- if USER_MODULE in [ RULENAMES ]
- {
- USER_MODULE $(module-name) ;
- }
-}
-
-
-# Helper rule used to load configuration files. Loads the first configuration
-# file with the given 'filename' at 'path' into module with name 'module-name'.
-# Not finding the requested file may or may not be treated as an error depending
-# on the must-find parameter. Returns a normalized path to the loaded
-# configuration file or nothing if no file was loaded.
-#
-local rule load-config ( module-name : filename : path + : must-find ? )
-{
- if $(.debug-config)
- {
- ECHO "notice: Searching" "$(path)" "for" "$(module-name)"
- "configuration file" "$(filename)" "." ;
- }
- local where = [ GLOB $(path) : $(filename) ] ;
- if $(where)
- {
- where = [ NORMALIZE_PATH $(where[1]) ] ;
- if $(.debug-config)
- {
- ECHO "notice: Loading" "$(module-name)" "configuration file"
- "$(filename)" "from" $(where) "." ;
- }
-
- # Set source location so that path-constant in config files
- # with relative paths work. This is of most importance
- # for project-config.jam, but may be used in other
- # config files as well.
- local attributes = [ project.attributes $(module-name) ] ;
- $(attributes).set source-location : $(where:D) : exact ;
- modules.load $(module-name) : $(filename) : $(path) ;
- project.load-used-projects $(module-name) ;
- }
- else
- {
- if $(must-find)
- {
- errors.user-error "Configuration file" "$(filename)" "not found in"
- "$(path)" "." ;
- }
- if $(.debug-config)
- {
- ECHO "notice:" "Configuration file" "$(filename)" "not found in"
- "$(path)" "." ;
- }
- }
- return $(where) ;
-}
-
-
-# Loads all the configuration files used by Boost Build in the following order:
-#
-# -- test-config --
-# Loaded only if specified on the command-line using the --test-config
-# command-line parameter. It is ok for this file not to exist even if specified.
-# If this configuration file is loaded, regular site and user configuration
-# files will not be. If a relative path is specified, file is searched for in
-# the current folder.
-#
-# -- site-config --
-# Always named site-config.jam. Will only be found if located on the system
-# root path (Windows), /etc (non-Windows), user's home folder or the Boost Build
-# path, in that order. Not loaded in case the test-config configuration file is
-# loaded or either the --ignore-site-config or the --ignore-config command-line
-# option is specified.
-#
-# -- user-config --
-# Named user-config.jam by default or may be named explicitly using the
-# --user-config command-line option or the BOOST_BUILD_USER_CONFIG environment
-# variable. If named explicitly the file is looked for from the current working
-# directory and if the default one is used then it is searched for in the
-# user's home directory and the Boost Build path, in that order. Not loaded in
-# case either the test-config configuration file is loaded, --ignore-config
-# command-line option is specified or an empty file name is explicitly
-# specified. If the file name has been given explicitly then the file must
-# exist.
-#
-# Test configurations have been added primarily for use by Boost Build's
-# internal unit testing system but may be used freely in other places as well.
-#
-local rule load-configuration-files
-{
- # Flag indicating that site configuration should not be loaded.
- local ignore-site-config =
- [ MATCH ^(--ignore-site-config)$ : $(.argv) ] ;
-
- if $(.legacy-ignore-config) && $(.debug-config)
- {
- ECHO "notice: Regular site and user configuration files will be ignored" ;
- ECHO "notice: due to the --ignore-config command-line option." ;
- }
-
- initialize-config-module test-config ;
- local test-config = [ MATCH ^--test-config=(.*)$ : $(.argv) ] ;
- local uq = [ MATCH \"(.*)\" : $(test-config) ] ;
- if $(uq)
- {
- test-config = $(uq) ;
- }
- if $(test-config)
- {
- local where =
- [ load-config test-config : $(test-config:BS) : $(test-config:D) ] ;
- if $(where)
- {
- if $(.debug-config) && ! $(.legacy-ignore-config)
- {
- ECHO "notice: Regular site and user configuration files will" ;
- ECHO "notice: be ignored due to the test configuration being"
- "loaded." ;
- }
- }
- else
- {
- test-config = ;
- }
- }
-
- local user-path = [ os.home-directories ] [ os.environ BOOST_BUILD_PATH ] ;
- local site-path = /etc $(user-path) ;
- if [ os.name ] in NT CYGWIN
- {
- site-path = [ modules.peek : SystemRoot ] $(user-path) ;
- }
-
- if $(ignore-site-config) && !$(.legacy-ignore-config)
- {
- ECHO "notice: Site configuration files will be ignored due to the" ;
- ECHO "notice: --ignore-site-config command-line option." ;
- }
-
- initialize-config-module site-config ;
- if ! $(test-config) && ! $(ignore-site-config) && ! $(.legacy-ignore-config)
- {
- load-config site-config : site-config.jam : $(site-path) ;
- }
-
- initialize-config-module user-config ;
- if ! $(test-config) && ! $(.legacy-ignore-config)
- {
- local user-config = [ MATCH ^--user-config=(.*)$ : $(.argv) ] ;
- user-config = $(user-config[-1]) ;
- user-config ?= [ os.environ BOOST_BUILD_USER_CONFIG ] ;
- # Special handling for the case when the OS does not strip the quotes
- # around the file name, as is the case when using Cygwin bash.
- user-config = [ utility.unquote $(user-config) ] ;
- local explicitly-requested = $(user-config) ;
- user-config ?= user-config.jam ;
-
- if $(user-config)
- {
- if $(explicitly-requested)
- {
- # Treat explicitly entered user paths as native OS path
- # references and, if non-absolute, root them at the current
- # working directory.
- user-config = [ path.make $(user-config) ] ;
- user-config = [ path.root $(user-config) [ path.pwd ] ] ;
- user-config = [ path.native $(user-config) ] ;
-
- if $(.debug-config)
- {
- ECHO "notice: Loading explicitly specified user"
- "configuration file:" ;
- ECHO " $(user-config)" ;
- }
-
- load-config user-config : $(user-config:BS) : $(user-config:D)
- : must-exist ;
- }
- else
- {
- load-config user-config : $(user-config) : $(user-path) ;
- }
- }
- else if $(.debug-config)
- {
- ECHO "notice: User configuration file loading explicitly disabled." ;
- }
- }
-
- # We look for project-config.jam from "." upward.
- # I am not sure this is 100% right decision, we might as well check for
- # it only alonside the Jamroot file. However:
- #
- # - We need to load project-root.jam before Jamroot
- # - We probably would need to load project-root.jam even if there's no
- # Jamroot - e.g. to implement automake-style out-of-tree builds.
- local file = [ path.glob "." : project-config.jam ] ;
- if ! $(file)
- {
- file = [ path.glob-in-parents "." : project-config.jam ] ;
- }
- if $(file)
- {
- initialize-config-module project-config : $(file:D) ;
- load-config project-config : project-config.jam : $(file:D) ;
- }
-}
-
-
-# Autoconfigure toolsets based on any instances of --toolset=xx,yy,...zz or
-# toolset=xx,yy,...zz in the command line. May return additional properties to
-# be processed as if they had been specified by the user.
-#
-local rule process-explicit-toolset-requests
-{
- local extra-properties ;
-
- local option-toolsets = [ regex.split-list [ MATCH ^--toolset=(.*)$ : $(.argv) ] : "," ] ;
- local feature-toolsets = [ regex.split-list [ MATCH ^toolset=(.*)$ : $(.argv) ] : "," ] ;
-
- for local t in $(option-toolsets) $(feature-toolsets)
- {
- # Parse toolset-version/properties.
- local (t-v,t,v) = [ MATCH (([^-/]+)-?([^/]+)?)/?.* : $(t) ] ;
- local toolset-version = $((t-v,t,v)[1]) ;
- local toolset = $((t-v,t,v)[2]) ;
- local version = $((t-v,t,v)[3]) ;
-
- if $(.debug-config)
- {
- ECHO notice: [cmdline-cfg] Detected command-line request for
- $(toolset-version): "toolset=" $(toolset) "version="
- $(version) ;
- }
-
- # If the toolset is not known, configure it now.
- local known ;
- if $(toolset) in [ feature.values <toolset> ]
- {
- known = true ;
- }
- if $(known) && $(version) && ! [ feature.is-subvalue toolset
- : $(toolset) : version : $(version) ]
- {
- known = ;
- }
- # TODO: we should do 'using $(toolset)' in case no version has been
- # specified and there are no versions defined for the given toolset to
- # allow the toolset to configure its default version. For this we need
- # to know how to detect whether a given toolset has any versions
- # defined. An alternative would be to do this whenever version is not
- # specified but that would require that toolsets correctly handle the
- # case when their default version is configured multiple times which
- # should be checked for all existing toolsets first.
-
- if ! $(known)
- {
- if $(.debug-config)
- {
- ECHO "notice: [cmdline-cfg] toolset $(toolset-version) not"
- "previously configured; attempting to auto-configure now" ;
- }
- toolset.using $(toolset) : $(version) ;
- }
- else
- {
- if $(.debug-config)
- {
- ECHO notice: [cmdline-cfg] toolset $(toolset-version) already
- configured ;
- }
- }
-
- # Make sure we get an appropriate property into the build request in
- # case toolset has been specified using the "--toolset=..." command-line
- # option form.
- if ! $(t) in $(.argv) && ! $(t) in $(feature-toolsets)
- {
- if $(.debug-config)
- {
- ECHO notice: [cmdline-cfg] adding toolset=$(t) to the build
- request. ;
- }
- extra-properties += toolset=$(t) ;
- }
- }
-
- return $(extra-properties) ;
-}
-
-
-# Returns 'true' if the given 'project' is equal to or is a (possibly indirect)
-# child to any of the projects requested to be cleaned in this build system run.
-# Returns 'false' otherwise. Expects the .project-targets list to have already
-# been constructed.
-#
-local rule should-clean-project ( project )
-{
- if ! $(.should-clean-project.$(project))
- {
- local r = false ;
- if $(project) in $(.project-targets)
- {
- r = true ;
- }
- else
- {
- local parent = [ project.attribute $(project) parent-module ] ;
- if $(parent) && $(parent) != user-config
- {
- r = [ should-clean-project $(parent) ] ;
- }
- }
- .should-clean-project.$(project) = $(r) ;
- }
-
- return $(.should-clean-project.$(project)) ;
-}
-
-
-################################################################################
-#
-# main()
-# ------
-#
-################################################################################
-
-{
- if --version in $(.argv)
- {
- version.print ;
- EXIT ;
- }
-
- version.verify-engine-version ;
-
- load-configuration-files ;
-
- local extra-properties ;
- # Note that this causes --toolset options to be ignored if --ignore-config
- # is specified.
- if ! $(.legacy-ignore-config)
- {
- extra-properties = [ process-explicit-toolset-requests ] ;
- }
-
-
- # We always load project in "." so that 'use-project' directives have any
- # chance of being seen. Otherwise, we would not be able to refer to
- # subprojects using target ids.
- local current-project ;
- if [ project.find "." : "." ]
- {
- current-project = [ project.target [ project.load "." ] ] ;
- }
-
-
- # In case there are no toolsets currently defined makes the build run using
- # the default toolset.
- if ! $(.legacy-ignore-config) && ! [ feature.values <toolset> ]
- {
- local default-toolset = $(.default-toolset) ;
- local default-toolset-version = ;
- if $(default-toolset)
- {
- default-toolset-version = $(.default-toolset-version) ;
- }
- else
- {
- default-toolset = gcc ;
- if [ os.name ] = NT
- {
- default-toolset = msvc ;
- }
- else if [ os.name ] = MACOSX
- {
- default-toolset = darwin ;
- }
- }
-
- ECHO "warning: No toolsets are configured." ;
- ECHO "warning: Configuring default toolset" \"$(default-toolset)\". ;
- ECHO "warning: If the default is wrong, your build may not work correctly." ;
- ECHO "warning: Use the \"toolset=xxxxx\" option to override our guess." ;
- ECHO "warning: For more configuration options, please consult" ;
- ECHO "warning: http://boost.org/boost-build2/doc/html/bbv2/advanced/configuration.html" ;
-
- toolset.using $(default-toolset) : $(default-toolset-version) ;
- }
-
-
- # Parse command line for targets and properties. Note that this requires
- # that all project files already be loaded.
- local build-request = [ build-request.from-command-line $(.argv)
- $(extra-properties) ] ;
- local target-ids = [ $(build-request).get-at 1 ] ;
- local properties = [ $(build-request).get-at 2 ] ;
-
-
- # Expand properties specified on the command line into multiple property
- # sets consisting of all legal property combinations. Each expanded property
- # set will be used for a single build run. E.g. if multiple toolsets are
- # specified then requested targets will be built with each of them.
- if $(properties)
- {
- expanded = [ build-request.expand-no-defaults $(properties) ] ;
- local xexpanded ;
- for local e in $(expanded)
- {
- xexpanded += [ property-set.create [ feature.split $(e) ] ] ;
- }
- expanded = $(xexpanded) ;
- }
- else
- {
- expanded = [ property-set.empty ] ;
- }
-
-
- # Check that we actually found something to build.
- if ! $(current-project) && ! $(target-ids)
- {
- errors.user-error "error: no Jamfile in current directory found, and no"
- "target references specified." ;
- EXIT ;
- }
-
-
- # Flags indicating that this build system run has been started in order to
- # clean existing instead of create new targets. Note that these are not the
- # final flag values as they may get changed later on due to some special
- # targets being specified on the command line.
- local clean ; if "--clean" in $(.argv) { clean = true ; }
- local cleanall ; if "--clean-all" in $(.argv) { cleanall = true ; }
-
-
- # List of explicitly requested files to build. Any target references read
- # from the command line parameter not recognized as one of the targets
- # defined in the loaded Jamfiles will be interpreted as an explicitly
- # requested file to build. If any such files are explicitly requested then
- # only those files and the targets they depend on will be built and they
- # will be searched for among targets that would have been built had there
- # been no explicitly requested files.
- local explicitly-requested-files
-
-
- # List of Boost Build meta-targets, virtual-targets and actual Jam targets
- # constructed in this build system run.
- local targets ;
- local virtual-targets ;
- local actual-targets ;
-
-
- # Process each target specified on the command-line and convert it into
- # internal Boost Build target objects. Detect special clean target. If no
- # main Boost Build targets were explictly requested use the current project
- # as the target.
- for local id in $(target-ids)
- {
- if $(id) = clean
- {
- clean = true ;
- }
- else
- {
- local t ;
- if $(current-project)
- {
- t = [ $(current-project).find $(id) : no-error ] ;
- }
- else
- {
- t = [ find-target $(id) ] ;
- }
-
- if ! $(t)
- {
- ECHO "notice: could not find main target" $(id) ;
- ECHO "notice: assuming it is a name of file to create." ;
- explicitly-requested-files += $(id) ;
- }
- else
- {
- targets += $(t) ;
- }
- }
- }
- if ! $(targets)
- {
- targets += [ project.target [ project.module-name "." ] ] ;
- }
-
- if [ option.get dump-generators : : true ]
- {
- generators.dump ;
- }
-
- # We wish to put config.log in the build directory corresponding
- # to Jamroot, so that the location does not differ depending on
- # directory where we do build. The amount of indirection necessary
- # here is scary.
- local first-project = [ $(targets[0]).project ] ;
- local first-project-root-location = [ $(first-project).get project-root ] ;
- local first-project-root-module = [ project.load $(first-project-root-location) ] ;
- local first-project-root = [ project.target $(first-project-root-module) ] ;
- local first-build-build-dir = [ $(first-project-root).build-dir ] ;
- configure.set-log-file $(first-build-build-dir)/config.log ;
-
- # Now that we have a set of targets to build and a set of property sets to
- # build the targets with, we can start the main build process by using each
- # property set to generate virtual targets from all of our listed targets
- # and any of their dependants.
- for local p in $(expanded)
- {
- .command-line-free-features = [ property-set.create [ $(p).free ] ] ;
- for local t in $(targets)
- {
- local g = [ $(t).generate $(p) ] ;
- if ! [ class.is-a $(t) : project-target ]
- {
- .results-of-main-targets += $(g[2-]) ;
- }
- virtual-targets += $(g[2-]) ;
- }
- }
-
-
- # Convert collected virtual targets into actual raw Jam targets.
- for t in $(virtual-targets)
- {
- actual-targets += [ $(t).actualize ] ;
- }
-
-
- # If XML data output has been requested prepare additional rules and targets
- # so we can hook into Jam to collect build data while its building and have
- # it trigger the final XML report generation after all the planned targets
- # have been built.
- if $(.out-xml)
- {
- # Get a qualified virtual target name.
- rule full-target-name ( target )
- {
- local name = [ $(target).name ] ;
- local project = [ $(target).project ] ;
- local project-path = [ $(project).get location ] ;
- return $(project-path)//$(name) ;
- }
-
- # Generate an XML file containing build statistics for each constituent.
- #
- rule out-xml ( xml-file : constituents * )
- {
- # Prepare valid XML header and footer with some basic info.
- local nl = "
-" ;
- local os = [ modules.peek : OS OSPLAT JAMUNAME ] "" ;
- local timestamp = [ modules.peek : JAMDATE ] ;
- local cwd = [ PWD ] ;
- local command = $(.argv) ;
- local bb-version = [ version.boost-build ] ;
- .header on $(xml-file) =
- "<?xml version=\"1.0\" encoding=\"utf-8\"?>"
- "$(nl)<build format=\"1.0\" version=\"$(bb-version)\">"
- "$(nl) <os name=\"$(os[1])\" platform=\"$(os[2])\"><![CDATA[$(os[3-]:J= )]]></os>"
- "$(nl) <timestamp><![CDATA[$(timestamp)]]></timestamp>"
- "$(nl) <directory><![CDATA[$(cwd)]]></directory>"
- "$(nl) <command><![CDATA[\"$(command:J=\" \")\"]]></command>"
- ;
- .footer on $(xml-file) =
- "$(nl)</build>" ;
-
- # Generate the target dependency graph.
- .contents on $(xml-file) +=
- "$(nl) <targets>" ;
- for local t in [ virtual-target.all-targets ]
- {
- local action = [ $(t).action ] ;
- if $(action)
- # If a target has no action, it has no dependencies.
- {
- local name = [ full-target-name $(t) ] ;
- local sources = [ $(action).sources ] ;
- local dependencies ;
- for local s in $(sources)
- {
- dependencies += [ full-target-name $(s) ] ;
- }
-
- local path = [ $(t).path ] ;
- local jam-target = [ $(t).actual-name ] ;
-
- .contents on $(xml-file) +=
- "$(nl) <target>"
- "$(nl) <name><![CDATA[$(name)]]></name>"
- "$(nl) <dependencies>"
- "$(nl) <dependency><![CDATA[$(dependencies)]]></dependency>"
- "$(nl) </dependencies>"
- "$(nl) <path><![CDATA[$(path)]]></path>"
- "$(nl) <jam-target><![CDATA[$(jam-target)]]></jam-target>"
- "$(nl) </target>"
- ;
- }
- }
- .contents on $(xml-file) +=
- "$(nl) </targets>" ;
-
- # Build $(xml-file) after $(constituents). Do so even if a
- # constituent action fails and regenerate the xml on every bjam run.
- INCLUDES $(xml-file) : $(constituents) ;
- ALWAYS $(xml-file) ;
- __ACTION_RULE__ on $(xml-file) = build-system.out-xml.generate-action ;
- out-xml.generate $(xml-file) ;
- }
-
- # The actual build actions are here; if we did this work in the actions
- # clause we would have to form a valid command line containing the
- # result of @(...) below (the name of the XML file).
- #
- rule out-xml.generate-action ( args * : xml-file
- : command status start end user system : output ? )
- {
- local contents =
- [ on $(xml-file) return $(.header) $(.contents) $(.footer) ] ;
- local f = @($(xml-file):E=$(contents)) ;
- }
-
- # Nothing to do here; the *real* actions happen in
- # out-xml.generate-action.
- actions quietly out-xml.generate { }
-
- # Define the out-xml file target, which depends on all the targets so
- # that it runs the collection after the targets have run.
- out-xml $(.out-xml) : $(actual-targets) ;
-
- # Set up a global __ACTION_RULE__ that records all the available
- # statistics about each actual target in a variable "on" the --out-xml
- # target.
- #
- rule out-xml.collect ( xml-file : target : command status start end user
- system : output ? )
- {
- local nl = "
-" ;
- # Open the action with some basic info.
- .contents on $(xml-file) +=
- "$(nl) <action status=\"$(status)\" start=\"$(start)\" end=\"$(end)\" user=\"$(user)\" system=\"$(system)\">" ;
-
- # If we have an action object we can print out more detailed info.
- local action = [ on $(target) return $(.action) ] ;
- if $(action)
- {
- local action-name = [ $(action).action-name ] ;
- local action-sources = [ $(action).sources ] ;
- local action-props = [ $(action).properties ] ;
-
- # The qualified name of the action which we created the target.
- .contents on $(xml-file) +=
- "$(nl) <name><![CDATA[$(action-name)]]></name>" ;
-
- # The sources that made up the target.
- .contents on $(xml-file) +=
- "$(nl) <sources>" ;
- for local source in $(action-sources)
- {
- local source-actual = [ $(source).actual-name ] ;
- .contents on $(xml-file) +=
- "$(nl) <source><![CDATA[$(source-actual)]]></source>" ;
- }
- .contents on $(xml-file) +=
- "$(nl) </sources>" ;
-
- # The properties that define the conditions under which the
- # target was built.
- .contents on $(xml-file) +=
- "$(nl) <properties>" ;
- for local prop in [ $(action-props).raw ]
- {
- local prop-name = [ MATCH ^<(.*)>$ : $(prop:G) ] ;
- .contents on $(xml-file) +=
- "$(nl) <property name=\"$(prop-name)\"><![CDATA[$(prop:G=)]]></property>" ;
- }
- .contents on $(xml-file) +=
- "$(nl) </properties>" ;
- }
-
- local locate = [ on $(target) return $(LOCATE) ] ;
- locate ?= "" ;
- .contents on $(xml-file) +=
- "$(nl) <jam-target><![CDATA[$(target)]]></jam-target>"
- "$(nl) <path><![CDATA[$(target:G=:R=$(locate))]]></path>"
- "$(nl) <command><![CDATA[$(command)]]></command>"
- "$(nl) <output><![CDATA[$(output)]]></output>" ;
- .contents on $(xml-file) +=
- "$(nl) </action>" ;
- }
-
- # When no __ACTION_RULE__ is set "on" a target, the search falls back to
- # the global module.
- module
- {
- __ACTION_RULE__ = build-system.out-xml.collect
- [ modules.peek build-system : .out-xml ] ;
- }
-
- IMPORT
- build-system :
- out-xml.collect
- out-xml.generate-action
- : :
- build-system.out-xml.collect
- build-system.out-xml.generate-action
- ;
- }
-
- local j = [ option.get jobs ] ;
- if $(j)
- {
- modules.poke : PARALLELISM : $(j) ;
- }
-
- local k = [ option.get keep-going : true : true ] ;
- if $(k) in "on" "yes" "true"
- {
- modules.poke : KEEP_GOING : 1 ;
- }
- else if $(k) in "off" "no" "false"
- {
- modules.poke : KEEP_GOING : 0 ;
- }
- else
- {
- ECHO "error: Invalid value for the --keep-going option" ;
- EXIT ;
- }
-
- # The 'all' pseudo target is not strictly needed expect in the case when we
- # use it below but people often assume they always have this target
- # available and do not declare it themselves before use which may cause
- # build failures with an error message about not being able to build the
- # 'all' target.
- NOTFILE all ;
-
- # And now that all the actual raw Jam targets and all the dependencies
- # between them have been prepared all that is left is to tell Jam to update
- # those targets.
- if $(explicitly-requested-files)
- {
- # Note that this case can not be joined with the regular one when only
- # exact Boost Build targets are requested as here we do not build those
- # requested targets but only use them to construct the dependency tree
- # needed to build the explicitly requested files.
- UPDATE $(explicitly-requested-files:G=e) $(.out-xml) ;
- }
- else if $(cleanall)
- {
- UPDATE clean-all ;
- }
- else if $(clean)
- {
- common.Clean clean : [ actual-clean-targets ] ;
- UPDATE clean ;
- }
- else
- {
- configure.print-configure-checks-summary ;
-
- if $(.pre-build-hook)
- {
- $(.pre-build-hook) ;
- }
-
- DEPENDS all : $(actual-targets) ;
- if UPDATE_NOW in [ RULENAMES ]
- {
- local ok = [ UPDATE_NOW all $(.out-xml) ] ;
- if $(.post-build-hook)
- {
- $(.post-build-hook) $(ok) ;
- }
- # Prevent automatic update of the 'all' target, now that
- # we have explicitly updated what we wanted.
- UPDATE ;
- }
- else
- {
- UPDATE all $(.out-xml) ;
- }
- }
-}
diff --git a/tools/build/v2/build/ac.jam b/tools/build/v2/build/ac.jam
deleted file mode 100644
index 6768f358c2..0000000000
--- a/tools/build/v2/build/ac.jam
+++ /dev/null
@@ -1,198 +0,0 @@
-# Copyright (c) 2010 Vladimir Prus.
-#
-# Use, modification and distribution is subject to the Boost Software
-# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
-# http://www.boost.org/LICENSE_1_0.txt)
-
-import property-set ;
-import path ;
-import modules ;
-import "class" ;
-import errors ;
-import configure ;
-
-rule find-include-path ( variable : properties : header
- : provided-path ? )
-{
- # FIXME: document which properties affect this function by
- # default.
- local target-os = [ $(properties).get <target-os> ] ;
- properties = [ property-set.create <target-os>$(toolset) ] ;
- if $($(variable)-$(properties))
- {
- return $($(variable)-$(properties)) ;
- }
- else
- {
- provided-path ?= [ modules.peek : $(variable) ] ;
- includes = $(provided-path) ;
- includes += [ $(properties).get <include> ] ;
- if [ $(properties).get <target-os> ] != windows
- {
- # FIXME: use sysroot
- includes += /usr/include ;
- }
-
- local result ;
- while ! $(result) && $(includes)
- {
- local f = [ path.root $(header) $(includes[1]) ] ;
- ECHO "Checking " $(f) ;
- if [ path.exists $(f) ]
- {
- result = $(includes[1]) ;
- }
- else if $(provided-path)
- {
- errors.user-error "Could not find header" $(header)
- : "in the user-specified directory" $(provided-path) ;
- }
- includes = $(includes[2-]) ;
- }
- $(variable)-$(properties) = $(result) ;
- return $(result) ;
- }
-}
-
-rule find-library ( variable : properties : names + : provided-path ? )
-{
- local target-os = [ $(properties).get <target-os> ] ;
- properties = [ property-set.create <target-os>$(toolset) ] ;
- if $($(variable)-$(properties))
- {
- return $($(variable)-$(properties)) ;
- }
- else
- {
- provided-path ?= [ modules.peek : $(variable) ] ;
- paths = $(provided-path) ;
- paths += [ $(properties).get <library-path> ] ;
- if [ $(properties).get <target-os> ] != windows
- {
- paths += /usr/lib /usr/lib32 /usr/lib64 ;
- }
-
- local result ;
- while ! $(result) && $(paths)
- {
- while ! $(result) && $(names)
- {
- local f ;
- if $(target-os) = windows
- {
- f = $(paths[1])/$(names[1]).lib ;
- if [ path.exists $(f) ]
- {
- result = $(f) ;
- }
- }
- else
- {
- # FIXME: check for .a as well, depending on
- # the 'link' feature.
- f = $(paths[1])/lib$(names[1]).so ;
- ECHO "CHECKING $(f) " ;
- if [ path.exists $(f) ]
- {
- result = $(f) ;
- }
- }
- if ! $(result) && $(provided-path)
- {
- errors.user-error "Could not find either of: " $(names)
- : "in the user-specified directory" $(provided-path) ;
-
- }
- names = $(names[2-]) ;
- }
- paths = $(paths[2-]) ;
- }
- $(variable)-$(properties) = $(result) ;
- return $(result) ;
- }
-}
-
-class ac-library : basic-target
-{
- import errors ;
- import indirect ;
- import virtual-target ;
- import ac ;
- import configure ;
-
- rule __init__ ( name : project : * : * )
- {
- basic-target.__init__ $(name) : $(project) : $(sources)
- : $(requirements) ;
-
- reconfigure $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
- }
-
- rule set-header ( header )
- {
- self.header = $(header) ;
- }
-
- rule set-default-names ( names + )
- {
- self.default-names = $(names) ;
- }
-
- rule reconfigure ( * : * )
- {
- ECHO "XXX" $(1) ;
- if ! $(1)
- {
- # This is 'using xxx ;'. Nothing to configure, really.
- }
- else
- {
- for i in 1 2 3 4 5 6 7 8 9
- {
- # FIXME: this naming is inconsistent with XXX_INCLUDE/XXX_LIBRARY
- if ! ( $($(i)[1]) in root include-path library-path library-name condition )
- {
- errors.user-error "Invalid named parameter" $($(i)[1]) ;
- }
- local name = $($(i)[1]) ;
- local value = $($(i)[2-]) ;
- if $($(name)) && $($(name)) != $(value)
- {
- errors.user-error "Attempt to change value of '$(name)'" ;
- }
- $(name) = $(value) ;
- }
-
- include-path ?= $(root)/include ;
- library-path ?= $(root)/lib ;
- }
- }
-
- rule construct ( name : sources * : property-set )
- {
- # FIXME: log results.
- local libnames = $(library-name) ;
- if ! $(libnames) && ! $(include-path) && ! $(library-path)
- {
- libnames = [ modules.peek : $(name:U)_NAME ] ;
- # Backward compatibility only.
- libnames ?= [ modules.peek : $(name:U)_BINARY ] ;
- }
- libnames ?= $(self.default-names) ;
-
- local includes = [
- ac.find-include-path $(name:U)_INCLUDE : $(property-set) : $(self.header) : $(include-path) ] ;
- local library = [ ac.find-library $(name:U)_LIBRARY : $(property-set) : $(libnames) : $(library-path) ] ;
- if $(includes) && $(library)
- {
- library = [ virtual-target.from-file $(library) : . : $(self.project) ] ;
- configure.log-library-search-result $(name) : "found" ;
- return [ property-set.create <include>$(includes) <source>$(library) ] ;
- }
- else
- {
- configure.log-library-search-result $(name) : "no found" ;
- }
- }
-}
-
diff --git a/tools/build/v2/build/alias.jam b/tools/build/v2/build/alias.jam
deleted file mode 100644
index 48019cb988..0000000000
--- a/tools/build/v2/build/alias.jam
+++ /dev/null
@@ -1,73 +0,0 @@
-# Copyright 2003, 2004, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# This module defines the 'alias' rule and the associated target class.
-#
-# Alias is just a main target which returns its source targets without any
-# processing. For example:
-#
-# alias bin : hello test_hello ;
-# alias lib : helpers xml_parser ;
-#
-# Another important use of 'alias' is to conveniently group source files:
-#
-# alias platform-src : win.cpp : <os>NT ;
-# alias platform-src : linux.cpp : <os>LINUX ;
-# exe main : main.cpp platform-src ;
-#
-# Lastly, it is possible to create a local alias for some target, with different
-# properties:
-#
-# alias big_lib : : @/external_project/big_lib/<link>static ;
-#
-
-import "class" : new ;
-import project ;
-import property-set ;
-import targets ;
-
-
-class alias-target-class : basic-target
-{
- rule __init__ ( name : project : sources * : requirements *
- : default-build * : usage-requirements * )
- {
- basic-target.__init__ $(name) : $(project) : $(sources) :
- $(requirements) : $(default-build) : $(usage-requirements) ;
- }
-
- rule construct ( name : source-targets * : property-set )
- {
- return [ property-set.empty ] $(source-targets) ;
- }
-
- rule compute-usage-requirements ( subvariant )
- {
- local base = [ basic-target.compute-usage-requirements $(subvariant) ] ;
- return [ $(base).add [ $(subvariant).sources-usage-requirements ] ] ;
- }
-}
-
-
-# Declares the 'alias' target. It will process its sources virtual-targets by
-# returning them unaltered as its own constructed virtual-targets.
-#
-rule alias ( name : sources * : requirements * : default-build * :
- usage-requirements * )
-{
- local project = [ project.current ] ;
-
- targets.main-target-alternative
- [ new alias-target-class $(name) : $(project)
- : [ targets.main-target-sources $(sources) : $(name) : no-renaming ]
- : [ targets.main-target-requirements $(requirements) : $(project) ]
- : [ targets.main-target-default-build $(default-build) : $(project)
- ]
- : [ targets.main-target-usage-requirements $(usage-requirements) :
- $(project) ]
- ] ;
-}
-
-
-IMPORT $(__name__) : alias : : alias ;
diff --git a/tools/build/v2/build/build-request.jam b/tools/build/v2/build/build-request.jam
deleted file mode 100644
index 8a1f7b0ebe..0000000000
--- a/tools/build/v2/build/build-request.jam
+++ /dev/null
@@ -1,322 +0,0 @@
-# Copyright 2002 Dave Abrahams
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import "class" : new ;
-import sequence ;
-import set ;
-import regex ;
-import feature ;
-import property ;
-import container ;
-import string ;
-
-
-# Transform property-set by applying f to each component property.
-#
-local rule apply-to-property-set ( f property-set )
-{
- local properties = [ feature.split $(property-set) ] ;
- return [ string.join [ $(f) $(properties) ] : / ] ;
-}
-
-
-# Expand the given build request by combining all property-sets which do not
-# specify conflicting non-free features. Expects all the project files to
-# already be loaded.
-#
-rule expand-no-defaults ( property-sets * )
-{
- # First make all features and subfeatures explicit.
- local expanded-property-sets = [ sequence.transform apply-to-property-set
- feature.expand-subfeatures : $(property-sets) ] ;
-
- # Now combine all of the expanded property-sets
- local product = [ x-product $(expanded-property-sets) : $(feature-space) ] ;
-
- return $(product) ;
-}
-
-
-# Implementation of x-product, below. Expects all the project files to already
-# be loaded.
-#
-local rule x-product-aux ( property-sets + )
-{
- local result ;
- local p = [ feature.split $(property-sets[1]) ] ;
- local f = [ set.difference $(p:G) : [ feature.free-features ] ] ;
- local seen ;
- # No conflict with things used at a higher level?
- if ! [ set.intersection $(f) : $(x-product-used) ]
- {
- local x-product-seen ;
- {
- # Do not mix in any conflicting features.
- local x-product-used = $(x-product-used) $(f) ;
-
- if $(property-sets[2])
- {
- local rest = [ x-product-aux $(property-sets[2-]) : $(feature-space) ] ;
- result = $(property-sets[1])/$(rest) ;
- }
-
- result ?= $(property-sets[1]) ;
- }
-
- # If we did not encounter a conflicting feature lower down, do not
- # recurse again.
- if ! [ set.intersection $(f) : $(x-product-seen) ]
- {
- property-sets = ;
- }
-
- seen = $(x-product-seen) ;
- }
-
- if $(property-sets[2])
- {
- result += [ x-product-aux $(property-sets[2-]) : $(feature-space) ] ;
- }
-
- # Note that we have seen these features so that higher levels will recurse
- # again without them set.
- x-product-seen += $(f) $(seen) ;
- return $(result) ;
-}
-
-
-# Return the cross-product of all elements of property-sets, less any that would
-# contain conflicting values for single-valued features. Expects all the project
-# files to already be loaded.
-#
-local rule x-product ( property-sets * )
-{
- if $(property-sets).non-empty
- {
- # Prepare some "scoped globals" that can be used by the implementation
- # function, x-product-aux.
- local x-product-seen x-product-used ;
- return [ x-product-aux $(property-sets) : $(feature-space) ] ;
- }
- # Otherwise return empty.
-}
-
-
-# Returns true if either 'v' or the part of 'v' before the first '-' symbol is
-# an implicit value. Expects all the project files to already be loaded.
-#
-local rule looks-like-implicit-value ( v )
-{
- if [ feature.is-implicit-value $(v) ]
- {
- return true ;
- }
- else
- {
- local split = [ regex.split $(v) - ] ;
- if [ feature.is-implicit-value $(split[1]) ]
- {
- return true ;
- }
- }
-}
-
-
-# Takes the command line tokens (such as taken from the ARGV rule) and
-# constructs a build request from them. Returns a vector of two vectors (where
-# "vector" means container.jam's "vector"). First is the set of targets
-# specified in the command line, and second is the set of requested build
-# properties. Expects all the project files to already be loaded.
-#
-rule from-command-line ( command-line * )
-{
- local targets ;
- local properties ;
-
- command-line = $(command-line[2-]) ;
- local skip-next = ;
- for local e in $(command-line)
- {
- if $(skip-next)
- {
- skip-next = ;
- }
- else if ! [ MATCH "^(-).*" : $(e) ]
- {
- # Build request spec either has "=" in it or completely consists of
- # implicit feature values.
- local fs = feature-space ;
- if [ MATCH "(.*=.*)" : $(e) ]
- || [ looks-like-implicit-value $(e:D=) : $(feature-space) ]
- {
- properties += [ convert-command-line-element $(e) :
- $(feature-space) ] ;
- }
- else
- {
- targets += $(e) ;
- }
- }
- else if [ MATCH "^(-[-ldjfsto])$" : $(e) ]
- {
- skip-next = true ;
- }
- }
- return [ new vector
- [ new vector $(targets) ]
- [ new vector $(properties) ] ] ;
-}
-
-
-# Converts one element of command line build request specification into internal
-# form. Expects all the project files to already be loaded.
-#
-local rule convert-command-line-element ( e )
-{
- local result ;
- local parts = [ regex.split $(e) "/" ] ;
- while $(parts)
- {
- local p = $(parts[1]) ;
- local m = [ MATCH "([^=]*)=(.*)" : $(p) ] ;
- local lresult ;
- local feature ;
- local values ;
- if $(m)
- {
- feature = $(m[1]) ;
- values = [ regex.split $(m[2]) "," ] ;
- lresult = <$(feature)>$(values) ;
- }
- else
- {
- lresult = [ regex.split $(p) "," ] ;
- }
-
- if $(feature) && free in [ feature.attributes $(feature) ]
- {
- # If we have free feature, then the value is everything
- # until the end of the command line token. Slashes in
- # the following string are not taked to mean separation
- # of properties. Commas are also not interpreted specially.
- values = $(values:J=,) ;
- values = $(values) $(parts[2-]) ;
- values = $(values:J=/) ;
- lresult = <$(feature)>$(values) ;
- parts = ;
- }
-
- if ! [ MATCH (.*-.*) : $(p) ]
- {
- # property.validate cannot handle subfeatures, so we avoid the check
- # here.
- for local p in $(lresult)
- {
- property.validate $(p) : $(feature-space) ;
- }
- }
-
- if ! $(result)
- {
- result = $(lresult) ;
- }
- else
- {
- result = $(result)/$(lresult) ;
- }
-
- parts = $(parts[2-]) ;
- }
-
- return $(result) ;
-}
-
-
-rule __test__ ( )
-{
- import assert ;
- import feature ;
-
- feature.prepare-test build-request-test-temp ;
-
- import build-request ;
- import build-request : expand-no-defaults : build-request.expand-no-defaults ;
- import errors : try catch ;
- import feature : feature subfeature ;
-
- feature toolset : gcc msvc borland : implicit ;
- subfeature toolset gcc : version : 2.95.2 2.95.3 2.95.4
- 3.0 3.0.1 3.0.2 : optional ;
-
- feature variant : debug release : implicit composite ;
- feature inlining : on off ;
- feature "include" : : free ;
-
- feature stdlib : native stlport : implicit ;
-
- feature runtime-link : dynamic static : symmetric ;
-
- # Empty build requests should expand to empty.
- assert.result
- : build-request.expand-no-defaults ;
-
- assert.result
- <toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>debug
- <toolset>msvc/<stdlib>stlport/<variant>debug
- <toolset>msvc/<variant>debug
- : build-request.expand-no-defaults gcc-3.0.1/stlport msvc/stlport msvc debug ;
-
- assert.result
- <toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>debug
- <toolset>msvc/<variant>debug
- <variant>debug/<toolset>msvc/<stdlib>stlport
- : build-request.expand-no-defaults gcc-3.0.1/stlport msvc debug msvc/stlport ;
-
- assert.result
- <toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>debug/<inlining>off
- <toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>release/<inlining>off
- : build-request.expand-no-defaults gcc-3.0.1/stlport debug release <inlining>off ;
-
- assert.result
- <include>a/b/c/<toolset>gcc/<toolset-gcc:version>3.0.1/<stdlib>stlport/<variant>debug/<include>x/y/z
- <include>a/b/c/<toolset>msvc/<stdlib>stlport/<variant>debug/<include>x/y/z
- <include>a/b/c/<toolset>msvc/<variant>debug/<include>x/y/z
- : build-request.expand-no-defaults <include>a/b/c gcc-3.0.1/stlport msvc/stlport msvc debug <include>x/y/z ;
-
- local r ;
-
- r = [ build-request.from-command-line bjam debug runtime-link=dynamic ] ;
- assert.equal [ $(r).get-at 1 ] : ;
- assert.equal [ $(r).get-at 2 ] : debug <runtime-link>dynamic ;
-
- try ;
- {
- build-request.from-command-line bjam gcc/debug runtime-link=dynamic/static ;
- }
- catch \"static\" is not a value of an implicit feature ;
-
- r = [ build-request.from-command-line bjam -d2 --debug debug target runtime-link=dynamic ] ;
- assert.equal [ $(r).get-at 1 ] : target ;
- assert.equal [ $(r).get-at 2 ] : debug <runtime-link>dynamic ;
-
- r = [ build-request.from-command-line bjam debug runtime-link=dynamic,static ] ;
- assert.equal [ $(r).get-at 1 ] : ;
- assert.equal [ $(r).get-at 2 ] : debug <runtime-link>dynamic <runtime-link>static ;
-
- r = [ build-request.from-command-line bjam debug gcc/runtime-link=dynamic,static ] ;
- assert.equal [ $(r).get-at 1 ] : ;
- assert.equal [ $(r).get-at 2 ] : debug gcc/<runtime-link>dynamic
- gcc/<runtime-link>static ;
-
- r = [ build-request.from-command-line bjam msvc gcc,borland/runtime-link=static ] ;
- assert.equal [ $(r).get-at 1 ] : ;
- assert.equal [ $(r).get-at 2 ] : msvc gcc/<runtime-link>static
- borland/<runtime-link>static ;
-
- r = [ build-request.from-command-line bjam gcc-3.0 ] ;
- assert.equal [ $(r).get-at 1 ] : ;
- assert.equal [ $(r).get-at 2 ] : gcc-3.0 ;
-
- feature.finish-test build-request-test-temp ;
-}
diff --git a/tools/build/v2/build/build_request.py b/tools/build/v2/build/build_request.py
deleted file mode 100644
index cc9f2400a9..0000000000
--- a/tools/build/v2/build/build_request.py
+++ /dev/null
@@ -1,216 +0,0 @@
-# Status: being ported by Vladimir Prus
-# TODO: need to re-compare with mainline of .jam
-# Base revision: 40480
-#
-# (C) Copyright David Abrahams 2002. Permission to copy, use, modify, sell and
-# distribute this software is granted provided this copyright notice appears in
-# all copies. This software is provided "as is" without express or implied
-# warranty, and with no claim as to its suitability for any purpose.
-
-import b2.build.feature
-feature = b2.build.feature
-
-from b2.util.utility import *
-import b2.build.property_set as property_set
-
-def expand_no_defaults (property_sets):
- """ Expand the given build request by combining all property_sets which don't
- specify conflicting non-free features.
- """
- # First make all features and subfeatures explicit
- expanded_property_sets = [ps.expand_subfeatures() for ps in property_sets]
-
- # Now combine all of the expanded property_sets
- product = __x_product (expanded_property_sets)
-
- return [property_set.create(p) for p in product]
-
-
-def __x_product (property_sets):
- """ Return the cross-product of all elements of property_sets, less any
- that would contain conflicting values for single-valued features.
- """
- x_product_seen = set()
- return __x_product_aux (property_sets, x_product_seen)[0]
-
-def __x_product_aux (property_sets, seen_features):
- """Returns non-conflicting combinations of property sets.
-
- property_sets is a list of PropertySet instances. seen_features is a set of Property
- instances.
-
- Returns a tuple of:
- - list of lists of Property instances, such that within each list, no two Property instance
- have the same feature, and no Property is for feature in seen_features.
- - set of features we saw in property_sets
- """
- if not property_sets:
- return ([], set())
-
- properties = property_sets[0].all()
-
- these_features = set()
- for p in property_sets[0].non_free():
- these_features.add(p.feature())
-
- # Note: the algorithm as implemented here, as in original Jam code, appears to
- # detect conflicts based on features, not properties. For example, if command
- # line build request say:
- #
- # <a>1/<b>1 c<1>/<b>1
- #
- # It will decide that those two property sets conflict, because they both specify
- # a value for 'b' and will not try building "<a>1 <c1> <b1>", but rather two
- # different property sets. This is a topic for future fixing, maybe.
- if these_features & seen_features:
-
- (inner_result, inner_seen) = __x_product_aux(property_sets[1:], seen_features)
- return (inner_result, inner_seen | these_features)
-
- else:
-
- result = []
- (inner_result, inner_seen) = __x_product_aux(property_sets[1:], seen_features | these_features)
- if inner_result:
- for inner in inner_result:
- result.append(properties + inner)
- else:
- result.append(properties)
-
- if inner_seen & these_features:
- # Some of elements in property_sets[1:] conflict with elements of property_sets[0],
- # Try again, this time omitting elements of property_sets[0]
- (inner_result2, inner_seen2) = __x_product_aux(property_sets[1:], seen_features)
- result.extend(inner_result2)
-
- return (result, inner_seen | these_features)
-
-
-
-def looks_like_implicit_value(v):
- """Returns true if 'v' is either implicit value, or
- the part before the first '-' symbol is implicit value."""
- if feature.is_implicit_value(v):
- return 1
- else:
- split = v.split("-")
- if feature.is_implicit_value(split[0]):
- return 1
-
- return 0
-
-def from_command_line(command_line):
- """Takes the command line tokens (such as taken from ARGV rule)
- and constructs build request from it. Returns a list of two
- lists. First is the set of targets specified in the command line,
- and second is the set of requested build properties."""
-
- targets = []
- properties = []
-
- for e in command_line:
- if e[0] != "-":
- # Build request spec either has "=" in it, or completely
- # consists of implicit feature values.
- if e.find("=") != -1 or looks_like_implicit_value(e.split("/")[0]):
- properties += convert_command_line_element(e)
- else:
- targets.append(e)
-
- return [targets, properties]
-
-# Converts one element of command line build request specification into
-# internal form.
-def convert_command_line_element(e):
-
- result = None
- parts = e.split("/")
- for p in parts:
- m = p.split("=")
- if len(m) > 1:
- feature = m[0]
- values = m[1].split(",")
- lresult = [("<%s>%s" % (feature, v)) for v in values]
- else:
- lresult = p.split(",")
-
- if p.find('-') == -1:
- # FIXME: first port property.validate
- # property.validate cannot handle subfeatures,
- # so we avoid the check here.
- #for p in lresult:
- # property.validate(p)
- pass
-
- if not result:
- result = lresult
- else:
- result = [e1 + "/" + e2 for e1 in result for e2 in lresult]
-
- return [property_set.create(b2.build.feature.split(r)) for r in result]
-
-###
-### rule __test__ ( )
-### {
-### import assert feature ;
-###
-### feature.prepare-test build-request-test-temp ;
-###
-### import build-request ;
-### import build-request : expand_no_defaults : build-request.expand_no_defaults ;
-### import errors : try catch ;
-### import feature : feature subfeature ;
-###
-### feature toolset : gcc msvc borland : implicit ;
-### subfeature toolset gcc : version : 2.95.2 2.95.3 2.95.4
-### 3.0 3.0.1 3.0.2 : optional ;
-###
-### feature variant : debug release : implicit composite ;
-### feature inlining : on off ;
-### feature "include" : : free ;
-###
-### feature stdlib : native stlport : implicit ;
-###
-### feature runtime-link : dynamic static : symmetric ;
-###
-###
-### local r ;
-###
-### r = [ build-request.from-command-line bjam debug runtime-link=dynamic ] ;
-### assert.equal [ $(r).get-at 1 ] : ;
-### assert.equal [ $(r).get-at 2 ] : debug <runtime-link>dynamic ;
-###
-### try ;
-### {
-###
-### build-request.from-command-line bjam gcc/debug runtime-link=dynamic/static ;
-### }
-### catch \"static\" is not a value of an implicit feature ;
-###
-###
-### r = [ build-request.from-command-line bjam -d2 --debug debug target runtime-link=dynamic ] ;
-### assert.equal [ $(r).get-at 1 ] : target ;
-### assert.equal [ $(r).get-at 2 ] : debug <runtime-link>dynamic ;
-###
-### r = [ build-request.from-command-line bjam debug runtime-link=dynamic,static ] ;
-### assert.equal [ $(r).get-at 1 ] : ;
-### assert.equal [ $(r).get-at 2 ] : debug <runtime-link>dynamic <runtime-link>static ;
-###
-### r = [ build-request.from-command-line bjam debug gcc/runtime-link=dynamic,static ] ;
-### assert.equal [ $(r).get-at 1 ] : ;
-### assert.equal [ $(r).get-at 2 ] : debug gcc/<runtime-link>dynamic
-### gcc/<runtime-link>static ;
-###
-### r = [ build-request.from-command-line bjam msvc gcc,borland/runtime-link=static ] ;
-### assert.equal [ $(r).get-at 1 ] : ;
-### assert.equal [ $(r).get-at 2 ] : msvc gcc/<runtime-link>static
-### borland/<runtime-link>static ;
-###
-### r = [ build-request.from-command-line bjam gcc-3.0 ] ;
-### assert.equal [ $(r).get-at 1 ] : ;
-### assert.equal [ $(r).get-at 2 ] : gcc-3.0 ;
-###
-### feature.finish-test build-request-test-temp ;
-### }
-###
-###
diff --git a/tools/build/v2/build/configure.jam b/tools/build/v2/build/configure.jam
deleted file mode 100644
index 14c1328aff..0000000000
--- a/tools/build/v2/build/configure.jam
+++ /dev/null
@@ -1,237 +0,0 @@
-# Copyright (c) 2010 Vladimir Prus.
-#
-# Use, modification and distribution is subject to the Boost Software
-# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
-# http://www.boost.org/LICENSE_1_0.txt)
-
-# This module defines function to help with two main tasks:
-#
-# - Discovering build-time configuration for the purposes of adjusting
-# build process.
-# - Reporting what is built, and how it is configured.
-
-import targets ;
-import errors ;
-import targets ;
-import sequence ;
-import property ;
-import property-set ;
-import "class" : new ;
-import common ;
-import path ;
-
-rule log-summary ( )
-{
-
-}
-
-.width = 30 ;
-
-rule set-width ( width )
-{
- .width = $(width) ;
-}
-
-# Declare that the components specified by the parameter exist.
-rule register-components ( components * )
-{
- .components += $(components) ;
-}
-
-# Declare that the components specified by the parameters will
-# be build.
-rule components-building ( components * )
-{
- .built-components += $(components) ;
-}
-
-# Report something about component configuration that the
-# user should better know.
-rule log-component-configuration ( component : message )
-{
- # FIXME: implement per-property-set logs
- .component-logs.$(component) += $(message) ;
-}
-
-
-
-rule log-check-result ( result )
-{
- if ! $(.announced-checks)
- {
- ECHO "Performing configuration checks\n" ;
- .announced-checks = 1 ;
- }
-
- ECHO $(result) ;
- #.check-results += $(result) ;
-}
-
-rule log-library-search-result ( library : result )
-{
- local x = [ PAD " - $(library) : $(result)" : $(.width) ] ;
- log-check-result "$(x)" ;
-}
-
-rule print-component-configuration ( )
-{
- local c = [ sequence.unique $(.components) ] ;
-
- ECHO "\nComponent configuration:\n" ;
- for c in $(.components)
- {
- local s ;
- if $(c) in $(.built-components)
- {
- s = "building" ;
- }
- else
- {
- s = "not building" ;
- }
- ECHO [ PAD " - $(c)" : $(.width) ] ": $(s)" ;
- for local m in $(.component-logs.$(c))
- {
- ECHO " -" $(m) ;
- }
- }
- ECHO ;
-}
-
-rule print-configure-checks-summary ( )
-{
- # FIXME: the problem with that approach is tha
- # the user sees checks summary when all checks are
- # done, and has no progress reporting while the
- # checks are being executed.
- if $(.check-results)
- {
- ECHO "Configuration checks summary\n" ;
-
- for local r in $(.check-results)
- {
- ECHO $(r) ;
- }
- ECHO ;
- }
-}
-
-# Attempt to build a metatarget named by 'metatarget-reference'
-# in context of 'project' with properties 'ps'.
-# Returns non-empty value if build is OK.
-rule builds-raw ( metatarget-reference : project : ps : what : retry ? )
-{
- local result ;
-
- if ! $(retry) && ! $(.$(what)-tested.$(ps))
- {
- .$(what)-tested.$(ps) = true ;
-
- local targets = [ targets.generate-from-reference
- $(metatarget-reference) : $(project) : $(ps) ] ;
-
- local jam-targets ;
- for local t in $(targets[2-])
- {
- jam-targets += [ $(t).actualize ] ;
- }
-
- if ! UPDATE_NOW in [ RULENAMES ]
- {
- # Cannot determine. Assume existance.
- }
- else
- {
- local x = [ PAD " - $(what)" : $(.width) ] ;
- if [ UPDATE_NOW $(jam-targets) :
- $(.log-fd) : ignore-minus-n : ignore-minus-q ]
- {
- .$(what)-supported.$(ps) = yes ;
- result = true ;
- log-check-result "$(x) : yes" ;
- }
- else
- {
- log-check-result "$(x) : no" ;
- }
- }
- return $(result) ;
- }
- else
- {
- return $(.$(what)-supported.$(ps)) ;
- }
-}
-
-rule builds ( metatarget-reference : properties * : what ? : retry ? )
-{
- what ?= "$(metatarget-reference) builds" ;
-
- # FIXME: this should not be hardcoded. Other checks might
- # want to consider different set of features as relevant.
- local toolset = [ property.select <toolset> : $(properties) ] ;
- local toolset-version-property = "<toolset-$(toolset:G=):version>" ;
- local relevant = [ property.select <target-os> <toolset> $(toolset-version-property)
- <address-model> <architecture>
- : $(properties) ] ;
- local ps = [ property-set.create $(relevant) ] ;
- local t = [ targets.current ] ;
- local p = [ $(t).project ] ;
-
- return [ builds-raw $(metatarget-reference) : $(p) : $(ps) : $(what) : $(retry) ] ;
-}
-
-
-# Called by Boost.Build startup code to specify name of a file
-# that will receive results of configure checks. This
-# should never be called by users.
-rule set-log-file ( log-file )
-{
- path.makedirs [ path.parent $(log-file) ] ;
-
- .log-fd = [ FILE_OPEN $(log-file) : "w" ] ;
-}
-
-# Frontend rules
-
-class check-target-builds-worker
-{
- import configure ;
- import property-set ;
- import targets ;
- import property ;
-
- rule __init__ ( target message ? : true-properties * : false-properties * )
- {
- self.target = $(target) ;
- self.message = $(message) ;
- self.true-properties = $(true-properties) ;
- self.false-properties = $(false-properties) ;
- }
-
- rule check ( properties * )
- {
- local choosen ;
- if [ configure.builds $(self.target) : $(properties) : $(self.message) ]
- {
- choosen = $(self.true-properties) ;
- }
- else
- {
- choosen = $(self.false-properties) ;
- }
- return [ property.evaluate-conditionals-in-context $(choosen) : $(properties) ] ;
- }
-}
-
-
-rule check-target-builds ( target message ? : true-properties * : false-properties * )
-{
- local instance = [ new check-target-builds-worker $(target) $(message) : $(true-properties)
- : $(false-properties) ] ;
- return <conditional>@$(instance).check ;
-}
-
-IMPORT $(__name__) : check-target-builds : : check-target-builds ;
-
-
diff --git a/tools/build/v2/build/engine.py b/tools/build/v2/build/engine.py
deleted file mode 100644
index 9e624dae82..0000000000
--- a/tools/build/v2/build/engine.py
+++ /dev/null
@@ -1,177 +0,0 @@
-# Copyright Pedro Ferreira 2005.
-# Copyright Vladimir Prus 2007.
-# Distributed under the Boost
-# Software License, Version 1.0. (See accompanying
-# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
-
-bjam_interface = __import__('bjam')
-
-import operator
-import re
-
-import b2.build.property_set as property_set
-import b2.util
-
-class BjamAction:
- """Class representing bjam action defined from Python."""
-
- def __init__(self, action_name, function):
- self.action_name = action_name
- self.function = function
-
- def __call__(self, targets, sources, property_set):
-
- # Bjam actions defined from Python have only the command
- # to execute, and no associated jam procedural code. So
- # passing 'property_set' to it is not necessary.
- bjam_interface.call("set-update-action", self.action_name,
- targets, sources, [])
- if self.function:
- self.function(targets, sources, property_set)
-
-class BjamNativeAction:
- """Class representing bjam action defined by Jam code.
-
- We still allow to associate a Python callable that will
- be called when this action is installed on any target.
- """
-
- def __init__(self, action_name, function):
- self.action_name = action_name
- self.function = function
-
- def __call__(self, targets, sources, property_set):
- if self.function:
- self.function(targets, sources, property_set)
-
- p = []
- if property_set:
- p = property_set.raw()
-
- b2.util.set_jam_action(self.action_name, targets, sources, p)
-
-action_modifiers = {"updated": 0x01,
- "together": 0x02,
- "ignore": 0x04,
- "quietly": 0x08,
- "piecemeal": 0x10,
- "existing": 0x20}
-
-class Engine:
- """ The abstract interface to a build engine.
-
- For now, the naming of targets, and special handling of some
- target variables like SEARCH and LOCATE make this class coupled
- to bjam engine.
- """
- def __init__ (self):
- self.actions = {}
-
- def add_dependency (self, targets, sources):
- """Adds a dependency from 'targets' to 'sources'
-
- Both 'targets' and 'sources' can be either list
- of target names, or a single target name.
- """
- if isinstance (targets, str):
- targets = [targets]
- if isinstance (sources, str):
- sources = [sources]
-
- for target in targets:
- for source in sources:
- self.do_add_dependency (target, source)
-
- def set_target_variable (self, targets, variable, value, append=0):
- """ Sets a target variable.
-
- The 'variable' will be available to bjam when it decides
- where to generate targets, and will also be available to
- updating rule for that 'taret'.
- """
- if isinstance (targets, str):
- targets = [targets]
-
- for target in targets:
- self.do_set_target_variable (target, variable, value, append)
-
- def set_update_action (self, action_name, targets, sources, properties=property_set.empty()):
- """ Binds a target to the corresponding update action.
- If target needs to be updated, the action registered
- with action_name will be used.
- The 'action_name' must be previously registered by
- either 'register_action' or 'register_bjam_action'
- method.
- """
- assert(isinstance(properties, property_set.PropertySet))
- if isinstance (targets, str):
- targets = [targets]
- self.do_set_update_action (action_name, targets, sources, properties)
-
- def register_action (self, action_name, command, bound_list = [], flags = [],
- function = None):
- """Creates a new build engine action.
-
- Creates on bjam side an action named 'action_name', with
- 'command' as the command to be executed, 'bound_variables'
- naming the list of variables bound when the command is executed
- and specified flag.
- If 'function' is not None, it should be a callable taking three
- parameters:
- - targets
- - sources
- - instance of the property_set class
- This function will be called by set_update_action, and can
- set additional target variables.
- """
- if self.actions.has_key(action_name):
- raise "Bjam action %s is already defined" % action_name
-
- assert(isinstance(flags, list))
-
- bjam_flags = reduce(operator.or_,
- (action_modifiers[flag] for flag in flags), 0)
-
- # We allow command to be empty so that we can define 'action' as pure
- # python function that would do some conditional logic and then relay
- # to other actions.
- assert command or function
- if command:
- bjam_interface.define_action(action_name, command, bound_list, bjam_flags)
-
- self.actions[action_name] = BjamAction(action_name, function)
-
- def register_bjam_action (self, action_name, function=None):
- """Informs self that 'action_name' is declared in bjam.
-
- From this point, 'action_name' is a valid argument to the
- set_update_action method. The action_name should be callable
- in the global module of bjam.
- """
-
- # We allow duplicate calls to this rule for the same
- # action name. This way, jamfile rules that take action names
- # can just register them without specially checking if
- # action is already registered.
- if not self.actions.has_key(action_name):
- self.actions[action_name] = BjamNativeAction(action_name, function)
-
- # Overridables
-
-
- def do_set_update_action (self, action_name, targets, sources, property_set):
- action = self.actions.get(action_name)
- if not action:
- raise Exception("No action %s was registered" % action_name)
- action(targets, sources, property_set)
-
- def do_set_target_variable (self, target, variable, value, append):
- if append:
- bjam_interface.call("set-target-variable", target, variable, value, "true")
- else:
- bjam_interface.call("set-target-variable", target, variable, value)
-
- def do_add_dependency (self, target, source):
- bjam_interface.call("DEPENDS", target, source)
-
-
diff --git a/tools/build/v2/build/feature.jam b/tools/build/v2/build/feature.jam
deleted file mode 100644
index 6f54adefbc..0000000000
--- a/tools/build/v2/build/feature.jam
+++ /dev/null
@@ -1,1335 +0,0 @@
-# Copyright 2001, 2002, 2003 Dave Abrahams
-# Copyright 2002, 2006 Rene Rivera
-# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import assert : * ;
-import "class" : * ;
-import errors : lol->list ;
-import indirect ;
-import modules ;
-import regex ;
-import sequence ;
-import set ;
-import utility ;
-
-
-local rule setup ( )
-{
- .all-attributes =
- implicit
- composite
- optional
- symmetric
- free
- incidental
- path
- dependency
- propagated
- link-incompatible
- subfeature
- order-sensitive
- ;
-
- .all-features = ;
- .all-subfeatures = ;
- .all-top-features = ; # non-subfeatures
- .all-implicit-values = ;
-}
-setup ;
-
-
-# Prepare a fresh space to test in by moving all global variable settings into
-# the given temporary module and erasing them here.
-#
-rule prepare-test ( temp-module )
-{
- DELETE_MODULE $(temp-module) ;
-
- # Transfer globals to temp-module.
- for local v in [ VARNAMES feature ]
- {
- if [ MATCH (\\.) : $(v) ]
- {
- modules.poke $(temp-module) : $(v) : $($(v)) ;
- $(v) = ;
- }
- }
- setup ;
-}
-
-
-# Clear out all global variables and recover all variables from the given
-# temporary module.
-#
-rule finish-test ( temp-module )
-{
- # Clear globals.
- for local v in [ VARNAMES feature ]
- {
- if [ MATCH (\\.) : $(v) ]
- {
- $(v) = ;
- }
- }
-
- for local v in [ VARNAMES $(temp-module) ]
- {
- $(v) = [ modules.peek $(temp-module) : $(v) ] ;
- }
- DELETE_MODULE $(temp-module) ;
-}
-
-
-# Transform features by bracketing any elements which are not already bracketed
-# by "<>".
-#
-local rule grist ( features * )
-{
- local empty = "" ;
- return $(empty:G=$(features)) ;
-}
-
-
-# Declare a new feature with the given name, values, and attributes.
-#
-rule feature (
- name # Feature name.
- : values * # Allowable values - may be extended later using feature.extend.
- : attributes * # Feature attributes (e.g. implicit, free, propagated...).
-)
-{
- name = [ grist $(name) ] ;
-
- local error ;
-
- # Check for any unknown attributes.
- if ! ( $(attributes) in $(.all-attributes) )
- {
- error = unknown attributes:
- [ set.difference $(attributes) : $(.all-attributes) ] ;
- }
- else if $(name) in $(.all-features)
- {
- error = feature already defined: ;
- }
- else if implicit in $(attributes) && free in $(attributes)
- {
- error = free features cannot also be implicit ;
- }
- else if free in $(attributes) && propagated in $(attributes)
- {
- error = free features cannot be propagated ;
- }
- else
- {
- local m = [ MATCH (.*=.*) : $(values) ] ;
- if $(m[1])
- {
- error = "feature value may not contain '='" ;
- }
- }
-
- if $(error)
- {
- errors.error $(error)
- : "in" feature declaration:
- : feature [ lol->list $(1) : $(2) : $(3) ] ;
- }
-
- $(name).values ?= ;
- $(name).attributes = $(attributes) ;
- $(name).subfeatures ?= ;
- $(attributes).features += $(name) ;
-
- .all-features += $(name) ;
- if subfeature in $(attributes)
- {
- .all-subfeatures += $(name) ;
- }
- else
- {
- .all-top-features += $(name) ;
- }
- extend $(name) : $(values) ;
-}
-
-
-# Sets the default value of the given feature, overriding any previous default.
-#
-rule set-default ( feature : value )
-{
- local f = [ grist $(feature) ] ;
- local a = $($(f).attributes) ;
- local bad-attribute = ;
- if free in $(a)
- {
- bad-attribute = free ;
- }
- else if optional in $(a)
- {
- bad-attribute = optional ;
- }
- if $(bad-attribute)
- {
- errors.error "$(bad-attribute) property $(f) cannot have a default." ;
- }
- if ! $(value) in $($(f).values)
- {
- errors.error "The specified default value, '$(value)' is invalid"
- : "allowed values are: " $($(f).values) ;
- }
- $(f).default = $(value) ;
-}
-
-
-# Returns the default property values for the given features.
-#
-rule defaults ( features * )
-{
- local result ;
- for local f in $(features)
- {
- local gf = $(:E=:G=$(f)) ;
- local a = $($(gf).attributes) ;
- if ( free in $(a) ) || ( optional in $(a) )
- {
- }
- else
- {
- result += $(gf)$($(gf).default) ;
- }
- }
- return $(result) ;
-}
-
-
-# Returns true iff all 'names' elements are valid features.
-#
-rule valid ( names + )
-{
- if $(names) in $(.all-features)
- {
- return true ;
- }
-}
-
-
-# Returns the attibutes of the given feature.
-#
-rule attributes ( feature )
-{
- return $($(:E=:G=$(feature)).attributes) ;
-}
-
-
-# Returns the values of the given feature.
-#
-rule values ( feature )
-{
- return $($(:E=:G=$(feature)).values) ;
-}
-
-
-# Returns true iff 'value-string' is a value-string of an implicit feature.
-#
-rule is-implicit-value ( value-string )
-{
- local v = [ regex.split $(value-string) - ] ;
- local failed ;
- if ! $(v[1]) in $(.all-implicit-values)
- {
- failed = true ;
- }
- else
- {
- local feature = $($(v[1]).implicit-feature) ;
- for local subvalue in $(v[2-])
- {
- if ! [ find-implied-subfeature $(feature) $(subvalue) : $(v[1]) ]
- {
- failed = true ;
- }
- }
- }
-
- if ! $(failed)
- {
- return true ;
- }
-}
-
-
-# Returns the implicit feature associated with the given implicit value.
-#
-rule implied-feature ( implicit-value )
-{
- local components = [ regex.split $(implicit-value) "-" ] ;
-
- local feature = $($(components[1]).implicit-feature) ;
- if ! $(feature)
- {
- errors.error \"$(implicit-value)\" is not a value of an implicit feature ;
- feature = "" ; # Keep testing happy; it expects a result.
- }
- return $(feature) ;
-}
-
-
-local rule find-implied-subfeature ( feature subvalue : value-string ? )
-{
- # Feature should be of the form <feature-name>.
- if $(feature) != $(feature:G)
- {
- errors.error invalid feature $(feature) ;
- }
-
- return $($(feature)$(value-string:E="")<>$(subvalue).subfeature) ;
-}
-
-
-# Given a feature and a value of one of its subfeatures, find the name of the
-# subfeature. If value-string is supplied, looks for implied subfeatures that
-# are specific to that value of feature
-#
-rule implied-subfeature (
- feature # The main feature name.
- subvalue # The value of one of its subfeatures.
- : value-string ? # The value of the main feature.
-)
-{
- local subfeature = [ find-implied-subfeature $(feature) $(subvalue)
- : $(value-string) ] ;
- if ! $(subfeature)
- {
- value-string ?= "" ;
- errors.error \"$(subvalue)\" is not a known subfeature value of
- $(feature)$(value-string) ;
- }
- return $(subfeature) ;
-}
-
-
-# Generate an error if the feature is unknown.
-#
-local rule validate-feature ( feature )
-{
- if ! $(feature) in $(.all-features)
- {
- errors.error unknown feature \"$(feature)\" ;
- }
-}
-
-
-# Given a feature and its value or just a value corresponding to an implicit
-# feature, returns a property set consisting of all component subfeatures and
-# their values. For example all the following calls:
-#
-# expand-subfeatures-aux <toolset>gcc-2.95.2-linux-x86
-# expand-subfeatures-aux gcc-2.95.2-linux-x86
-#
-# return:
-#
-# <toolset>gcc <toolset-version>2.95.2 <toolset-os>linux <toolset-cpu>x86
-#
-local rule expand-subfeatures-aux (
- feature ? # Feature name or empty if value corresponds to an
- # implicit property.
- : value # Feature value.
- : dont-validate ? # If set, no value string validation will be done.
-)
-{
- if $(feature)
- {
- feature = $(feature) ;
- }
-
- if ! $(feature)
- {
- feature = [ implied-feature $(value) ] ;
- }
- else
- {
- validate-feature $(feature) ;
- }
- if ! $(dont-validate)
- {
- validate-value-string $(feature) $(value) ;
- }
-
- local components = [ regex.split $(value) "-" ] ;
-
- # Get the top-level feature's value.
- local value = $(components[1]:G=) ;
-
- local result = $(components[1]:G=$(feature)) ;
-
- local subvalues = $(components[2-]) ;
- while $(subvalues)
- {
- local subvalue = $(subvalues[1]) ; # Pop the head off of subvalues.
- subvalues = $(subvalues[2-]) ;
-
- local subfeature = [ find-implied-subfeature $(feature) $(subvalue) :
- $(value) ] ;
-
- # If no subfeature was found reconstitute the value string and use that.
- if ! $(subfeature)
- {
- result = $(components:J=-) ;
- result = $(result:G=$(feature)) ;
- subvalues = ; # Stop looping.
- }
- else
- {
- local f = [ MATCH ^<(.*)>$ : $(feature) ] ;
- result += $(subvalue:G=$(f)-$(subfeature)) ;
- }
- }
-
- return $(result) ;
-}
-
-
-# Make all elements of properties corresponding to implicit features explicit,
-# and express all subfeature values as separate properties in their own right.
-# For example, all of the following properties
-#
-# gcc-2.95.2-linux-x86
-# <toolset>gcc-2.95.2-linux-x86
-#
-# might expand to
-#
-# <toolset>gcc <toolset-version>2.95.2 <toolset-os>linux <toolset-cpu>x86
-#
-rule expand-subfeatures (
- properties * # Property set with elements of the form
- # <feature>value-string or just value-string in the case
- # of implicit features.
- : dont-validate ?
-)
-{
- local result ;
- for local p in $(properties)
- {
- # Don't expand subfeatures in subfeatures
- if ! [ MATCH "(:)" : $(p:G) ]
- {
- result += [ expand-subfeatures-aux $(p:G) : $(p:G=) : $(dont-validate) ] ;
- }
- else
- {
- result += $(p) ;
- }
- }
- return $(result) ;
-}
-
-
-# Helper for extend, below. Handles the feature case.
-#
-local rule extend-feature ( feature : values * )
-{
- feature = [ grist $(feature) ] ;
- validate-feature $(feature) ;
- if implicit in $($(feature).attributes)
- {
- for local v in $(values)
- {
- if $($(v).implicit-feature)
- {
- errors.error $(v) is already associated with the \"$($(v).implicit-feature)\" feature ;
- }
- $(v).implicit-feature = $(feature) ;
- }
-
- .all-implicit-values += $(values) ;
- }
- if ! $($(feature).values)
- {
- # This is the first value specified for this feature so make it be the
- # default.
- $(feature).default = $(values[1]) ;
- }
- $(feature).values += $(values) ;
-}
-
-
-# Checks that value-string is a valid value-string for the given feature.
-#
-rule validate-value-string ( feature value-string )
-{
- if ! (
- free in $($(feature).attributes)
- || ( $(value-string) in $(feature).values )
- )
- {
- local values = $(value-string) ;
-
- if $($(feature).subfeatures)
- {
- if ! ( $(value-string) in $($(feature).values) )
- && ! ( $(value-string) in $($(feature).subfeatures) )
- {
- values = [ regex.split $(value-string) - ] ;
- }
- }
-
- if ! ( $(values[1]) in $($(feature).values) ) &&
-
- # An empty value is allowed for optional features.
- ( $(values[1]) || ! ( optional in $($(feature).attributes) ) )
- {
- errors.error \"$(values[1])\" is not a known value of feature $(feature)
- : legal values: \"$($(feature).values)\" ;
- }
-
- for local v in $(values[2-])
- {
- # This will validate any subfeature values in value-string.
- implied-subfeature $(feature) $(v) : $(values[1]) ;
- }
- }
-}
-
-
-# A helper that computes:
-# * name(s) of module-local variable(s) used to record the correspondence
-# between subvalue(s) and a subfeature
-# * value of that variable when such a subfeature/subvalue has been defined and
-# returns a list consisting of the latter followed by the former.
-#
-local rule subvalue-var (
- feature # Main feature name.
- value-string ? # If supplied, specifies a specific value of the main
- # feature for which the subfeature values are valid.
- : subfeature # Subfeature name.
- : subvalues * # Subfeature values.
-)
-{
- feature = [ grist $(feature) ] ;
- validate-feature $(feature) ;
- if $(value-string)
- {
- validate-value-string $(feature) $(value-string) ;
- }
-
- local subfeature-name = [ get-subfeature-name $(subfeature) $(value-string) ] ;
-
- return $(subfeature-name)
- $(feature)$(value-string:E="")<>$(subvalues).subfeature ;
-}
-
-
-# Extends the given subfeature with the subvalues. If the optional value-string
-# is provided, the subvalues are only valid for the given value of the feature.
-# Thus, you could say that <target-platform>mingw is specific to
-# <toolset>gcc-2.95.2 as follows:
-#
-# extend-subfeature toolset gcc-2.95.2 : target-platform : mingw ;
-#
-rule extend-subfeature (
- feature # The feature whose subfeature is being extended.
-
- value-string ? # If supplied, specifies a specific value of the main
- # feature for which the new subfeature values are valid.
-
- : subfeature # Subfeature name.
- : subvalues * # Additional subfeature values.
-)
-{
- local subfeature-vars = [ subvalue-var $(feature) $(value-string)
- : $(subfeature) : $(subvalues) ] ;
-
- local f = [ utility.ungrist [ grist $(feature) ] ] ;
- extend $(f)-$(subfeature-vars[1]) : $(subvalues) ;
-
- # Provide a way to get from the given feature or property and subfeature
- # value to the subfeature name.
- $(subfeature-vars[2-]) = $(subfeature-vars[1]) ;
-}
-
-
-# Returns true iff the subvalues are valid for the feature. When the optional
-# value-string is provided, returns true iff the subvalues are valid for the
-# given value of the feature.
-#
-rule is-subvalue ( feature : value-string ? : subfeature : subvalue )
-{
- local subfeature-vars = [ subvalue-var $(feature) $(value-string)
- : $(subfeature) : $(subvalue) ] ;
-
- if $($(subfeature-vars[2])) = $(subfeature-vars[1])
- {
- return true ;
- }
-}
-
-
-# Can be called three ways:
-#
-# 1. extend feature : values *
-# 2. extend <feature> subfeature : values *
-# 3. extend <feature>value-string subfeature : values *
-#
-# * Form 1 adds the given values to the given feature.
-# * Forms 2 and 3 add subfeature values to the given feature.
-# * Form 3 adds the subfeature values as specific to the given property
-# value-string.
-#
-rule extend ( feature-or-property subfeature ? : values * )
-{
- local feature ; # If a property was specified this is its feature.
- local value-string ; # E.g., the gcc-2.95-2 part of <toolset>gcc-2.95.2.
-
- # If a property was specified.
- if $(feature-or-property:G) && $(feature-or-property:G=)
- {
- # Extract the feature and value-string, if any.
- feature = $(feature-or-property:G) ;
- value-string = $(feature-or-property:G=) ;
- }
- else
- {
- feature = [ grist $(feature-or-property) ] ;
- }
-
- # Dispatch to the appropriate handler.
- if $(subfeature)
- {
- extend-subfeature $(feature) $(value-string) : $(subfeature)
- : $(values) ;
- }
- else
- {
- # If no subfeature was specified, we do not expect to see a
- # value-string.
- if $(value-string)
- {
- errors.error can only specify a property as the first argument when
- extending a subfeature
- : usage:
- : " extend" feature ":" values...
- : " | extend" <feature>value-string subfeature ":" values...
- ;
- }
-
- extend-feature $(feature) : $(values) ;
- }
-}
-
-
-local rule get-subfeature-name ( subfeature value-string ? )
-{
- local prefix = $(value-string): ;
- return $(prefix:E="")$(subfeature) ;
-}
-
-
-# Declares a subfeature.
-#
-rule subfeature (
- feature # Root feature that is not a subfeature.
- value-string ? # A value-string specifying which feature or subfeature
- # values this subfeature is specific to, if any.
- : subfeature # The name of the subfeature being declared.
- : subvalues * # The allowed values of this subfeature.
- : attributes * # The attributes of the subfeature.
-)
-{
- feature = [ grist $(feature) ] ;
- validate-feature $(feature) ;
-
- # Add grist to the subfeature name if a value-string was supplied.
- local subfeature-name = [ get-subfeature-name $(subfeature) $(value-string) ] ;
-
- if $(subfeature-name) in $($(feature).subfeatures)
- {
- errors.error \"$(subfeature)\" already declared as a subfeature of \"$(feature)\"
- "specific to "$(value-string) ;
- }
- $(feature).subfeatures += $(subfeature-name) ;
-
- # First declare the subfeature as a feature in its own right.
- local f = [ utility.ungrist $(feature) ] ;
- feature $(f)-$(subfeature-name) : $(subvalues) : $(attributes) subfeature ;
-
- # Now make sure the subfeature values are known.
- extend-subfeature $(feature) $(value-string) : $(subfeature) : $(subvalues) ;
-}
-
-
-# Set components of the given composite property.
-#
-rule compose ( composite-property : component-properties * )
-{
- local feature = $(composite-property:G) ;
- if ! ( composite in [ attributes $(feature) ] )
- {
- errors.error "$(feature)" is not a composite feature ;
- }
-
- $(composite-property).components ?= ;
- if $($(composite-property).components)
- {
- errors.error components of "$(composite-property)" already set:
- $($(composite-property).components) ;
- }
-
- if $(composite-property) in $(component-properties)
- {
- errors.error composite property "$(composite-property)" cannot have itself as a component ;
- }
- $(composite-property).components = $(component-properties) ;
-}
-
-
-local rule expand-composite ( property )
-{
- return $(property)
- [ sequence.transform expand-composite : $($(property).components) ] ;
-}
-
-
-# Return all values of the given feature specified by the given property set.
-#
-rule get-values ( feature : properties * )
-{
- local result ;
-
- feature = $(:E=:G=$(feature)) ; # Add <> if necessary.
- for local p in $(properties)
- {
- if $(p:G) = $(feature)
- {
- # Use MATCH instead of :G= to get the value, in order to preserve
- # the value intact instead of having bjam treat it as a decomposable
- # path.
- result += [ MATCH ">(.*)" : $(p) ] ;
- }
- }
- return $(result) ;
-}
-
-
-rule free-features ( )
-{
- return $(free.features) ;
-}
-
-
-# Expand all composite properties in the set so that all components are
-# explicitly expressed.
-#
-rule expand-composites ( properties * )
-{
- local explicit-features = $(properties:G) ;
- local result ;
-
- # Now expand composite features.
- for local p in $(properties)
- {
- local expanded = [ expand-composite $(p) ] ;
-
- for local x in $(expanded)
- {
- if ! $(x) in $(result)
- {
- local f = $(x:G) ;
-
- if $(f) in $(free.features)
- {
- result += $(x) ;
- }
- else if ! $(x) in $(properties) # x is the result of expansion
- {
- if ! $(f) in $(explicit-features) # not explicitly-specified
- {
- if $(f) in $(result:G)
- {
- errors.error expansions of composite features result
- in conflicting values for $(f)
- : values: [ get-values $(f) : $(result) ] $(x:G=)
- : one contributing composite property was $(p) ;
- }
- else
- {
- result += $(x) ;
- }
- }
- }
- else if $(f) in $(result:G)
- {
- errors.error explicitly-specified values of non-free feature
- $(f) conflict :
- "existing values:" [ get-values $(f) : $(properties) ] :
- "value from expanding " $(p) ":" $(x:G=) ;
- }
- else
- {
- result += $(x) ;
- }
- }
- }
- }
- return $(result) ;
-}
-
-
-# Return true iff f is an ordinary subfeature of the parent-property's feature,
-# or if f is a subfeature of the parent-property's feature specific to the
-# parent-property's value.
-#
-local rule is-subfeature-of ( parent-property f )
-{
- if subfeature in $($(f).attributes)
- {
- local specific-subfeature = [ MATCH <(.*):(.*)> : $(f) ] ;
- if $(specific-subfeature)
- {
- # The feature has the form <topfeature-topvalue:subfeature>, e.g.
- # <toolset-msvc:version>.
- local feature-value = [ split-top-feature $(specific-subfeature[1])
- ] ;
- if <$(feature-value[1])>$(feature-value[2]) = $(parent-property)
- {
- return true ;
- }
- }
- else
- {
- # The feature has the form <topfeature-subfeature>, e.g.
- # <toolset-version>
- local top-sub = [ split-top-feature [ utility.ungrist $(f) ] ] ;
- if $(top-sub[2]) && <$(top-sub[1])> = $(parent-property:G)
- {
- return true ;
- }
- }
- }
-}
-
-
-# As for is-subfeature-of but for subproperties.
-#
-local rule is-subproperty-of ( parent-property p )
-{
- return [ is-subfeature-of $(parent-property) $(p:G) ] ;
-}
-
-
-# Given a property, return the subset of features consisting of all ordinary
-# subfeatures of the property's feature, and all specific subfeatures of the
-# property's feature which are conditional on the property's value.
-#
-local rule select-subfeatures ( parent-property : features * )
-{
- return [ sequence.filter is-subfeature-of $(parent-property) : $(features) ] ;
-}
-
-
-# As for select-subfeatures but for subproperties.
-#
-local rule select-subproperties ( parent-property : properties * )
-{
- return [ sequence.filter is-subproperty-of $(parent-property) : $(properties) ] ;
-}
-
-
-# Given a property set which may consist of composite and implicit properties
-# and combined subfeature values, returns an expanded, normalized property set
-# with all implicit features expressed explicitly, all subfeature values
-# individually expressed, and all components of composite properties expanded.
-# Non-free features directly expressed in the input properties cause any values
-# of those features due to composite feature expansion to be dropped. If two
-# values of a given non-free feature are directly expressed in the input, an
-# error is issued.
-#
-rule expand ( properties * )
-{
- local expanded = [ expand-subfeatures $(properties) ] ;
- return [ expand-composites $(expanded) ] ;
-}
-
-
-# Helper rule for minimize. Returns true iff property's feature is present in
-# the contents of the variable named by feature-set-var.
-#
-local rule in-features ( feature-set-var property )
-{
- if $(property:G) in $($(feature-set-var))
- {
- return true ;
- }
-}
-
-
-# Helper rule for minimize. Returns the list with the same properties, but with
-# all subfeatures moved to the end of the list.
-#
-local rule move-subfeatures-to-the-end ( properties * )
-{
- local x1 ;
- local x2 ;
- for local p in $(properties)
- {
- if subfeature in $($(p:G).attributes)
- {
- x2 += $(p) ;
- }
- else
- {
- x1 += $(p) ;
- }
- }
- return $(x1) $(x2) ;
-}
-
-
-# Given an expanded property set, eliminate all redundancy: properties that are
-# elements of other (composite) properties in the set will be eliminated.
-# Non-symmetric properties equal to default values will be eliminated unless
-# they override a value from some composite property. Implicit properties will
-# be expressed without feature grist, and sub-property values will be expressed
-# as elements joined to the corresponding main property.
-#
-rule minimize ( properties * )
-{
- # Precondition checking
- local implicits = [ set.intersection $(p:G=) : $(p:G) ] ;
- if $(implicits)
- {
- errors.error minimize requires an expanded property set, but
- \"$(implicits[1])\" appears to be the value of an un-expanded
- implicit feature ;
- }
-
- # Remove properties implied by composite features.
- local components = $($(properties).components) ;
- local x = [ set.difference $(properties) : $(components) ] ;
-
- # Handle subfeatures and implicit features.
- x = [ move-subfeatures-to-the-end $(x) ] ;
- local result ;
- while $(x)
- {
- local p fullp = $(x[1]) ;
- local f = $(p:G) ;
- local v = $(p:G=) ;
-
- # Eliminate features in implicit properties.
- if implicit in [ attributes $(f) ]
- {
- p = $(v) ;
- }
-
- # Locate all subproperties of $(x[1]) in the property set.
- local subproperties = [ select-subproperties $(fullp) : $(x) ] ;
- if $(subproperties)
- {
- # Reconstitute the joined property name.
- local sorted = [ sequence.insertion-sort $(subproperties) ] ;
- result += $(p)-$(sorted:G="":J=-) ;
-
- x = [ set.difference $(x[2-]) : $(subproperties) ] ;
- }
- else
- {
- # Eliminate properties whose value is equal to feature's default,
- # which are not symmetric and which do not contradict values implied
- # by composite properties.
-
- # Since all component properties of composites in the set have been
- # eliminated, any remaining property whose feature is the same as a
- # component of a composite in the set must have a non-redundant
- # value.
- if $(fullp) != [ defaults $(f) ]
- || symmetric in [ attributes $(f) ]
- || $(fullp:G) in $(components:G)
- {
- result += $(p) ;
- }
-
- x = $(x[2-]) ;
- }
- }
- return $(result) ;
-}
-
-
-# Combine all subproperties into their parent properties
-#
-# Requires: for every subproperty, there is a parent property. All features are
-# explicitly expressed.
-#
-# This rule probably should not be needed, but build-request.expand-no-defaults
-# is being abused for unintended purposes and it needs help.
-#
-rule compress-subproperties ( properties * )
-{
- local all-subs ;
- local matched-subs ;
- local result ;
-
- for local p in $(properties)
- {
- if ! $(p:G)
- {
- # Expecting fully-gristed properties.
- assert.variable-not-empty p:G ;
- }
-
- if ! subfeature in $($(p:G).attributes)
- {
- local subs = [ sequence.insertion-sort
- [ sequence.filter is-subproperty-of $(p) : $(properties) ] ] ;
-
- matched-subs += $(subs) ;
-
- local subvalues = -$(subs:G=:J=-) ;
- subvalues ?= "" ;
- result += $(p)$(subvalues) ;
- }
- else
- {
- all-subs += $(p) ;
- }
- }
- assert.result true : set.equal $(all-subs) : $(matched-subs) ;
- return $(result) ;
-}
-
-
-# Given an ungristed string, finds the longest prefix which is a top-level
-# feature name followed by a dash, and return a pair consisting of the parts
-# before and after that dash. More interesting than a simple split because
-# feature names may contain dashes.
-#
-local rule split-top-feature ( feature-plus )
-{
- local e = [ regex.split $(feature-plus) - ] ;
- local f = $(e[1]) ;
- local v ;
- while $(e)
- {
- if <$(f)> in $(.all-top-features)
- {
- v = $(f) $(e[2-]:J=-) ;
- }
- e = $(e[2-]) ;
- f = $(f)-$(e[1]) ;
- }
- return $(v) ;
-}
-
-
-# Given a set of properties, add default values for features not represented in
-# the set.
-#
-# Note: if there's an ordinary feature F1 and a composite feature F2 which
-# includes some value for F1 and both feature have default values then the
-# default value of F1 will be added (as opposed to the value in F2). This might
-# not be the right idea, e.g. consider:
-#
-# feature variant : debug ... ;
-# <variant>debug : .... <runtime-debugging>on
-# feature <runtime-debugging> : off on ;
-#
-# Here, when adding default for an empty property set, we'll get
-#
-# <variant>debug <runtime_debugging>off
-#
-# and that's kind of strange.
-#
-rule add-defaults ( properties * )
-{
- for local v in $(properties:G=)
- {
- if $(v) in $(properties)
- {
- errors.error add-defaults requires explicitly specified features,
- but \"$(v)\" appears to be the value of an un-expanded implicit
- feature ;
- }
- }
- # We don't add default for elements with ":" inside. This catches:
- # 1. Conditional properties --- we don't want <variant>debug:<define>DEBUG
- # to be takes as specified value for <variant>
- # 2. Free properties with ":" in values. We don't care, since free
- # properties don't have defaults.
- local xproperties = [ MATCH "^([^:]+)$" : $(properties) ] ;
- local missing-top = [ set.difference $(.all-top-features) : $(xproperties:G) ] ;
- local more = [ defaults $(missing-top) ] ;
- properties += $(more) ;
- xproperties += $(more) ;
-
- # Add defaults for subfeatures of features which are present.
- for local p in $(xproperties)
- {
- local s = $($(p:G).subfeatures) ;
- local f = [ utility.ungrist $(p:G) ] ;
- local missing-subs = [ set.difference <$(f)-$(s)> : $(properties:G) ] ;
- properties += [ defaults [ select-subfeatures $(p) : $(missing-subs) ] ] ;
- }
-
- return $(properties) ;
-}
-
-
-# Given a property-set of the form
-# v1/v2/...vN-1/<fN>vN/<fN+1>vN+1/...<fM>vM
-#
-# Returns
-# v1 v2 ... vN-1 <fN>vN <fN+1>vN+1 ... <fM>vM
-#
-# Note that vN...vM may contain slashes. This needs to be resilient to the
-# substitution of backslashes for slashes, since Jam, unbidden, sometimes swaps
-# slash direction on NT.
-#
-rule split ( property-set )
-{
- local pieces = [ regex.split $(property-set) [\\/] ] ;
- local result ;
-
- for local x in $(pieces)
- {
- if ( ! $(x:G) ) && $(result[-1]:G)
- {
- result = $(result[1--2]) $(result[-1])/$(x) ;
- }
- else
- {
- result += $(x) ;
- }
- }
-
- return $(result) ;
-}
-
-
-# Tests of module feature.
-#
-rule __test__ ( )
-{
- # Use a fresh copy of the feature module.
- prepare-test feature-test-temp ;
-
- import assert ;
- import errors : try catch ;
-
- # These are local rules and so must be explicitly reimported into the
- # testing module.
- import feature : extend-feature validate-feature select-subfeatures ;
-
- feature toolset : gcc : implicit ;
- feature define : : free ;
- feature runtime-link : dynamic static : symmetric ;
- feature optimization : on off ;
- feature variant : debug release profile : implicit composite symmetric ;
- feature stdlib : native stlport ;
- feature magic : : free ;
-
- compose <variant>debug : <define>_DEBUG <optimization>off ;
- compose <variant>release : <define>NDEBUG <optimization>on ;
-
- assert.result dynamic static : values <runtime-link> ;
- assert.result dynamic static : values runtime-link ;
-
- try ;
- {
- compose <variant>profile : <variant>profile ;
- }
- catch composite property <variant>profile cannot have itself as a component ;
-
- extend-feature toolset : msvc metrowerks ;
- subfeature toolset gcc : version : 2.95.2 2.95.3 2.95.4 3.0 3.0.1 3.0.2 ;
-
- assert.true is-subvalue toolset : gcc : version : 2.95.3 ;
- assert.false is-subvalue toolset : gcc : version : 1.1 ;
-
- assert.false is-subvalue toolset : msvc : version : 2.95.3 ;
- assert.false is-subvalue toolset : : version : yabba ;
-
- feature yabba ;
- subfeature yabba : version : dabba ;
- assert.true is-subvalue yabba : : version : dabba ;
-
- subfeature toolset gcc : platform : linux cygwin : optional ;
-
- assert.result <toolset-gcc:version>
- : select-subfeatures <toolset>gcc
- : <toolset-gcc:version>
- <toolset-msvc:version>
- <toolset-version>
- <stdlib> ;
-
- subfeature stdlib : version : 3 4 : optional ;
-
- assert.result <stdlib-version>
- : select-subfeatures <stdlib>native
- : <toolset-gcc:version>
- <toolset-msvc:version>
- <toolset-version>
- <stdlib-version> ;
-
- assert.result <toolset>gcc <toolset-gcc:version>3.0.1
- : expand-subfeatures <toolset>gcc-3.0.1 ;
-
- assert.result <toolset>gcc <toolset-gcc:version>3.0.1 <toolset-gcc:platform>linux
- : expand-subfeatures <toolset>gcc-3.0.1-linux ;
-
- assert.result <toolset>gcc <toolset-gcc:version>3.0.1
- : expand <toolset>gcc <toolset-gcc:version>3.0.1 ;
-
- assert.result <define>foo=x-y
- : expand-subfeatures <define>foo=x-y ;
-
- assert.result <toolset>gcc <toolset-gcc:version>3.0.1
- : expand-subfeatures gcc-3.0.1 ;
-
- assert.result a c e
- : get-values <x> : <x>a <y>b <x>c <y>d <x>e ;
-
- assert.result <toolset>gcc <toolset-gcc:version>3.0.1
- <variant>debug <define>_DEBUG <optimization>on
- : expand gcc-3.0.1 debug <optimization>on ;
-
- assert.result <variant>debug <define>_DEBUG <optimization>on
- : expand debug <optimization>on ;
-
- assert.result <optimization>on <variant>debug <define>_DEBUG
- : expand <optimization>on debug ;
-
- assert.result <runtime-link>dynamic <optimization>on
- : defaults <runtime-link> <define> <optimization> ;
-
- # Make sure defaults is resilient to missing grist.
- assert.result <runtime-link>dynamic <optimization>on
- : defaults runtime-link define optimization ;
-
- feature dummy : dummy1 dummy2 ;
- subfeature dummy : subdummy : x y z : optional ;
-
- feature fu : fu1 fu2 : optional ;
- subfeature fu : subfu : x y z : optional ;
- subfeature fu : subfu2 : q r s ;
-
- assert.result optional : attributes <fu> ;
- assert.result optional : attributes fu ;
-
- assert.result <runtime-link>static <define>foobar <optimization>on
- <toolset>gcc:<define>FOO <toolset>gcc <variant>debug <stdlib>native
- <dummy>dummy1 <toolset-gcc:version>2.95.2
- : add-defaults <runtime-link>static <define>foobar <optimization>on
- <toolset>gcc:<define>FOO ;
-
- assert.result <runtime-link>static <define>foobar <optimization>on
- <toolset>gcc:<define>FOO <fu>fu1 <toolset>gcc <variant>debug
- <stdlib>native <dummy>dummy1 <fu-subfu2>q <toolset-gcc:version>2.95.2
- : add-defaults <runtime-link>static <define>foobar <optimization>on
- <toolset>gcc:<define>FOO <fu>fu1 ;
-
- set-default <runtime-link> : static ;
- assert.result <runtime-link>static : defaults <runtime-link> ;
-
- assert.result gcc-3.0.1 debug <optimization>on
- : minimize [ expand gcc-3.0.1 debug <optimization>on <stdlib>native ] ;
-
- assert.result gcc-3.0.1 debug <runtime-link>dynamic
- : minimize
- [ expand gcc-3.0.1 debug <optimization>off <runtime-link>dynamic ] ;
-
- assert.result gcc-3.0.1 debug
- : minimize [ expand gcc-3.0.1 debug <optimization>off ] ;
-
- assert.result debug <optimization>on
- : minimize [ expand debug <optimization>on ] ;
-
- assert.result gcc-3.0
- : minimize <toolset>gcc <toolset-gcc:version>3.0 ;
-
- assert.result gcc-3.0
- : minimize <toolset-gcc:version>3.0 <toolset>gcc ;
-
- assert.result <x>y/z <a>b/c <d>e/f
- : split <x>y/z/<a>b/c/<d>e/f ;
-
- assert.result <x>y/z <a>b/c <d>e/f
- : split <x>y\\z\\<a>b\\c\\<d>e\\f ;
-
- assert.result a b c <d>e/f/g <h>i/j/k
- : split a/b/c/<d>e/f/g/<h>i/j/k ;
-
- assert.result a b c <d>e/f/g <h>i/j/k
- : split a\\b\\c\\<d>e\\f\\g\\<h>i\\j\\k ;
-
- # Test error checking.
-
- try ;
- {
- expand release <optimization>off <optimization>on ;
- }
- catch explicitly-specified values of non-free feature <optimization> conflict ;
-
- try ;
- {
- validate-feature <foobar> ;
- }
- catch unknown feature ;
-
- validate-value-string <toolset> gcc ;
- validate-value-string <toolset> gcc-3.0.1 ;
-
- try ;
- {
- validate-value-string <toolset> digital_mars ;
- }
- catch \"digital_mars\" is not a known value of <toolset> ;
-
- try ;
- {
- feature foobar : : baz ;
- }
- catch unknown attributes: baz ;
-
- feature feature1 ;
- try ;
- {
- feature feature1 ;
- }
- catch feature already defined: ;
-
- try ;
- {
- feature feature2 : : free implicit ;
- }
- catch free features cannot also be implicit ;
-
- try ;
- {
- feature feature3 : : free propagated ;
- }
- catch free features cannot be propagated ;
-
- try ;
- {
- implied-feature lackluster ;
- }
- catch \"lackluster\" is not a value of an implicit feature ;
-
- try ;
- {
- implied-subfeature <toolset> 3.0.1 ;
- }
- catch \"3.0.1\" is not a known subfeature value of <toolset> ;
-
- try ;
- {
- implied-subfeature <toolset> not-a-version : gcc ;
- }
- catch \"not-a-version\" is not a known subfeature value of <toolset>gcc ;
-
- # Leave a clean copy of the features module behind.
- finish-test feature-test-temp ;
-}
diff --git a/tools/build/v2/build/feature.py b/tools/build/v2/build/feature.py
deleted file mode 100644
index 289aca14c5..0000000000
--- a/tools/build/v2/build/feature.py
+++ /dev/null
@@ -1,907 +0,0 @@
-# Status: ported, except for unit tests.
-# Base revision: 64488
-#
-# Copyright 2001, 2002, 2003 Dave Abrahams
-# Copyright 2002, 2006 Rene Rivera
-# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import re
-
-from b2.util import utility, bjam_signature
-import b2.util.set
-from b2.util.utility import add_grist, get_grist, ungrist, replace_grist, to_seq
-from b2.exceptions import *
-
-__re_split_subfeatures = re.compile ('<(.*):(.*)>')
-__re_no_hyphen = re.compile ('^([^:]+)$')
-__re_slash_or_backslash = re.compile (r'[\\/]')
-
-class Feature(object):
-
- # Map from string attribute names to integers bit flags.
- # This will be initialized after declaration of the class.
- _attribute_name_to_integer = {}
-
- def __init__(self, name, values, attributes):
- self._name = name
- self._values = values
- self._default = None
- self._attributes = 0
- for a in attributes:
- self._attributes = self._attributes | Feature._attribute_name_to_integer[a]
- self._attributes_string_list = attributes
- self._subfeatures = []
- self._parent = None
-
- def name(self):
- return self._name
-
- def values(self):
- return self._values
-
- def add_values(self, values):
- self._values.extend(values)
-
- def attributes(self):
- return self._attributes
-
- def set_default(self, value):
- self._default = value
-
- def default(self):
- return self._default
-
- # FIXME: remove when we fully move to using classes for features/properties
- def attributes_string_list(self):
- return self._attributes_string_list
-
- def subfeatures(self):
- return self._subfeatures
-
- def add_subfeature(self, name):
- self._subfeatures.append(name)
-
- def parent(self):
- """For subfeatures, return pair of (parent_feature, value).
-
- Value may be None if this subfeature is not specific to any
- value of the parent feature.
- """
- return self._parent
-
- def set_parent(self, feature, value):
- self._parent = (feature, value)
-
- def __str__(self):
- return self._name
-
-
-def reset ():
- """ Clear the module state. This is mainly for testing purposes.
- """
- global __all_attributes, __all_features, __implicit_features, __composite_properties
- global __features_with_attributes, __subfeature_from_value, __all_top_features, __free_features
- global __all_subfeatures
-
- # The list with all attribute names.
- __all_attributes = [ 'implicit',
- 'composite',
- 'optional',
- 'symmetric',
- 'free',
- 'incidental',
- 'path',
- 'dependency',
- 'propagated',
- 'link-incompatible',
- 'subfeature',
- 'order-sensitive'
- ]
- i = 1
- for a in __all_attributes:
- setattr(Feature, a.upper(), i)
- Feature._attribute_name_to_integer[a] = i
- def probe(self, flag=i):
- return getattr(self, "_attributes") & flag
- setattr(Feature, a.replace("-", "_"), probe)
- i = i << 1
-
- # A map containing all features. The key is the feature name.
- # The value is an instance of Feature class.
- __all_features = {}
-
- # All non-subfeatures.
- __all_top_features = []
-
- # Maps valus to the corresponding implicit feature
- __implicit_features = {}
-
- # A map containing all composite properties. The key is a Property instance,
- # and the value is a list of Property instances
- __composite_properties = {}
-
- __features_with_attributes = {}
- for attribute in __all_attributes:
- __features_with_attributes [attribute] = []
-
- # Maps a value to the corresponding subfeature name.
- __subfeature_from_value = {}
-
- # All free features
- __free_features = []
-
- __all_subfeatures = []
-
-reset ()
-
-def enumerate ():
- """ Returns an iterator to the features map.
- """
- return __all_features.iteritems ()
-
-def get(name):
- """Return the Feature instance for the specified name.
-
- Throws if no feature by such name exists
- """
- return __all_features[name]
-
-# FIXME: prepare-test/finish-test?
-
-@bjam_signature((["name"], ["values", "*"], ["attributes", "*"]))
-def feature (name, values, attributes = []):
- """ Declares a new feature with the given name, values, and attributes.
- name: the feature name
- values: a sequence of the allowable values - may be extended later with feature.extend
- attributes: a sequence of the feature's attributes (e.g. implicit, free, propagated, ...)
- """
- __validate_feature_attributes (name, attributes)
-
- feature = Feature(name, [], attributes)
- __all_features[name] = feature
- # Temporary measure while we have not fully moved from 'gristed strings'
- __all_features["<" + name + ">"] = feature
-
- for attribute in attributes:
- __features_with_attributes [attribute].append (name)
-
- name = add_grist(name)
-
- if 'subfeature' in attributes:
- __all_subfeatures.append(name)
- else:
- __all_top_features.append(feature)
-
- extend (name, values)
-
- # FIXME: why his is needed.
- if 'free' in attributes:
- __free_features.append (name)
-
- return feature
-
-@bjam_signature((["feature"], ["value"]))
-def set_default (feature, value):
- """ Sets the default value of the given feature, overriding any previous default.
- feature: the name of the feature
- value: the default value to assign
- """
- f = __all_features[feature]
- attributes = f.attributes()
- bad_attribute = None
-
- if attributes & Feature.FREE:
- bad_attribute = "free"
- elif attributes & Feature.OPTIONAL:
- bad_attribute = "optional"
-
- if bad_attribute:
- raise InvalidValue ("%s property %s cannot have a default" % (bad_attribute, feature.name()))
-
- if not value in f.values():
- raise InvalidValue ("The specified default value, '%s' is invalid.\n" % value + "allowed values are: %s" % values)
-
- f.set_default(value)
-
-def defaults(features):
- """ Returns the default property values for the given features.
- """
- # FIXME: should merge feature and property modules.
- import property
-
- result = []
- for f in features:
- if not f.free() and not f.optional() and f.default():
- result.append(property.Property(f, f.default()))
-
- return result
-
-def valid (names):
- """ Returns true iff all elements of names are valid features.
- """
- def valid_one (name): return __all_features.has_key (name)
-
- if isinstance (names, str):
- return valid_one (names)
- else:
- return [ valid_one (name) for name in names ]
-
-def attributes (feature):
- """ Returns the attributes of the given feature.
- """
- return __all_features[feature].attributes_string_list()
-
-def values (feature):
- """ Return the values of the given feature.
- """
- validate_feature (feature)
- return __all_features[feature].values()
-
-def is_implicit_value (value_string):
- """ Returns true iff 'value_string' is a value_string
- of an implicit feature.
- """
-
- if __implicit_features.has_key(value_string):
- return __implicit_features[value_string]
-
- v = value_string.split('-')
-
- if not __implicit_features.has_key(v[0]):
- return False
-
- feature = __implicit_features[v[0]]
-
- for subvalue in (v[1:]):
- if not __find_implied_subfeature(feature, subvalue, v[0]):
- return False
-
- return True
-
-def implied_feature (implicit_value):
- """ Returns the implicit feature associated with the given implicit value.
- """
- components = implicit_value.split('-')
-
- if not __implicit_features.has_key(components[0]):
- raise InvalidValue ("'%s' is not a value of an implicit feature" % implicit_value)
-
- return __implicit_features[components[0]]
-
-def __find_implied_subfeature (feature, subvalue, value_string):
-
- #if value_string == None: value_string = ''
-
- if not __subfeature_from_value.has_key(feature) \
- or not __subfeature_from_value[feature].has_key(value_string) \
- or not __subfeature_from_value[feature][value_string].has_key (subvalue):
- return None
-
- return __subfeature_from_value[feature][value_string][subvalue]
-
-# Given a feature and a value of one of its subfeatures, find the name
-# of the subfeature. If value-string is supplied, looks for implied
-# subfeatures that are specific to that value of feature
-# feature # The main feature name
-# subvalue # The value of one of its subfeatures
-# value-string # The value of the main feature
-
-def implied_subfeature (feature, subvalue, value_string):
- result = __find_implied_subfeature (feature, subvalue, value_string)
- if not result:
- raise InvalidValue ("'%s' is not a known subfeature value of '%s%s'" % (subvalue, feature, value_string))
-
- return result
-
-def validate_feature (name):
- """ Checks if all name is a valid feature. Otherwise, raises an exception.
- """
- if not __all_features.has_key(name):
- raise InvalidFeature ("'%s' is not a valid feature name" % name)
- else:
- return __all_features[name]
-
-def valid (names):
- """ Returns true iff all elements of names are valid features.
- """
- def valid_one (name): return __all_features.has_key (name)
-
- if isinstance (names, str):
- return valid_one (names)
- else:
- return [ valid_one (name) for name in names ]
-
-# Uses Property
-def __expand_subfeatures_aux (property, dont_validate = False):
- """ Helper for expand_subfeatures.
- Given a feature and value, or just a value corresponding to an
- implicit feature, returns a property set consisting of all component
- subfeatures and their values. For example:
-
- expand_subfeatures <toolset>gcc-2.95.2-linux-x86
- -> <toolset>gcc <toolset-version>2.95.2 <toolset-os>linux <toolset-cpu>x86
- equivalent to:
- expand_subfeatures gcc-2.95.2-linux-x86
-
- feature: The name of the feature, or empty if value corresponds to an implicit property
- value: The value of the feature.
- dont_validate: If True, no validation of value string will be done.
- """
- f = property.feature()
- v = property.value()
- if not dont_validate:
- validate_value_string(f, v)
-
- components = v.split ("-")
-
- v = components[0]
-
- import property
-
- result = [property.Property(f, components[0])]
-
- subvalues = components[1:]
-
- while len(subvalues) > 0:
- subvalue = subvalues [0] # pop the head off of subvalues
- subvalues = subvalues [1:]
-
- subfeature = __find_implied_subfeature (f, subvalue, v)
-
- # If no subfeature was found, reconstitute the value string and use that
- if not subfeature:
- return [property.Property(f, '-'.join(components))]
-
- result.append(property.Property(subfeature, subvalue))
-
- return result
-
-def expand_subfeatures(properties, dont_validate = False):
- """
- Make all elements of properties corresponding to implicit features
- explicit, and express all subfeature values as separate properties
- in their own right. For example, the property
-
- gcc-2.95.2-linux-x86
-
- might expand to
-
- <toolset>gcc <toolset-version>2.95.2 <toolset-os>linux <toolset-cpu>x86
-
- properties: A sequence with elements of the form
- <feature>value-string or just value-string in the
- case of implicit features.
- : dont_validate: If True, no validation of value string will be done.
- """
- result = []
- for p in properties:
- # Don't expand subfeatures in subfeatures
- if p.feature().subfeature():
- result.append (p)
- else:
- result.extend(__expand_subfeatures_aux (p, dont_validate))
-
- return result
-
-
-
-# rule extend was defined as below:
- # Can be called three ways:
- #
- # 1. extend feature : values *
- # 2. extend <feature> subfeature : values *
- # 3. extend <feature>value-string subfeature : values *
- #
- # * Form 1 adds the given values to the given feature
- # * Forms 2 and 3 add subfeature values to the given feature
- # * Form 3 adds the subfeature values as specific to the given
- # property value-string.
- #
- #rule extend ( feature-or-property subfeature ? : values * )
-#
-# Now, the specific rule must be called, depending on the desired operation:
-# extend_feature
-# extend_subfeature
-
-def extend (name, values):
- """ Adds the given values to the given feature.
- """
- name = add_grist (name)
- __validate_feature (name)
- feature = __all_features [name]
-
- if feature.implicit():
- for v in values:
- if __implicit_features.has_key(v):
- raise BaseException ("'%s' is already associated with the feature '%s'" % (v, __implicit_features [v]))
-
- __implicit_features[v] = feature
-
- if len (feature.values()) == 0 and len (values) > 0:
- # This is the first value specified for this feature,
- # take it as default value
- feature.set_default(values[0])
-
- feature.add_values(values)
-
-def validate_value_string (f, value_string):
- """ Checks that value-string is a valid value-string for the given feature.
- """
- if f.free() or value_string in f.values():
- return
-
- values = [value_string]
-
- if f.subfeatures():
- if not value_string in f.values() and \
- not value_string in f.subfeatures():
- values = value_string.split('-')
-
- # An empty value is allowed for optional features
- if not values[0] in f.values() and \
- (values[0] or not f.optional()):
- raise InvalidValue ("'%s' is not a known value of feature '%s'\nlegal values: '%s'" % (values [0], feature, f.values()))
-
- for v in values [1:]:
- # this will validate any subfeature values in value-string
- implied_subfeature(f, v, values[0])
-
-
-""" Extends the given subfeature with the subvalues. If the optional
- value-string is provided, the subvalues are only valid for the given
- value of the feature. Thus, you could say that
- <target-platform>mingw is specifc to <toolset>gcc-2.95.2 as follows:
-
- extend-subfeature toolset gcc-2.95.2 : target-platform : mingw ;
-
- feature: The feature whose subfeature is being extended.
-
- value-string: If supplied, specifies a specific value of the
- main feature for which the new subfeature values
- are valid.
-
- subfeature: The name of the subfeature.
-
- subvalues: The additional values of the subfeature being defined.
-"""
-def extend_subfeature (feature_name, value_string, subfeature_name, subvalues):
-
- feature = validate_feature(feature_name)
-
- if value_string:
- validate_value_string(feature, value_string)
-
- subfeature_name = feature_name + '-' + __get_subfeature_name (subfeature_name, value_string)
-
- extend(subfeature_name, subvalues) ;
- subfeature = __all_features[subfeature_name]
-
- if value_string == None: value_string = ''
-
- if not __subfeature_from_value.has_key(feature):
- __subfeature_from_value [feature] = {}
-
- if not __subfeature_from_value[feature].has_key(value_string):
- __subfeature_from_value [feature][value_string] = {}
-
- for subvalue in subvalues:
- __subfeature_from_value [feature][value_string][subvalue] = subfeature
-
-@bjam_signature((["feature_name", "value_string", "?"], ["subfeature"],
- ["subvalues", "*"], ["attributes", "*"]))
-def subfeature (feature_name, value_string, subfeature, subvalues, attributes = []):
- """ Declares a subfeature.
- feature_name: Root feature that is not a subfeature.
- value_string: An optional value-string specifying which feature or
- subfeature values this subfeature is specific to,
- if any.
- subfeature: The name of the subfeature being declared.
- subvalues: The allowed values of this subfeature.
- attributes: The attributes of the subfeature.
- """
- parent_feature = validate_feature (feature_name)
-
- # Add grist to the subfeature name if a value-string was supplied
- subfeature_name = __get_subfeature_name (subfeature, value_string)
-
- if subfeature_name in __all_features[feature_name].subfeatures():
- message = "'%s' already declared as a subfeature of '%s'" % (subfeature, feature_name)
- message += " specific to '%s'" % value_string
- raise BaseException (message)
-
- # First declare the subfeature as a feature in its own right
- f = feature (feature_name + '-' + subfeature_name, subvalues, attributes + ['subfeature'])
- f.set_parent(parent_feature, value_string)
-
- parent_feature.add_subfeature(f)
-
- # Now make sure the subfeature values are known.
- extend_subfeature (feature_name, value_string, subfeature, subvalues)
-
-
-@bjam_signature((["composite_property_s"], ["component_properties_s", "*"]))
-def compose (composite_property_s, component_properties_s):
- """ Sets the components of the given composite property.
-
- All paremeters are <feature>value strings
- """
- import property
-
- component_properties_s = to_seq (component_properties_s)
- composite_property = property.create_from_string(composite_property_s)
- f = composite_property.feature()
-
- if len(component_properties_s) > 0 and isinstance(component_properties_s[0], property.Property):
- component_properties = component_properties_s
- else:
- component_properties = [property.create_from_string(p) for p in component_properties_s]
-
- if not f.composite():
- raise BaseException ("'%s' is not a composite feature" % f)
-
- if __composite_properties.has_key(property):
- raise BaseException ('components of "%s" already set: %s' % (composite_property, str (__composite_properties[composite_property])))
-
- if composite_property in component_properties:
- raise BaseException ('composite property "%s" cannot have itself as a component' % composite_property)
-
- __composite_properties[composite_property] = component_properties
-
-
-def expand_composite(property):
- result = [ property ]
- if __composite_properties.has_key(property):
- for p in __composite_properties[property]:
- result.extend(expand_composite(p))
- return result
-
-@bjam_signature((['feature'], ['properties', '*']))
-def get_values (feature, properties):
- """ Returns all values of the given feature specified by the given property set.
- """
- if feature[0] != '<':
- feature = '<' + feature + '>'
- result = []
- for p in properties:
- if get_grist (p) == feature:
- result.append (replace_grist (p, ''))
-
- return result
-
-def free_features ():
- """ Returns all free features.
- """
- return __free_features
-
-def expand_composites (properties):
- """ Expand all composite properties in the set so that all components
- are explicitly expressed.
- """
- explicit_features = set(p.feature() for p in properties)
-
- result = []
-
- # now expand composite features
- for p in properties:
- expanded = expand_composite(p)
-
- for x in expanded:
- if not x in result:
- f = x.feature()
-
- if f.free():
- result.append (x)
- elif not x in properties: # x is the result of expansion
- if not f in explicit_features: # not explicitly-specified
- if any(r.feature() == f for r in result):
- raise FeatureConflict(
- "expansions of composite features result in "
- "conflicting values for '%s'\nvalues: '%s'\none contributing composite property was '%s'" %
- (f.name(), [r.value() for r in result if r.feature() == f] + [x.value()], p))
- else:
- result.append (x)
- elif any(r.feature() == f for r in result):
- raise FeatureConflict ("explicitly-specified values of non-free feature '%s' conflict\n"
- "existing values: '%s'\nvalue from expanding '%s': '%s'" % (f,
- [r.value() for r in result if r.feature() == f], p, x.value()))
- else:
- result.append (x)
-
- return result
-
-# Uses Property
-def is_subfeature_of (parent_property, f):
- """ Return true iff f is an ordinary subfeature of the parent_property's
- feature, or if f is a subfeature of the parent_property's feature
- specific to the parent_property's value.
- """
- if not f.subfeature():
- return False
-
- p = f.parent()
- if not p:
- return False
-
- parent_feature = p[0]
- parent_value = p[1]
-
- if parent_feature != parent_property.feature():
- return False
-
- if parent_value and parent_value != parent_property.value():
- return False
-
- return True
-
-def __is_subproperty_of (parent_property, p):
- """ As is_subfeature_of, for subproperties.
- """
- return is_subfeature_of (parent_property, p.feature())
-
-
-# Returns true iff the subvalue is valid for the feature. When the
-# optional value-string is provided, returns true iff the subvalues
-# are valid for the given value of the feature.
-def is_subvalue(feature, value_string, subfeature, subvalue):
-
- if not value_string:
- value_string = ''
-
- if not __subfeature_from_value.has_key(feature):
- return False
-
- if not __subfeature_from_value[feature].has_key(value_string):
- return False
-
- if not __subfeature_from_value[feature][value_string].has_key(subvalue):
- return False
-
- if __subfeature_from_value[feature][value_string][subvalue]\
- != subfeature:
- return False
-
- return True
-
-def implied_subfeature (feature, subvalue, value_string):
- result = __find_implied_subfeature (feature, subvalue, value_string)
- if not result:
- raise InvalidValue ("'%s' is not a known subfeature value of '%s%s'" % (subvalue, feature, value_string))
-
- return result
-
-
-# Uses Property
-def expand (properties):
- """ Given a property set which may consist of composite and implicit
- properties and combined subfeature values, returns an expanded,
- normalized property set with all implicit features expressed
- explicitly, all subfeature values individually expressed, and all
- components of composite properties expanded. Non-free features
- directly expressed in the input properties cause any values of
- those features due to composite feature expansion to be dropped. If
- two values of a given non-free feature are directly expressed in the
- input, an error is issued.
- """
- expanded = expand_subfeatures(properties)
- return expand_composites (expanded)
-
-# Accepts list of Property objects
-def add_defaults (properties):
- """ Given a set of properties, add default values for features not
- represented in the set.
- Note: if there's there's ordinary feature F1 and composite feature
- F2, which includes some value for F1, and both feature have default values,
- then the default value of F1 will be added, not the value in F2. This might
- not be right idea: consider
-
- feature variant : debug ... ;
- <variant>debug : .... <runtime-debugging>on
- feature <runtime-debugging> : off on ;
-
- Here, when adding default for an empty property set, we'll get
-
- <variant>debug <runtime_debugging>off
-
- and that's kind of strange.
- """
- result = [x for x in properties]
-
- handled_features = set()
- for p in properties:
- # We don't add default for conditional properties. We don't want
- # <variant>debug:<define>DEBUG to be takes as specified value for <variant>
- if not p.condition():
- handled_features.add(p.feature())
-
- missing_top = [f for f in __all_top_features if not f in handled_features]
- more = defaults(missing_top)
- result.extend(more)
- for p in more:
- handled_features.add(p.feature())
-
- # Add defaults for subfeatures of features which are present
- for p in result[:]:
- s = p.feature().subfeatures()
- more = defaults([s for s in p.feature().subfeatures() if not s in handled_features])
- for p in more:
- handled_features.add(p.feature())
- result.extend(more)
-
- return result
-
-def minimize (properties):
- """ Given an expanded property set, eliminate all redundancy: properties
- which are elements of other (composite) properties in the set will
- be eliminated. Non-symmetric properties equal to default values will be
- eliminated, unless the override a value from some composite property.
- Implicit properties will be expressed without feature
- grist, and sub-property values will be expressed as elements joined
- to the corresponding main property.
- """
-
- # remove properties implied by composite features
- components = []
- for property in properties:
- if __composite_properties.has_key (property):
- components.extend(__composite_properties[property])
- properties = b2.util.set.difference (properties, components)
-
- # handle subfeatures and implicit features
-
- # move subfeatures to the end of the list
- properties = [p for p in properties if not p.feature().subfeature()] +\
- [p for p in properties if p.feature().subfeature()]
-
- result = []
- while properties:
- p = properties[0]
- f = p.feature()
-
- # locate all subproperties of $(x[1]) in the property set
- subproperties = __select_subproperties (p, properties)
-
- if subproperties:
- # reconstitute the joined property name
- subproperties.sort ()
- joined = b2.build.property.Property(p.feature(), p.value() + '-' + '-'.join ([sp.value() for sp in subproperties]))
- result.append(joined)
-
- properties = b2.util.set.difference(properties[1:], subproperties)
-
- else:
- # eliminate properties whose value is equal to feature's
- # default and which are not symmetric and which do not
- # contradict values implied by composite properties.
-
- # since all component properties of composites in the set
- # have been eliminated, any remaining property whose
- # feature is the same as a component of a composite in the
- # set must have a non-redundant value.
- if p.value() != f.default() or f.symmetric():
- result.append (p)
- #\
- #or get_grist (fullp) in get_grist (components):
- # FIXME: restore above
-
-
- properties = properties[1:]
-
- return result
-
-
-def split (properties):
- """ Given a property-set of the form
- v1/v2/...vN-1/<fN>vN/<fN+1>vN+1/...<fM>vM
-
- Returns
- v1 v2 ... vN-1 <fN>vN <fN+1>vN+1 ... <fM>vM
-
- Note that vN...vM may contain slashes. This is resilient to the
- substitution of backslashes for slashes, since Jam, unbidden,
- sometimes swaps slash direction on NT.
- """
-
- def split_one (properties):
- pieces = re.split (__re_slash_or_backslash, properties)
- result = []
-
- for x in pieces:
- if not get_grist (x) and len (result) > 0 and get_grist (result [-1]):
- result = result [0:-1] + [ result [-1] + '/' + x ]
- else:
- result.append (x)
-
- return result
-
- if isinstance (properties, str):
- return split_one (properties)
-
- result = []
- for p in properties:
- result += split_one (p)
- return result
-
-
-def compress_subproperties (properties):
- """ Combine all subproperties into their parent properties
-
- Requires: for every subproperty, there is a parent property. All
- features are explicitly expressed.
-
- This rule probably shouldn't be needed, but
- build-request.expand-no-defaults is being abused for unintended
- purposes and it needs help
- """
- result = []
- matched_subs = set()
- all_subs = set()
- for p in properties:
- f = p.feature()
-
- if not f.subfeature():
- subs = __select_subproperties (p, properties)
- if subs:
-
- matched_subs.update(subs)
-
- subvalues = '-'.join (sub.value() for sub in subs)
- result.append(b2.build.property.Property(
- p.feature(), p.value() + '-' + subvalues,
- p.condition()))
- else:
- result.append(p)
-
- else:
- all_subs.add(p)
-
- # TODO: this variables are used just for debugging. What's the overhead?
- assert all_subs == matched_subs
-
- return result
-
-######################################################################################
-# Private methods
-
-def __select_subproperties (parent_property, properties):
- return [ x for x in properties if __is_subproperty_of (parent_property, x) ]
-
-def __get_subfeature_name (subfeature, value_string):
- if value_string == None:
- prefix = ''
- else:
- prefix = value_string + ':'
-
- return prefix + subfeature
-
-
-def __validate_feature_attributes (name, attributes):
- for attribute in attributes:
- if not attribute in __all_attributes:
- raise InvalidAttribute ("unknown attributes: '%s' in feature declaration: '%s'" % (str (b2.util.set.difference (attributes, __all_attributes)), name))
-
- if name in __all_features:
- raise AlreadyDefined ("feature '%s' already defined" % name)
- elif 'implicit' in attributes and 'free' in attributes:
- raise InvalidAttribute ("free features cannot also be implicit (in declaration of feature '%s')" % name)
- elif 'free' in attributes and 'propagated' in attributes:
- raise InvalidAttribute ("free features cannot also be propagated (in declaration of feature '%s')" % name)
-
-
-def __validate_feature (feature):
- """ Generates an error if the feature is unknown.
- """
- if not __all_features.has_key (feature):
- raise BaseException ('unknown feature "%s"' % feature)
-
-
-def __select_subfeatures (parent_property, features):
- """ Given a property, return the subset of features consisting of all
- ordinary subfeatures of the property's feature, and all specific
- subfeatures of the property's feature which are conditional on the
- property's value.
- """
- return [f for f in features if is_subfeature_of (parent_property, f)]
-
-# FIXME: copy over tests.
diff --git a/tools/build/v2/build/generators.jam b/tools/build/v2/build/generators.jam
deleted file mode 100644
index 333def5741..0000000000
--- a/tools/build/v2/build/generators.jam
+++ /dev/null
@@ -1,1380 +0,0 @@
-# Copyright Vladimir Prus 2002.
-# Copyright Rene Rivera 2006.
-#
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-# Manages 'generators' --- objects which can do transformation between different
-# target types and contain algorithm for finding transformation from sources to
-# targets.
-#
-# The main entry point to this module is generators.construct rule. It is given
-# a list of source targets, desired target type and a set of properties. It
-# starts by selecting 'viable generators', which have any chances of producing
-# the desired target type with the required properties. Generators are ranked
-# and a set of the most specific ones is selected.
-#
-# The most specific generators have their 'run' methods called, with the
-# properties and list of sources. Each one selects a target which can be
-# directly consumed, and tries to convert the remaining ones to the types it can
-# consume. This is done by recursively calling 'construct' with all consumable
-# types.
-#
-# If the generator has collected all the targets it needs, it creates targets
-# corresponding to result, and returns it. When all generators have been run,
-# results of one of them are selected and returned as a result.
-#
-# It is quite possible for 'construct' to return more targets that it was asked
-# for. For example, if it were asked to generate a target of type EXE, but the
-# only found generator produces both EXE and TDS (file with debug) information.
-# The extra target will be returned.
-#
-# Likewise, when generator tries to convert sources to consumable types, it can
-# get more targets that it was asked for. The question is what to do with extra
-# targets. Boost.Build attempts to convert them to requested types, and attempts
-# that as early as possible. Specifically, this is done after invoking each
-# generator. TODO: An example is needed to document the rationale for trying
-# extra target conversion at that point.
-#
-# In order for the system to be able to use a specific generator instance 'when
-# needed', the instance needs to be registered with the system using
-# generators.register() or one of its related rules. Unregistered generators may
-# only be run explicitly and will not be considered by Boost.Build when when
-# converting between given target types.
-
-import "class" : new ;
-import errors ;
-import property-set ;
-import sequence ;
-import set ;
-import type ;
-import utility ;
-import virtual-target ;
-
-
-if "--debug-generators" in [ modules.peek : ARGV ]
-{
- .debug = true ;
-}
-
-
-# Updated cached viable source target type information as needed after a new
-# target type gets defined. This is needed because if a target type is a viable
-# source target type for some generator then all of the target type's derived
-# target types should automatically be considered as viable source target types
-# for the same generator as well. Does nothing if a non-derived target type is
-# passed to it.
-#
-rule update-cached-information-with-a-new-type ( type )
-{
- local base-type = [ type.base $(type) ] ;
- if $(base-type)
- {
- for local g in $(.vstg-cached-generators)
- {
- if $(base-type) in $(.vstg.$(g))
- {
- .vstg.$(g) += $(type) ;
- }
- }
-
- for local t in $(.vst-cached-types)
- {
- if $(base-type) in $(.vst.$(t))
- {
- .vst.$(t) += $(type) ;
- }
- }
- }
-}
-
-
-# Clears cached viable source target type information except for target types
-# and generators with all source types listed as viable. Should be called when
-# something invalidates those cached values by possibly causing some new source
-# types to become viable.
-#
-local rule invalidate-extendable-viable-source-target-type-cache ( )
-{
- local generators-with-cached-source-types = $(.vstg-cached-generators) ;
- .vstg-cached-generators = ;
- for local g in $(generators-with-cached-source-types)
- {
- if $(.vstg.$(g)) = *
- {
- .vstg-cached-generators += $(g) ;
- }
- else
- {
- .vstg.$(g) = ;
- }
- }
-
- local types-with-cached-source-types = $(.vst-cached-types) ;
- .vst-cached-types = ;
- for local t in $(types-with-cached-source-types)
- {
- if $(.vst.$(t)) = *
- {
- .vst-cached-types += $(t) ;
- }
- else
- {
- .vst.$(t) = ;
- }
- }
-}
-
-
-# Outputs a debug message if generators debugging is on. Each element of
-# 'message' is checked to see if it is a class instance. If so, instead of the
-# value, the result of 'str' call is output.
-#
-local rule generators.dout ( message * )
-{
- if $(.debug)
- {
- ECHO [ sequence.transform utility.str : $(message) ] ;
- }
-}
-
-
-local rule indent ( )
-{
- return $(.indent:J="") ;
-}
-
-
-local rule increase-indent ( )
-{
- .indent += " " ;
-}
-
-
-local rule decrease-indent ( )
-{
- .indent = $(.indent[2-]) ;
-}
-
-
-# Models a generator.
-#
-class generator
-{
- import generators : indent increase-indent decrease-indent generators.dout ;
- import set ;
- import utility ;
- import feature ;
- import errors ;
- import sequence ;
- import type ;
- import virtual-target ;
- import "class" : new ;
- import property ;
- import path ;
-
- EXPORT class@generator : indent increase-indent decrease-indent
- generators.dout ;
-
- rule __init__ (
- id # Identifies the generator - should be name
- # of the rule which sets up the build
- # actions.
-
- composing ? # Whether generator processes each source
- # target in turn, converting it to required
- # types. Ordinary generators pass all
- # sources together to the recursive
- # generators.construct-types call.
-
- : source-types * # Types that this generator can handle. If
- # empty, the generator can consume anything.
-
- : target-types-and-names + # Types the generator will create and,
- # optionally, names for created targets.
- # Each element should have the form
- # type["(" name-pattern ")"], for example,
- # obj(%_x). Generated target name will be
- # found by replacing % with the name of
- # source, provided an explicit name was not
- # specified.
-
- : requirements *
- )
- {
- self.id = $(id) ;
- self.rule-name = $(id) ;
- self.composing = $(composing) ;
- self.source-types = $(source-types) ;
- self.target-types-and-names = $(target-types-and-names) ;
- self.requirements = $(requirements) ;
-
- for local e in $(target-types-and-names)
- {
- # Create three parallel lists: one with the list of target types,
- # and two other with prefixes and postfixes to be added to target
- # name. We use parallel lists for prefix and postfix (as opposed to
- # mapping), because given target type might occur several times, for
- # example "H H(%_symbols)".
- local m = [ MATCH ([^\\(]*)(\\((.*)%(.*)\\))? : $(e) ] ;
- self.target-types += $(m[1]) ;
- self.name-prefix += $(m[3]:E="") ;
- self.name-postfix += $(m[4]:E="") ;
- }
-
- # Note that 'transform' here, is the same as 'for_each'.
- sequence.transform type.validate : $(self.source-types) ;
- sequence.transform type.validate : $(self.target-types) ;
- }
-
- ################# End of constructor #################
-
- rule id ( )
- {
- return $(self.id) ;
- }
-
- # Returns the list of target type the generator accepts.
- #
- rule source-types ( )
- {
- return $(self.source-types) ;
- }
-
- # Returns the list of target types that this generator produces. It is
- # assumed to be always the same -- i.e. it can not change depending on some
- # provided list of sources.
- #
- rule target-types ( )
- {
- return $(self.target-types) ;
- }
-
- # Returns the required properties for this generator. Properties in returned
- # set must be present in build properties if this generator is to be used.
- # If result has grist-only element, that build properties must include some
- # value of that feature.
- #
- # XXX: remove this method?
- #
- rule requirements ( )
- {
- return $(self.requirements) ;
- }
-
- rule set-rule-name ( rule-name )
- {
- self.rule-name = $(rule-name) ;
- }
-
- rule rule-name ( )
- {
- return $(self.rule-name) ;
- }
-
- # Returns a true value if the generator can be run with the specified
- # properties.
- #
- rule match-rank ( property-set-to-match )
- {
- # See if generator requirements are satisfied by 'properties'. Treat a
- # feature name in requirements (i.e. grist-only element), as matching
- # any value of the feature.
- local all-requirements = [ requirements ] ;
-
- local property-requirements feature-requirements ;
- for local r in $(all-requirements)
- {
- if $(r:G=)
- {
- property-requirements += $(r) ;
- }
- else
- {
- feature-requirements += $(r) ;
- }
- }
-
- local properties-to-match = [ $(property-set-to-match).raw ] ;
- if $(property-requirements) in $(properties-to-match) &&
- $(feature-requirements) in $(properties-to-match:G)
- {
- return true ;
- }
- else
- {
- return ;
- }
- }
-
- # Returns another generator which differs from $(self) in
- # - id
- # - value to <toolset> feature in properties
- #
- rule clone ( new-id : new-toolset-properties + )
- {
- local g = [ new $(__class__) $(new-id) $(self.composing) :
- $(self.source-types) : $(self.target-types-and-names) :
- # Note: this does not remove any subfeatures of <toolset> which
- # might cause problems.
- [ property.change $(self.requirements) : <toolset> ]
- $(new-toolset-properties) ] ;
- return $(g) ;
- }
-
- # Creates another generator that is the same as $(self), except that if
- # 'base' is in target types of $(self), 'type' will in target types of the
- # new generator.
- #
- rule clone-and-change-target-type ( base : type )
- {
- local target-types ;
- for local t in $(self.target-types-and-names)
- {
- local m = [ MATCH ([^\\(]*)(\\(.*\\))? : $(t) ] ;
- if $(m) = $(base)
- {
- target-types += $(type)$(m[2]:E="") ;
- }
- else
- {
- target-types += $(t) ;
- }
- }
-
- local g = [ new $(__class__) $(self.id) $(self.composing) :
- $(self.source-types) : $(target-types) : $(self.requirements) ] ;
- if $(self.rule-name)
- {
- $(g).set-rule-name $(self.rule-name) ;
- }
- return $(g) ;
- }
-
- # Tries to invoke this generator on the given sources. Returns a list of
- # generated targets (instances of 'virtual-target') and optionally a set of
- # properties to be added to the usage-requirements for all the generated
- # targets. Returning nothing from run indicates that the generator was
- # unable to create the target.
- #
- rule run
- (
- project # Project for which the targets are generated.
- name ? # Used when determining the 'name' attribute for all
- # generated targets. See the 'generated-targets' method.
- : property-set # Desired properties for generated targets.
- : sources + # Source targets.
- )
- {
- generators.dout [ indent ] " ** generator" $(self.id) ;
- generators.dout [ indent ] " composing:" $(self.composing) ;
-
- if ! $(self.composing) && $(sources[2]) && $(self.source-types[2])
- {
- errors.error "Unsupported source/source-type combination" ;
- }
-
- # We do not run composing generators if no name is specified. The reason
- # is that composing generator combines several targets, which can have
- # different names, and it cannot decide which name to give for produced
- # target. Therefore, the name must be passed.
- #
- # This in effect, means that composing generators are runnable only at
- # the top-level of a transformation graph, or if their name is passed
- # explicitly. Thus, we dissallow composing generators in the middle. For
- # example, the transformation CPP -> OBJ -> STATIC_LIB -> RSP -> EXE
- # will not be allowed as the OBJ -> STATIC_LIB generator is composing.
- if ! $(self.composing) || $(name)
- {
- run-really $(project) $(name) : $(property-set) : $(sources) ;
- }
- }
-
- rule run-really ( project name ? : property-set : sources + )
- {
- # Targets that this generator will consume directly.
- local consumed = ;
- # Targets that can not be consumed and will be returned as-is.
- local bypassed = ;
-
- if $(self.composing)
- {
- consumed = [ convert-multiple-sources-to-consumable-types $(project)
- : $(property-set) : $(sources) ] ;
- }
- else
- {
- consumed = [ convert-to-consumable-types $(project) $(name)
- : $(property-set) : $(sources) ] ;
- }
-
- local result ;
- if $(consumed)
- {
- result = [ construct-result $(consumed) : $(project) $(name) :
- $(property-set) ] ;
- }
-
- if $(result)
- {
- generators.dout [ indent ] " SUCCESS: " $(result) ;
- }
- else
- {
- generators.dout [ indent ] " FAILURE" ;
- }
- generators.dout ;
- return $(result) ;
- }
-
- # Constructs the dependency graph to be returned by this generator.
- #
- rule construct-result
- (
- consumed + # Already prepared list of consumable targets.
- # Composing generators may receive multiple sources
- # all of which will have types matching those in
- # $(self.source-types). Non-composing generators with
- # multiple $(self.source-types) will receive exactly
- # len $(self.source-types) sources with types matching
- # those in $(self.source-types). And non-composing
- # generators with only a single source type may
- # receive multiple sources with all of them of the
- # type listed in $(self.source-types).
- : project name ?
- : property-set # Properties to be used for all actions created here.
- )
- {
- local result ;
- # If this is 1->1 transformation, apply it to all consumed targets in
- # order.
- if ! $(self.source-types[2]) && ! $(self.composing)
- {
- for local r in $(consumed)
- {
- result += [ generated-targets $(r) : $(property-set) :
- $(project) $(name) ] ;
- }
- }
- else if $(consumed)
- {
- result += [ generated-targets $(consumed) : $(property-set) :
- $(project) $(name) ] ;
- }
- return $(result) ;
- }
-
- # Determine target name from fullname (maybe including path components)
- # Place optional prefix and postfix around basename
- #
- rule determine-target-name ( fullname : prefix ? : postfix ? )
- {
- # See if we need to add directory to the target name.
- local dir = $(fullname:D) ;
- local name = $(fullname:B) ;
-
- name = $(prefix:E=)$(name) ;
- name = $(name)$(postfix:E=) ;
-
- if $(dir) &&
- # Never append '..' to target path.
- ! [ MATCH .*(\\.\\.).* : $(dir) ]
- &&
- ! [ path.is-rooted $(dir) ]
- {
- # Relative path is always relative to the source
- # directory. Retain it, so that users can have files
- # with the same in two different subdirectories.
- name = $(dir)/$(name) ;
- }
- return $(name) ;
- }
-
- # Determine the name of the produced target from the names of the sources.
- #
- rule determine-output-name ( sources + )
- {
- # The simple case if when a name of source has single dot. Then, we take
- # the part before dot. Several dots can be caused by:
- # - using source file like a.host.cpp, or
- # - a type whose suffix has a dot. Say, we can type 'host_cpp' with
- # extension 'host.cpp'.
- # In the first case, we want to take the part up to the last dot. In the
- # second case -- not sure, but for now take the part up to the last dot
- # too.
- name = [ utility.basename [ $(sources[1]).name ] ] ;
-
- for local s in $(sources[2])
- {
- local n2 = [ utility.basename [ $(s).name ] ] ;
- if $(n2) != $(name)
- {
- errors.error "$(self.id): source targets have different names: cannot determine target name" ;
- }
- }
- name = [ determine-target-name [ $(sources[1]).name ] ] ;
- return $(name) ;
- }
-
- # Constructs targets that are created after consuming 'sources'. The result
- # will be the list of virtual-target, which has the same length as the
- # 'target-types' attribute and with corresponding types.
- #
- # When 'name' is empty, all source targets must have the same 'name'
- # attribute value, which will be used instead of the 'name' argument.
- #
- # The 'name' attribute value for each generated target will be equal to
- # the 'name' parameter if there is no name pattern for this type. Otherwise,
- # the '%' symbol in the name pattern will be replaced with the 'name'
- # parameter to obtain the 'name' attribute.
- #
- # For example, if targets types are T1 and T2 (with name pattern "%_x"),
- # suffixes for T1 and T2 are .t1 and .t2, and source is foo.z, then created
- # files would be "foo.t1" and "foo_x.t2". The 'name' attribute actually
- # determines the basename of a file.
- #
- # Note that this pattern mechanism has nothing to do with implicit patterns
- # in make. It is a way to produce a target whose name is different than the
- # name of its source.
- #
- rule generated-targets ( sources + : property-set : project name ? )
- {
- if ! $(name)
- {
- name = [ determine-output-name $(sources) ] ;
- }
-
- # Assign an action for each target.
- local action = [ action-class ] ;
- local a = [ class.new $(action) $(sources) : $(self.rule-name) :
- $(property-set) ] ;
-
- # Create generated target for each target type.
- local targets ;
- local pre = $(self.name-prefix) ;
- local post = $(self.name-postfix) ;
- for local t in $(self.target-types)
- {
- local generated-name = $(pre[1])$(name:BS)$(post[1]) ;
- generated-name = $(generated-name:R=$(name:D)) ;
- pre = $(pre[2-]) ;
- post = $(post[2-]) ;
-
- targets += [ class.new file-target $(generated-name) : $(t) :
- $(project) : $(a) ] ;
- }
-
- return [ sequence.transform virtual-target.register : $(targets) ] ;
- }
-
- # Attempts to convert 'sources' to targets of types that this generator can
- # handle. The intention is to produce the set of targets that can be used
- # when the generator is run.
- #
- rule convert-to-consumable-types
- (
- project name ?
- : property-set
- : sources +
- : only-one ? # Convert 'source' to only one of the source types. If
- # there is more that one possibility, report an error.
- )
- {
- local _consumed ;
- local missing-types ;
-
- if $(sources[2])
- {
- # Do not know how to handle several sources yet. Just try to pass
- # the request to other generator.
- missing-types = $(self.source-types) ;
- }
- else
- {
- local temp = [ consume-directly $(sources) ] ;
- if $(temp[1])
- {
- _consumed = $(temp[1]) ;
- }
- missing-types = $(temp[2-]) ;
- }
-
- # No need to search for transformation if some source type has consumed
- # source and no more source types are needed.
- if $(only-one) && $(_consumed)
- {
- missing-types = ;
- }
-
- # TODO: we should check that only one source type if create of
- # 'only-one' is true.
-
- if $(missing-types)
- {
- local transformed = [ generators.construct-types $(project) $(name)
- : $(missing-types) : $(property-set) : $(sources) ] ;
-
- # Add targets of right type to 'consumed'. Add others to 'bypassed'.
- # The 'generators.construct' rule has done its best to convert
- # everything to the required type. There is no need to rerun it on
- # targets of different types.
-
- # NOTE: ignoring usage requirements.
- for local t in $(transformed[2-])
- {
- if [ $(t).type ] in $(missing-types)
- {
- _consumed += $(t) ;
- }
- }
- }
-
- return [ sequence.unique $(_consumed) ] ;
- }
-
- # Converts several files to consumable types. Called for composing
- # generators only.
- #
- rule convert-multiple-sources-to-consumable-types ( project : property-set :
- sources * )
- {
- local result ;
- # We process each source one-by-one, trying to convert it to a usable
- # type.
- for local source in $(sources)
- {
- local _c = [ convert-to-consumable-types $(project) : $(property-set)
- : $(source) : true ] ;
- if ! $(_c)
- {
- generators.dout [ indent ] " failed to convert " $(source) ;
- }
- result += $(_c) ;
- }
- return $(result) ;
- }
-
- rule consume-directly ( source )
- {
- local real-source-type = [ $(source).type ] ;
-
- # If there are no source types, we can consume anything.
- local source-types = $(self.source-types) ;
- source-types ?= $(real-source-type) ;
-
- local result = "" ;
- local missing-types ;
-
- for local st in $(source-types)
- {
- # The 'source' if of the right type already.
- if $(real-source-type) = $(st) || [ type.is-derived
- $(real-source-type) $(st) ]
- {
- result = $(source) ;
- }
- else
- {
- missing-types += $(st) ;
- }
- }
- return $(result) $(missing-types) ;
- }
-
- # Returns the class to be used to actions. Default implementation returns
- # "action".
- #
- rule action-class ( )
- {
- return "action" ;
- }
-}
-
-
-# Registers a new generator instance 'g'.
-#
-rule register ( g )
-{
- .all-generators += $(g) ;
-
- # A generator can produce several targets of the same type. We want unique
- # occurrence of that generator in .generators.$(t) in that case, otherwise,
- # it will be tried twice and we will get a false ambiguity.
- for local t in [ sequence.unique [ $(g).target-types ] ]
- {
- .generators.$(t) += $(g) ;
- }
-
- # Update the set of generators for toolset.
-
- # TODO: should we check that generator with this id is not already
- # registered. For example, the fop.jam module intentionally declared two
- # generators with the same id, so such check will break it.
- local id = [ $(g).id ] ;
-
- # Some generators have multiple periods in their name, so a simple $(id:S=)
- # will not generate the right toolset name. E.g. if id = gcc.compile.c++,
- # then .generators-for-toolset.$(id:S=) will append to
- # .generators-for-toolset.gcc.compile, which is a separate value from
- # .generators-for-toolset.gcc. Correcting this makes generator inheritance
- # work properly. See also inherit-generators in the toolset module.
- local base = $(id) ;
- while $(base:S)
- {
- base = $(base:B) ;
- }
- .generators-for-toolset.$(base) += $(g) ;
-
-
- # After adding a new generator that can construct new target types, we need
- # to clear the related cached viable source target type information for
- # constructing a specific target type or using a specific generator. Cached
- # viable source target type lists affected by this are those containing any
- # of the target types constructed by the new generator or any of their base
- # target types.
- #
- # A more advanced alternative to clearing that cached viable source target
- # type information would be to expand it with additional source types or
- # even better - mark it as needing to be expanded on next use.
- #
- # Also see the http://thread.gmane.org/gmane.comp.lib.boost.build/19077
- # mailing list thread for an even more advanced idea of how we could convert
- # Boost Build's Jamfile processing, target selection and generator selection
- # into separate steps which would prevent these caches from ever being
- # invalidated.
- #
- # For now we just clear all the cached viable source target type information
- # that does not simply state 'all types' and may implement a more detailed
- # algorithm later on if it becomes needed.
-
- invalidate-extendable-viable-source-target-type-cache ;
-}
-
-
-# Creates a new non-composing 'generator' class instance and registers it.
-# Returns the created instance. Rationale: the instance is returned so that it
-# is possible to first register a generator and then call its 'run' method,
-# bypassing the whole generator selection process.
-#
-rule register-standard ( id : source-types * : target-types + : requirements * )
-{
- local g = [ new generator $(id) : $(source-types) : $(target-types) :
- $(requirements) ] ;
- register $(g) ;
- return $(g) ;
-}
-
-
-# Creates a new composing 'generator' class instance and registers it.
-#
-rule register-composing ( id : source-types * : target-types + : requirements *
- )
-{
- local g = [ new generator $(id) true : $(source-types) : $(target-types) :
- $(requirements) ] ;
- register $(g) ;
- return $(g) ;
-}
-
-
-# Returns all generators belonging to the given 'toolset', i.e. whose ids are
-# '$(toolset).<something>'.
-#
-rule generators-for-toolset ( toolset )
-{
- return $(.generators-for-toolset.$(toolset)) ;
-}
-
-
-# Make generator 'overrider-id' be preferred to 'overridee-id'. If, when
-# searching for generators that could produce a target of a certain type, both
-# those generators are among viable generators, the overridden generator is
-# immediately discarded.
-#
-# The overridden generators are discarded immediately after computing the list
-# of viable generators but before running any of them.
-#
-rule override ( overrider-id : overridee-id )
-{
- .override.$(overrider-id) += $(overridee-id) ;
-}
-
-
-# Returns a list of source type which can possibly be converted to 'target-type'
-# by some chain of generator invocation.
-#
-# More formally, takes all generators for 'target-type' and returns a union of
-# source types for those generators and result of calling itself recursively on
-# source types.
-#
-# Returns '*' in case any type should be considered a viable source type for the
-# given type.
-#
-local rule viable-source-types-real ( target-type )
-{
- local result ;
-
- # 't0' is the initial list of target types we need to process to get a list
- # of their viable source target types. New target types will not be added to
- # this list.
- local t0 = [ type.all-bases $(target-type) ] ;
-
- # 't' is the list of target types which have not yet been processed to get a
- # list of their viable source target types. This list will get expanded as
- # we locate more target types to process.
- local t = $(t0) ;
-
- while $(t)
- {
- # Find all generators for the current type. Unlike
- # 'find-viable-generators' we do not care about the property-set.
- local generators = $(.generators.$(t[1])) ;
- t = $(t[2-]) ;
-
- while $(generators)
- {
- local g = $(generators[1]) ;
- generators = $(generators[2-]) ;
-
- if ! [ $(g).source-types ]
- {
- # Empty source types -- everything can be accepted.
- result = * ;
- # This will terminate this loop.
- generators = ;
- # This will terminate the outer loop.
- t = ;
- }
-
- for local source-type in [ $(g).source-types ]
- {
- if ! $(source-type) in $(result)
- {
- # If a generator accepts a 'source-type' it will also
- # happily accept any type derived from it.
- for local n in [ type.all-derived $(source-type) ]
- {
- if ! $(n) in $(result)
- {
- # Here there is no point in adding target types to
- # the list of types to process in case they are or
- # have already been on that list. We optimize this
- # check by realizing that we only need to avoid the
- # original target type's base types. Other target
- # types that are or have been on the list of target
- # types to process have been added to the 'result'
- # list as well and have thus already been eliminated
- # by the previous if.
- if ! $(n) in $(t0)
- {
- t += $(n) ;
- }
- result += $(n) ;
- }
- }
- }
- }
- }
- }
-
- return $(result) ;
-}
-
-
-# Helper rule, caches the result of 'viable-source-types-real'.
-#
-rule viable-source-types ( target-type )
-{
- local key = .vst.$(target-type) ;
- if ! $($(key))
- {
- .vst-cached-types += $(target-type) ;
- local v = [ viable-source-types-real $(target-type) ] ;
- if ! $(v)
- {
- v = none ;
- }
- $(key) = $(v) ;
- }
-
- if $($(key)) != none
- {
- return $($(key)) ;
- }
-}
-
-
-# Returns the list of source types, which, when passed to 'run' method of
-# 'generator', has some change of being eventually used (probably after
-# conversion by other generators).
-#
-# Returns '*' in case any type should be considered a viable source type for the
-# given generator.
-#
-rule viable-source-types-for-generator-real ( generator )
-{
- local source-types = [ $(generator).source-types ] ;
- if ! $(source-types)
- {
- # If generator does not specify any source types, it might be a special
- # generator like builtin.lib-generator which just relays to other
- # generators. Return '*' to indicate that any source type is possibly
- # OK, since we do not know for sure.
- return * ;
- }
- else
- {
- local result ;
- while $(source-types)
- {
- local s = $(source-types[1]) ;
- source-types = $(source-types[2-]) ;
- local viable-sources = [ generators.viable-source-types $(s) ] ;
- if $(viable-sources) = *
- {
- result = * ;
- source-types = ; # Terminate the loop.
- }
- else
- {
- result += [ type.all-derived $(s) ] $(viable-sources) ;
- }
- }
- return [ sequence.unique $(result) ] ;
- }
-}
-
-
-# Helper rule, caches the result of 'viable-source-types-for-generator'.
-#
-local rule viable-source-types-for-generator ( generator )
-{
- local key = .vstg.$(generator) ;
- if ! $($(key))
- {
- .vstg-cached-generators += $(generator) ;
- local v = [ viable-source-types-for-generator-real $(generator) ] ;
- if ! $(v)
- {
- v = none ;
- }
- $(key) = $(v) ;
- }
-
- if $($(key)) != none
- {
- return $($(key)) ;
- }
-}
-
-
-# Returns usage requirements + list of created targets.
-#
-local rule try-one-generator-really ( project name ? : generator : target-type
- : property-set : sources * )
-{
- local targets =
- [ $(generator).run $(project) $(name) : $(property-set) : $(sources) ] ;
-
- local usage-requirements ;
- local success ;
-
- generators.dout [ indent ] returned $(targets) ;
-
- if $(targets)
- {
- success = true ;
-
- if [ class.is-a $(targets[1]) : property-set ]
- {
- usage-requirements = $(targets[1]) ;
- targets = $(targets[2-]) ;
- }
- else
- {
- usage-requirements = [ property-set.empty ] ;
- }
- }
-
- generators.dout [ indent ] " generator" [ $(generator).id ] " spawned " ;
- generators.dout [ indent ] " " $(targets) ;
- if $(usage-requirements)
- {
- generators.dout [ indent ] " with usage requirements:" $(x) ;
- }
-
- if $(success)
- {
- return $(usage-requirements) $(targets) ;
- }
-}
-
-
-# Checks if generator invocation can be pruned, because it is guaranteed to
-# fail. If so, quickly returns an empty list. Otherwise, calls
-# try-one-generator-really.
-#
-local rule try-one-generator ( project name ? : generator : target-type
- : property-set : sources * )
-{
- local source-types ;
- for local s in $(sources)
- {
- source-types += [ $(s).type ] ;
- }
- local viable-source-types = [ viable-source-types-for-generator $(generator)
- ] ;
-
- if $(source-types) && $(viable-source-types) != * &&
- ! [ set.intersection $(source-types) : $(viable-source-types) ]
- {
- local id = [ $(generator).id ] ;
- generators.dout [ indent ] " ** generator '$(id)' pruned" ;
- #generators.dout [ indent ] "source-types" '$(source-types)' ;
- #generators.dout [ indent ] "viable-source-types" '$(viable-source-types)' ;
- }
- else
- {
- return [ try-one-generator-really $(project) $(name) : $(generator) :
- $(target-type) : $(property-set) : $(sources) ] ;
- }
-}
-
-
-rule construct-types ( project name ? : target-types + : property-set
- : sources + )
-{
- local result ;
- local usage-requirements = [ property-set.empty ] ;
- for local t in $(target-types)
- {
- local r = [ construct $(project) $(name) : $(t) : $(property-set) :
- $(sources) ] ;
- if $(r)
- {
- usage-requirements = [ $(usage-requirements).add $(r[1]) ] ;
- result += $(r[2-]) ;
- }
- }
- # TODO: have to introduce parameter controlling if several types can be
- # matched and add appropriate checks.
-
- # TODO: need to review the documentation for 'construct' to see if it should
- # return $(source) even if nothing can be done with it. Currents docs seem
- # to imply that, contrary to the behaviour.
- if $(result)
- {
- return $(usage-requirements) $(result) ;
- }
- else
- {
- return $(usage-requirements) $(sources) ;
- }
-}
-
-
-# Ensures all 'targets' have their type. If this is not so, exists with error.
-#
-local rule ensure-type ( targets * )
-{
- for local t in $(targets)
- {
- if ! [ $(t).type ]
- {
- errors.error "target" [ $(t).str ] "has no type" ;
- }
- }
-}
-
-
-# Returns generators which can be used to construct target of specified type
-# with specified properties. Uses the following algorithm:
-# - iterates over requested target-type and all its bases (in the order returned
-# by type.all-bases).
-# - for each type find all generators that generate that type and whose
-# requirements are satisfied by properties.
-# - if the set of generators is not empty, returns that set.
-#
-# Note: this algorithm explicitly ignores generators for base classes if there
-# is at least one generator for the requested target-type.
-#
-local rule find-viable-generators-aux ( target-type : property-set )
-{
- # Select generators that can create the required target type.
- local viable-generators = ;
- local generator-rank = ;
-
- import type ;
- local t = [ type.all-bases $(target-type) ] ;
-
- generators.dout [ indent ] find-viable-generators target-type= $(target-type)
- property-set= [ $(property-set).as-path ] ;
-
- # Get the list of generators for the requested type. If no generator is
- # registered, try base type, and so on.
- local generators ;
- while $(t[1])
- {
- generators.dout [ indent ] "trying type" $(t[1]) ;
- if $(.generators.$(t[1]))
- {
- generators.dout [ indent ] "there are generators for this type" ;
- generators = $(.generators.$(t[1])) ;
-
- if $(t[1]) != $(target-type)
- {
- # We are here because there were no generators found for
- # target-type but there are some generators for its base type.
- # We will try to use them, but they will produce targets of
- # base type, not of 'target-type'. So, we clone the generators
- # and modify the list of target types.
- local generators2 ;
- for local g in $(generators)
- {
- # generators.register adds a generator to the list of
- # generators for toolsets, which is a bit strange, but
- # should work. That list is only used when inheriting a
- # toolset, which should have been done before running
- # generators.
- generators2 += [ $(g).clone-and-change-target-type $(t[1]) :
- $(target-type) ] ;
- generators.register $(generators2[-1]) ;
- }
- generators = $(generators2) ;
- }
- t = ;
- }
- t = $(t[2-]) ;
- }
-
- for local g in $(generators)
- {
- generators.dout [ indent ] "trying generator" [ $(g).id ] "(" [ $(g).source-types ] -> [ $(g).target-types ] ")" ;
-
- local m = [ $(g).match-rank $(property-set) ] ;
- if $(m)
- {
- generators.dout [ indent ] " is viable" ;
- viable-generators += $(g) ;
- }
- }
-
- return $(viable-generators) ;
-}
-
-
-rule find-viable-generators ( target-type : property-set )
-{
- local key = $(target-type).$(property-set) ;
- local l = $(.fv.$(key)) ;
- if ! $(l)
- {
- l = [ find-viable-generators-aux $(target-type) : $(property-set) ] ;
- if ! $(l)
- {
- l = none ;
- }
- .fv.$(key) = $(l) ;
- }
-
- if $(l) = none
- {
- l = ;
- }
-
- local viable-generators ;
- for local g in $(l)
- {
- # Avoid trying the same generator twice on different levels.
- if ! $(g) in $(.active-generators)
- {
- viable-generators += $(g) ;
- }
- else
- {
- generators.dout [ indent ] " generator " [ $(g).id ] "is active, discaring" ;
- }
- }
-
- # Generators which override 'all'.
- local all-overrides ;
- # Generators which are overriden.
- local overriden-ids ;
- for local g in $(viable-generators)
- {
- local id = [ $(g).id ] ;
- local this-overrides = $(.override.$(id)) ;
- overriden-ids += $(this-overrides) ;
- if all in $(this-overrides)
- {
- all-overrides += $(g) ;
- }
- }
- if $(all-overrides)
- {
- viable-generators = $(all-overrides) ;
- }
- local result ;
- for local g in $(viable-generators)
- {
- if ! [ $(g).id ] in $(overriden-ids)
- {
- result += $(g) ;
- }
- }
-
- return $(result) ;
-}
-
-
-.construct-stack = ;
-
-
-# Attempts to construct a target by finding viable generators, running them and
-# selecting the dependency graph.
-#
-local rule construct-really ( project name ? : target-type : property-set :
- sources * )
-{
- viable-generators = [ find-viable-generators $(target-type) :
- $(property-set) ] ;
-
- generators.dout [ indent ] "*** " [ sequence.length $(viable-generators) ]
- " viable generators" ;
-
- local result ;
- local generators-that-succeeded ;
- for local g in $(viable-generators)
- {
- # This variable will be restored on exit from this scope.
- local .active-generators = $(g) $(.active-generators) ;
-
- local r = [ try-one-generator $(project) $(name) : $(g) : $(target-type)
- : $(property-set) : $(sources) ] ;
-
- if $(r)
- {
- generators-that-succeeded += $(g) ;
- if $(result)
- {
- ECHO "Error: ambiguity found when searching for best transformation" ;
- ECHO "Trying to produce type '$(target-type)' from: " ;
- for local s in $(sources)
- {
- ECHO " - " [ $(s).str ] ;
- }
- ECHO "Generators that succeeded:" ;
- for local g in $(generators-that-succeeded)
- {
- ECHO " - " [ $(g).id ] ;
- }
- ECHO "First generator produced: " ;
- for local t in $(result[2-])
- {
- ECHO " - " [ $(t).str ] ;
- }
- ECHO "Second generator produced: " ;
- for local t in $(r[2-])
- {
- ECHO " - " [ $(t).str ] ;
- }
- EXIT ;
- }
- else
- {
- result = $(r) ;
- }
- }
- }
-
- return $(result) ;
-}
-
-
-# Attempts to create a target of 'target-type' with 'properties' from 'sources'.
-# The 'sources' are treated as a collection of *possible* ingridients, i.e.
-# there is no obligation to consume them all.
-#
-# Returns a list of targets. When this invocation is first instance of
-# 'construct' in stack, returns only targets of requested 'target-type',
-# otherwise, returns also unused sources and additionally generated targets.
-#
-# If 'top-level' is set, does not suppress generators that are already
-# used in the stack. This may be useful in cases where a generator
-# has to build a metatargets -- for example a target corresponding to
-# built tool.
-#
-rule construct ( project name ? : target-type : property-set * : sources * : top-level ? )
-{
- local saved-stack ;
- if $(top-level)
- {
- saved-active = $(.active-generators) ;
- .active-generators = ;
- }
-
- if (.construct-stack)
- {
- ensure-type $(sources) ;
- }
-
- .construct-stack += 1 ;
-
- increase-indent ;
-
- if $(.debug)
- {
- generators.dout [ indent ] "*** construct" $(target-type) ;
-
- for local s in $(sources)
- {
- generators.dout [ indent ] " from" $(s) ;
- }
- generators.dout [ indent ] " properties:" [ $(property-set).raw ] ;
- }
-
- local result = [ construct-really $(project) $(name) : $(target-type) :
- $(property-set) : $(sources) ] ;
-
- decrease-indent ;
-
- .construct-stack = $(.construct-stack[2-]) ;
-
- if $(top-level)
- {
- .active-generators = $(saved-active) ;
- }
-
- return $(result) ;
-}
-
-# Given 'result', obtained from some generator or generators.construct, adds
-# 'raw-properties' as usage requirements to it. If result already contains usage
-# requirements -- that is the first element of result of an instance of the
-# property-set class, the existing usage requirements and 'raw-properties' are
-# combined.
-#
-rule add-usage-requirements ( result * : raw-properties * )
-{
- if $(result)
- {
- if [ class.is-a $(result[1]) : property-set ]
- {
- return [ $(result[1]).add-raw $(raw-properties) ] $(result[2-]) ;
- }
- else
- {
- return [ property-set.create $(raw-properties) ] $(result) ;
- }
- }
-}
-
-rule dump ( )
-{
- for local g in $(.all-generators)
- {
- ECHO [ $(g).id ] ":" [ $(g).source-types ] -> [ $(g).target-types ] ;
- }
-}
-
diff --git a/tools/build/v2/build/generators.py b/tools/build/v2/build/generators.py
deleted file mode 100644
index 7c82645388..0000000000
--- a/tools/build/v2/build/generators.py
+++ /dev/null
@@ -1,1097 +0,0 @@
-# Status: being ported by Vladimir Prus
-# Base revision: 48649
-# TODO: replace the logging with dout
-
-# Copyright Vladimir Prus 2002.
-# Copyright Rene Rivera 2006.
-#
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-# Manages 'generators' --- objects which can do transformation between different
-# target types and contain algorithm for finding transformation from sources
-# to targets.
-#
-# The main entry point to this module is generators.construct rule. It is given
-# a list of source targets, desired target type and a set of properties.
-# It starts by selecting 'viable generators', which have any chances of producing
-# the desired target type with the required properties. Generators are ranked and
-# a set of most specific ones is selected.
-#
-# The most specific generators have their 'run' methods called, with the properties
-# and list of sources. Each one selects target which can be directly consumed, and
-# tries to convert the remaining ones to the types it can consume. This is done
-# by recursively calling 'construct' with all consumable types.
-#
-# If the generator has collected all the targets it needs, it creates targets
-# corresponding to result, and returns it. When all generators have been run,
-# results of one of them are selected and returned as result.
-#
-# It's quite possible that 'construct' returns more targets that it was asked for.
-# For example, it was asked to target type EXE, but the only found generators produces
-# both EXE and TDS (file with debug) information. The extra target will be returned.
-#
-# Likewise, when generator tries to convert sources to consumable types, it can get
-# more targets that it was asked for. The question is what to do with extra targets.
-# Boost.Build attempts to convert them to requested types, and attempts as early as
-# possible. Specifically, this is done after invoking each generator. (Later I'll
-# document the rationale for trying extra target conversion at that point).
-#
-# That early conversion is not always desirable. Suppose a generator got a source of
-# type Y and must consume one target of type X_1 and one target of type X_2.
-# When converting Y to X_1 extra target of type Y_2 is created. We should not try to
-# convert it to type X_1, because if we do so, the generator will get two targets
-# of type X_1, and will be at loss as to which one to use. Because of that, the
-# 'construct' rule has a parameter, telling if multiple targets can be returned. If
-# the parameter is false, conversion of extra targets is not performed.
-
-
-import re
-import cStringIO
-import os.path
-
-from virtual_target import Subvariant
-import virtual_target, type, property_set, property
-from b2.util.logger import *
-from b2.util.utility import *
-from b2.util import set
-from b2.util.sequence import unique
-import b2.util.sequence as sequence
-from b2.manager import get_manager
-import b2.build.type
-
-def reset ():
- """ Clear the module state. This is mainly for testing purposes.
- """
- global __generators, __type_to_generators, __generators_for_toolset, __construct_stack
- global __overrides, __active_generators
- global __viable_generators_cache, __viable_source_types_cache
- global __vstg_cached_generators, __vst_cached_types
-
- __generators = {}
- __type_to_generators = {}
- __generators_for_toolset = {}
- __overrides = {}
-
- # TODO: can these be global?
- __construct_stack = []
- __viable_generators_cache = {}
- __viable_source_types_cache = {}
- __active_generators = []
-
- __vstg_cached_generators = []
- __vst_cached_types = []
-
-reset ()
-
-_re_separate_types_prefix_and_postfix = re.compile ('([^\\(]*)(\\((.*)%(.*)\\))?')
-_re_match_type = re.compile('([^\\(]*)(\\(.*\\))?')
-
-
-__debug = None
-__indent = ""
-
-def debug():
- global __debug
- if __debug is None:
- __debug = "--debug-generators" in bjam.variable("ARGV")
- return __debug
-
-def increase_indent():
- global __indent
- __indent += " "
-
-def decrease_indent():
- global __indent
- __indent = __indent[0:-4]
-
-
-# Updated cached viable source target type information as needed after a new
-# derived target type gets added. This is needed because if a target type is a
-# viable source target type for some generator then all of the target type's
-# derived target types are automatically viable as source target types for the
-# same generator. Does nothing if a non-derived target type is passed to it.
-#
-def update_cached_information_with_a_new_type(type):
-
- base_type = b2.build.type.base(type)
-
- if base_type:
- for g in __vstg_cached_generators:
- if base_type in __viable_source_types_cache.get(g, []):
- __viable_source_types_cache[g].append(type)
-
- for t in __vst_cached_types:
- if base_type in __viable_source_types_cache.get(t, []):
- __viable_source_types_cache[t].append(type)
-
-# Clears cached viable source target type information except for target types
-# and generators with all source types listed as viable. Should be called when
-# something invalidates those cached values by possibly causing some new source
-# types to become viable.
-#
-def invalidate_extendable_viable_source_target_type_cache():
-
- global __vstg_cached_generators
- generators_with_cached_source_types = __vstg_cached_generators
- __vstg_cached_generators = []
-
- for g in generators_with_cached_source_types:
- if __viable_source_types_cache.has_key(g):
- if __viable_source_types_cache[g] == ["*"]:
- __vstg_cached_generators.append(g)
- else:
- del __viable_source_types_cache[g]
-
- global __vst_cached_types
- types_with_cached_sources_types = __vst_cached_types
- __vst_cached_types = []
- for t in types_with_cached_sources_types:
- if __viable_source_types_cache.has_key(t):
- if __viable_source_types_cache[t] == ["*"]:
- __vst_cached_types.append(t)
- else:
- del __viable_source_types_cache[t]
-
-def dout(message):
- if debug():
- print __indent + message
-
-class Generator:
- """ Creates a generator.
- manager: the build manager.
- id: identifies the generator
-
- rule: the rule which sets up build actions.
-
- composing: whether generator processes each source target in
- turn, converting it to required types.
- Ordinary generators pass all sources together to
- recusrive generators.construct_types call.
-
- source_types (optional): types that this generator can handle
-
- target_types_and_names: types the generator will create and, optionally, names for
- created targets. Each element should have the form
- type["(" name-pattern ")"]
- for example, obj(%_x). Name of generated target will be found
- by replacing % with the name of source, provided explicit name
- was not specified.
-
- requirements (optional)
-
- NOTE: all subclasses must have a similar signature for clone to work!
- """
- def __init__ (self, id, composing, source_types, target_types_and_names, requirements = []):
- assert(not isinstance(source_types, str))
- assert(not isinstance(target_types_and_names, str))
- self.id_ = id
- self.composing_ = composing
- self.source_types_ = source_types
- self.target_types_and_names_ = target_types_and_names
- self.requirements_ = requirements
-
- self.target_types_ = []
- self.name_prefix_ = []
- self.name_postfix_ = []
-
- for e in target_types_and_names:
- # Create three parallel lists: one with the list of target types,
- # and two other with prefixes and postfixes to be added to target
- # name. We use parallel lists for prefix and postfix (as opposed
- # to mapping), because given target type might occur several times,
- # for example "H H(%_symbols)".
- m = _re_separate_types_prefix_and_postfix.match (e)
-
- if not m:
- raise BaseException ("Invalid type and name '%s' in declaration of type '%s'" % (e, id))
-
- target_type = m.group (1)
- if not target_type: target_type = ''
- prefix = m.group (3)
- if not prefix: prefix = ''
- postfix = m.group (4)
- if not postfix: postfix = ''
-
- self.target_types_.append (target_type)
- self.name_prefix_.append (prefix)
- self.name_postfix_.append (postfix)
-
- for x in self.source_types_:
- type.validate (x)
-
- for x in self.target_types_:
- type.validate (x)
-
- def clone (self, new_id, new_toolset_properties):
- """ Returns another generator which differers from $(self) in
- - id
- - value to <toolset> feature in properties
- """
- return self.__class__ (new_id,
- self.composing_,
- self.source_types_,
- self.target_types_and_names_,
- # Note: this does not remove any subfeatures of <toolset>
- # which might cause problems
- property.change (self.requirements_, '<toolset>') + new_toolset_properties)
-
- def clone_and_change_target_type(self, base, type):
- """Creates another generator that is the same as $(self), except that
- if 'base' is in target types of $(self), 'type' will in target types
- of the new generator."""
- target_types = []
- for t in self.target_types_and_names_:
- m = _re_match_type.match(t)
- assert m
-
- if m.group(1) == base:
- if m.group(2):
- target_types.append(type + m.group(2))
- else:
- target_types.append(type)
- else:
- target_types.append(t)
-
- return self.__class__(self.id_, self.composing_,
- self.source_types_,
- target_types,
- self.requirements_)
-
-
- def id(self):
- return self.id_
-
- def source_types (self):
- """ Returns the list of target type the generator accepts.
- """
- return self.source_types_
-
- def target_types (self):
- """ Returns the list of target types that this generator produces.
- It is assumed to be always the same -- i.e. it cannot change depending
- list of sources.
- """
- return self.target_types_
-
- def requirements (self):
- """ Returns the required properties for this generator. Properties
- in returned set must be present in build properties if this
- generator is to be used. If result has grist-only element,
- that build properties must include some value of that feature.
- """
- return self.requirements_
-
- def match_rank (self, ps):
- """ Returns true if the generator can be run with the specified
- properties.
- """
- # See if generator's requirements are satisfied by
- # 'properties'. Treat a feature name in requirements
- # (i.e. grist-only element), as matching any value of the
- # feature.
- all_requirements = self.requirements ()
-
- property_requirements = []
- feature_requirements = []
- # This uses strings because genenator requirements allow
- # the '<feature>' syntax without value and regular validation
- # is not happy about that.
- for r in all_requirements:
- if get_value (r):
- property_requirements.append (r)
-
- else:
- feature_requirements.append (r)
-
- return all(ps.get(get_grist(s)) == [get_value(s)] for s in property_requirements) \
- and all(ps.get(get_grist(s)) for s in feature_requirements)
-
- def run (self, project, name, prop_set, sources):
- """ Tries to invoke this generator on the given sources. Returns a
- list of generated targets (instances of 'virtual-target').
-
- project: Project for which the targets are generated.
-
- name: Determines the name of 'name' attribute for
- all generated targets. See 'generated_targets' method.
-
- prop_set: Desired properties for generated targets.
-
- sources: Source targets.
- """
-
- if project.manager ().logger ().on ():
- project.manager ().logger ().log (__name__, " generator '%s'" % self.id_)
- project.manager ().logger ().log (__name__, " composing: '%s'" % self.composing_)
-
- if not self.composing_ and len (sources) > 1 and len (self.source_types_) > 1:
- raise BaseException ("Unsupported source/source_type combination")
-
- # We don't run composing generators if no name is specified. The reason
- # is that composing generator combines several targets, which can have
- # different names, and it cannot decide which name to give for produced
- # target. Therefore, the name must be passed.
- #
- # This in effect, means that composing generators are runnable only
- # at top-level of transofrmation graph, or if name is passed explicitly.
- # Thus, we dissallow composing generators in the middle. For example, the
- # transofrmation CPP -> OBJ -> STATIC_LIB -> RSP -> EXE won't be allowed
- # (the OBJ -> STATIC_LIB generator is composing)
- if not self.composing_ or name:
- return self.run_really (project, name, prop_set, sources)
- else:
- return []
-
- def run_really (self, project, name, prop_set, sources):
-
- # consumed: Targets that this generator will consume directly.
- # bypassed: Targets that can't be consumed and will be returned as-is.
-
- if self.composing_:
- (consumed, bypassed) = self.convert_multiple_sources_to_consumable_types (project, prop_set, sources)
- else:
- (consumed, bypassed) = self.convert_to_consumable_types (project, name, prop_set, sources)
-
- result = []
- if consumed:
- result = self.construct_result (consumed, project, name, prop_set)
- result.extend (bypassed)
-
- if result:
- if project.manager ().logger ().on ():
- project.manager ().logger ().log (__name__, " SUCCESS: ", result)
-
- else:
- project.manager ().logger ().log (__name__, " FAILURE")
-
- return result
-
- def construct_result (self, consumed, project, name, prop_set):
- """ Constructs the dependency graph that will be returned by this
- generator.
- consumed: Already prepared list of consumable targets
- If generator requires several source files will contain
- exactly len $(self.source_types_) targets with matching types
- Otherwise, might contain several targets with the type of
- self.source_types_ [0]
- project:
- name:
- prop_set: Properties to be used for all actions create here
- """
- result = []
- # If this is 1->1 transformation, apply it to all consumed targets in order.
- if len (self.source_types_) < 2 and not self.composing_:
-
- for r in consumed:
- result.extend (self.generated_targets ([r], prop_set, project, name))
-
- else:
-
- if consumed:
- result.extend (self.generated_targets (consumed, prop_set, project, name))
-
- return result
-
- def determine_target_name(self, fullname):
- # Determine target name from fullname (maybe including path components)
- # Place optional prefix and postfix around basename
-
- dir = os.path.dirname(fullname)
- name = os.path.basename(fullname)
- idx = name.find(".")
- if idx != -1:
- name = name[:idx]
-
- if dir and not ".." in dir and not os.path.isabs(dir):
- # Relative path is always relative to the source
- # directory. Retain it, so that users can have files
- # with the same in two different subdirectories.
- name = dir + "/" + name
-
- return name
-
- def determine_output_name(self, sources):
- """Determine the name of the produced target from the
- names of the sources."""
-
- # The simple case if when a name
- # of source has single dot. Then, we take the part before
- # dot. Several dots can be caused by:
- # - Using source file like a.host.cpp
- # - A type which suffix has a dot. Say, we can
- # type 'host_cpp' with extension 'host.cpp'.
- # In the first case, we want to take the part till the last
- # dot. In the second case -- no sure, but for now take
- # the part till the last dot too.
- name = os.path.splitext(sources[0].name())[0]
-
- for s in sources[1:]:
- n2 = os.path.splitext(s.name())
- if n2 != name:
- get_manager().errors()(
- "%s: source targets have different names: cannot determine target name"
- % (self.id_))
-
- # Names of sources might include directory. We should strip it.
- return self.determine_target_name(sources[0].name())
-
-
- def generated_targets (self, sources, prop_set, project, name):
- """ Constructs targets that are created after consuming 'sources'.
- The result will be the list of virtual-target, which the same length
- as 'target_types' attribute and with corresponding types.
-
- When 'name' is empty, all source targets must have the same value of
- the 'name' attribute, which will be used instead of the 'name' argument.
-
- The value of 'name' attribute for each generated target will be equal to
- the 'name' parameter if there's no name pattern for this type. Otherwise,
- the '%' symbol in the name pattern will be replaced with the 'name' parameter
- to obtain the 'name' attribute.
-
- For example, if targets types are T1 and T2(with name pattern "%_x"), suffixes
- for T1 and T2 are .t1 and t2, and source if foo.z, then created files would
- be "foo.t1" and "foo_x.t2". The 'name' attribute actually determined the
- basename of a file.
-
- Note that this pattern mechanism has nothing to do with implicit patterns
- in make. It's a way to produce target which name is different for name of
- source.
- """
- if not name:
- name = self.determine_output_name(sources)
-
- # Assign an action for each target
- action = self.action_class()
- a = action(project.manager(), sources, self.id_, prop_set)
-
- # Create generated target for each target type.
- targets = []
- pre = self.name_prefix_
- post = self.name_postfix_
- for t in self.target_types_:
- basename = os.path.basename(name)
- generated_name = pre[0] + basename + post[0]
- generated_name = os.path.join(os.path.dirname(name), generated_name)
- pre = pre[1:]
- post = post[1:]
-
- targets.append(virtual_target.FileTarget(generated_name, t, project, a))
-
- return [ project.manager().virtual_targets().register(t) for t in targets ]
-
- def convert_to_consumable_types (self, project, name, prop_set, sources, only_one=False):
- """ Attempts to convert 'source' to the types that this generator can
- handle. The intention is to produce the set of targets can should be
- used when generator is run.
- only_one: convert 'source' to only one of source types
- if there's more that one possibility, report an
- error.
-
- Returns a pair:
- consumed: all targets that can be consumed.
- bypassed: all targets that cannot be consumed.
- """
- consumed = []
- bypassed = []
- missing_types = []
-
- if len (sources) > 1:
- # Don't know how to handle several sources yet. Just try
- # to pass the request to other generator
- missing_types = self.source_types_
-
- else:
- (c, m) = self.consume_directly (sources [0])
- consumed += c
- missing_types += m
-
- # No need to search for transformation if
- # some source type has consumed source and
- # no more source types are needed.
- if only_one and consumed:
- missing_types = []
-
- #TODO: we should check that only one source type
- #if create of 'only_one' is true.
- # TODO: consider if consuned/bypassed separation should
- # be done by 'construct_types'.
-
- if missing_types:
- transformed = construct_types (project, name, missing_types, prop_set, sources)
-
- # Add targets of right type to 'consumed'. Add others to
- # 'bypassed'. The 'generators.construct' rule has done
- # its best to convert everything to the required type.
- # There's no need to rerun it on targets of different types.
-
- # NOTE: ignoring usage requirements
- for t in transformed[1]:
- if t.type() in missing_types:
- consumed.append(t)
-
- else:
- bypassed.append(t)
-
- consumed = unique(consumed)
- bypassed = unique(bypassed)
-
- # remove elements of 'bypassed' that are in 'consumed'
-
- # Suppose the target type of current generator, X is produced from
- # X_1 and X_2, which are produced from Y by one generator.
- # When creating X_1 from Y, X_2 will be added to 'bypassed'
- # Likewise, when creating X_2 from Y, X_1 will be added to 'bypassed'
- # But they are also in 'consumed'. We have to remove them from
- # bypassed, so that generators up the call stack don't try to convert
- # them.
-
- # In this particular case, X_1 instance in 'consumed' and X_1 instance
- # in 'bypassed' will be the same: because they have the same source and
- # action name, and 'virtual-target.register' won't allow two different
- # instances. Therefore, it's OK to use 'set.difference'.
-
- bypassed = set.difference(bypassed, consumed)
-
- return (consumed, bypassed)
-
-
- def convert_multiple_sources_to_consumable_types (self, project, prop_set, sources):
- """ Converts several files to consumable types.
- """
- consumed = []
- bypassed = []
-
- # We process each source one-by-one, trying to convert it to
- # a usable type.
- for s in sources:
- # TODO: need to check for failure on each source.
- (c, b) = self.convert_to_consumable_types (project, None, prop_set, [s], True)
- if not c:
- project.manager ().logger ().log (__name__, " failed to convert ", s)
-
- consumed.extend (c)
- bypassed.extend (b)
-
- return (consumed, bypassed)
-
- def consume_directly (self, source):
- real_source_type = source.type ()
-
- # If there are no source types, we can consume anything
- source_types = self.source_types()
- if not source_types:
- source_types = [real_source_type]
-
- consumed = []
- missing_types = []
- for st in source_types:
- # The 'source' if of right type already)
- if real_source_type == st or type.is_derived (real_source_type, st):
- consumed.append (source)
-
- else:
- missing_types.append (st)
-
- return (consumed, missing_types)
-
- def action_class (self):
- """ Returns the class to be used to actions. Default implementation
- returns "action".
- """
- return virtual_target.Action
-
-
-def find (id):
- """ Finds the generator with id. Returns None if not found.
- """
- return __generators.get (id, None)
-
-def register (g):
- """ Registers new generator instance 'g'.
- """
- id = g.id()
-
- __generators [id] = g
-
- # A generator can produce several targets of the
- # same type. We want unique occurence of that generator
- # in .generators.$(t) in that case, otherwise, it will
- # be tried twice and we'll get false ambiguity.
- for t in sequence.unique(g.target_types()):
- __type_to_generators.setdefault(t, []).append(g)
-
- # Update the set of generators for toolset
-
- # TODO: should we check that generator with this id
- # is not already registered. For example, the fop.jam
- # module intentionally declared two generators with the
- # same id, so such check will break it.
-
- # Some generators have multiple periods in their name, so the
- # normal $(id:S=) won't generate the right toolset name.
- # e.g. if id = gcc.compile.c++, then
- # .generators-for-toolset.$(id:S=) will append to
- # .generators-for-toolset.gcc.compile, which is a separate
- # value from .generators-for-toolset.gcc. Correcting this
- # makes generator inheritance work properly.
- # See also inherit-generators in module toolset
- base = id.split ('.', 100) [0]
-
- __generators_for_toolset.setdefault(base, []).append(g)
-
- # After adding a new generator that can construct new target types, we need
- # to clear the related cached viable source target type information for
- # constructing a specific target type or using a specific generator. Cached
- # viable source target type lists affected by this are those containing any
- # of the target types constructed by the new generator or any of their base
- # target types.
- #
- # A more advanced alternative to clearing that cached viable source target
- # type information would be to expand it with additional source types or
- # even better - mark it as needing to be expanded on next use.
- #
- # For now we just clear all the cached viable source target type information
- # that does not simply state 'all types' and may implement a more detailed
- # algorithm later on if it becomes needed.
-
- invalidate_extendable_viable_source_target_type_cache()
-
-
-def register_standard (id, source_types, target_types, requirements = []):
- """ Creates new instance of the 'generator' class and registers it.
- Returns the creates instance.
- Rationale: the instance is returned so that it's possible to first register
- a generator and then call 'run' method on that generator, bypassing all
- generator selection.
- """
- g = Generator (id, False, source_types, target_types, requirements)
- register (g)
- return g
-
-def register_composing (id, source_types, target_types, requirements = []):
- g = Generator (id, True, source_types, target_types, requirements)
- register (g)
- return g
-
-def generators_for_toolset (toolset):
- """ Returns all generators which belong to 'toolset'.
- """
- return __generators_for_toolset.get(toolset, [])
-
-def override (overrider_id, overridee_id):
- """Make generator 'overrider-id' be preferred to
- 'overridee-id'. If, when searching for generators
- that could produce a target of certain type,
- both those generators are amoung viable generators,
- the overridden generator is immediately discarded.
-
- The overridden generators are discarded immediately
- after computing the list of viable generators, before
- running any of them."""
-
- __overrides.setdefault(overrider_id, []).append(overridee_id)
-
-def __viable_source_types_real (target_type):
- """ Returns a list of source type which can possibly be converted
- to 'target_type' by some chain of generator invocation.
-
- More formally, takes all generators for 'target_type' and
- returns union of source types for those generators and result
- of calling itself recusrively on source types.
- """
- generators = []
-
- # 't0' is the initial list of target types we need to process to get a list
- # of their viable source target types. New target types will not be added to
- # this list.
- t0 = type.all_bases (target_type)
-
-
- # 't' is the list of target types which have not yet been processed to get a
- # list of their viable source target types. This list will get expanded as
- # we locate more target types to process.
- t = t0
-
- result = []
- while t:
- # Find all generators for current type.
- # Unlike 'find_viable_generators' we don't care about prop_set.
- generators = __type_to_generators.get (t [0], [])
- t = t[1:]
-
- for g in generators:
- if not g.source_types():
- # Empty source types -- everything can be accepted
- result = "*"
- # This will terminate outer loop.
- t = None
- break
-
- for source_type in g.source_types ():
- if not source_type in result:
- # If generator accepts 'source_type' it
- # will happily accept any type derived from it
- all = type.all_derived (source_type)
- for n in all:
- if not n in result:
-
- # Here there is no point in adding target types to
- # the list of types to process in case they are or
- # have already been on that list. We optimize this
- # check by realizing that we only need to avoid the
- # original target type's base types. Other target
- # types that are or have been on the list of target
- # types to process have been added to the 'result'
- # list as well and have thus already been eliminated
- # by the previous if.
- if not n in t0:
- t.append (n)
- result.append (n)
-
- return result
-
-
-def viable_source_types (target_type):
- """ Helper rule, caches the result of '__viable_source_types_real'.
- """
- if not __viable_source_types_cache.has_key(target_type):
- __vst_cached_types.append(target_type)
- __viable_source_types_cache [target_type] = __viable_source_types_real (target_type)
- return __viable_source_types_cache [target_type]
-
-def viable_source_types_for_generator_real (generator):
- """ Returns the list of source types, which, when passed to 'run'
- method of 'generator', has some change of being eventually used
- (probably after conversion by other generators)
- """
- source_types = generator.source_types ()
-
- if not source_types:
- # If generator does not specify any source types,
- # it might be special generator like builtin.lib-generator
- # which just relays to other generators. Return '*' to
- # indicate that any source type is possibly OK, since we don't
- # know for sure.
- return ['*']
-
- else:
- result = []
- for s in source_types:
- viable_sources = viable_source_types(s)
- if viable_sources == "*":
- result = ["*"]
- break
- else:
- result.extend(type.all_derived(s) + viable_sources)
- return unique(result)
-
-def viable_source_types_for_generator (generator):
- """ Caches the result of 'viable_source_types_for_generator'.
- """
- if not __viable_source_types_cache.has_key(generator):
- __vstg_cached_generators.append(generator)
- __viable_source_types_cache[generator] = viable_source_types_for_generator_real (generator)
-
- return __viable_source_types_cache[generator]
-
-def try_one_generator_really (project, name, generator, target_type, properties, sources):
- """ Returns usage requirements + list of created targets.
- """
- targets = generator.run (project, name, properties, sources)
-
- usage_requirements = []
- success = False
-
- dout("returned " + str(targets))
-
- if targets:
- success = True;
-
- if isinstance (targets[0], property_set.PropertySet):
- usage_requirements = targets [0]
- targets = targets [1]
-
- else:
- usage_requirements = property_set.empty ()
-
- dout( " generator" + generator.id() + " spawned ")
- # generators.dout [ indent ] " " $(targets) ;
-# if $(usage-requirements)
-# {
-# generators.dout [ indent ] " with usage requirements:" $(x) ;
-# }
-
- if success:
- return (usage_requirements, targets)
- else:
- return None
-
-def try_one_generator (project, name, generator, target_type, properties, sources):
- """ Checks if generator invocation can be pruned, because it's guaranteed
- to fail. If so, quickly returns empty list. Otherwise, calls
- try_one_generator_really.
- """
- source_types = []
-
- for s in sources:
- source_types.append (s.type ())
-
- viable_source_types = viable_source_types_for_generator (generator)
-
- if source_types and viable_source_types != ['*'] and\
- not set.intersection (source_types, viable_source_types):
- if project.manager ().logger ().on ():
- id = generator.id ()
- project.manager ().logger ().log (__name__, "generator '%s' pruned" % id)
- project.manager ().logger ().log (__name__, "source_types" '%s' % source_types)
- project.manager ().logger ().log (__name__, "viable_source_types '%s'" % viable_source_types)
-
- return []
-
- else:
- return try_one_generator_really (project, name, generator, target_type, properties, sources)
-
-
-def construct_types (project, name, target_types, prop_set, sources):
-
- result = []
- usage_requirements = property_set.empty()
-
- for t in target_types:
- r = construct (project, name, t, prop_set, sources)
-
- if r:
- (ur, targets) = r
- usage_requirements = usage_requirements.add(ur)
- result.extend(targets)
-
- # TODO: have to introduce parameter controlling if
- # several types can be matched and add appropriate
- # checks
-
- # TODO: need to review the documentation for
- # 'construct' to see if it should return $(source) even
- # if nothing can be done with it. Currents docs seem to
- # imply that, contrary to the behaviour.
- if result:
- return (usage_requirements, result)
-
- else:
- return (usage_requirements, sources)
-
-def __ensure_type (targets):
- """ Ensures all 'targets' have types. If this is not so, exists with
- error.
- """
- for t in targets:
- if not t.type ():
- get_manager().errors()("target '%s' has no type" % str (t))
-
-def find_viable_generators_aux (target_type, prop_set):
- """ Returns generators which can be used to construct target of specified type
- with specified properties. Uses the following algorithm:
- - iterates over requested target_type and all it's bases (in the order returned bt
- type.all-bases.
- - for each type find all generators that generate that type and which requirements
- are satisfied by properties.
- - if the set of generators is not empty, returns that set.
-
- Note: this algorithm explicitly ignores generators for base classes if there's
- at least one generator for requested target_type.
- """
- # Select generators that can create the required target type.
- viable_generators = []
- initial_generators = []
-
- import type
-
- # Try all-type generators first. Assume they have
- # quite specific requirements.
- all_bases = type.all_bases(target_type)
-
- for t in all_bases:
-
- initial_generators = __type_to_generators.get(t, [])
-
- if initial_generators:
- dout("there are generators for this type")
- if t != target_type:
- # We're here, when no generators for target-type are found,
- # but there are some generators for a base type.
- # We'll try to use them, but they will produce targets of
- # base type, not of 'target-type'. So, we clone the generators
- # and modify the list of target types.
- generators2 = []
- for g in initial_generators[:]:
- # generators.register adds generator to the list of generators
- # for toolsets, which is a bit strange, but should work.
- # That list is only used when inheriting toolset, which
- # should have being done before generators are run.
- ng = g.clone_and_change_target_type(t, target_type)
- generators2.append(ng)
- register(ng)
-
- initial_generators = generators2
- break
-
- for g in initial_generators:
- dout("trying generator " + g.id()
- + "(" + str(g.source_types()) + "->" + str(g.target_types()) + ")")
-
- m = g.match_rank(prop_set)
- if m:
- dout(" is viable")
- viable_generators.append(g)
-
- return viable_generators
-
-def find_viable_generators (target_type, prop_set):
- key = target_type + '.' + str (prop_set)
-
- l = __viable_generators_cache.get (key, None)
- if not l:
- l = []
-
- if not l:
- l = find_viable_generators_aux (target_type, prop_set)
-
- __viable_generators_cache [key] = l
-
- viable_generators = []
- for g in l:
- # Avoid trying the same generator twice on different levels.
- # TODO: is this really used?
- if not g in __active_generators:
- viable_generators.append (g)
- else:
- dout(" generator %s is active, discarding" % g.id())
-
- # Generators which override 'all'.
- all_overrides = []
-
- # Generators which are overriden
- overriden_ids = []
-
- for g in viable_generators:
- id = g.id ()
-
- this_overrides = __overrides.get (id, [])
-
- if this_overrides:
- overriden_ids.extend (this_overrides)
- if 'all' in this_overrides:
- all_overrides.append (g)
-
- if all_overrides:
- viable_generators = all_overrides
-
- return [g for g in viable_generators if not g.id() in overriden_ids]
-
-def __construct_really (project, name, target_type, prop_set, sources):
- """ Attempts to construct target by finding viable generators, running them
- and selecting the dependency graph.
- """
- viable_generators = find_viable_generators (target_type, prop_set)
-
- result = []
-
- dout(" *** %d viable generators" % len (viable_generators))
-
- generators_that_succeeded = []
-
- for g in viable_generators:
- __active_generators.append(g)
- r = try_one_generator (project, name, g, target_type, prop_set, sources)
- del __active_generators[-1]
-
- if r:
- generators_that_succeeded.append(g)
- if result:
- output = cStringIO.StringIO()
- print >>output, "ambiguity found when searching for best transformation"
- print >>output, "Trying to produce type '%s' from: " % (target_type)
- for s in sources:
- print >>output, " - " + s.str()
- print >>output, "Generators that succeeded:"
- for g in generators_that_succeeded:
- print >>output, " - " + g.id()
- print >>output, "First generator produced: "
- for t in result[1:]:
- print >>output, " - " + str(t)
- print >>output, "Second generator produced:"
- for t in r[1:]:
- print >>output, " - " + str(t)
- get_manager().errors()(output.getvalue())
- else:
- result = r;
-
- return result;
-
-
-def construct (project, name, target_type, prop_set, sources, top_level=False):
- """ Attempts to create target of 'target-type' with 'properties'
- from 'sources'. The 'sources' are treated as a collection of
- *possible* ingridients -- i.e. it is not required to consume
- them all. If 'multiple' is true, the rule is allowed to return
- several targets of 'target-type'.
-
- Returns a list of target. When this invocation is first instance of
- 'construct' in stack, returns only targets of requested 'target-type',
- otherwise, returns also unused sources and additionally generated
- targets.
-
- If 'top-level' is set, does not suppress generators that are already
- used in the stack. This may be useful in cases where a generator
- has to build a metatarget -- for example a target corresponding to
- built tool.
- """
-
- global __active_generators
- if top_level:
- saved_active = __active_generators
- __active_generators = []
-
- global __construct_stack
- if not __construct_stack:
- __ensure_type (sources)
-
- __construct_stack.append (1)
-
- if project.manager().logger().on():
- increase_indent ()
-
- dout( "*** construct " + target_type)
-
- for s in sources:
- dout(" from " + str(s))
-
- project.manager().logger().log (__name__, " properties: ", prop_set.raw ())
-
- result = __construct_really(project, name, target_type, prop_set, sources)
-
- project.manager().logger().decrease_indent()
-
- __construct_stack = __construct_stack [1:]
-
- if top_level:
- __active_generators = saved_active
-
- return result
-
-def add_usage_requirements (result, raw_properties):
- if result:
- if isinstance (result[0], property_set.PropertySet):
- return (result[0].add_raw(raw_properties), result[1])
- else:
- return (propery_set.create(raw-properties), result)
- #if [ class.is-a $(result[1]) : property-set ]
- #{
- # return [ $(result[1]).add-raw $(raw-properties) ] $(result[2-]) ;
- #}
- #else
- #{
- # return [ property-set.create $(raw-properties) ] $(result) ;
- #}
diff --git a/tools/build/v2/build/modifiers.jam b/tools/build/v2/build/modifiers.jam
deleted file mode 100644
index 6b00934333..0000000000
--- a/tools/build/v2/build/modifiers.jam
+++ /dev/null
@@ -1,232 +0,0 @@
-# Copyright 2003 Rene Rivera
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Modifiers are generalized generators that mutate targets in specific ways.
-# This structure allows for grouping a variety of functionality in an
-# orthogonal way to the functionality in toolsets, and without specifying
-# more target variations. In turn the modifiers can be used as building
-# blocks to implement simple requests, like the <version> feature.
-
-import modules ;
-import feature ;
-import errors ;
-import type ;
-import "class" : new ;
-import generators ;
-import property ;
-import virtual-target ;
-import numbers ;
-import sequence ;
-import symlink ;
-import property-set ;
-
-# Base generator for creating targets that are modifications of existing
-# targets.
-#
-class modifier : generator
-{
- rule __init__ (
- id
- composing ?
- : source-types *
- : target-types-and-names +
- : requirements *
- )
- {
- generator.__init__ $(id) $(composing)
- : $(source-types)
- : $(target-types-and-names)
- : $(requirements) ;
-
- self.targets-in-progress = ;
- }
-
- # Wraps the generation of the target to call before and after rules to
- # affect the real target.
- #
- rule run ( project name ? : property-set : sources + )
- {
- local result ;
- local current-target = $(project)^$(name) ;
- if ! $(current-target) in $(self.targets-in-progress)
- {
- # Before modifications...
- local project_ =
- [ modify-project-before
- $(project) $(name) : $(property-set) : $(sources) ] ;
- local name_ =
- [ modify-name-before
- $(project) $(name) : $(property-set) : $(sources) ] ;
- local property-set_ =
- [ modify-properties-before
- $(project) $(name) : $(property-set) : $(sources) ] ;
- local sources_ =
- [ modify-sources-before
- $(project) $(name) : $(property-set) : $(sources) ] ;
- project = $(project_) ;
- name = $(name_) ;
- property-set = $(property-set_) ;
- sources = $(sources_) ;
-
- # Generate the real target...
- local target-type-p =
- [ property.select <main-target-type> : [ $(property-set).raw ] ] ;
- self.targets-in-progress += $(current-target) ;
- result =
- [ generators.construct $(project) $(name)
- : $(target-type-p:G=)
- : $(property-set)
- : $(sources) ] ;
- self.targets-in-progress = $(self.targets-in-progress[1--2]) ;
-
- # After modifications...
- result =
- [ modify-target-after $(result)
- : $(project) $(name)
- : $(property-set)
- : $(sources) ] ;
- }
- return $(result) ;
- }
-
- rule modify-project-before ( project name ? : property-set : sources + )
- {
- return $(project) ;
- }
-
- rule modify-name-before ( project name ? : property-set : sources + )
- {
- return $(name) ;
- }
-
- rule modify-properties-before ( project name ? : property-set : sources + )
- {
- return $(property-set) ;
- }
-
- rule modify-sources-before ( project name ? : property-set : sources + )
- {
- return $(sources) ;
- }
-
- rule modify-target-after ( target : project name ? : property-set : sources + )
- {
- return $(target) ;
- }
-
- # Utility, clones a file-target with optional changes to the name, type and
- # project of the target.
- # NOTE: This functionality should be moved, and generalized, to
- # virtual-targets.
- #
- rule clone-file-target ( target : new-name ? : new-type ? : new-project ? )
- {
- # Need a MUTCH better way to clone a target...
- new-name ?= [ $(target).name ] ;
- new-type ?= [ $(target).type ] ;
- new-project ?= [ $(target).project ] ;
- local result = [ new file-target $(new-name) : $(new-type) : $(new-project) ] ;
-
- if [ $(target).dependencies ] { $(result).depends [ $(target).dependencies ] ; }
- $(result).root [ $(target).root ] ;
- $(result).set-usage-requirements [ $(target).usage-requirements ] ;
-
- local action = [ $(target).action ] ;
- local action-class = [ modules.peek $(action) : __class__ ] ;
-
- local ps = [ $(action).properties ] ;
- local cloned-action = [ new $(action-class) $(result) :
- [ $(action).sources ] : [ $(action).action-name ] : $(ps) ] ;
- $(result).action $(cloned-action) ;
-
- return $(result) ;
- }
-}
-
-
-# A modifier that changes the name of a target, after it's generated, given a
-# regular expression to split the name, and a set of token to insert between the
-# split tokens of the name. This also exposes the target for other uses with a
-# symlink to the original name (optionally).
-#
-class name-modifier : modifier
-{
- rule __init__ ( )
- {
- # Apply ourselves to EXE targets, for now.
- modifier.__init__ name.modifier : : EXE LIB : <name-modify>yes ;
- }
-
- # Modifies the name, by cloning the target with the new name.
- #
- rule modify-target-after ( target : project name ? : property-set : sources + )
- {
- local result = $(target) ;
-
- local name-mod-p = [ property.select <name-modifier> : [ $(property-set).raw ] ] ;
- if $(name-mod-p)
- {
- local new-name = [ modify-name [ $(target).name ] : $(name-mod-p:G=) ] ;
- if $(new-name) != [ $(target).name ]
- {
- result = [ clone-file-target $(target) : $(new-name) ] ;
- }
- local expose-original-as-symlink = [ MATCH "<symlink>(.*)" : $(name-mod-p) ] ;
- if $(expose-original-as-symlink)
- {
- local symlink-t = [ new symlink-targets $(project) : $(name) : [ $(result).name ] ] ;
- result = [ $(symlink-t).construct $(result)
- : [ property-set.create [ $(property-set).raw ] <symlink-location>build-relative ] ] ;
- }
- }
-
- return $(result) ;
- }
-
- # Do the transformation of the name.
- #
- rule modify-name ( name : modifier-spec + )
- {
- local match = [ MATCH "<match>(.*)" : $(modifier-spec) ] ;
- local name-parts = [ MATCH $(match) : $(name) ] ;
- local insertions = [ sequence.insertion-sort [ MATCH "(<[0123456789]+>.*)" : $(modifier-spec) ] ] ;
- local new-name-parts ;
- local insert-position = 1 ;
- while $(insertions)
- {
- local insertion = [ MATCH "<$(insert-position)>(.*)" : $(insertions[1]) ] ;
- if $(insertion)
- {
- new-name-parts += $(insertion) ;
- insertions = $(insertions[2-]) ;
- }
- new-name-parts += $(name-parts[1]) ;
- name-parts = $(name-parts[2-]) ;
- insert-position = [ numbers.increment $(insert-position) ] ;
- }
- new-name-parts += $(name-parts) ;
- return [ sequence.join $(new-name-parts) ] ;
- }
-
- rule optional-properties ( )
- {
- return <name-modify>yes ;
- }
-}
-feature.feature name-modifier : : free ;
-feature.feature name-modify : no yes : incidental optional ;
-generators.register [ new name-modifier ] ;
-
-# Translates <version> property to a set of modification properties
-# that are applied by the name-modifier, and symlink-modifier.
-#
-rule version-to-modifier ( property : properties * )
-{
- return
- <name-modify>yes
- <name-modifier><match>"^([^.]*)(.*)" <name-modifier><2>.$(property:G=)
- <name-modifier><symlink>yes
- ;
-}
-feature.action <version> : version-to-modifier ;
diff --git a/tools/build/v2/build/project.jam b/tools/build/v2/build/project.jam
deleted file mode 100644
index 0fae4d2004..0000000000
--- a/tools/build/v2/build/project.jam
+++ /dev/null
@@ -1,1121 +0,0 @@
-# Copyright 2002, 2003 Dave Abrahams
-# Copyright 2002, 2005, 2006 Rene Rivera
-# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Implements project representation and loading. Each project is represented by:
-# - a module where all the Jamfile content live.
-# - an instance of 'project-attributes' class.
-# (given a module name, can be obtained using the 'attributes' rule)
-# - an instance of 'project-target' class (from targets.jam)
-# (given a module name, can be obtained using the 'target' rule)
-#
-# Typically, projects are created as result of loading a Jamfile, which is done
-# by rules 'load' and 'initialize', below. First, module for Jamfile is loaded
-# and new project-attributes instance is created. Some rules necessary for
-# project are added to the module (see 'project-rules' module) at the bottom of
-# this file. Default project attributes are set (inheriting attributes of parent
-# project, if it exists). After that the Jamfile is read. It can declare its own
-# attributes using the 'project' rule which will be combined with any already
-# set attributes.
-#
-# The 'project' rule can also declare a project id which will be associated with
-# the project module.
-#
-# There can also be 'standalone' projects. They are created by calling
-# 'initialize' on an arbitrary module and not specifying their location. After
-# the call, the module can call the 'project' rule, declare main targets and
-# behave as a regular project except that, since it is not associated with any
-# location, it should not declare targets that are not prebuilt.
-#
-# The list of all loaded Jamfile is stored in the .project-locations variable.
-# It is possible to obtain a module name for a location using the 'module-name'
-# rule. Standalone projects are not recorded and can only be referenced using
-# their project id.
-
-import "class" : new ;
-import errors ;
-import modules ;
-import path ;
-import print ;
-import property-set ;
-import sequence ;
-
-
-# Loads the Jamfile at the given location. After loading, project global file
-# and Jamfiles needed by the requested one will be loaded recursively. If the
-# Jamfile at that location is loaded already, does nothing. Returns the project
-# module for the Jamfile.
-#
-rule load ( jamfile-location )
-{
- if --debug-loading in [ modules.peek : ARGV ]
- {
- ECHO "Loading Jamfile at" '$(jamfile-location)' ;
- }
-
- local module-name = [ module-name $(jamfile-location) ] ;
- # If Jamfile is already loaded, don't try again.
- if ! $(module-name) in $(.jamfile-modules)
- {
- load-jamfile $(jamfile-location) : $(module-name) ;
-
- # We want to make sure that child project are loaded only after parent
- # projects. In particular, because parent projects define attributes
- # which are inherited by children, and we don't want children to be
- # loaded before parent has defined everything.
- #
- # While "build-project" and "use-project" can potentially refer to child
- # projects from parent projects, we don't immediately load child
- # projects when seeing those attributes. Instead, we record the minimal
- # information to be used only later.
- load-used-projects $(module-name) ;
- }
- return $(module-name) ;
-}
-
-
-rule load-used-projects ( module-name )
-{
- local used = [ modules.peek $(module-name) : .used-projects ] ;
- local location = [ attribute $(module-name) location ] ;
- import project ;
- while $(used)
- {
- local id = $(used[1]) ;
- local where = $(used[2]) ;
-
- project.use $(id) : [ path.root [ path.make $(where) ] $(location) ] ;
- used = $(used[3-]) ;
- }
-}
-
-
-# Note the use of character groups, as opposed to listing 'Jamroot' and
-# 'jamroot'. With the latter, we would get duplicate matches on Windows and
-# would have to eliminate duplicates.
-JAMROOT ?= [ modules.peek : JAMROOT ] ;
-JAMROOT ?= project-root.jam [Jj]amroot [Jj]amroot.jam ;
-
-
-# Loads parent of Jamfile at 'location'. Issues an error if nothing is found.
-#
-rule load-parent ( location )
-{
- local found = [ path.glob-in-parents $(location) : $(JAMROOT) $(JAMFILE) ] ;
-
- if ! $(found)
- {
- ECHO error: Could not find parent for project at '$(location)' ;
- EXIT error: Did not find Jamfile.jam or Jamroot.jam in any parent
- directory. ;
- }
-
- return [ load $(found[1]:D) ] ;
-}
-
-
-# Makes the specified 'module' act as if it were a regularly loaded Jamfile at
-# 'location'. Reports an error if a Jamfile has already been loaded for that
-# location.
-#
-rule act-as-jamfile ( module : location )
-{
- if [ module-name $(location) ] in $(.jamfile-modules)
- {
- errors.error "Jamfile was already loaded for '$(location)'" ;
- }
- # Set up non-default mapping from location to module.
- .module.$(location) = $(module) ;
-
- # Add the location to the list of project locations so that we don't try to
- # reload the same Jamfile in the future.
- .jamfile-modules += [ module-name $(location) ] ;
-
- initialize $(module) : $(location) ;
-}
-
-
-# Returns the project module corresponding to the given project-id or plain
-# directory name. Returns nothing if such a project can not be found.
-#
-rule find ( name : current-location )
-{
- local project-module ;
-
- # Try interpreting name as project id.
- if [ path.is-rooted $(name) ]
- {
- project-module = $($(name).jamfile-module) ;
- }
-
- if ! $(project-module)
- {
- local location = [ path.root [ path.make $(name) ] $(current-location) ]
- ;
-
- # If no project is registered for the given location, try to load it.
- # First see if we have a Jamfile. If not, then see if we might have a
- # project root willing to act as a Jamfile. In that case, project root
- # must be placed in the directory referred by id.
-
- project-module = [ module-name $(location) ] ;
- if ! $(project-module) in $(.jamfile-modules)
- {
- if [ path.glob $(location) : $(JAMROOT) $(JAMFILE) ]
- {
- project-module = [ load $(location) ] ;
- }
- else
- {
- project-module = ;
- }
- }
- }
-
- return $(project-module) ;
-}
-
-
-# Returns the name of the module corresponding to 'jamfile-location'. If no
-# module corresponds to that location yet, associates the default module name
-# with that location.
-#
-rule module-name ( jamfile-location )
-{
- if ! $(.module.$(jamfile-location))
- {
- # Root the path, so that locations are always unambiguous. Without this,
- # we can't decide if '../../exe/program1' and '.' are the same paths.
- jamfile-location = [ path.root $(jamfile-location) [ path.pwd ] ] ;
- .module.$(jamfile-location) = Jamfile<$(jamfile-location)> ;
- }
- return $(.module.$(jamfile-location)) ;
-}
-
-
-# Default patterns to search for the Jamfiles to use for build declarations.
-#
-JAMFILE = [ modules.peek : JAMFILE ] ;
-JAMFILE ?= [Bb]uild.jam [Jj]amfile.v2 [Jj]amfile [Jj]amfile.jam ;
-
-
-# Find the Jamfile at the given location. This returns the exact names of all
-# the Jamfiles in the given directory. The optional parent-root argument causes
-# this to search not the given directory but the ones above it up to the
-# directory given in it.
-#
-rule find-jamfile (
- dir # The directory(s) to look for a Jamfile.
- parent-root ? # Optional flag indicating to search for the parent Jamfile.
- : no-errors ?
- )
-{
- # Glob for all the possible Jamfiles according to the match pattern.
- #
- local jamfile-glob = ;
- if $(parent-root)
- {
- if ! $(.parent-jamfile.$(dir))
- {
- .parent-jamfile.$(dir) = [ path.glob-in-parents $(dir) : $(JAMFILE)
- ] ;
- }
- jamfile-glob = $(.parent-jamfile.$(dir)) ;
- }
- else
- {
- if ! $(.jamfile.$(dir))
- {
- .jamfile.$(dir) = [ path.glob $(dir) : $(JAMFILE) ] ;
- }
- jamfile-glob = $(.jamfile.$(dir)) ;
-
- }
-
- local jamfile-to-load = $(jamfile-glob) ;
- # Multiple Jamfiles found in the same place. Warn about this and ensure we
- # use only one of them. As a temporary convenience measure, if there is
- # Jamfile.v2 among found files, suppress the warning and use it.
- #
- if $(jamfile-to-load[2-])
- {
- local v2-jamfiles = [ MATCH (.*[Jj]amfile\\.v2)|(.*[Bb]uild\\.jam) : $(jamfile-to-load) ] ;
-
- if $(v2-jamfiles) && ! $(v2-jamfiles[2])
- {
- jamfile-to-load = $(v2-jamfiles) ;
- }
- else
- {
- local jamfile = [ path.basename $(jamfile-to-load[1]) ] ;
- ECHO "warning: Found multiple Jamfiles at '"$(dir)"'!"
- "Loading the first one: '$(jamfile)'." ;
- }
-
- jamfile-to-load = $(jamfile-to-load[1]) ;
- }
-
- # Could not find it, error.
- #
- if ! $(no-errors) && ! $(jamfile-to-load)
- {
- errors.error Unable to load Jamfile.
- : Could not find a Jamfile in directory '$(dir)'.
- : Attempted to find it with pattern '"$(JAMFILE:J= )"'.
- : Please consult the documentation at 'http://www.boost.org'. ;
- }
-
- return $(jamfile-to-load) ;
-}
-
-
-# Load a Jamfile at the given directory. Returns nothing. Will attempt to load
-# the file as indicated by the JAMFILE patterns. Effect of calling this rule
-# twice with the same 'dir' is undefined.
-#
-local rule load-jamfile (
- dir # The directory of the project Jamfile.
- : jamfile-module
- )
-{
- # See if the Jamfile is where it should be.
- #
- local jamfile-to-load = [ path.glob $(dir) : $(JAMROOT) ] ;
- if ! $(jamfile-to-load)
- {
- jamfile-to-load = [ find-jamfile $(dir) ] ;
- }
-
- if $(jamfile-to-load[2])
- {
- errors.error "Multiple Jamfiles found at '$(dir)'"
- : "Filenames are: " $(jamfile-to-load:D=) ;
- }
-
- # Now load the Jamfile in it's own context.
- # The call to 'initialize' may load parent Jamfile, which might have
- # 'use-project' statement that causes a second attempt to load the
- # same project we're loading now. Checking inside .jamfile-modules
- # prevents that second attempt from messing up.
- if ! $(jamfile-module) in $(.jamfile-modules)
- {
-
- # Initialize the Jamfile module before loading.
- #
- initialize $(jamfile-module) : [ path.parent $(jamfile-to-load) ]
- : $(jamfile-to-load:BS) ;
-
- if ! $(jamfile-module) in $(.jamfile-modules)
- {
- .jamfile-modules += $(jamfile-module) ;
-
- local saved-project = $(.current-project) ;
-
- mark-as-user $(jamfile-module) ;
- modules.load $(jamfile-module) : [ path.native $(jamfile-to-load) ] : . ;
- if [ MATCH ($(JAMROOT)) : $(jamfile-to-load:BS) ]
- {
- jamfile = [ find-jamfile $(dir) : no-errors ] ;
- if $(jamfile)
- {
- load-aux $(jamfile-module) : [ path.native $(jamfile) ] ;
- }
- }
-
- # Now do some checks.
- if $(.current-project) != $(saved-project)
- {
- errors.error "The value of the .current-project variable has magically"
- : "changed after loading a Jamfile. This means some of the targets"
- : "might be defined in the wrong project."
- : "after loading" $(jamfile-module)
- : "expected value" $(saved-project)
- : "actual value" $(.current-project) ;
- }
-
- if $(.global-build-dir)
- {
- local id = [ attribute $(jamfile-module) id ] ;
- local project-root = [ attribute $(jamfile-module) project-root ] ;
- local location = [ attribute $(jamfile-module) location ] ;
-
- if $(location) && $(project-root) = $(dir)
- {
- # This is Jamroot.
- if ! $(id)
- {
- ECHO "warning: the --build-dir option was specified" ;
- ECHO "warning: but Jamroot at '$(dir)'" ;
- ECHO "warning: specified no project id" ;
- ECHO "warning: the --build-dir option will be ignored" ;
- }
- }
- }
- }
- }
-}
-
-
-rule mark-as-user ( module-name )
-{
- if USER_MODULE in [ RULENAMES ]
- {
- USER_MODULE $(module-name) ;
- }
-}
-
-
-rule load-aux ( module-name : file )
-{
- mark-as-user $(module-name) ;
-
- module $(module-name)
- {
- include $(2) ;
- local rules = [ RULENAMES $(1) ] ;
- IMPORT $(1) : $(rules) : $(1) : $(1).$(rules) ;
- }
-}
-
-
-.global-build-dir = [ MATCH --build-dir=(.*) : [ modules.peek : ARGV ] ] ;
-if $(.global-build-dir)
-{
- # If the option is specified several times, take the last value.
- .global-build-dir = [ path.make $(.global-build-dir[-1]) ] ;
-}
-
-
-# Initialize the module for a project.
-#
-rule initialize (
- module-name # The name of the project module.
- : location ? # The location (directory) of the project to initialize. If
- # not specified, a standalone project will be initialized.
- : basename ?
- )
-{
- if --debug-loading in [ modules.peek : ARGV ]
- {
- ECHO "Initializing project '$(module-name)'" ;
- }
-
- local jamroot ;
-
- local parent-module ;
- if $(module-name) = test-config
- {
- # No parent.
- }
- else if $(module-name) = site-config
- {
- parent-module = test-config ;
- }
- else if $(module-name) = user-config
- {
- parent-module = site-config ;
- }
- else if $(module-name) = project-config
- {
- parent-module = user-config ;
- }
- else
- {
- # We search for parent/project-root only if Jamfile was specified, i.e.
- # if the project is not standalone.
- if $(location) && ! [ MATCH ($(JAMROOT)) : $(basename) ]
- {
- parent-module = [ load-parent $(location) ] ;
- }
- else
- {
- # It's either jamroot or standalone project. If it's jamroot,
- # inherit from user-config.
- if $(location)
- {
- # If project-config module exist, inherit from it.
- if $(project-config.attributes)
- {
- parent-module = project-config ;
- }
- else
- {
- parent-module = user-config ;
- }
- jamroot = true ;
- }
- }
- }
-
- # TODO: need to consider if standalone projects can do anything but define
- # prebuilt targets. If so, we need to give it a more sensible "location", so
- # that source paths are correct.
- location ?= "" ;
- # Create the module for the Jamfile first.
- module $(module-name)
- {
- }
-
- # load-parent can end up loading this module again.
- # Make sure this isn't duplicated.
- if ! $($(module-name).attributes) {
-
- $(module-name).attributes = [ new project-attributes $(location)
- $(module-name) ] ;
- local attributes = $($(module-name).attributes) ;
-
- if $(location)
- {
- $(attributes).set source-location : [ path.make $(location) ] : exact ;
- }
- else if ! $(module-name) in test-config site-config user-config project-config
- {
- # This is a standalone project with known location. Set source location
- # so that it can declare targets. This is intended so that you can put
- # a .jam file in your sources and use it via 'using'. Standard modules
- # (in 'tools' subdir) may not assume source dir is set.
- local s = [ modules.binding $(module-name) ] ;
- if ! $(s)
- {
- errors.error "Could not determine project location $(module-name)" ;
- }
- $(attributes).set source-location : $(s:D) : exact ;
- }
-
- $(attributes).set requirements : [ property-set.empty ] : exact ;
- $(attributes).set usage-requirements : [ property-set.empty ] : exact ;
-
- # Import rules common to all project modules from project-rules module,
- # defined at the end of this file.
- local rules = [ RULENAMES project-rules ] ;
- IMPORT project-rules : $(rules) : $(module-name) : $(rules) ;
-
- if $(parent-module)
- {
- inherit-attributes $(module-name) : $(parent-module) ;
- $(attributes).set parent-module : $(parent-module) : exact ;
- }
-
- if $(jamroot)
- {
- $(attributes).set project-root : $(location) : exact ;
- }
-
- local parent ;
- if $(parent-module)
- {
- parent = [ target $(parent-module) ] ;
- }
-
- if ! $(.target.$(module-name))
- {
- .target.$(module-name) = [ new project-target $(module-name)
- : $(module-name) $(parent)
- : [ attribute $(module-name) requirements ] ] ;
-
- if --debug-loading in [ modules.peek : ARGV ]
- {
- ECHO "Assigned project target" $(.target.$(module-name))
- "to '$(module-name)'" ;
- }
- }
- }
-
- .current-project = [ target $(module-name) ] ;
-}
-
-
-# Make 'project-module' inherit attributes of project root and parent module.
-#
-rule inherit-attributes ( project-module : parent-module )
-{
- local attributes = $($(project-module).attributes) ;
- local pattributes = [ attributes $(parent-module) ] ;
- # Parent module might be locationless configuration module.
- if [ modules.binding $(parent-module) ]
- {
- $(attributes).set parent : [ path.parent
- [ path.make [ modules.binding $(parent-module) ] ] ] ;
- }
- local v = [ $(pattributes).get project-root ] ;
- $(attributes).set project-root : $(v) : exact ;
- $(attributes).set default-build
- : [ $(pattributes).get default-build ] ;
- $(attributes).set requirements
- : [ $(pattributes).get requirements ] : exact ;
- $(attributes).set usage-requirements
- : [ $(pattributes).get usage-requirements ] : exact ;
-
- local parent-build-dir = [ $(pattributes).get build-dir ] ;
- if $(parent-build-dir)
- {
- # Have to compute relative path from parent dir to our dir. Convert both
- # paths to absolute, since we cannot find relative path from ".." to
- # ".".
-
- local location = [ attribute $(project-module) location ] ;
- local parent-location = [ attribute $(parent-module) location ] ;
-
- local pwd = [ path.pwd ] ;
- local parent-dir = [ path.root $(parent-location) $(pwd) ] ;
- local our-dir = [ path.root $(location) $(pwd) ] ;
- $(attributes).set build-dir : [ path.join $(parent-build-dir)
- [ path.relative $(our-dir) $(parent-dir) ] ] : exact ;
- }
-}
-
-
-# Associate the given id with the given project module.
-#
-rule register-id ( id : module )
-{
- $(id).jamfile-module = $(module) ;
-}
-
-
-# Class keeping all the attributes of a project.
-#
-# The standard attributes are "id", "location", "project-root", "parent"
-# "requirements", "default-build", "source-location" and "projects-to-build".
-#
-class project-attributes
-{
- import property ;
- import property-set ;
- import errors ;
- import path ;
- import print ;
- import sequence ;
- import project ;
-
- rule __init__ ( location project-module )
- {
- self.location = $(location) ;
- self.project-module = $(project-module) ;
- }
-
- # Set the named attribute from the specification given by the user. The
- # value actually set may be different.
- #
- rule set ( attribute : specification *
- : exact ? # Sets value from 'specification' without any processing.
- )
- {
- if $(exact)
- {
- self.$(attribute) = $(specification) ;
- }
- else if $(attribute) = "requirements"
- {
- local result = [ property-set.refine-from-user-input
- $(self.requirements) : $(specification)
- : $(self.project-module) : $(self.location) ] ;
-
- if $(result[1]) = "@error"
- {
- errors.error Requirements for project at '$(self.location)'
- conflict with parent's. : Explanation: $(result[2-]) ;
- }
- else
- {
- self.requirements = $(result) ;
- }
- }
- else if $(attribute) = "usage-requirements"
- {
- local unconditional ;
- for local p in $(specification)
- {
- local split = [ property.split-conditional $(p) ] ;
- split ?= nothing $(p) ;
- unconditional += $(split[2]) ;
- }
-
- local non-free = [ property.remove free : $(unconditional) ] ;
- if $(non-free)
- {
- errors.error usage-requirements $(specification) have non-free
- properties $(non-free) ;
- }
- local t = [ property.translate-paths $(specification)
- : $(self.location) ] ;
- if $(self.usage-requirements)
- {
- self.usage-requirements = [ property-set.create
- [ $(self.usage-requirements).raw ] $(t) ] ;
- }
- else
- {
- self.usage-requirements = [ property-set.create $(t) ] ;
- }
- }
- else if $(attribute) = "default-build"
- {
- self.default-build = [ property.make $(specification) ] ;
- }
- else if $(attribute) = "source-location"
- {
- self.source-location = ;
- for local src-path in $(specification)
- {
- self.source-location += [ path.root [ path.make $(src-path) ]
- $(self.location) ] ;
- }
- }
- else if $(attribute) = "build-dir"
- {
- self.build-dir = [ path.root
- [ path.make $(specification) ] $(self.location) ] ;
- }
- else if $(attribute) = "id"
- {
- id = [ path.root $(specification) / ] ;
- project.register-id $(id) : $(self.project-module) ;
- self.id = $(id) ;
- }
- else if ! $(attribute) in "default-build" "location" "parent"
- "projects-to-build" "project-root" "source-location"
- {
- errors.error Invalid project attribute '$(attribute)' specified for
- project at '$(self.location)' ;
- }
- else
- {
- self.$(attribute) = $(specification) ;
- }
- }
-
- # Returns the value of the given attribute.
- #
- rule get ( attribute )
- {
- return $(self.$(attribute)) ;
- }
-
- # Prints the project attributes.
- #
- rule print ( )
- {
- local id = $(self.id) ; id ?= (none) ;
- local parent = $(self.parent) ; parent ?= (none) ;
- print.section "'"$(id)"'" ;
- print.list-start ;
- print.list-item "Parent project:" $(parent) ;
- print.list-item "Requirements:" [ $(self.requirements).raw ] ;
- print.list-item "Default build:" $(self.default-build) ;
- print.list-item "Source location:" $(self.source-location) ;
- print.list-item "Projects to build:"
- [ sequence.insertion-sort $(self.projects-to-build) ] ;
- print.list-end ;
- }
-}
-
-
-# Returns the project which is currently being loaded.
-#
-rule current ( )
-{
- return $(.current-project) ;
-}
-
-
-# Temporarily changes the current project to 'project'. Should be followed by
-# 'pop-current'.
-#
-rule push-current ( project )
-{
- .saved-current-project += $(.current-project) ;
- .current-project = $(project) ;
-}
-
-
-rule pop-current ( )
-{
- .current-project = $(.saved-current-project[-1]) ;
- .saved-current-project = $(.saved-current-project[1--2]) ;
-}
-
-
-# Returns the project-attribute instance for the specified Jamfile module.
-#
-rule attributes ( project )
-{
- return $($(project).attributes) ;
-}
-
-
-# Returns the value of the specified attribute in the specified Jamfile module.
-#
-rule attribute ( project attribute )
-{
- return [ $($(project).attributes).get $(attribute) ] ;
-}
-
-
-# Returns the project target corresponding to the 'project-module'.
-#
-rule target ( project-module )
-{
- if ! $(.target.$(project-module))
- {
- .target.$(project-module) = [ new project-target $(project-module)
- : $(project-module)
- : [ attribute $(project-module) requirements ] ] ;
- }
- return $(.target.$(project-module)) ;
-}
-
-
-# Use/load a project.
-#
-rule use ( id : location )
-{
- local saved-project = $(.current-project) ;
- local project-module = [ project.load $(location) ] ;
- local declared-id = [ project.attribute $(project-module) id ] ;
-
- if ! $(declared-id) || $(declared-id) != $(id)
- {
- # The project at 'location' either has no id or that id is not equal to
- # the 'id' parameter.
- if $($(id).jamfile-module) && ( $($(id).jamfile-module) !=
- $(project-module) )
- {
- errors.user-error Attempt to redeclare already existing project id
- '$(id)'
- location '$(location)' ;
- }
- $(id).jamfile-module = $(project-module) ;
- }
- .current-project = $(saved-project) ;
-}
-
-
-# Defines a Boost.Build extension project. Such extensions usually contain
-# library targets and features that can be used by many people. Even though
-# extensions are really projects, they can be initialized as a module would be
-# with the "using" (project.project-rules.using) mechanism.
-#
-rule extension ( id : options * : * )
-{
- # The caller is a standalone module for the extension.
- local mod = [ CALLER_MODULE ] ;
-
- # We need to do the rest within the extension module.
- module $(mod)
- {
- import path ;
-
- # Find the root project.
- local root-project = [ project.current ] ;
- root-project = [ $(root-project).project-module ] ;
- while
- [ project.attribute $(root-project) parent-module ] &&
- [ project.attribute $(root-project) parent-module ] != user-config
- {
- root-project = [ project.attribute $(root-project) parent-module ] ;
- }
-
- # Create the project data, and bring in the project rules into the
- # module.
- project.initialize $(__name__) : [ path.join [ project.attribute
- $(root-project) location ] ext $(1:L) ] ;
-
- # Create the project itself, i.e. the attributes. All extensions are
- # created in the "/ext" project space.
- project /ext/$(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) :
- $(9) ;
- local attributes = [ project.attributes $(__name__) ] ;
-
- # Inherit from the root project of whomever is defining us.
- project.inherit-attributes $(__name__) : $(root-project) ;
- $(attributes).set parent-module : $(root-project) : exact ;
- }
-}
-
-
-rule glob-internal ( project : wildcards + : excludes * : rule-name )
-{
- local location = [ $(project).get source-location ] ;
-
- local result ;
- local paths = [ path.$(rule-name) $(location) :
- [ sequence.transform path.make : $(wildcards) ] :
- [ sequence.transform path.make : $(excludes) ] ] ;
- if $(wildcards:D) || $(rule-name) != glob
- {
- # The paths we have found are relative to the current directory, but the
- # names specified in the sources list are assumed to be relative to the
- # source directory of the corresponding project. So, just make the names
- # absolute.
- for local p in $(paths)
- {
- # If the path is below source location, use relative path.
- # Otherwise, use full path just to avoid any ambiguities.
- local rel = [ path.relative $(p) $(location) : no-error ] ;
- if $(rel) = not-a-child
- {
- result += [ path.root $(p) [ path.pwd ] ] ;
- }
- else
- {
- result += $(rel) ;
- }
- }
- }
- else
- {
- # There were no wildcards in the directory path, so the files are all in
- # the source directory of the project. Just drop the directory, instead
- # of making paths absolute.
- result = $(paths:D="") ;
- }
-
- return $(result) ;
-}
-
-
-# This module defines rules common to all projects.
-#
-module project-rules
-{
- rule using ( toolset-module : * )
- {
- import toolset ;
- import modules ;
- import project ;
-
- # Temporarily change the search path so the module referred to by
- # 'using' can be placed in the same directory as Jamfile. User will
- # expect the module to be found even though the directory is not in
- # BOOST_BUILD_PATH.
- local x = [ modules.peek : BOOST_BUILD_PATH ] ;
- local caller = [ CALLER_MODULE ] ;
- local caller-location = [ modules.binding $(caller) ] ;
- modules.poke : BOOST_BUILD_PATH : $(caller-location:D) $(x) ;
- toolset.using $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
- modules.poke : BOOST_BUILD_PATH : $(x) ;
-
- # The above might have clobbered .current-project. Restore the correct
- # value.
- modules.poke project : .current-project
- : [ project.target $(caller) ] ;
- }
-
- import modules ;
-
- rule import ( * : * : * )
- {
- modules.import project ;
-
- local caller = [ CALLER_MODULE ] ;
- local saved = [ modules.peek project : .current-project ] ;
- module $(caller)
- {
- modules.import $(1) : $(2) : $(3) ;
- }
- modules.poke project : .current-project : $(saved) ;
- }
-
- rule project ( id ? : options * : * )
- {
- import errors ;
- import path ;
- import project ;
-
- local caller = [ CALLER_MODULE ] ;
- local attributes = [ project.attributes $(caller) ] ;
- if $(id)
- {
- $(attributes).set id : $(id) ;
- }
-
- local explicit-build-dir ;
-
- for n in 2 3 4 5 6 7 8 9
- {
- local option = $($(n)) ;
- if $(option)
- {
- $(attributes).set $(option[1]) : $(option[2-]) ;
- }
- if $(option[1]) = "build-dir"
- {
- explicit-build-dir = [ path.make $(option[2-]) ] ;
- }
- }
-
- # If '--build-dir' is specified, change the build dir for the project.
- local global-build-dir =
- [ modules.peek project : .global-build-dir ] ;
-
- if $(global-build-dir)
- {
- local location = [ $(attributes).get location ] ;
- # Project with an empty location is a 'standalone' project such as
- # user-config or qt. It has no build dir. If we try to set build dir
- # for user-config, we shall then try to inherit it, with either
- # weird or wrong consequences.
- if $(location) && $(location) = [ $(attributes).get project-root ]
- {
- # Re-read the project id, since it might have been changed in
- # the project's attributes.
- id = [ $(attributes).get id ] ;
- # This is Jamroot.
- if $(id)
- {
- if $(explicit-build-dir) &&
- [ path.is-rooted $(explicit-build-dir) ]
- {
- errors.user-error Absolute directory specified via
- 'build-dir' project attribute : Do not know how to
- combine that with the --build-dir option. ;
- }
- # Strip the leading slash from id.
- local rid = [ MATCH /(.*) : $(id) ] ;
- local p = [ path.join
- $(global-build-dir) $(rid) $(explicit-build-dir) ] ;
-
- $(attributes).set build-dir : $(p) : exact ;
- }
- }
- else
- {
- # Not Jamroot.
- if $(explicit-build-dir)
- {
- errors.user-error When --build-dir is specified, the
- 'build-dir' project : attribute is allowed only for
- top-level 'project' invocations ;
- }
- }
- }
- }
-
- # Declare and set a project global constant. Project global constants are
- # normal variables but should not be changed. They are applied to every
- # child Jamfile.
- #
- rule constant (
- name # Variable name of the constant.
- : value + # Value of the constant.
- )
- {
- import project ;
- local caller = [ CALLER_MODULE ] ;
- local p = [ project.target $(caller) ] ;
- $(p).add-constant $(name) : $(value) ;
- }
-
- # Declare and set a project global constant, whose value is a path. The path
- # is adjusted to be relative to the invocation directory. The given value
- # path is taken to be either absolute, or relative to this project root.
- #
- rule path-constant (
- name # Variable name of the constant.
- : value + # Value of the constant.
- )
- {
- import project ;
- local caller = [ CALLER_MODULE ] ;
- local p = [ project.target $(caller) ] ;
- $(p).add-constant $(name) : $(value) : path ;
- }
-
- rule use-project ( id : where )
- {
- import modules ;
- # See comment in 'load' for explanation.
- local caller = [ CALLER_MODULE ] ;
- modules.poke $(caller) : .used-projects :
- [ modules.peek $(caller) : .used-projects ]
- $(id) $(where) ;
- }
-
- rule build-project ( dir )
- {
- import project ;
- local caller = [ CALLER_MODULE ] ;
- local attributes = [ project.attributes $(caller) ] ;
-
- local now = [ $(attributes).get projects-to-build ] ;
- $(attributes).set projects-to-build : $(now) $(dir) ;
- }
-
- rule explicit ( target-names * )
- {
- import project ;
- # If 'explicit' is used in a helper rule defined in Jamroot and
- # inherited by children, then most of the time we want 'explicit' to
- # operate on the Jamfile where the helper rule is invoked.
- local t = [ project.current ] ;
- for local n in $(target-names)
- {
- $(t).mark-target-as-explicit $(n) ;
- }
- }
-
- rule always ( target-names * )
- {
- import project ;
- local t = [ project.current ] ;
- for local n in $(target-names)
- {
- $(t).mark-target-as-always $(n) ;
- }
- }
-
- rule glob ( wildcards + : excludes * )
- {
- import project ;
- return [ project.glob-internal [ project.current ] : $(wildcards) :
- $(excludes) : glob ] ;
- }
-
- rule glob-tree ( wildcards + : excludes * )
- {
- import project ;
-
- if $(wildcards:D) || $(excludes:D)
- {
- errors.user-error The patterns to 'glob-tree' may not include
- directory ;
- }
- return [ project.glob-internal [ project.current ] : $(wildcards) :
- $(excludes) : glob-tree ] ;
- }
-
- # Calculates conditional requirements for multiple requirements at once.
- # This is a shorthand to reduce duplication and to keep an inline
- # declarative syntax. For example:
- #
- # lib x : x.cpp : [ conditional <toolset>gcc <variant>debug :
- # <define>DEBUG_EXCEPTION <define>DEBUG_TRACE ] ;
- #
- rule conditional ( condition + : requirements * )
- {
- local condition = $(condition:J=,) ;
- if [ MATCH (:) : $(condition) ]
- {
- return $(condition)$(requirements) ;
- }
- else
- {
- return $(condition):$(requirements) ;
- }
- }
-
- rule option ( name : value )
- {
- local m = [ CALLER_MODULE ] ;
- if $(m) != site-config && $(m) != user-config && $(m) != project-config
- {
- import errors ;
- errors.error "The 'option' rule may be used only in site-config or user-config" ;
- }
- import option ;
- option.set $(name) : $(value) ;
- }
-}
diff --git a/tools/build/v2/build/project.py b/tools/build/v2/build/project.py
deleted file mode 100644
index 1e1e16faed..0000000000
--- a/tools/build/v2/build/project.py
+++ /dev/null
@@ -1,1120 +0,0 @@
-# Status: ported.
-# Base revision: 64488
-
-# Copyright 2002, 2003 Dave Abrahams
-# Copyright 2002, 2005, 2006 Rene Rivera
-# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Implements project representation and loading.
-# Each project is represented by
-# - a module where all the Jamfile content live.
-# - an instance of 'project-attributes' class.
-# (given module name, can be obtained by 'attributes' rule)
-# - an instance of 'project-target' class (from targets.jam)
-# (given a module name, can be obtained by 'target' rule)
-#
-# Typically, projects are created as result of loading Jamfile, which is
-# do by rules 'load' and 'initialize', below. First, module for Jamfile
-# is loaded and new project-attributes instance is created. Some rules
-# necessary for project are added to the module (see 'project-rules' module)
-# at the bottom of this file.
-# Default project attributes are set (inheriting attributes of parent project, if
-# it exists). After that, Jamfile is read. It can declare its own attributes,
-# via 'project' rule, which will be combined with already set attributes.
-#
-#
-# The 'project' rule can also declare project id, which will be associated with
-# the project module.
-#
-# There can also be 'standalone' projects. They are created by calling 'initialize'
-# on arbitrary module, and not specifying location. After the call, the module can
-# call 'project' rule, declare main target and behave as regular projects. However,
-# since it's not associated with any location, it's better declare only prebuilt
-# targets.
-#
-# The list of all loaded Jamfile is stored in variable .project-locations. It's possible
-# to obtain module name for a location using 'module-name' rule. The standalone projects
-# are not recorded, the only way to use them is by project id.
-
-import b2.util.path
-from b2.build import property_set, property
-from b2.build.errors import ExceptionWithUserContext
-import b2.build.targets
-
-import bjam
-
-import re
-import sys
-import os
-import string
-import imp
-import traceback
-import b2.util.option as option
-
-from b2.util import record_jam_to_value_mapping, qualify_jam_action
-
-class ProjectRegistry:
-
- def __init__(self, manager, global_build_dir):
- self.manager = manager
- self.global_build_dir = global_build_dir
- self.project_rules_ = ProjectRules(self)
-
- # The target corresponding to the project being loaded now
- self.current_project = None
-
- # The set of names of loaded project modules
- self.jamfile_modules = {}
-
- # Mapping from location to module name
- self.location2module = {}
-
- # Mapping from project id to project module
- self.id2module = {}
-
- # Map from Jamfile directory to parent Jamfile/Jamroot
- # location.
- self.dir2parent_jamfile = {}
-
- # Map from directory to the name of Jamfile in
- # that directory (or None).
- self.dir2jamfile = {}
-
- # Map from project module to attributes object.
- self.module2attributes = {}
-
- # Map from project module to target for the project
- self.module2target = {}
-
- # Map from names to Python modules, for modules loaded
- # via 'using' and 'import' rules in Jamfiles.
- self.loaded_tool_modules_ = {}
-
- self.loaded_tool_module_path_ = {}
-
- # Map from project target to the list of
- # (id,location) pairs corresponding to all 'use-project'
- # invocations.
- # TODO: should not have a global map, keep this
- # in ProjectTarget.
- self.used_projects = {}
-
- self.saved_current_project = []
-
- self.JAMROOT = self.manager.getenv("JAMROOT");
-
- # Note the use of character groups, as opposed to listing
- # 'Jamroot' and 'jamroot'. With the latter, we'd get duplicate
- # matches on windows and would have to eliminate duplicates.
- if not self.JAMROOT:
- self.JAMROOT = ["project-root.jam", "[Jj]amroot", "[Jj]amroot.jam"]
-
- # Default patterns to search for the Jamfiles to use for build
- # declarations.
- self.JAMFILE = self.manager.getenv("JAMFILE")
-
- if not self.JAMFILE:
- self.JAMFILE = ["[Bb]uild.jam", "[Jj]amfile.v2", "[Jj]amfile",
- "[Jj]amfile.jam"]
-
-
- def load (self, jamfile_location):
- """Loads jamfile at the given location. After loading, project global
- file and jamfile needed by the loaded one will be loaded recursively.
- If the jamfile at that location is loaded already, does nothing.
- Returns the project module for the Jamfile."""
-
- absolute = os.path.join(os.getcwd(), jamfile_location)
- absolute = os.path.normpath(absolute)
- jamfile_location = b2.util.path.relpath(os.getcwd(), absolute)
-
- if "--debug-loading" in self.manager.argv():
- print "Loading Jamfile at '%s'" % jamfile_location
-
-
- mname = self.module_name(jamfile_location)
- # If Jamfile is already loaded, don't try again.
- if not mname in self.jamfile_modules:
-
- self.load_jamfile(jamfile_location, mname)
-
- # We want to make sure that child project are loaded only
- # after parent projects. In particular, because parent projects
- # define attributes whch are inherited by children, and we don't
- # want children to be loaded before parents has defined everything.
- #
- # While "build-project" and "use-project" can potentially refer
- # to child projects from parent projects, we don't immediately
- # load child projects when seing those attributes. Instead,
- # we record the minimal information that will be used only later.
-
- self.load_used_projects(mname)
-
- return mname
-
- def load_used_projects(self, module_name):
- # local used = [ modules.peek $(module-name) : .used-projects ] ;
- used = self.used_projects[module_name]
-
- location = self.attribute(module_name, "location")
- for u in used:
- id = u[0]
- where = u[1]
-
- self.use(id, os.path.join(location, where))
-
- def load_parent(self, location):
- """Loads parent of Jamfile at 'location'.
- Issues an error if nothing is found."""
-
- found = b2.util.path.glob_in_parents(
- location, self.JAMROOT + self.JAMFILE)
-
- if not found:
- print "error: Could not find parent for project at '%s'" % location
- print "error: Did not find Jamfile or project-root.jam in any parent directory."
- sys.exit(1)
-
- return self.load(os.path.dirname(found[0]))
-
- def act_as_jamfile(self, module, location):
- """Makes the specified 'module' act as if it were a regularly loaded Jamfile
- at 'location'. If Jamfile is already located for that location, it's an
- error."""
-
- if self.module_name(location) in self.jamfile_modules:
- self.manager.errors()(
- "Jamfile was already loaded for '%s'" % location)
-
- # Set up non-default mapping from location to module.
- self.location2module[location] = module
-
- # Add the location to the list of project locations
- # so that we don't try to load Jamfile in future
- self.jamfile_modules.append(location)
-
- self.initialize(module, location)
-
- def find(self, name, current_location):
- """Given 'name' which can be project-id or plain directory name,
- return project module corresponding to that id or directory.
- Returns nothing of project is not found."""
-
- project_module = None
-
- # Try interpreting name as project id.
- if name[0] == '/':
- project_module = self.id2module.get(name)
-
- if not project_module:
- location = os.path.join(current_location, name)
- # If no project is registered for the given location, try to
- # load it. First see if we have Jamfile. If not we might have project
- # root, willing to act as Jamfile. In that case, project-root
- # must be placed in the directory referred by id.
-
- project_module = self.module_name(location)
- if not project_module in self.jamfile_modules:
- if b2.util.path.glob([location], self.JAMROOT + self.JAMFILE):
- project_module = self.load(location)
- else:
- project_module = None
-
- return project_module
-
- def module_name(self, jamfile_location):
- """Returns the name of module corresponding to 'jamfile-location'.
- If no module corresponds to location yet, associates default
- module name with that location."""
- module = self.location2module.get(jamfile_location)
- if not module:
- # Root the path, so that locations are always umbiguious.
- # Without this, we can't decide if '../../exe/program1' and '.'
- # are the same paths, or not.
- jamfile_location = os.path.realpath(
- os.path.join(os.getcwd(), jamfile_location))
- module = "Jamfile<%s>" % jamfile_location
- self.location2module[jamfile_location] = module
- return module
-
- def find_jamfile (self, dir, parent_root=0, no_errors=0):
- """Find the Jamfile at the given location. This returns the
- exact names of all the Jamfiles in the given directory. The optional
- parent-root argument causes this to search not the given directory
- but the ones above it up to the directory given in it."""
-
- # Glob for all the possible Jamfiles according to the match pattern.
- #
- jamfile_glob = None
- if parent_root:
- parent = self.dir2parent_jamfile.get(dir)
- if not parent:
- parent = b2.util.path.glob_in_parents(dir,
- self.JAMFILE)
- self.dir2parent_jamfile[dir] = parent
- jamfile_glob = parent
- else:
- jamfile = self.dir2jamfile.get(dir)
- if not jamfile:
- jamfile = b2.util.path.glob([dir], self.JAMFILE)
- self.dir2jamfile[dir] = jamfile
- jamfile_glob = jamfile
-
- if len(jamfile_glob) > 1:
- # Multiple Jamfiles found in the same place. Warn about this.
- # And ensure we use only one of them.
- # As a temporary convenience measure, if there's Jamfile.v2 amount
- # found files, suppress the warning and use it.
- #
- pattern = "(.*[Jj]amfile\\.v2)|(.*[Bb]uild\\.jam)"
- v2_jamfiles = [x for x in jamfile_glob if re.match(pattern, x)]
- if len(v2_jamfiles) == 1:
- jamfile_glob = v2_jamfiles
- else:
- print """warning: Found multiple Jamfiles at '%s'!""" % (dir)
- for j in jamfile_glob:
- print " -", j
- print "Loading the first one"
-
- # Could not find it, error.
- if not no_errors and not jamfile_glob:
- self.manager.errors()(
- """Unable to load Jamfile.
-Could not find a Jamfile in directory '%s'
-Attempted to find it with pattern '%s'.
-Please consult the documentation at 'http://boost.org/boost-build2'."""
- % (dir, string.join(self.JAMFILE)))
-
- if jamfile_glob:
- return jamfile_glob[0]
-
- def load_jamfile(self, dir, jamfile_module):
- """Load a Jamfile at the given directory. Returns nothing.
- Will attempt to load the file as indicated by the JAMFILE patterns.
- Effect of calling this rule twice with the same 'dir' is underfined."""
-
- # See if the Jamfile is where it should be.
- is_jamroot = False
- jamfile_to_load = b2.util.path.glob([dir], self.JAMROOT)
- if not jamfile_to_load:
- jamfile_to_load = self.find_jamfile(dir)
- else:
- if len(jamfile_to_load) > 1:
- get_manager().errors()("Multiple Jamfiles found at '%s'\n" +\
- "Filenames are: %s"
- % (dir, [os.path.basename(j) for j in jamfile_to_load]))
-
- is_jamroot = True
- jamfile_to_load = jamfile_to_load[0]
-
- dir = os.path.dirname(jamfile_to_load)
- if not dir:
- dir = "."
-
- self.used_projects[jamfile_module] = []
-
- # Now load the Jamfile in it's own context.
- # The call to 'initialize' may load parent Jamfile, which might have
- # 'use-project' statement that causes a second attempt to load the
- # same project we're loading now. Checking inside .jamfile-modules
- # prevents that second attempt from messing up.
- if not jamfile_module in self.jamfile_modules:
- self.jamfile_modules[jamfile_module] = True
-
- # Initialize the jamfile module before loading.
- #
- self.initialize(jamfile_module, dir, os.path.basename(jamfile_to_load))
-
- saved_project = self.current_project
-
- bjam.call("load", jamfile_module, jamfile_to_load)
- basename = os.path.basename(jamfile_to_load)
-
- if is_jamroot:
- jamfile = self.find_jamfile(dir, no_errors=True)
- if jamfile:
- bjam.call("load", jamfile_module, jamfile)
-
- # Now do some checks
- if self.current_project != saved_project:
- self.manager.errors()(
-"""The value of the .current-project variable
-has magically changed after loading a Jamfile.
-This means some of the targets might be defined a the wrong project.
-after loading %s
-expected value %s
-actual value %s""" % (jamfile_module, saved_project, self.current_project))
-
- if self.global_build_dir:
- id = self.attributeDefault(jamfile_module, "id", None)
- project_root = self.attribute(jamfile_module, "project-root")
- location = self.attribute(jamfile_module, "location")
-
- if location and project_root == dir:
- # This is Jamroot
- if not id:
- # FIXME: go via errors module, so that contexts are
- # shown?
- print "warning: the --build-dir option was specified"
- print "warning: but Jamroot at '%s'" % dir
- print "warning: specified no project id"
- print "warning: the --build-dir option will be ignored"
-
-
- def load_standalone(self, jamfile_module, file):
- """Loads 'file' as standalone project that has no location
- associated with it. This is mostly useful for user-config.jam,
- which should be able to define targets, but although it has
- some location in filesystem, we don't want any build to
- happen in user's HOME, for example.
-
- The caller is required to never call this method twice on
- the same file.
- """
-
- self.used_projects[jamfile_module] = []
- bjam.call("load", jamfile_module, file)
- self.load_used_projects(jamfile_module)
-
- def is_jamroot(self, basename):
- match = [ pat for pat in self.JAMROOT if re.match(pat, basename)]
- if match:
- return 1
- else:
- return 0
-
- def initialize(self, module_name, location=None, basename=None):
- """Initialize the module for a project.
-
- module-name is the name of the project module.
- location is the location (directory) of the project to initialize.
- If not specified, stanalone project will be initialized
- """
-
- if "--debug-loading" in self.manager.argv():
- print "Initializing project '%s'" % module_name
-
- # TODO: need to consider if standalone projects can do anything but defining
- # prebuilt targets. If so, we need to give more sensible "location", so that
- # source paths are correct.
- if not location:
- location = ""
-
- attributes = ProjectAttributes(self.manager, location, module_name)
- self.module2attributes[module_name] = attributes
-
- python_standalone = False
- if location:
- attributes.set("source-location", [location], exact=1)
- elif not module_name in ["test-config", "site-config", "user-config", "project-config"]:
- # This is a standalone project with known location. Set source location
- # so that it can declare targets. This is intended so that you can put
- # a .jam file in your sources and use it via 'using'. Standard modules
- # (in 'tools' subdir) may not assume source dir is set.
- module = sys.modules[module_name]
- attributes.set("source-location", self.loaded_tool_module_path_[module_name], exact=1)
- python_standalone = True
-
- attributes.set("requirements", property_set.empty(), exact=True)
- attributes.set("usage-requirements", property_set.empty(), exact=True)
- attributes.set("default-build", property_set.empty(), exact=True)
- attributes.set("projects-to-build", [], exact=True)
- attributes.set("project-root", None, exact=True)
- attributes.set("build-dir", None, exact=True)
-
- self.project_rules_.init_project(module_name, python_standalone)
-
- jamroot = False
-
- parent_module = None;
- if module_name == "test-config":
- # No parent
- pass
- elif module_name == "site-config":
- parent_module = "test-config"
- elif module_name == "user-config":
- parent_module = "site-config"
- elif module_name == "project-config":
- parent_module = "user-config"
- elif location and not self.is_jamroot(basename):
- # We search for parent/project-root only if jamfile was specified
- # --- i.e
- # if the project is not standalone.
- parent_module = self.load_parent(location)
- else:
- # It's either jamroot, or standalone project.
- # If it's jamroot, inherit from user-config.
- if location:
- # If project-config module exist, inherit from it.
- if self.module2attributes.has_key("project-config"):
- parent_module = "project-config"
- else:
- parent_module = "user-config" ;
-
- jamroot = True ;
-
- if parent_module:
- self.inherit_attributes(module_name, parent_module)
- attributes.set("parent-module", parent_module, exact=1)
-
- if jamroot:
- attributes.set("project-root", location, exact=1)
-
- parent = None
- if parent_module:
- parent = self.target(parent_module)
-
- if not self.module2target.has_key(module_name):
- target = b2.build.targets.ProjectTarget(self.manager,
- module_name, module_name, parent,
- self.attribute(module_name,"requirements"),
- # FIXME: why we need to pass this? It's not
- # passed in jam code.
- self.attribute(module_name, "default-build"))
- self.module2target[module_name] = target
-
- self.current_project = self.target(module_name)
-
- def inherit_attributes(self, project_module, parent_module):
- """Make 'project-module' inherit attributes of project
- root and parent module."""
-
- attributes = self.module2attributes[project_module]
- pattributes = self.module2attributes[parent_module]
-
- # Parent module might be locationless user-config.
- # FIXME:
- #if [ modules.binding $(parent-module) ]
- #{
- # $(attributes).set parent : [ path.parent
- # [ path.make [ modules.binding $(parent-module) ] ] ] ;
- # }
-
- attributes.set("project-root", pattributes.get("project-root"), exact=True)
- attributes.set("default-build", pattributes.get("default-build"), exact=True)
- attributes.set("requirements", pattributes.get("requirements"), exact=True)
- attributes.set("usage-requirements",
- pattributes.get("usage-requirements"), exact=1)
-
- parent_build_dir = pattributes.get("build-dir")
-
- if parent_build_dir:
- # Have to compute relative path from parent dir to our dir
- # Convert both paths to absolute, since we cannot
- # find relative path from ".." to "."
-
- location = attributes.get("location")
- parent_location = pattributes.get("location")
-
- our_dir = os.path.join(os.getcwd(), location)
- parent_dir = os.path.join(os.getcwd(), parent_location)
-
- build_dir = os.path.join(parent_build_dir,
- os.path.relpath(our_dir, parent_dir))
- attributes.set("build-dir", build_dir, exact=True)
-
- def register_id(self, id, module):
- """Associate the given id with the given project module."""
- self.id2module[id] = module
-
- def current(self):
- """Returns the project which is currently being loaded."""
- return self.current_project
-
- def set_current(self, c):
- self.current_project = c
-
- def push_current(self, project):
- """Temporary changes the current project to 'project'. Should
- be followed by 'pop-current'."""
- self.saved_current_project.append(self.current_project)
- self.current_project = project
-
- def pop_current(self):
- self.current_project = self.saved_current_project[-1]
- del self.saved_current_project[-1]
-
- def attributes(self, project):
- """Returns the project-attribute instance for the
- specified jamfile module."""
- return self.module2attributes[project]
-
- def attribute(self, project, attribute):
- """Returns the value of the specified attribute in the
- specified jamfile module."""
- return self.module2attributes[project].get(attribute)
- try:
- return self.module2attributes[project].get(attribute)
- except:
- raise BaseException("No attribute '%s' for project" % (attribute, project))
-
- def attributeDefault(self, project, attribute, default):
- """Returns the value of the specified attribute in the
- specified jamfile module."""
- return self.module2attributes[project].getDefault(attribute, default)
-
- def target(self, project_module):
- """Returns the project target corresponding to the 'project-module'."""
- if not self.module2target.has_key(project_module):
- self.module2target[project_module] = \
- b2.build.targets.ProjectTarget(project_module, project_module,
- self.attribute(project_module, "requirements"))
-
- return self.module2target[project_module]
-
- def use(self, id, location):
- # Use/load a project.
- saved_project = self.current_project
- project_module = self.load(location)
- declared_id = self.attributeDefault(project_module, "id", "")
-
- if not declared_id or declared_id != id:
- # The project at 'location' either have no id or
- # that id is not equal to the 'id' parameter.
- if self.id2module.has_key(id) and self.id2module[id] != project_module:
- self.manager.errors()(
-"""Attempt to redeclare already existing project id '%s' at location '%s'""" % (id, location))
- self.id2module[id] = project_module
-
- self.current_module = saved_project
-
- def add_rule(self, name, callable):
- """Makes rule 'name' available to all subsequently loaded Jamfiles.
-
- Calling that rule wil relay to 'callable'."""
- self.project_rules_.add_rule(name, callable)
-
- def project_rules(self):
- return self.project_rules_
-
- def glob_internal(self, project, wildcards, excludes, rule_name):
- location = project.get("source-location")[0]
-
- result = []
- callable = b2.util.path.__dict__[rule_name]
-
- paths = callable([location], wildcards, excludes)
- has_dir = 0
- for w in wildcards:
- if os.path.dirname(w):
- has_dir = 1
- break
-
- if has_dir or rule_name != "glob":
- result = []
- # The paths we've found are relative to current directory,
- # but the names specified in sources list are assumed to
- # be relative to source directory of the corresponding
- # prject. Either translate them or make absolute.
-
- for p in paths:
- rel = os.path.relpath(p, location)
- # If the path is below source location, use relative path.
- if not ".." in rel:
- result.append(rel)
- else:
- # Otherwise, use full path just to avoid any ambiguities.
- result.append(os.path.abspath(p))
-
- else:
- # There were not directory in wildcard, so the files are all
- # in the source directory of the project. Just drop the
- # directory, instead of making paths absolute.
- result = [os.path.basename(p) for p in paths]
-
- return result
-
- def load_module(self, name, extra_path=None):
- """Load a Python module that should be useable from Jamfiles.
-
- There are generally two types of modules Jamfiles might want to
- use:
- - Core Boost.Build. Those are imported using plain names, e.g.
- 'toolset', so this function checks if we have module named
- b2.package.module already.
- - Python modules in the same directory as Jamfile. We don't
- want to even temporary add Jamfile's directory to sys.path,
- since then we might get naming conflicts between standard
- Python modules and those.
- """
-
- # See if we loaded module of this name already
- existing = self.loaded_tool_modules_.get(name)
- if existing:
- return existing
-
- # See if we have a module b2.whatever.<name>, where <name>
- # is what is passed to this function
- modules = sys.modules
- for class_name in modules:
- parts = class_name.split('.')
- if name is class_name or parts[0] == "b2" \
- and parts[-1] == name.replace("-", "_"):
- module = modules[class_name]
- self.loaded_tool_modules_[name] = module
- return module
-
- # Lookup a module in BOOST_BUILD_PATH
- path = extra_path
- if not path:
- path = []
- path.extend(self.manager.boost_build_path())
- location = None
- for p in path:
- l = os.path.join(p, name + ".py")
- if os.path.exists(l):
- location = l
- break
-
- if not location:
- self.manager.errors()("Cannot find module '%s'" % name)
-
- mname = name + "__for_jamfile"
- file = open(location)
- try:
- # TODO: this means we'll never make use of .pyc module,
- # which might be a problem, or not.
- self.loaded_tool_module_path_[mname] = location
- module = imp.load_module(mname, file, os.path.basename(location),
- (".py", "r", imp.PY_SOURCE))
- self.loaded_tool_modules_[name] = module
- return module
- finally:
- file.close()
-
-
-
-# FIXME:
-# Defines a Boost.Build extension project. Such extensions usually
-# contain library targets and features that can be used by many people.
-# Even though extensions are really projects, they can be initialize as
-# a module would be with the "using" (project.project-rules.using)
-# mechanism.
-#rule extension ( id : options * : * )
-#{
-# # The caller is a standalone module for the extension.
-# local mod = [ CALLER_MODULE ] ;
-#
-# # We need to do the rest within the extension module.
-# module $(mod)
-# {
-# import path ;
-#
-# # Find the root project.
-# local root-project = [ project.current ] ;
-# root-project = [ $(root-project).project-module ] ;
-# while
-# [ project.attribute $(root-project) parent-module ] &&
-# [ project.attribute $(root-project) parent-module ] != user-config
-# {
-# root-project = [ project.attribute $(root-project) parent-module ] ;
-# }
-#
-# # Create the project data, and bring in the project rules
-# # into the module.
-# project.initialize $(__name__) :
-# [ path.join [ project.attribute $(root-project) location ] ext $(1:L) ] ;
-#
-# # Create the project itself, i.e. the attributes.
-# # All extensions are created in the "/ext" project space.
-# project /ext/$(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
-# local attributes = [ project.attributes $(__name__) ] ;
-#
-# # Inherit from the root project of whomever is defining us.
-# project.inherit-attributes $(__name__) : $(root-project) ;
-# $(attributes).set parent-module : $(root-project) : exact ;
-# }
-#}
-
-
-class ProjectAttributes:
- """Class keeping all the attributes of a project.
-
- The standard attributes are 'id', "location", "project-root", "parent"
- "requirements", "default-build", "source-location" and "projects-to-build".
- """
-
- def __init__(self, manager, location, project_module):
- self.manager = manager
- self.location = location
- self.project_module = project_module
- self.attributes = {}
- self.usage_requirements = None
-
- def set(self, attribute, specification, exact=False):
- """Set the named attribute from the specification given by the user.
- The value actually set may be different."""
-
- if exact:
- self.__dict__[attribute] = specification
-
- elif attribute == "requirements":
- self.requirements = property_set.refine_from_user_input(
- self.requirements, specification,
- self.project_module, self.location)
-
- elif attribute == "usage-requirements":
- unconditional = []
- for p in specification:
- split = property.split_conditional(p)
- if split:
- unconditional.append(split[1])
- else:
- unconditional.append(p)
-
- non_free = property.remove("free", unconditional)
- if non_free:
- get_manager().errors()("usage-requirements %s have non-free properties %s" \
- % (specification, non_free))
-
- t = property.translate_paths(
- property.create_from_strings(specification, allow_condition=True),
- self.location)
-
- existing = self.__dict__.get("usage-requirements")
- if existing:
- new = property_set.create(existing.all() + t)
- else:
- new = property_set.create(t)
- self.__dict__["usage-requirements"] = new
-
-
- elif attribute == "default-build":
- self.__dict__["default-build"] = property_set.create(specification)
-
- elif attribute == "source-location":
- source_location = []
- for path in specification:
- source_location.append(os.path.join(self.location, path))
- self.__dict__["source-location"] = source_location
-
- elif attribute == "build-dir":
- self.__dict__["build-dir"] = os.path.join(self.location, specification[0])
-
- elif attribute == "id":
- id = specification[0]
- if id[0] != '/':
- id = "/" + id
- self.manager.projects().register_id(id, self.project_module)
- self.__dict__["id"] = id
-
- elif not attribute in ["default-build", "location",
- "source-location", "parent",
- "projects-to-build", "project-root"]:
- self.manager.errors()(
-"""Invalid project attribute '%s' specified
-for project at '%s'""" % (attribute, self.location))
- else:
- self.__dict__[attribute] = specification
-
- def get(self, attribute):
- return self.__dict__[attribute]
-
- def getDefault(self, attribute, default):
- return self.__dict__.get(attribute, default)
-
- def dump(self):
- """Prints the project attributes."""
- id = self.get("id")
- if not id:
- id = "(none)"
- else:
- id = id[0]
-
- parent = self.get("parent")
- if not parent:
- parent = "(none)"
- else:
- parent = parent[0]
-
- print "'%s'" % id
- print "Parent project:%s", parent
- print "Requirements:%s", self.get("requirements")
- print "Default build:%s", string.join(self.get("debuild-build"))
- print "Source location:%s", string.join(self.get("source-location"))
- print "Projects to build:%s", string.join(self.get("projects-to-build").sort());
-
-class ProjectRules:
- """Class keeping all rules that are made available to Jamfile."""
-
- def __init__(self, registry):
- self.registry = registry
- self.manager_ = registry.manager
- self.rules = {}
- self.local_names = [x for x in self.__class__.__dict__
- if x not in ["__init__", "init_project", "add_rule",
- "error_reporting_wrapper", "add_rule_for_type", "reverse"]]
- self.all_names_ = [x for x in self.local_names]
-
- def _import_rule(self, bjam_module, name, callable):
- if hasattr(callable, "bjam_signature"):
- bjam.import_rule(bjam_module, name, self.make_wrapper(callable), callable.bjam_signature)
- else:
- bjam.import_rule(bjam_module, name, self.make_wrapper(callable))
-
-
- def add_rule_for_type(self, type):
- rule_name = type.lower().replace("_", "-")
-
- def xpto (name, sources = [], requirements = [], default_build = [], usage_requirements = []):
- return self.manager_.targets().create_typed_target(
- type, self.registry.current(), name[0], sources,
- requirements, default_build, usage_requirements)
-
- self.add_rule(rule_name, xpto)
-
- def add_rule(self, name, callable):
- self.rules[name] = callable
- self.all_names_.append(name)
-
- # Add new rule at global bjam scope. This might not be ideal,
- # added because if a jamroot does 'import foo' where foo calls
- # add_rule, we need to import new rule to jamroot scope, and
- # I'm lazy to do this now.
- self._import_rule("", name, callable)
-
- def all_names(self):
- return self.all_names_
-
- def call_and_report_errors(self, callable, *args, **kw):
- result = None
- try:
- self.manager_.errors().push_jamfile_context()
- result = callable(*args, **kw)
- except ExceptionWithUserContext, e:
- e.report()
- except Exception, e:
- try:
- self.manager_.errors().handle_stray_exception (e)
- except ExceptionWithUserContext, e:
- e.report()
- finally:
- self.manager_.errors().pop_jamfile_context()
-
- return result
-
- def make_wrapper(self, callable):
- """Given a free-standing function 'callable', return a new
- callable that will call 'callable' and report all exceptins,
- using 'call_and_report_errors'."""
- def wrapper(*args, **kw):
- return self.call_and_report_errors(callable, *args, **kw)
- return wrapper
-
- def init_project(self, project_module, python_standalone=False):
-
- if python_standalone:
- m = sys.modules[project_module]
-
- for n in self.local_names:
- if n != "import_":
- setattr(m, n, getattr(self, n))
-
- for n in self.rules:
- setattr(m, n, self.rules[n])
-
- return
-
- for n in self.local_names:
- # Using 'getattr' here gives us a bound method,
- # while using self.__dict__[r] would give unbound one.
- v = getattr(self, n)
- if callable(v):
- if n == "import_":
- n = "import"
- else:
- n = string.replace(n, "_", "-")
-
- self._import_rule(project_module, n, v)
-
- for n in self.rules:
- self._import_rule(project_module, n, self.rules[n])
-
- def project(self, *args):
-
- jamfile_module = self.registry.current().project_module()
- attributes = self.registry.attributes(jamfile_module)
-
- id = None
- if args and args[0]:
- id = args[0][0]
- args = args[1:]
-
- if id:
- attributes.set('id', [id])
-
- explicit_build_dir = None
- for a in args:
- if a:
- attributes.set(a[0], a[1:], exact=0)
- if a[0] == "build-dir":
- explicit_build_dir = a[1]
-
- # If '--build-dir' is specified, change the build dir for the project.
- if self.registry.global_build_dir:
-
- location = attributes.get("location")
- # Project with empty location is 'standalone' project, like
- # user-config, or qt. It has no build dir.
- # If we try to set build dir for user-config, we'll then
- # try to inherit it, with either weird, or wrong consequences.
- if location and location == attributes.get("project-root"):
- # Re-read the project id, since it might have been changed in
- # the project's attributes.
- id = attributes.get('id')
-
- # This is Jamroot.
- if id:
- if explicit_build_dir and os.path.isabs(explicit_build_dir):
- self.registry.manager.errors()(
-"""Absolute directory specified via 'build-dir' project attribute
-Don't know how to combine that with the --build-dir option.""")
-
- rid = id
- if rid[0] == '/':
- rid = rid[1:]
-
- p = os.path.join(self.registry.global_build_dir, rid)
- if explicit_build_dir:
- p = os.path.join(p, explicit_build_dir)
- attributes.set("build-dir", p, exact=1)
- elif explicit_build_dir:
- self.registry.manager.errors()(
-"""When --build-dir is specified, the 'build-dir'
-attribute is allowed only for top-level 'project' invocations""")
-
- def constant(self, name, value):
- """Declare and set a project global constant.
- Project global constants are normal variables but should
- not be changed. They are applied to every child Jamfile."""
- m = "Jamfile</home/ghost/Work/Boost/boost-svn/tools/build/v2_python/python/tests/bjam/make>"
- self.registry.current().add_constant(name[0], value)
-
- def path_constant(self, name, value):
- """Declare and set a project global constant, whose value is a path. The
- path is adjusted to be relative to the invocation directory. The given
- value path is taken to be either absolute, or relative to this project
- root."""
- if len(value) > 1:
- self.registry.manager.error()("path constant should have one element")
- self.registry.current().add_constant(name[0], value[0], path=1)
-
- def use_project(self, id, where):
- # See comment in 'load' for explanation why we record the
- # parameters as opposed to loading the project now.
- m = self.registry.current().project_module();
- self.registry.used_projects[m].append((id[0], where[0]))
-
- def build_project(self, dir):
- assert(isinstance(dir, list))
- jamfile_module = self.registry.current().project_module()
- attributes = self.registry.attributes(jamfile_module)
- now = attributes.get("projects-to-build")
- attributes.set("projects-to-build", now + dir, exact=True)
-
- def explicit(self, target_names):
- self.registry.current().mark_targets_as_explicit(target_names)
-
- def always(self, target_names):
- self.registry.current().mark_targets_as_alays(target_names)
-
- def glob(self, wildcards, excludes=None):
- return self.registry.glob_internal(self.registry.current(),
- wildcards, excludes, "glob")
-
- def glob_tree(self, wildcards, excludes=None):
- bad = 0
- for p in wildcards:
- if os.path.dirname(p):
- bad = 1
-
- if excludes:
- for p in excludes:
- if os.path.dirname(p):
- bad = 1
-
- if bad:
- self.registry.manager.errors()(
-"The patterns to 'glob-tree' may not include directory")
- return self.registry.glob_internal(self.registry.current(),
- wildcards, excludes, "glob_tree")
-
-
- def using(self, toolset, *args):
- # The module referred by 'using' can be placed in
- # the same directory as Jamfile, and the user
- # will expect the module to be found even though
- # the directory is not in BOOST_BUILD_PATH.
- # So temporary change the search path.
- current = self.registry.current()
- location = current.get('location')
-
- m = self.registry.load_module(toolset[0], [location])
- if not m.__dict__.has_key("init"):
- self.registry.manager.errors()(
- "Tool module '%s' does not define the 'init' method" % toolset[0])
- m.init(*args)
-
- # The above might have clobbered .current-project. Restore the correct
- # value.
- self.registry.set_current(current)
-
- def import_(self, name, names_to_import=None, local_names=None):
-
- name = name[0]
- py_name = name
- if py_name == "os":
- py_name = "os_j"
- jamfile_module = self.registry.current().project_module()
- attributes = self.registry.attributes(jamfile_module)
- location = attributes.get("location")
-
- saved = self.registry.current()
-
- m = self.registry.load_module(py_name, [location])
-
- for f in m.__dict__:
- v = m.__dict__[f]
- f = f.replace("_", "-")
- if callable(v):
- qn = name + "." + f
- self._import_rule(jamfile_module, qn, v)
- record_jam_to_value_mapping(qualify_jam_action(qn, jamfile_module), v)
-
-
- if names_to_import:
- if not local_names:
- local_names = names_to_import
-
- if len(names_to_import) != len(local_names):
- self.registry.manager.errors()(
-"""The number of names to import and local names do not match.""")
-
- for n, l in zip(names_to_import, local_names):
- self._import_rule(jamfile_module, l, m.__dict__[n])
-
- self.registry.set_current(saved)
-
- def conditional(self, condition, requirements):
- """Calculates conditional requirements for multiple requirements
- at once. This is a shorthand to be reduce duplication and to
- keep an inline declarative syntax. For example:
-
- lib x : x.cpp : [ conditional <toolset>gcc <variant>debug :
- <define>DEBUG_EXCEPTION <define>DEBUG_TRACE ] ;
- """
-
- c = string.join(condition, ",")
- if c.find(":") != -1:
- return [c + r for r in requirements]
- else:
- return [c + ":" + r for r in requirements]
-
- def option(self, name, value):
- name = name[0]
- if not name in ["site-config", "user-config", "project-config"]:
- get_manager().errors()("The 'option' rule may be used only in site-config or user-config")
-
- option.set(name, value[0])
diff --git a/tools/build/v2/build/property-set.jam b/tools/build/v2/build/property-set.jam
deleted file mode 100644
index 51e2b20de4..0000000000
--- a/tools/build/v2/build/property-set.jam
+++ /dev/null
@@ -1,489 +0,0 @@
-# Copyright 2003 Dave Abrahams
-# Copyright 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import "class" : new ;
-import feature ;
-import path ;
-import project ;
-import property ;
-import sequence ;
-import set ;
-import option ;
-
-# Class for storing a set of properties.
-#
-# There is 1<->1 correspondence between identity and value. No two instances
-# of the class are equal. To maintain this property, the 'property-set.create'
-# rule should be used to create new instances. Instances are immutable.
-#
-# Each property is classified with regard to its effect on build results.
-# Incidental properties have no effect on build results, from Boost.Build's
-# point of view. Others are either free, or non-free and we refer to non-free
-# ones as 'base'. Each property belongs to exactly one of those categories.
-#
-# It is possible to get a list of properties belonging to each category as
-# well as a list of properties with a specific attribute.
-#
-# Several operations, like and refine and as-path are provided. They all use
-# caching whenever possible.
-#
-class property-set
-{
- import errors ;
- import feature ;
- import path ;
- import property ;
- import property-set ;
- import set ;
-
- rule __init__ ( raw-properties * )
- {
- self.raw = $(raw-properties) ;
-
- for local p in $(raw-properties)
- {
- if ! $(p:G)
- {
- errors.error "Invalid property: '$(p)'" ;
- }
- }
- }
-
- # Returns Jam list of stored properties.
- #
- rule raw ( )
- {
- return $(self.raw) ;
- }
-
- rule str ( )
- {
- return "[" $(self.raw) "]" ;
- }
-
- # Returns properties that are neither incidental nor free.
- #
- rule base ( )
- {
- if ! $(self.base-initialized)
- {
- init-base ;
- }
- return $(self.base) ;
- }
-
- # Returns free properties which are not incidental.
- #
- rule free ( )
- {
- if ! $(self.base-initialized)
- {
- init-base ;
- }
- return $(self.free) ;
- }
-
- # Returns dependency properties.
- #
- rule dependency ( )
- {
- if ! $(self.dependency-initialized)
- {
- init-dependency ;
- }
- return $(self.dependency) ;
- }
-
- rule non-dependency ( )
- {
- if ! $(self.dependency-initialized)
- {
- init-dependency ;
- }
- return $(self.non-dependency) ;
- }
-
- rule conditional ( )
- {
- if ! $(self.conditional-initialized)
- {
- init-conditional ;
- }
- return $(self.conditional) ;
- }
-
- rule non-conditional ( )
- {
- if ! $(self.conditional-initialized)
- {
- init-conditional ;
- }
- return $(self.non-conditional) ;
- }
-
- # Returns incidental properties.
- #
- rule incidental ( )
- {
- if ! $(self.base-initialized)
- {
- init-base ;
- }
- return $(self.incidental) ;
- }
-
- rule refine ( ps )
- {
- if ! $(self.refined.$(ps))
- {
- local r = [ property.refine $(self.raw) : [ $(ps).raw ] ] ;
- if $(r[1]) != "@error"
- {
- self.refined.$(ps) = [ property-set.create $(r) ] ;
- }
- else
- {
- self.refined.$(ps) = $(r) ;
- }
- }
- return $(self.refined.$(ps)) ;
- }
-
- rule expand ( )
- {
- if ! $(self.expanded)
- {
- self.expanded = [ property-set.create [ feature.expand $(self.raw) ] ] ;
- }
- return $(self.expanded) ;
- }
-
- rule expand-composites ( )
- {
- if ! $(self.composites)
- {
- self.composites = [ property-set.create
- [ feature.expand-composites $(self.raw) ] ] ;
- }
- return $(self.composites) ;
- }
-
- rule evaluate-conditionals ( context ? )
- {
- context ?= $(__name__) ;
- if ! $(self.evaluated.$(context))
- {
- self.evaluated.$(context) = [ property-set.create
- [ property.evaluate-conditionals-in-context $(self.raw) : [ $(context).raw ] ] ] ;
- }
- return $(self.evaluated.$(context)) ;
- }
-
- rule propagated ( )
- {
- if ! $(self.propagated-ps)
- {
- local result ;
- for local p in $(self.raw)
- {
- if propagated in [ feature.attributes $(p:G) ]
- {
- result += $(p) ;
- }
- }
- self.propagated-ps = [ property-set.create $(result) ] ;
- }
- return $(self.propagated-ps) ;
- }
-
- rule add-defaults ( )
- {
- if ! $(self.defaults)
- {
- self.defaults = [ property-set.create
- [ feature.add-defaults $(self.raw) ] ] ;
- }
- return $(self.defaults) ;
- }
-
- rule as-path ( )
- {
- if ! $(self.as-path)
- {
- self.as-path = [ property.as-path [ base ] ] ;
- }
- return $(self.as-path) ;
- }
-
- # Computes the path to be used for a target with the given properties.
- # Returns a list of
- # - the computed path
- # - if the path is relative to the build directory, a value of 'true'.
- #
- rule target-path ( )
- {
- if ! $(self.target-path)
- {
- # The <location> feature can be used to explicitly change the
- # location of generated targets.
- local l = [ get <location> ] ;
- if $(l)
- {
- self.target-path = $(l) ;
- }
- else
- {
- local p = [ as-path ] ;
- p = [ property-set.hash-maybe $(p) ] ;
-
- # A real ugly hack. Boost regression test system requires
- # specific target paths, and it seems that changing it to handle
- # other directory layout is really hard. For that reason, we
- # teach V2 to do the things regression system requires. The
- # value of '<location-prefix>' is prepended to the path.
- local prefix = [ get <location-prefix> ] ;
- if $(prefix)
- {
- self.target-path = [ path.join $(prefix) $(p) ] ;
- }
- else
- {
- self.target-path = $(p) ;
- }
- if ! $(self.target-path)
- {
- self.target-path = . ;
- }
- # The path is relative to build dir.
- self.target-path += true ;
- }
- }
- return $(self.target-path) ;
- }
-
- rule add ( ps )
- {
- if ! $(self.added.$(ps))
- {
- self.added.$(ps) = [ property-set.create $(self.raw) [ $(ps).raw ] ] ;
- }
- return $(self.added.$(ps)) ;
- }
-
- rule add-raw ( properties * )
- {
- return [ add [ property-set.create $(properties) ] ] ;
- }
-
- # Returns all values of 'feature'.
- #
- rule get ( feature )
- {
- if ! $(self.map-built)
- {
- # For each feature, create a member var and assign all values to it.
- # Since all regular member vars start with 'self', there will be no
- # conflicts between names.
- self.map-built = true ;
- for local v in $(self.raw)
- {
- $(v:G) += $(v:G=) ;
- }
- }
- return $($(feature)) ;
- }
-
- # private
-
- rule init-base ( )
- {
- for local p in $(self.raw)
- {
- local att = [ feature.attributes $(p:G) ] ;
- # A feature can be both incidental and free, in which case we add it
- # to incidental.
- if incidental in $(att)
- {
- self.incidental += $(p) ;
- }
- else if free in $(att)
- {
- self.free += $(p) ;
- }
- else
- {
- self.base += $(p) ;
- }
- }
- self.base-initialized = true ;
- }
-
- rule init-dependency ( )
- {
- for local p in $(self.raw)
- {
- local att = [ feature.attributes $(p:G) ] ;
-
- if dependency in $(att)
- {
- self.dependency += $(p) ;
- }
- else
- {
- self.non-dependency += $(p) ;
- }
- }
- self.dependency-initialized = true ;
- }
-
- rule init-conditional ( )
- {
- for local p in $(self.raw)
- {
- if [ MATCH (:) : $(p:G=) ]
- {
- self.conditional += $(p) ;
- }
- else
- {
- self.non-conditional += $(p) ;
- }
- }
- self.conditional-initialized = true ;
- }
-}
-
-
-# Creates a new 'property-set' instance for the given raw properties or returns
-# an already existing ones.
-#
-rule create ( raw-properties * )
-{
- raw-properties = [ sequence.unique
- [ sequence.insertion-sort $(raw-properties) ] ] ;
-
- local key = $(raw-properties:J=-:E=) ;
-
- if ! $(.ps.$(key))
- {
- .ps.$(key) = [ new property-set $(raw-properties) ] ;
- }
- return $(.ps.$(key)) ;
-}
-NATIVE_RULE property-set : create ;
-
-
-# Creates a new 'property-set' instance after checking that all properties are
-# valid and converting incidental properties into gristed form.
-#
-rule create-with-validation ( raw-properties * )
-{
- property.validate $(raw-properties) ;
- return [ create [ property.make $(raw-properties) ] ] ;
-}
-
-
-# Creates a property-set from the input given by the user, in the context of
-# 'jamfile-module' at 'location'.
-#
-rule create-from-user-input ( raw-properties * : jamfile-module location )
-{
- local specification = [ property.translate-paths $(raw-properties)
- : $(location) ] ;
- specification = [ property.translate-indirect $(specification)
- : $(jamfile-module) ] ;
- local project-id = [ project.attribute $(jamfile-module) id ] ;
- project-id ?= [ path.root $(location) [ path.pwd ] ] ;
- specification = [ property.translate-dependencies
- $(specification) : $(project-id) : $(location) ] ;
- specification =
- [ property.expand-subfeatures-in-conditions $(specification) ] ;
- specification = [ property.make $(specification) ] ;
- return [ property-set.create $(specification) ] ;
-}
-
-
-# Refines requirements with requirements provided by the user. Specially handles
-# "-<property>value" syntax in specification to remove given requirements.
-# - parent-requirements -- property-set object with requirements to refine.
-# - specification -- string list of requirements provided by the user.
-# - project-module -- module to which context indirect features will be
-# bound.
-# - location -- path to which path features are relative.
-#
-rule refine-from-user-input ( parent-requirements : specification * :
- project-module : location )
-{
- if ! $(specification)
- {
- return $(parent-requirements) ;
- }
- else
- {
- local add-requirements ;
- local remove-requirements ;
-
- for local r in $(specification)
- {
- local m = [ MATCH "^-(.*)" : $(r) ] ;
- if $(m)
- {
- remove-requirements += $(m) ;
- }
- else
- {
- add-requirements += $(r) ;
- }
- }
-
- if $(remove-requirements)
- {
- # Need to create a property set, so that path features and indirect
- # features are translated just like they are in project
- # requirements.
- local ps = [ property-set.create-from-user-input
- $(remove-requirements) : $(project-module) $(location) ] ;
-
- parent-requirements = [ property-set.create
- [ set.difference [ $(parent-requirements).raw ]
- : [ $(ps).raw ] ] ] ;
- specification = $(add-requirements) ;
- }
-
- local requirements = [ property-set.create-from-user-input
- $(specification) : $(project-module) $(location) ] ;
-
- return [ $(parent-requirements).refine $(requirements) ] ;
- }
-}
-
-
-# Returns a property-set with an empty set of properties.
-#
-rule empty ( )
-{
- if ! $(.empty)
- {
- .empty = [ create ] ;
- }
- return $(.empty) ;
-}
-
-if [ option.get hash : : yes ] = yes
-{
- rule hash-maybe ( path ? )
- {
- path ?= "" ;
- return [ MD5 $(path) ] ;
- }
-}
-else
-{
- rule hash-maybe ( path ? )
- {
- return $(path) ;
- }
-}
-
diff --git a/tools/build/v2/build/property.jam b/tools/build/v2/build/property.jam
deleted file mode 100644
index a2ad5226b9..0000000000
--- a/tools/build/v2/build/property.jam
+++ /dev/null
@@ -1,788 +0,0 @@
-# Copyright 2001, 2002, 2003 Dave Abrahams
-# Copyright 2006 Rene Rivera
-# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import errors ;
-import feature ;
-import indirect ;
-import path ;
-import regex ;
-import string ;
-import sequence ;
-import set ;
-import utility ;
-
-
-# Refines 'properties' by overriding any non-free and non-conditional properties
-# for which a different value is specified in 'requirements'. Returns the
-# resulting list of properties.
-#
-rule refine ( properties * : requirements * )
-{
- local result ;
- local error ;
-
- # All the 'requirements' elements should be present in the result. Record
- # them so that we can handle 'properties'.
- for local r in $(requirements)
- {
- # Do not consider conditional requirements.
- if ! [ MATCH (:) : $(r:G=) ]
- {
- # Note: cannot use a local variable here, so use an ugly name.
- __require__$(r:G) = $(r:G=) ;
- }
- }
-
- for local p in $(properties)
- {
- if [ MATCH (:) : $(p:G=) ]
- {
- # Do not modify conditional properties.
- result += $(p) ;
- }
- else if free in [ feature.attributes $(p:G) ]
- {
- # Do not modify free properties.
- result += $(p) ;
- }
- else
- {
- local required-value = $(__require__$(p:G)) ;
- if $(required-value)
- {
- if $(p:G=) != $(required-value)
- {
- result += $(p:G)$(required-value) ;
- }
- else
- {
- result += $(p) ;
- }
- }
- else
- {
- result += $(p) ;
- }
- }
- }
-
- # Unset our ugly map.
- for local r in $(requirements)
- {
- __require__$(r:G) = ;
- }
-
- if $(error)
- {
- return $(error) ;
- }
- else
- {
- return [ sequence.unique $(result) $(requirements) ] ;
- }
-}
-
-
-# Removes all conditional properties whose conditions are not met. For those
-# with met conditions, removes the condition. Properties in conditions are
-# looked up in 'context'.
-#
-rule evaluate-conditionals-in-context ( properties * : context * )
-{
- local base ;
- local conditionals ;
- for local p in $(properties)
- {
- if [ MATCH (:<) : $(p) ]
- {
- conditionals += $(p) ;
- }
- else
- {
- base += $(p) ;
- }
- }
-
- local result = $(base) ;
- for local p in $(conditionals)
- {
- # Separate condition and property.
- local s = [ MATCH (.*):(<.*) : $(p) ] ;
- # Split condition into individual properties.
- local condition = [ regex.split $(s[1]) "," ] ;
- # Evaluate condition.
- if ! [ MATCH (!).* : $(condition:G=) ]
- {
- # Only positive checks
- if $(condition) in $(context)
- {
- result += $(s[2]) ;
- }
- }
- else
- {
- # Have negative checks
- local fail ;
- while $(condition)
- {
- local c = $(condition[1]) ;
- local m = [ MATCH !(.*) : $(c) ] ;
- if $(m)
- {
- local p = $(m:G=$(c:G)) ;
- if $(p) in $(context)
- {
- fail = true ;
- c = ;
- }
- }
- else
- {
- if ! $(c) in $(context)
- {
- fail = true ;
- c = ;
- }
- }
- condition = $(condition[2-]) ;
- }
- if ! $(fail)
- {
- result += $(s[2]) ;
- }
- }
- }
- return $(result) ;
-}
-
-
-rule expand-subfeatures-in-conditions ( properties * )
-{
- local result ;
- for local p in $(properties)
- {
- local s = [ MATCH (.*):(<.*) : $(p) ] ;
- if ! $(s)
- {
- result += $(p) ;
- }
- else
- {
- local condition = $(s[1]) ;
- local value = $(s[2]) ;
- # Condition might include several elements.
- condition = [ regex.split $(condition) "," ] ;
- local e ;
- for local c in $(condition)
- {
- # It is common for a condition to include a toolset or
- # subfeatures that have not been defined. In that case we want
- # the condition to simply 'never be satisfied' and validation
- # would only produce a spurious error so we prevent it by
- # passing 'true' as the second parameter.
- e += [ feature.expand-subfeatures $(c) : true ] ;
- }
- if $(e) = $(condition)
- {
- # (todo)
- # This is just an optimization and possibly a premature one at
- # that.
- # (todo) (12.07.2008.) (Jurko)
- result += $(p) ;
- }
- else
- {
- result += $(e:J=,):$(value) ;
- }
- }
- }
- return $(result) ;
-}
-
-
-# Helper for as-path, below. Orders properties with the implicit ones first, and
-# within the two sections in alphabetical order of feature name.
-#
-local rule path-order ( x y )
-{
- if $(y:G) && ! $(x:G)
- {
- return true ;
- }
- else if $(x:G) && ! $(y:G)
- {
- return ;
- }
- else
- {
- if ! $(x:G)
- {
- x = [ feature.expand-subfeatures $(x) ] ;
- y = [ feature.expand-subfeatures $(y) ] ;
- }
-
- if $(x[1]) < $(y[1])
- {
- return true ;
- }
- }
-}
-
-
-local rule abbreviate-dashed ( string )
-{
- local r ;
- for local part in [ regex.split $(string) - ]
- {
- r += [ string.abbreviate $(part) ] ;
- }
- return $(r:J=-) ;
-}
-
-
-local rule identity ( string )
-{
- return $(string) ;
-}
-
-
-if --abbreviate-paths in [ modules.peek : ARGV ]
-{
- .abbrev = abbreviate-dashed ;
-}
-else
-{
- .abbrev = identity ;
-}
-
-
-# Returns a path representing the given expanded property set.
-#
-rule as-path ( properties * )
-{
- local entry = .result.$(properties:J=-) ;
-
- if ! $($(entry))
- {
- # Trim redundancy.
- properties = [ feature.minimize $(properties) ] ;
-
- # Sort according to path-order.
- properties = [ sequence.insertion-sort $(properties) : path-order ] ;
-
- local components ;
- for local p in $(properties)
- {
- if $(p:G)
- {
- local f = [ utility.ungrist $(p:G) ] ;
- p = $(f)-$(p:G=) ;
- }
- components += [ $(.abbrev) $(p) ] ;
- }
-
- $(entry) = $(components:J=/) ;
- }
-
- return $($(entry)) ;
-}
-
-
-# Exit with error if property is not valid.
-#
-local rule validate1 ( property )
-{
- local msg ;
- if $(property:G)
- {
- local feature = $(property:G) ;
- local value = $(property:G=) ;
-
- if ! [ feature.valid $(feature) ]
- {
- # Ungrist for better error messages.
- feature = [ utility.ungrist $(property:G) ] ;
- msg = "unknown feature '$(feature)'" ;
- }
- else if $(value) && ! free in [ feature.attributes $(feature) ]
- {
- feature.validate-value-string $(feature) $(value) ;
- }
- else if ! ( $(value) || ( optional in [ feature.attributes $(feature) ] ) )
- {
- # Ungrist for better error messages.
- feature = [ utility.ungrist $(property:G) ] ;
- msg = "No value specified for feature '$(feature)'" ;
- }
- }
- else
- {
- local feature = [ feature.implied-feature $(property) ] ;
- feature.validate-value-string $(feature) $(property) ;
- }
- if $(msg)
- {
- errors.error "Invalid property "'$(property:J=" ")'": "$(msg:J=" "). ;
- }
-}
-
-
-rule validate ( properties * )
-{
- for local p in $(properties)
- {
- validate1 $(p) ;
- }
-}
-
-
-rule validate-property-sets ( property-sets * )
-{
- for local s in $(property-sets)
- {
- validate [ feature.split $(s) ] ;
- }
-}
-
-
-# Expands any implicit property values in the given property 'specification' so
-# they explicitly state their feature.
-#
-rule make ( specification * )
-{
- local result ;
- for local e in $(specification)
- {
- if $(e:G)
- {
- result += $(e) ;
- }
- else if [ feature.is-implicit-value $(e) ]
- {
- local feature = [ feature.implied-feature $(e) ] ;
- result += $(feature)$(e) ;
- }
- else
- {
- errors.error "'$(e)' is not a valid property specification" ;
- }
- }
- return $(result) ;
-}
-
-
-# Returns a property set containing all the elements in 'properties' that do not
-# have their attributes listed in 'attributes'.
-#
-rule remove ( attributes + : properties * )
-{
- local result ;
- for local e in $(properties)
- {
- if ! [ set.intersection $(attributes) : [ feature.attributes $(e:G) ] ]
- {
- result += $(e) ;
- }
- }
- return $(result) ;
-}
-
-
-# Returns a property set containing all the elements in 'properties' that have
-# their attributes listed in 'attributes'.
-#
-rule take ( attributes + : properties * )
-{
- local result ;
- for local e in $(properties)
- {
- if [ set.intersection $(attributes) : [ feature.attributes $(e:G) ] ]
- {
- result += $(e) ;
- }
- }
- return $(result) ;
-}
-
-
-# Selects properties corresponding to any of the given features.
-#
-rule select ( features * : properties * )
-{
- local result ;
-
- # Add any missing angle brackets.
- local empty = "" ;
- features = $(empty:G=$(features)) ;
-
- for local p in $(properties)
- {
- if $(p:G) in $(features)
- {
- result += $(p) ;
- }
- }
- return $(result) ;
-}
-
-
-# Returns a modified version of properties with all values of the given feature
-# replaced by the given value. If 'value' is empty the feature will be removed.
-#
-rule change ( properties * : feature value ? )
-{
- local result ;
- for local p in $(properties)
- {
- if $(p:G) = $(feature)
- {
- result += $(value:G=$(feature)) ;
- }
- else
- {
- result += $(p) ;
- }
- }
- return $(result) ;
-}
-
-
-# If 'property' is a conditional property, returns the condition and the
-# property. E.g. <variant>debug,<toolset>gcc:<inlining>full will become
-# <variant>debug,<toolset>gcc <inlining>full. Otherwise, returns an empty
-# string.
-#
-rule split-conditional ( property )
-{
- local m = [ MATCH "(.+):<(.+)" : $(property) ] ;
- if $(m)
- {
- return $(m[1]) <$(m[2]) ;
- }
-}
-
-
-# Interpret all path properties in 'properties' as relative to 'path'. The
-# property values are assumed to be in system-specific form, and will be
-# translated into normalized form.
-#
-rule translate-paths ( properties * : path )
-{
- local result ;
- for local p in $(properties)
- {
- local split = [ split-conditional $(p) ] ;
- local condition = "" ;
- if $(split)
- {
- condition = $(split[1]): ;
- p = $(split[2]) ;
- }
-
- if path in [ feature.attributes $(p:G) ]
- {
- local values = [ regex.split $(p:TG=) "&&" ] ;
- local t ;
- for local v in $(values)
- {
- t += [ path.root [ path.make $(v) ] $(path) ] ;
- }
- t = $(t:J="&&") ;
- result += $(condition)$(t:TG=$(p:G)) ;
- }
- else
- {
- result += $(condition)$(p) ;
- }
- }
- return $(result) ;
-}
-
-
-# Assumes that all feature values that start with '@' are names of rules, used
-# in 'context-module'. Such rules can be either local to the module or global.
-# Converts such values into 'indirect-rule' format (see indirect.jam), so they
-# can be called from other modules. Does nothing for such values that are
-# already in the 'indirect-rule' format.
-#
-rule translate-indirect ( specification * : context-module )
-{
- local result ;
- for local p in $(specification)
- {
- local m = [ MATCH ^@(.+) : $(p:G=) ] ;
- if $(m)
- {
- local v ;
- if [ MATCH "^([^%]*)%([^%]+)$" : $(m) ]
- {
- # Rule is already in the 'indirect-rule' format.
- v = $(m) ;
- }
- else
- {
- if ! [ MATCH ".*([.]).*" : $(m) ]
- {
- # This is an unqualified rule name. The user might want to
- # set flags on this rule name and toolset.flag
- # auto-qualifies it. Need to do the same here so flag
- # setting works. We can arrange for toolset.flag to *not*
- # auto-qualify the argument but then two rules defined in
- # two Jamfiles would conflict.
- m = $(context-module).$(m) ;
- }
- v = [ indirect.make $(m) : $(context-module) ] ;
- }
-
- v = @$(v) ;
- result += $(v:G=$(p:G)) ;
- }
- else
- {
- result += $(p) ;
- }
- }
- return $(result) ;
-}
-
-
-# Binds all dependency properties in a list relative to the given project.
-# Targets with absolute paths will be left unchanged and targets which have a
-# project specified will have the path to the project interpreted relative to
-# the specified location.
-#
-rule translate-dependencies ( specification * : project-id : location )
-{
- local result ;
- for local p in $(specification)
- {
- local split = [ split-conditional $(p) ] ;
- local condition = "" ;
- if $(split)
- {
- condition = $(split[1]): ;
- p = $(split[2]) ;
- }
- if dependency in [ feature.attributes $(p:G) ]
- {
- local split-target = [ regex.match (.*)//(.*) : $(p:G=) ] ;
- if $(split-target)
- {
- local rooted = [ path.root [ path.make $(split-target[1]) ]
- [ path.root $(location) [ path.pwd ] ] ] ;
- result += $(condition)$(p:G)$(rooted)//$(split-target[2]) ;
- }
- else if [ path.is-rooted $(p:G=) ]
- {
- result += $(condition)$(p) ;
- }
- else
- {
- result += $(condition)$(p:G)$(project-id)//$(p:G=) ;
- }
- }
- else
- {
- result += $(condition)$(p) ;
- }
- }
- return $(result) ;
-}
-
-
-# Class maintaining a property set -> string mapping.
-#
-class property-map
-{
- import errors ;
- import numbers ;
- import sequence ;
-
- rule __init__ ( )
- {
- self.next-flag = 1 ;
- }
-
- # Associate 'value' with 'properties'.
- #
- rule insert ( properties + : value )
- {
- self.all-flags += $(self.next-flag) ;
- self.properties.$(self.next-flag) = $(properties) ;
- self.value.$(self.next-flag) = $(value) ;
-
- self.next-flag = [ numbers.increment $(self.next-flag) ] ;
- }
-
- # Returns the value associated with 'properties' or any subset of it. If
- # more than one subset has a value assigned to it, returns the value for the
- # longest subset, if it is unique.
- #
- rule find ( properties + )
- {
- return [ find-replace $(properties) ] ;
- }
-
- # Returns the value associated with 'properties'. If 'value' parameter is
- # given, replaces the found value.
- #
- rule find-replace ( properties + : value ? )
- {
- # First find all matches.
- local matches ;
- local match-ranks ;
- for local i in $(self.all-flags)
- {
- if $(self.properties.$(i)) in $(properties)
- {
- matches += $(i) ;
- match-ranks += [ sequence.length $(self.properties.$(i)) ] ;
- }
- }
- local best = [ sequence.select-highest-ranked $(matches)
- : $(match-ranks) ] ;
- if $(best[2])
- {
- errors.error "Ambiguous key $(properties:J= :E=)" ;
- }
- local original = $(self.value.$(best)) ;
- if $(value)
- {
- self.value.$(best) = $(value) ;
- }
- return $(original) ;
- }
-}
-
-
-rule __test__ ( )
-{
- import assert ;
- import "class" : new ;
- import errors : try catch ;
- import feature ;
-
- # Local rules must be explicitly re-imported.
- import property : path-order abbreviate-dashed ;
-
- feature.prepare-test property-test-temp ;
-
- feature.feature toolset : gcc : implicit symmetric ;
- feature.subfeature toolset gcc : version : 2.95.2 2.95.3 2.95.4 3.0 3.0.1
- 3.0.2 : optional ;
- feature.feature define : : free ;
- feature.feature runtime-link : dynamic static : symmetric link-incompatible ;
- feature.feature optimization : on off ;
- feature.feature variant : debug release : implicit composite symmetric ;
- feature.feature rtti : on off : link-incompatible ;
-
- feature.compose <variant>debug : <define>_DEBUG <optimization>off ;
- feature.compose <variant>release : <define>NDEBUG <optimization>on ;
-
- validate <toolset>gcc <toolset>gcc-3.0.1 : $(test-space) ;
-
- assert.true path-order $(test-space) debug <define>foo ;
- assert.false path-order $(test-space) <define>foo debug ;
- assert.true path-order $(test-space) gcc debug ;
- assert.false path-order $(test-space) debug gcc ;
- assert.true path-order $(test-space) <optimization>on <rtti>on ;
- assert.false path-order $(test-space) <rtti>on <optimization>on ;
-
- assert.result-set-equal <toolset>gcc <rtti>off <define>FOO
- : refine <toolset>gcc <rtti>off
- : <define>FOO
- : $(test-space) ;
-
- assert.result-set-equal <toolset>gcc <optimization>on
- : refine <toolset>gcc <optimization>off
- : <optimization>on
- : $(test-space) ;
-
- assert.result-set-equal <toolset>gcc <rtti>off
- : refine <toolset>gcc : <rtti>off : $(test-space) ;
-
- assert.result-set-equal <toolset>gcc <rtti>off <rtti>off:<define>FOO
- : refine <toolset>gcc : <rtti>off <rtti>off:<define>FOO
- : $(test-space) ;
-
- assert.result-set-equal <toolset>gcc:<define>foo <toolset>gcc:<define>bar
- : refine <toolset>gcc:<define>foo : <toolset>gcc:<define>bar
- : $(test-space) ;
-
- assert.result <define>MY_RELEASE
- : evaluate-conditionals-in-context
- <variant>release,<rtti>off:<define>MY_RELEASE
- : <toolset>gcc <variant>release <rtti>off ;
-
- assert.result debug
- : as-path <optimization>off <variant>debug
- : $(test-space) ;
-
- assert.result gcc/debug/rtti-off
- : as-path <toolset>gcc <optimization>off <rtti>off <variant>debug
- : $(test-space) ;
-
- assert.result optmz-off : abbreviate-dashed optimization-off ;
- assert.result rntm-lnk-sttc : abbreviate-dashed runtime-link-static ;
-
- try ;
- validate <feature>value : $(test-space) ;
- catch "Invalid property '<feature>value': unknown feature 'feature'." ;
-
- try ;
- validate <rtti>default : $(test-space) ;
- catch \"default\" is not a known value of feature <rtti> ;
-
- validate <define>WHATEVER : $(test-space) ;
-
- try ;
- validate <rtti> : $(test-space) ;
- catch "Invalid property '<rtti>': No value specified for feature 'rtti'." ;
-
- try ;
- validate value : $(test-space) ;
- catch "value" is not a value of an implicit feature ;
-
- assert.result-set-equal <rtti>on
- : remove free implicit : <toolset>gcc <define>foo <rtti>on : $(test-space) ;
-
- assert.result-set-equal <include>a
- : select include : <include>a <toolset>gcc ;
-
- assert.result-set-equal <include>a
- : select include bar : <include>a <toolset>gcc ;
-
- assert.result-set-equal <include>a <toolset>gcc
- : select include <bar> <toolset> : <include>a <toolset>gcc ;
-
- assert.result-set-equal <toolset>kylix <include>a
- : change <toolset>gcc <include>a : <toolset> kylix ;
-
- pm = [ new property-map ] ;
- $(pm).insert <toolset>gcc : o ;
- $(pm).insert <toolset>gcc <os>NT : obj ;
- $(pm).insert <toolset>gcc <os>CYGWIN : obj ;
-
- assert.equal o : [ $(pm).find <toolset>gcc ] ;
-
- assert.equal obj : [ $(pm).find <toolset>gcc <os>NT ] ;
-
- try ;
- $(pm).find <toolset>gcc <os>NT <os>CYGWIN ;
- catch "Ambiguous key <toolset>gcc <os>NT <os>CYGWIN" ;
-
- # Test ordinary properties.
- assert.result : split-conditional <toolset>gcc ;
-
- # Test properties with ":".
- assert.result : split-conditional <define>FOO=A::B ;
-
- # Test conditional feature.
- assert.result-set-equal <toolset>gcc,<toolset-gcc:version>3.0 <define>FOO
- : split-conditional <toolset>gcc,<toolset-gcc:version>3.0:<define>FOO ;
-
- feature.finish-test property-test-temp ;
-}
diff --git a/tools/build/v2/build/property.py b/tools/build/v2/build/property.py
deleted file mode 100644
index c8bbdb29f5..0000000000
--- a/tools/build/v2/build/property.py
+++ /dev/null
@@ -1,593 +0,0 @@
-# Status: ported, except for tests and --abbreviate-paths.
-# Base revision: 64070
-#
-# Copyright 2001, 2002, 2003 Dave Abrahams
-# Copyright 2006 Rene Rivera
-# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import re
-from b2.util.utility import *
-from b2.build import feature
-from b2.util import sequence, qualify_jam_action
-import b2.util.set
-from b2.manager import get_manager
-
-__re_two_ampersands = re.compile ('&&')
-__re_comma = re.compile (',')
-__re_split_condition = re.compile ('(.*):(<.*)')
-__re_split_conditional = re.compile (r'(.+):<(.+)')
-__re_colon = re.compile (':')
-__re_has_condition = re.compile (r':<')
-__re_separate_condition_and_property = re.compile (r'(.*):(<.*)')
-
-class Property(object):
-
- __slots__ = ('_feature', '_value', '_condition')
-
- def __init__(self, f, value, condition = []):
- if type(f) == type(""):
- f = feature.get(f)
- # At present, single property has a single value.
- assert type(value) != type([])
- assert(f.free() or value.find(':') == -1)
- self._feature = f
- self._value = value
- self._condition = condition
-
- def feature(self):
- return self._feature
-
- def value(self):
- return self._value
-
- def condition(self):
- return self._condition
-
- def to_raw(self):
- result = "<" + self._feature.name() + ">" + str(self._value)
- if self._condition:
- result = ",".join(str(p) for p in self._condition) + ':' + result
- return result
-
- def __str__(self):
- return self.to_raw()
-
- def __hash__(self):
- # FIXME: consider if this class should be value-is-identity one
- return hash((self._feature, self._value, tuple(self._condition)))
-
- def __cmp__(self, other):
- return cmp((self._feature, self._value, self._condition),
- (other._feature, other._value, other._condition))
-
-
-def create_from_string(s, allow_condition=False,allow_missing_value=False):
-
- condition = []
- import types
- if not isinstance(s, types.StringType):
- print type(s)
- if __re_has_condition.search(s):
-
- if not allow_condition:
- raise BaseException("Conditional property is not allowed in this context")
-
- m = __re_separate_condition_and_property.match(s)
- condition = m.group(1)
- s = m.group(2)
-
- # FIXME: break dependency cycle
- from b2.manager import get_manager
-
- feature_name = get_grist(s)
- if not feature_name:
- if feature.is_implicit_value(s):
- f = feature.implied_feature(s)
- value = s
- else:
- raise get_manager().errors()("Invalid property '%s' -- unknown feature" % s)
- else:
- f = feature.get(feature_name)
-
- value = get_value(s)
- if not value and not allow_missing_value:
- get_manager().errors()("Invalid property '%s' -- no value specified" % s)
-
-
- if condition:
- condition = [create_from_string(x) for x in condition.split(',')]
-
- return Property(f, value, condition)
-
-def create_from_strings(string_list, allow_condition=False):
-
- return [create_from_string(s, allow_condition) for s in string_list]
-
-def reset ():
- """ Clear the module state. This is mainly for testing purposes.
- """
- global __results
-
- # A cache of results from as_path
- __results = {}
-
-reset ()
-
-
-def path_order (x, y):
- """ Helper for as_path, below. Orders properties with the implicit ones
- first, and within the two sections in alphabetical order of feature
- name.
- """
- if x == y:
- return 0
-
- xg = get_grist (x)
- yg = get_grist (y)
-
- if yg and not xg:
- return -1
-
- elif xg and not yg:
- return 1
-
- else:
- if not xg:
- x = feature.expand_subfeatures([x])
- y = feature.expand_subfeatures([y])
-
- if x < y:
- return -1
- elif x > y:
- return 1
- else:
- return 0
-
-def identify(string):
- return string
-
-# Uses Property
-def refine (properties, requirements):
- """ Refines 'properties' by overriding any non-free properties
- for which a different value is specified in 'requirements'.
- Conditional requirements are just added without modification.
- Returns the resulting list of properties.
- """
- # The result has no duplicates, so we store it in a set
- result = set()
-
- # Records all requirements.
- required = {}
-
- # All the elements of requirements should be present in the result
- # Record them so that we can handle 'properties'.
- for r in requirements:
- # Don't consider conditional requirements.
- if not r.condition():
- required[r.feature()] = r
-
- for p in properties:
- # Skip conditional properties
- if p.condition():
- result.add(p)
- # No processing for free properties
- elif p.feature().free():
- result.add(p)
- else:
- if required.has_key(p.feature()):
- result.add(required[p.feature()])
- else:
- result.add(p)
-
- return sequence.unique(list(result) + requirements)
-
-def translate_paths (properties, path):
- """ Interpret all path properties in 'properties' as relative to 'path'
- The property values are assumed to be in system-specific form, and
- will be translated into normalized form.
- """
- result = []
-
- for p in properties:
-
- if p.feature().path():
- values = __re_two_ampersands.split(p.value())
-
- new_value = "&&".join(os.path.join(path, v) for v in values)
-
- if new_value != p.value():
- result.append(Property(p.feature(), new_value, p.condition()))
- else:
- result.append(p)
-
- else:
- result.append (p)
-
- return result
-
-def translate_indirect(properties, context_module):
- """Assumes that all feature values that start with '@' are
- names of rules, used in 'context-module'. Such rules can be
- either local to the module or global. Qualified local rules
- with the name of the module."""
- result = []
- for p in properties:
- if p.value()[0] == '@':
- q = qualify_jam_action(p.value()[1:], context_module)
- get_manager().engine().register_bjam_action(q)
- result.append(Property(p.feature(), '@' + q, p.condition()))
- else:
- result.append(p)
-
- return result
-
-def validate (properties):
- """ Exit with error if any of the properties is not valid.
- properties may be a single property or a sequence of properties.
- """
-
- if isinstance (properties, str):
- __validate1 (properties)
- else:
- for p in properties:
- __validate1 (p)
-
-def expand_subfeatures_in_conditions (properties):
-
- result = []
- for p in properties:
-
- if not p.condition():
- result.append(p)
- else:
- expanded = []
- for c in p.condition():
-
- if c.feature().name().startswith("toolset") or c.feature().name() == "os":
- # It common that condition includes a toolset which
- # was never defined, or mentiones subfeatures which
- # were never defined. In that case, validation will
- # only produce an spirious error, so don't validate.
- expanded.extend(feature.expand_subfeatures ([c], True))
- else:
- expanded.extend(feature.expand_subfeatures([c]))
-
- result.append(Property(p.feature(), p.value(), expanded))
-
- return result
-
-# FIXME: this should go
-def split_conditional (property):
- """ If 'property' is conditional property, returns
- condition and the property, e.g
- <variant>debug,<toolset>gcc:<inlining>full will become
- <variant>debug,<toolset>gcc <inlining>full.
- Otherwise, returns empty string.
- """
- m = __re_split_conditional.match (property)
-
- if m:
- return (m.group (1), '<' + m.group (2))
-
- return None
-
-
-def select (features, properties):
- """ Selects properties which correspond to any of the given features.
- """
- result = []
-
- # add any missing angle brackets
- features = add_grist (features)
-
- return [p for p in properties if get_grist(p) in features]
-
-def validate_property_sets (sets):
- for s in sets:
- validate(s.all())
-
-def evaluate_conditionals_in_context (properties, context):
- """ Removes all conditional properties which conditions are not met
- For those with met conditions, removes the condition. Properies
- in conditions are looked up in 'context'
- """
- base = []
- conditional = []
-
- for p in properties:
- if p.condition():
- conditional.append (p)
- else:
- base.append (p)
-
- result = base[:]
- for p in conditional:
-
- # Evaluate condition
- # FIXME: probably inefficient
- if all(x in context for x in p.condition()):
- result.append(Property(p.feature(), p.value()))
-
- return result
-
-
-def change (properties, feature, value = None):
- """ Returns a modified version of properties with all values of the
- given feature replaced by the given value.
- If 'value' is None the feature will be removed.
- """
- result = []
-
- feature = add_grist (feature)
-
- for p in properties:
- if get_grist (p) == feature:
- if value:
- result.append (replace_grist (value, feature))
-
- else:
- result.append (p)
-
- return result
-
-
-################################################################
-# Private functions
-
-def __validate1 (property):
- """ Exit with error if property is not valid.
- """
- msg = None
-
- if not property.feature().free():
- feature.validate_value_string (property.feature(), property.value())
-
-
-###################################################################
-# Still to port.
-# Original lines are prefixed with "# "
-#
-#
-# import utility : ungrist ;
-# import sequence : unique ;
-# import errors : error ;
-# import feature ;
-# import regex ;
-# import sequence ;
-# import set ;
-# import path ;
-# import assert ;
-#
-#
-
-
-# rule validate-property-sets ( property-sets * )
-# {
-# for local s in $(property-sets)
-# {
-# validate [ feature.split $(s) ] ;
-# }
-# }
-#
-
-def remove(attributes, properties):
- """Returns a property sets which include all the elements
- in 'properties' that do not have attributes listed in 'attributes'."""
-
- result = []
- for e in properties:
- attributes_new = feature.attributes(get_grist(e))
- has_common_features = 0
- for a in attributes_new:
- if a in attributes:
- has_common_features = 1
- break
-
- if not has_common_features:
- result += e
-
- return result
-
-
-def take(attributes, properties):
- """Returns a property set which include all
- properties in 'properties' that have any of 'attributes'."""
- result = []
- for e in properties:
- if b2.util.set.intersection(attributes, feature.attributes(get_grist(e))):
- result.append(e)
- return result
-
-def translate_dependencies(properties, project_id, location):
-
- result = []
- for p in properties:
-
- if not p.feature().dependency():
- result.append(p)
- else:
- v = p.value()
- m = re.match("(.*)//(.*)", v)
- if m:
- rooted = m.group(1)
- if rooted[0] == '/':
- # Either project id or absolute Linux path, do nothing.
- pass
- else:
- rooted = os.path.join(os.getcwd(), location, rooted)
-
- result.append(Property(p.feature(), rooted + "//" + m.group(2), p.condition()))
-
- elif os.path.isabs(v):
- result.append(p)
- else:
- result.append(Property(p.feature(), project_id + "//" + v, p.condition()))
-
- return result
-
-
-class PropertyMap:
- """ Class which maintains a property set -> string mapping.
- """
- def __init__ (self):
- self.__properties = []
- self.__values = []
-
- def insert (self, properties, value):
- """ Associate value with properties.
- """
- self.__properties.append(properties)
- self.__values.append(value)
-
- def find (self, properties):
- """ Return the value associated with properties
- or any subset of it. If more than one
- subset has value assigned to it, return the
- value for the longest subset, if it's unique.
- """
- return self.find_replace (properties)
-
- def find_replace(self, properties, value=None):
- matches = []
- match_ranks = []
-
- for i in range(0, len(self.__properties)):
- p = self.__properties[i]
-
- if b2.util.set.contains (p, properties):
- matches.append (i)
- match_ranks.append(len(p))
-
- best = sequence.select_highest_ranked (matches, match_ranks)
-
- if not best:
- return None
-
- if len (best) > 1:
- raise NoBestMatchingAlternative ()
-
- best = best [0]
-
- original = self.__values[best]
-
- if value:
- self.__values[best] = value
-
- return original
-
-# local rule __test__ ( )
-# {
-# import errors : try catch ;
-# import feature ;
-# import feature : feature subfeature compose ;
-#
-# # local rules must be explicitly re-imported
-# import property : path-order ;
-#
-# feature.prepare-test property-test-temp ;
-#
-# feature toolset : gcc : implicit symmetric ;
-# subfeature toolset gcc : version : 2.95.2 2.95.3 2.95.4
-# 3.0 3.0.1 3.0.2 : optional ;
-# feature define : : free ;
-# feature runtime-link : dynamic static : symmetric link-incompatible ;
-# feature optimization : on off ;
-# feature variant : debug release : implicit composite symmetric ;
-# feature rtti : on off : link-incompatible ;
-#
-# compose <variant>debug : <define>_DEBUG <optimization>off ;
-# compose <variant>release : <define>NDEBUG <optimization>on ;
-#
-# import assert ;
-# import "class" : new ;
-#
-# validate <toolset>gcc <toolset>gcc-3.0.1 : $(test-space) ;
-#
-# assert.result <toolset>gcc <rtti>off <define>FOO
-# : refine <toolset>gcc <rtti>off
-# : <define>FOO
-# : $(test-space)
-# ;
-#
-# assert.result <toolset>gcc <optimization>on
-# : refine <toolset>gcc <optimization>off
-# : <optimization>on
-# : $(test-space)
-# ;
-#
-# assert.result <toolset>gcc <rtti>off
-# : refine <toolset>gcc : <rtti>off : $(test-space)
-# ;
-#
-# assert.result <toolset>gcc <rtti>off <rtti>off:<define>FOO
-# : refine <toolset>gcc : <rtti>off <rtti>off:<define>FOO
-# : $(test-space)
-# ;
-#
-# assert.result <toolset>gcc:<define>foo <toolset>gcc:<define>bar
-# : refine <toolset>gcc:<define>foo : <toolset>gcc:<define>bar
-# : $(test-space)
-# ;
-#
-# assert.result <define>MY_RELEASE
-# : evaluate-conditionals-in-context
-# <variant>release,<rtti>off:<define>MY_RELEASE
-# : <toolset>gcc <variant>release <rtti>off
-#
-# ;
-#
-# try ;
-# validate <feature>value : $(test-space) ;
-# catch "Invalid property '<feature>value': unknown feature 'feature'." ;
-#
-# try ;
-# validate <rtti>default : $(test-space) ;
-# catch \"default\" is not a known value of feature <rtti> ;
-#
-# validate <define>WHATEVER : $(test-space) ;
-#
-# try ;
-# validate <rtti> : $(test-space) ;
-# catch "Invalid property '<rtti>': No value specified for feature 'rtti'." ;
-#
-# try ;
-# validate value : $(test-space) ;
-# catch "value" is not a value of an implicit feature ;
-#
-#
-# assert.result <rtti>on
-# : remove free implicit : <toolset>gcc <define>foo <rtti>on : $(test-space) ;
-#
-# assert.result <include>a
-# : select include : <include>a <toolset>gcc ;
-#
-# assert.result <include>a
-# : select include bar : <include>a <toolset>gcc ;
-#
-# assert.result <include>a <toolset>gcc
-# : select include <bar> <toolset> : <include>a <toolset>gcc ;
-#
-# assert.result <toolset>kylix <include>a
-# : change <toolset>gcc <include>a : <toolset> kylix ;
-#
-# # Test ordinary properties
-# assert.result
-# : split-conditional <toolset>gcc
-# ;
-#
-# # Test properties with ":"
-# assert.result
-# : split-conditional <define>FOO=A::B
-# ;
-#
-# # Test conditional feature
-# assert.result <toolset>gcc,<toolset-gcc:version>3.0 <define>FOO
-# : split-conditional <toolset>gcc,<toolset-gcc:version>3.0:<define>FOO
-# ;
-#
-# feature.finish-test property-test-temp ;
-# }
-#
-
diff --git a/tools/build/v2/build/property_set.py b/tools/build/v2/build/property_set.py
deleted file mode 100644
index f12eb90c14..0000000000
--- a/tools/build/v2/build/property_set.py
+++ /dev/null
@@ -1,449 +0,0 @@
-# Status: ported.
-# Base revision: 40480
-
-# Copyright (C) Vladimir Prus 2002. Permission to copy, use, modify, sell and
-# distribute this software is granted provided this copyright notice appears in
-# all copies. This software is provided "as is" without express or implied
-# warranty, and with no claim as to its suitability for any purpose.
-
-from b2.util.utility import *
-import property, feature, string
-import b2.build.feature
-from b2.exceptions import *
-from b2.util.sequence import unique
-from b2.util.set import difference
-from b2.util import cached
-
-from b2.manager import get_manager
-
-
-def reset ():
- """ Clear the module state. This is mainly for testing purposes.
- """
- global __cache
-
- # A cache of property sets
- # TODO: use a map of weak refs?
- __cache = {}
-
-reset ()
-
-
-def create (raw_properties = []):
- """ Creates a new 'PropertySet' instance for the given raw properties,
- or returns an already existing one.
- """
- # FIXME: propagate to callers.
- if len(raw_properties) > 0 and isinstance(raw_properties[0], property.Property):
- x = raw_properties
- else:
- x = [property.create_from_string(ps) for ps in raw_properties]
- x.sort()
- x = unique (x)
-
- # FIXME: can we do better, e.g. by directly computing
- # has value of the list?
- key = tuple(x)
-
- if not __cache.has_key (key):
- __cache [key] = PropertySet(x)
-
- return __cache [key]
-
-def create_with_validation (raw_properties):
- """ Creates new 'PropertySet' instances after checking
- that all properties are valid and converting incidental
- properties into gristed form.
- """
- properties = [property.create_from_string(s) for s in raw_properties]
- property.validate(properties)
-
- return create(properties)
-
-def empty ():
- """ Returns PropertySet with empty set of properties.
- """
- return create ()
-
-def create_from_user_input(raw_properties, jamfile_module, location):
- """Creates a property-set from the input given by the user, in the
- context of 'jamfile-module' at 'location'"""
-
- properties = property.create_from_strings(raw_properties, True)
- properties = property.translate_paths(properties, location)
- properties = property.translate_indirect(properties, jamfile_module)
-
- project_id = get_manager().projects().attributeDefault(jamfile_module, 'id', None)
- if not project_id:
- project_id = os.path.abspath(location)
- properties = property.translate_dependencies(properties, project_id, location)
- properties = property.expand_subfeatures_in_conditions(properties)
- return create(properties)
-
-
-def refine_from_user_input(parent_requirements, specification, jamfile_module,
- location):
- """Refines requirements with requirements provided by the user.
- Specially handles "-<property>value" syntax in specification
- to remove given requirements.
- - parent-requirements -- property-set object with requirements
- to refine
- - specification -- string list of requirements provided by the use
- - project-module -- the module to which context indirect features
- will be bound.
- - location -- the path to which path features are relative."""
-
-
- if not specification:
- return parent_requirements
-
-
- add_requirements = []
- remove_requirements = []
-
- for r in specification:
- if r[0] == '-':
- remove_requirements.append(r[1:])
- else:
- add_requirements.append(r)
-
- if remove_requirements:
- # Need to create property set, so that path features
- # and indirect features are translated just like they
- # are in project requirements.
- ps = create_from_user_input(remove_requirements,
- jamfile_module, location)
-
- parent_requirements = create(difference(parent_requirements.all(),
- ps.all()))
- specification = add_requirements
-
- requirements = create_from_user_input(specification,
- jamfile_module, location)
-
- return parent_requirements.refine(requirements)
-
-class PropertySet:
- """ Class for storing a set of properties.
- - there's 1<->1 correspondence between identity and value. No
- two instances of the class are equal. To maintain this property,
- the 'PropertySet.create' rule should be used to create new instances.
- Instances are immutable.
-
- - each property is classified with regard to it's effect on build
- results. Incidental properties have no effect on build results, from
- Boost.Build point of view. Others are either free, or non-free, which we
- call 'base'. Each property belong to exactly one of those categories and
- it's possible to get list of properties in each category.
-
- In addition, it's possible to get list of properties with specific
- attribute.
-
- - several operations, like and refine and as_path are provided. They all use
- caching whenever possible.
- """
- def __init__ (self, properties = []):
-
-
- raw_properties = []
- for p in properties:
- raw_properties.append(p.to_raw())
-
- self.all_ = properties
- self.all_raw_ = raw_properties
- self.all_set_ = set(properties)
-
- self.incidental_ = []
- self.free_ = []
- self.base_ = []
- self.dependency_ = []
- self.non_dependency_ = []
- self.conditional_ = []
- self.non_conditional_ = []
- self.propagated_ = []
- self.link_incompatible = []
-
- # A cache of refined properties.
- self.refined_ = {}
-
- # A cache of property sets created by adding properties to this one.
- self.added_ = {}
-
- # Cache for the default properties.
- self.defaults_ = None
-
- # Cache for the expanded properties.
- self.expanded_ = None
-
- # Cache for the expanded composite properties
- self.composites_ = None
-
- # Cache for property set with expanded subfeatures
- self.subfeatures_ = None
-
- # Cache for the property set containing propagated properties.
- self.propagated_ps_ = None
-
- # A map of features to its values.
- self.feature_map_ = None
-
- # A tuple (target path, is relative to build directory)
- self.target_path_ = None
-
- self.as_path_ = None
-
- # A cache for already evaluated sets.
- self.evaluated_ = {}
-
- for p in raw_properties:
- if not get_grist (p):
- raise BaseException ("Invalid property: '%s'" % p)
-
- att = feature.attributes (get_grist (p))
-
- if 'propagated' in att:
- self.propagated_.append (p)
-
- if 'link_incompatible' in att:
- self.link_incompatible.append (p)
-
- for p in properties:
-
- # A feature can be both incidental and free,
- # in which case we add it to incidental.
- if p.feature().incidental():
- self.incidental_.append(p)
- elif p.feature().free():
- self.free_.append(p)
- else:
- self.base_.append(p)
-
- if p.condition():
- self.conditional_.append(p)
- else:
- self.non_conditional_.append(p)
-
- if p.feature().dependency():
- self.dependency_.append (p)
- else:
- self.non_dependency_.append (p)
-
-
- def all(self):
- return self.all_
-
- def raw (self):
- """ Returns the list of stored properties.
- """
- return self.all_raw_
-
- def __str__(self):
- return ' '.join(str(p) for p in self.all_)
-
- def base (self):
- """ Returns properties that are neither incidental nor free.
- """
- return self.base_
-
- def free (self):
- """ Returns free properties which are not dependency properties.
- """
- return self.free_
-
- def non_free(self):
- return self.base_ + self.incidental_
-
- def dependency (self):
- """ Returns dependency properties.
- """
- return self.dependency_
-
- def non_dependency (self):
- """ Returns properties that are not dependencies.
- """
- return self.non_dependency_
-
- def conditional (self):
- """ Returns conditional properties.
- """
- return self.conditional_
-
- def non_conditional (self):
- """ Returns properties that are not conditional.
- """
- return self.non_conditional_
-
- def incidental (self):
- """ Returns incidental properties.
- """
- return self.incidental_
-
- def refine (self, requirements):
- """ Refines this set's properties using the requirements passed as an argument.
- """
- assert isinstance(requirements, PropertySet)
- if not self.refined_.has_key (requirements):
- r = property.refine(self.all_, requirements.all_)
-
- self.refined_[requirements] = create(r)
-
- return self.refined_[requirements]
-
- def expand (self):
- if not self.expanded_:
- expanded = feature.expand(self.all_)
- self.expanded_ = create(expanded)
- return self.expanded_
-
- def expand_subfeatures(self):
- if not self.subfeatures_:
- self.subfeatures_ = create(feature.expand_subfeatures(self.all_))
- return self.subfeatures_
-
- def evaluate_conditionals(self, context=None):
- if not context:
- context = self
-
- if not self.evaluated_.has_key(context):
- # FIXME: figure why the call messes up first parameter
- self.evaluated_[context] = create(
- property.evaluate_conditionals_in_context(self.all(), context))
-
- return self.evaluated_[context]
-
- def propagated (self):
- if not self.propagated_ps_:
- self.propagated_ps_ = create (self.propagated_)
- return self.propagated_ps_
-
- def add_defaults (self):
- # FIXME: this caching is invalidated when new features
- # are declare inside non-root Jamfiles.
- if not self.defaults_:
- expanded = feature.add_defaults(self.all_)
- self.defaults_ = create(expanded)
- return self.defaults_
-
- def as_path (self):
- if not self.as_path_:
-
- def path_order (p1, p2):
-
- i1 = p1.feature().implicit()
- i2 = p2.feature().implicit()
-
- if i1 != i2:
- return i2 - i1
- else:
- return cmp(p1.feature().name(), p2.feature().name())
-
- # trim redundancy
- properties = feature.minimize(self.base_)
-
- # sort according to path_order
- properties.sort (path_order)
-
- components = []
- for p in properties:
- if p.feature().implicit():
- components.append(p.value())
- else:
- components.append(p.feature().name() + "-" + p.value())
-
- self.as_path_ = '/'.join (components)
-
- return self.as_path_
-
- def target_path (self):
- """ Computes the target path that should be used for
- target with these properties.
- Returns a tuple of
- - the computed path
- - if the path is relative to build directory, a value of
- 'true'.
- """
- if not self.target_path_:
- # The <location> feature can be used to explicitly
- # change the location of generated targets
- l = self.get ('<location>')
- if l:
- computed = l[0]
- is_relative = False
-
- else:
- p = self.as_path ()
-
- # Really, an ugly hack. Boost regression test system requires
- # specific target paths, and it seems that changing it to handle
- # other directory layout is really hard. For that reason,
- # we teach V2 to do the things regression system requires.
- # The value o '<location-prefix>' is predended to the path.
- prefix = self.get ('<location-prefix>')
-
- if prefix:
- if len (prefix) > 1:
- raise AlreadyDefined ("Two <location-prefix> properties specified: '%s'" % prefix)
-
- computed = os.path.join(prefix[0], p)
-
- else:
- computed = p
-
- if not computed:
- computed = "."
-
- is_relative = True
-
- self.target_path_ = (computed, is_relative)
-
- return self.target_path_
-
- def add (self, ps):
- """ Creates a new property set containing the properties in this one,
- plus the ones of the property set passed as argument.
- """
- if not self.added_.has_key(ps):
- self.added_[ps] = create(self.all_ + ps.all())
- return self.added_[ps]
-
- def add_raw (self, properties):
- """ Creates a new property set containing the properties in this one,
- plus the ones passed as argument.
- """
- return self.add (create (properties))
-
-
- def get (self, feature):
- """ Returns all values of 'feature'.
- """
- if type(feature) == type([]):
- feature = feature[0]
- if not isinstance(feature, b2.build.feature.Feature):
- feature = b2.build.feature.get(feature)
-
- if not self.feature_map_:
- self.feature_map_ = {}
-
- for v in self.all_:
- if not self.feature_map_.has_key(v.feature()):
- self.feature_map_[v.feature()] = []
- self.feature_map_[v.feature()].append(v.value())
-
- return self.feature_map_.get(feature, [])
-
- @cached
- def get_properties(self, feature):
- """Returns all contained properties associated with 'feature'"""
-
- if not isinstance(feature, b2.build.feature.Feature):
- feature = b2.build.feature.get(feature)
-
- result = []
- for p in self.all_:
- if p.feature() == feature:
- result.append(p)
- return result
-
- def __contains__(self, item):
- return item in self.all_set_
-
diff --git a/tools/build/v2/build/readme.txt b/tools/build/v2/build/readme.txt
deleted file mode 100644
index c3dddd8d78..0000000000
--- a/tools/build/v2/build/readme.txt
+++ /dev/null
@@ -1,13 +0,0 @@
-Copyright 2001, 2002 Dave Abrahams
-Copyright 2002 Vladimir Prus
-Distributed under the Boost Software License, Version 1.0.
-(See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-Development code for new build system. To run unit tests for jam code, execute:
-
- bjam --debug --build-system=test
-
-Comprehensive tests require Python. See ../test/readme.txt
-
-
-
diff --git a/tools/build/v2/build/scanner.jam b/tools/build/v2/build/scanner.jam
deleted file mode 100644
index d6042ea2c7..0000000000
--- a/tools/build/v2/build/scanner.jam
+++ /dev/null
@@ -1,153 +0,0 @@
-# Copyright 2003 Dave Abrahams
-# Copyright 2002, 2003, 2004, 2005 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Implements scanners: objects that compute implicit dependencies for
-# files, such as includes in C++.
-#
-# Scanner has a regular expression used to find dependencies, some
-# data needed to interpret those dependencies (for example, include
-# paths), and a code which actually established needed relationship
-# between actual jam targets.
-#
-# Scanner objects are created by actions, when they try to actualize
-# virtual targets, passed to 'virtual-target.actualize' method and are
-# then associated with actual targets. It is possible to use
-# several scanners for a virtual-target. For example, a single source
-# might be used by to compile actions, with different include paths.
-# In this case, two different actual targets will be created, each
-# having scanner of its own.
-#
-# Typically, scanners are created from target type and action's
-# properties, using the rule 'get' in this module. Directly creating
-# scanners is not recommended, because it might create many equvivalent
-# but different instances, and lead in unneeded duplication of
-# actual targets. However, actions can also create scanners in a special
-# way, instead of relying on just target type.
-
-import "class" : new ;
-import property virtual-target property-set ;
-import errors : error ;
-
-# Base scanner class.
-class scanner
-{
- rule __init__ ( )
- {
- }
-
- # Returns a pattern to use for scanning
- rule pattern ( )
- {
- error "method must be overriden" ;
- }
-
- # Establish necessary relationship between targets,
- # given actual target beeing scanned, and a list of
- # pattern matches in that file.
- rule process ( target : matches * )
- {
- error "method must be overriden" ;
- }
-}
-
-# Registers a new generator class, specifying a set of
-# properties relevant to this scanner. Ctor for that class
-# should have one parameter: list of properties.
-rule register ( scanner-class : relevant-properties * )
-{
- .registered += $(scanner-class) ;
- .relevant-properties.$(scanner-class) = $(relevant-properties) ;
-}
-
-# Common scanner class, which can be used when there's only one
-# kind of includes (unlike C, where "" and <> includes have different
-# search paths).
-class common-scanner : scanner
-{
- import scanner ;
- rule __init__ ( includes * )
- {
- scanner.__init__ ;
- self.includes = $(includes) ;
- }
-
- rule process ( target : matches * : binding )
- {
- local target_path = [ NORMALIZE_PATH $(binding:D) ] ;
-
- NOCARE $(matches) ;
- INCLUDES $(target) : $(matches) ;
- SEARCH on $(matches) = $(target_path) $(self.includes:G=) ;
- ISFILE $(matches) ;
-
- scanner.propagate $(__name__) : $(matches) : $(target) ;
- }
-}
-
-
-# Returns an instance of previously registered scanner,
-# with the specified properties.
-rule get ( scanner-class : property-set )
-{
- if ! $(scanner-class) in $(.registered)
- {
- error "attempt to get unregisted scanner" ;
- }
-
- local r = $(.rv-cache.$(property-set)) ;
- if ! $(r)
- {
- r = [ property-set.create
- [ property.select $(.relevant-properties.$(scanner-class)) :
- [ $(property-set).raw ] ] ] ;
- .rv-cache.$(property-set) = $(r) ;
- }
-
- if ! $(scanner.$(scanner-class).$(r:J=-))
- {
- scanner.$(scanner-class).$(r:J=-) = [ new $(scanner-class) [ $(r).raw ] ] ;
- }
- return $(scanner.$(scanner-class).$(r:J=-)) ;
-}
-
-
-# Installs the specified scanner on actual target 'target'.
-rule install ( scanner : target
- vtarget # virtual target from which 'target' was actualized
-)
-{
- HDRSCAN on $(target) = [ $(scanner).pattern ] ;
- SCANNER on $(target) = $(scanner) ;
- HDRRULE on $(target) = scanner.hdrrule ;
-
- # scanner reflects difference in properties affecting
- # binding of 'target', which will be known when processing
- # includes for it, will give information on how to
- # interpret quoted includes.
- HDRGRIST on $(target) = $(scanner) ;
-}
-
-# Propagate scanner setting from 'including-target' to 'targets'.
-rule propagate ( scanner : targets * : including-target )
-{
- HDRSCAN on $(targets) = [ on $(including-target) return $(HDRSCAN) ] ;
- SCANNER on $(targets) = $(scanner) ;
- HDRRULE on $(targets) = scanner.hdrrule ;
- HDRGRIST on $(targets) = [ on $(including-target) return $(HDRGRIST) ] ;
-}
-
-
-rule hdrrule ( target : matches * : binding )
-{
- local scanner = [ on $(target) return $(SCANNER) ] ;
- $(scanner).process $(target) : $(matches) : $(binding) ;
-}
-# hdrrule must be available at global scope so that it can be invoked
-# by header scanning
-IMPORT scanner : hdrrule : : scanner.hdrrule ;
-
-
-
-
diff --git a/tools/build/v2/build/targets.jam b/tools/build/v2/build/targets.jam
deleted file mode 100644
index 12e4a5edff..0000000000
--- a/tools/build/v2/build/targets.jam
+++ /dev/null
@@ -1,1659 +0,0 @@
-# Copyright Vladimir Prus 2002.
-# Copyright Rene Rivera 2006.
-#
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-# Supports 'abstract' targets, which are targets explicitly defined in a
-# Jamfile.
-#
-# Abstract targets are represented by classes derived from 'abstract-target'
-# class. The first abstract target is 'project-target', which is created for
-# each Jamfile, and can be obtained by the 'target' rule in the Jamfile's module
-# (see project.jam).
-#
-# Project targets keep a list of 'main-target' instances. A main target is what
-# the user explicitly defines in a Jamfile. It is possible to have several
-# definitions for a main target, for example to have different lists of sources
-# for different platforms. So, main targets keep a list of alternatives.
-#
-# Each alternative is an instance of 'abstract-target'. When a main target
-# subvariant is defined by some rule, that rule will decide what class to use,
-# create an instance of that class and add it to the list of alternatives for
-# the main target.
-#
-# Rules supplied by the build system will use only targets derived from
-# 'basic-target' class, which will provide some default behaviour. There will be
-# different classes derived from it such as 'make-target', created by the 'make'
-# rule, and 'typed-target', created by rules such as 'exe' and 'lib'.
-
-#
-# +------------------------+
-# |abstract-target |
-# +========================+
-# |name |
-# |project |
-# | |
-# |generate(properties) = 0|
-# +-----------+------------+
-# |
-# ^
-# / \
-# +-+-+
-# |
-# |
-# +------------------------+------+------------------------------+
-# | | |
-# | | |
-# +----------+-----------+ +------+------+ +------+-------+
-# | project-target | | main-target | | basic-target |
-# +======================+ 1 * +=============+ alternatives +==============+
-# | generate(properties) |o-----------+ generate |<>------------->| generate |
-# | main-target | +-------------+ | construct = 0|
-# +----------------------+ +--------------+
-# |
-# ^
-# / \
-# +-+-+
-# |
-# |
-# ...--+----------------+------------------+----------------+---+
-# | | | |
-# | | | |
-# ... ---+-----+ +------+-------+ +------+------+ +--------+-----+
-# | | typed-target | | make-target | | stage-target |
-# . +==============+ +=============+ +==============+
-# . | construct | | construct | | construct |
-# +--------------+ +-------------+ +--------------+
-
-import assert ;
-import "class" : new ;
-import errors ;
-import feature ;
-import indirect ;
-import path ;
-import property ;
-import property-set ;
-import sequence ;
-import set ;
-import toolset ;
-import build-request ;
-
-
-# Base class for all abstract targets.
-#
-class abstract-target
-{
- import project ;
- import assert ;
- import "class" ;
- import errors ;
-
- rule __init__ ( name # Name of the target in Jamfile.
- : project-target # The project target to which this one belongs.
- )
- {
- # Note: it might seem that we don't need either name or project at all.
- # However, there are places where we really need it. One example is
- # error messages which should name problematic targets. Another is
- # setting correct paths for sources and generated files.
-
- self.name = $(name) ;
- self.project = $(project-target) ;
- self.location = [ errors.nearest-user-location ] ;
- }
-
- # Returns the name of this target.
- rule name ( )
- {
- return $(self.name) ;
- }
-
- # Returns the project for this target.
- rule project ( )
- {
- return $(self.project) ;
- }
-
- # Return the location where the target was declared.
- rule location ( )
- {
- return $(self.location) ;
- }
-
- # Returns a user-readable name for this target.
- rule full-name ( )
- {
- local location = [ $(self.project).get location ] ;
- return $(location)/$(self.name) ;
- }
-
- # Generates virtual targets for this abstract target using the specified
- # properties, unless a different value of some feature is required by the
- # target.
- # On success, returns:
- # - a property-set with the usage requirements to be applied to dependants
- # - a list of produced virtual targets, which may be empty.
- # If 'property-set' is empty, performs the default build of this target, in
- # a way specific to the derived class.
- #
- rule generate ( property-set )
- {
- errors.error "method should be defined in derived classes" ;
- }
-
- rule rename ( new-name )
- {
- self.name = $(new-name) ;
- }
-}
-
-
-if --debug-building in [ modules.peek : ARGV ]
-{
- modules.poke : .debug-building : true ;
-}
-
-
-rule indent ( )
-{
- return $(.indent:J="") ;
-}
-
-
-rule increase-indent ( )
-{
- .indent += " " ;
-}
-
-
-rule decrease-indent ( )
-{
- .indent = $(.indent[2-]) ;
-}
-
-
-# Project target class (derived from 'abstract-target').
-#
-# This class has the following responsibilities:
-# - Maintaining a list of main targets in this project and building them.
-#
-# Main targets are constructed in two stages:
-# - When Jamfile is read, a number of calls to 'add-alternative' is made. At
-# that time, alternatives can also be renamed to account for inline targets.
-# - The first time 'main-target' or 'has-main-target' rule is called, all
-# alternatives are enumerated and main targets are created.
-#
-class project-target : abstract-target
-{
- import project ;
- import targets ;
- import path ;
- import print ;
- import property-set ;
- import set ;
- import sequence ;
- import "class" : new ;
- import errors ;
-
- rule __init__ ( name : project-module parent-project ?
- : requirements * : default-build * )
- {
- abstract-target.__init__ $(name) : $(__name__) ;
-
- self.project-module = $(project-module) ;
- self.location = [ project.attribute $(project-module) location ] ;
- self.requirements = $(requirements) ;
- self.default-build = $(default-build) ;
-
- if $(parent-project)
- {
- inherit $(parent-project) ;
- }
- }
-
- # This is needed only by the 'make' rule. Need to find the way to make
- # 'make' work without this method.
- #
- rule project-module ( )
- {
- return $(self.project-module) ;
- }
-
- rule get ( attribute )
- {
- return [ project.attribute $(self.project-module) $(attribute) ] ;
- }
-
- rule build-dir ( )
- {
- if ! $(self.build-dir)
- {
- self.build-dir = [ get build-dir ] ;
- if ! $(self.build-dir)
- {
- self.build-dir = [ path.join [ $(self.project).get location ]
- bin ] ;
- }
- }
- return $(self.build-dir) ;
- }
-
- # Generates all possible targets contained in this project.
- #
- rule generate ( property-set * )
- {
- if [ modules.peek : .debug-building ]
- {
- ECHO [ targets.indent ] "building project" [ name ] " ('$(__name__)') with" [ $(property-set).raw ] ;
- targets.increase-indent ;
- }
-
- local usage-requirements = [ property-set.empty ] ;
- local targets ;
-
- for local t in [ targets-to-build ]
- {
- local g = [ $(t).generate $(property-set) ] ;
- usage-requirements = [ $(usage-requirements).add $(g[1]) ] ;
- targets += $(g[2-]) ;
- }
- targets.decrease-indent ;
- return $(usage-requirements) [ sequence.unique $(targets) ] ;
- }
-
- # Computes and returns a list of abstract-target instances which must be
- # built when this project is built.
- #
- rule targets-to-build ( )
- {
- local result ;
-
- if ! $(self.built-main-targets)
- {
- build-main-targets ;
- }
-
- # Collect all main targets here, except for "explicit" ones.
- for local t in $(self.main-targets)
- {
- if ! [ $(t).name ] in $(self.explicit-targets)
- {
- result += $(t) ;
- }
- }
-
- # Collect all projects referenced via "projects-to-build" attribute.
- local self-location = [ get location ] ;
- for local pn in [ get projects-to-build ]
- {
- result += [ find $(pn)/ ] ;
- }
-
- return $(result) ;
- }
-
- # Add 'target' to the list of targets in this project that should be build
- # only by explicit request
- #
- rule mark-target-as-explicit ( target-name * )
- {
- # Record the name of the target, not instance, since this rule is called
- # before main target instances are created.
- self.explicit-targets += $(target-name) ;
- }
-
- rule mark-target-as-always ( target-name * )
- {
- # Record the name of the target, not instance, since this rule is called
- # before main target instances are created.
- self.always-targets += $(target-name) ;
- }
-
- # Add new target alternative
- #
- rule add-alternative ( target-instance )
- {
- if $(self.built-main-targets)
- {
- errors.error add-alternative called when main targets are already
- created. : in project [ full-name ] ;
- }
- self.alternatives += $(target-instance) ;
- }
-
- # Returns a 'main-target' class instance corresponding to 'name'.
- #
- rule main-target ( name )
- {
- if ! $(self.built-main-targets)
- {
- build-main-targets ;
- }
- return $(self.main-target.$(name)) ;
- }
-
- # Returns whether a main target with the specified name exists.
- #
- rule has-main-target ( name )
- {
- if ! $(self.built-main-targets)
- {
- build-main-targets ;
- }
-
- if $(self.main-target.$(name))
- {
- return true ;
- }
- }
-
- # Worker function for the find rule not implementing any caching and simply
- # returning nothing in case the target can not be found.
- #
- rule find-really ( id )
- {
- local result ;
- local current-location = [ get location ] ;
-
- local split = [ MATCH (.*)//(.*) : $(id) ] ;
- local project-part = $(split[1]) ;
- local target-part = $(split[2]) ;
-
- local extra-error-message ;
- if $(project-part)
- {
- # There is an explicitly specified project part in id. Looks up the
- # project and passes the request to it.
- local pm = [ project.find $(project-part) : $(current-location) ] ;
- if $(pm)
- {
- project-target = [ project.target $(pm) ] ;
- result = [ $(project-target).find $(target-part) : no-error ] ;
- }
- else
- {
- # TODO: This extra error message will not get displayed most
- # likely due to some buggy refactoring. Refactor the code so the
- # message gets diplayed again.
- extra-error-message = error: could not find project
- '$(project-part)' ;
- }
- }
- else
- {
- # Interpret target-name as name of main target. Need to do this
- # before checking for file. Consider the following scenario with a
- # toolset not modifying its executable's names, e.g. gcc on
- # Unix-like platforms:
- #
- # exe test : test.cpp ;
- # install s : test : <location>. ;
- #
- # After the first build we would have a target named 'test' in the
- # Jamfile and a file named 'test' on the disk. We need the target to
- # override the file.
- result = [ main-target $(id) ] ;
-
- # Interpret id as an existing file reference.
- if ! $(result)
- {
- result = [ new file-reference [ path.make $(id) ] :
- $(self.project) ] ;
- if ! [ $(result).exists ]
- {
- result = ;
- }
- }
-
- # Interpret id as project-id.
- if ! $(result)
- {
- local project-module = [ project.find $(id) :
- $(current-location) ] ;
- if $(project-module)
- {
- result = [ project.target $(project-module) ] ;
- }
- }
- }
-
- return $(result) ;
- }
-
- # Find and return the target with the specified id, treated relative to
- # self. Id may specify either a target or a file name with the target taking
- # priority. May report an error or return nothing if the target is not found
- # depending on the 'no-error' parameter.
- #
- rule find ( id : no-error ? )
- {
- local v = $(.id.$(id)) ;
- if ! $(v)
- {
- v = [ find-really $(id) ] ;
- if ! $(v)
- {
- v = none ;
- }
- .id.$(id) = $(v) ;
- }
-
- if $(v) != none
- {
- return $(v) ;
- }
- else
- {
- if ! $(no-error)
- {
- local current-location = [ get location ] ;
- ECHO "error: Unable to find file or target named" ;
- ECHO "error: '$(id)'" ;
- ECHO "error: referred from project at" ;
- ECHO "error: '$(current-location)'" ;
- ECHO $(extra-error-message) ;
- EXIT ;
- }
- }
- }
-
- rule build-main-targets ( )
- {
- self.built-main-targets = true ;
- for local a in $(self.alternatives)
- {
- local name = [ $(a).name ] ;
- local target = $(self.main-target.$(name)) ;
- if ! $(target)
- {
- local t = [ new main-target $(name) : $(self.project) ] ;
- self.main-target.$(name) = $(t) ;
- self.main-targets += $(t) ;
- target = $(self.main-target.$(name)) ;
- }
-
- if $(name) in $(self.always-targets)
- {
- $(a).always ;
- }
-
- $(target).add-alternative $(a) ;
- }
- }
-
- # Accessor, add a constant.
- #
- rule add-constant (
- name # Variable name of the constant.
- : value + # Value of the constant.
- : type ? # Optional type of value.
- )
- {
- switch $(type)
- {
- case path :
- local r ;
- for local v in $(value)
- {
- local l = $(self.location) ;
- if ! $(l)
- {
- # Project corresponding to config files do not have
- # 'location' attribute, but do have source location.
- # It might be more reasonable to make every project have
- # a location and use some other approach to prevent buildable
- # targets in config files, but that's for later.
- l = [ get source-location ] ;
- }
- v = [ path.root [ path.make $(v) ] $(l) ] ;
- # Now make the value absolute path.
- v = [ path.root $(v) [ path.pwd ] ] ;
- # Constants should be in platform-native form.
- v = [ path.native $(v) ] ;
- r += $(v) ;
- }
- value = $(r) ;
- }
- if ! $(name) in $(self.constants)
- {
- self.constants += $(name) ;
- }
- self.constant.$(name) = $(value) ;
- # Inject the constant in the scope of the Jamroot module.
- modules.poke $(self.project-module) : $(name) : $(value) ;
- }
-
- rule inherit ( parent )
- {
- for local c in [ modules.peek $(parent) : self.constants ]
- {
- # No need to pass the type. Path constants were converted to
- # absolute paths already by parent.
- add-constant $(c)
- : [ modules.peek $(parent) : self.constant.$(c) ] ;
- }
-
- # Import rules from parent.
- local this-module = [ project-module ] ;
- local parent-module = [ $(parent).project-module ] ;
- # Do not import rules coming from 'project-rules' as they must be
- # imported localized.
- local user-rules = [ set.difference
- [ RULENAMES $(parent-module) ] :
- [ RULENAMES project-rules ] ] ;
- IMPORT $(parent-module) : $(user-rules) : $(this-module) : $(user-rules) ;
- EXPORT $(this-module) : $(user-rules) ;
- }
-}
-
-
-# Helper rules to detect cycles in main target references.
-#
-local rule start-building ( main-target-instance )
-{
- if $(main-target-instance) in $(.targets-being-built)
- {
- local names ;
- for local t in $(.targets-being-built) $(main-target-instance)
- {
- names += [ $(t).full-name ] ;
- }
-
- errors.error "Recursion in main target references"
- : "the following target are being built currently:"
- : $(names) ;
- }
- .targets-being-built += $(main-target-instance) ;
-}
-
-
-local rule end-building ( main-target-instance )
-{
- .targets-being-built = $(.targets-being-built[1--2]) ;
-}
-
-
-# A named top-level target in Jamfile.
-#
-class main-target : abstract-target
-{
- import assert ;
- import errors ;
- import feature ;
- import print ;
- import property-set ;
- import sequence ;
- import targets : start-building end-building ;
-
- rule __init__ ( name : project )
- {
- abstract-target.__init__ $(name) : $(project) ;
- }
-
- # Add a new alternative for this target
- rule add-alternative ( target )
- {
- local d = [ $(target).default-build ] ;
- if $(self.alternatives) && ( $(self.default-build) != $(d) )
- {
- errors.error "default build must be identical in all alternatives"
- : "main target is" [ full-name ]
- : "with" [ $(d).raw ]
- : "differing from previous default build" [ $(self.default-build).raw ] ;
- }
- else
- {
- self.default-build = $(d) ;
- }
- self.alternatives += $(target) ;
- }
-
- # Returns the best viable alternative for this property-set. See the
- # documentation for selection rules.
- #
- local rule select-alternatives ( property-set debug ? )
- {
- # When selecting alternatives we have to consider defaults, for example:
- # lib l : l.cpp : <variant>debug ;
- # lib l : l_opt.cpp : <variant>release ;
- # won't work unless we add default value <variant>debug.
- property-set = [ $(p).add-defaults ] ;
-
- # The algorithm: we keep the current best viable alternative. When we've
- # got a new best viable alternative, we compare it with the current one.
-
- local best ;
- local best-properties ;
-
- if $(self.alternatives[2-])
- {
- local bad ;
- local worklist = $(self.alternatives) ;
- while $(worklist) && ! $(bad)
- {
- local v = $(worklist[1]) ;
- local properties = [ $(v).match $(property-set) $(debug) ] ;
-
- if $(properties) != no-match
- {
- if ! $(best)
- {
- best = $(v) ;
- best-properties = $(properties) ;
- }
- else
- {
- if $(properties) = $(best-properties)
- {
- bad = true ;
- }
- else if $(properties) in $(best-properties)
- {
- # Do nothing, this alternative is worse
- }
- else if $(best-properties) in $(properties)
- {
- best = $(v) ;
- best-properties = $(properties) ;
- }
- else
- {
- bad = true ;
- }
- }
- }
- worklist = $(worklist[2-]) ;
- }
- if ! $(bad)
- {
- return $(best) ;
- }
- }
- else
- {
- return $(self.alternatives) ;
- }
- }
-
- rule apply-default-build ( property-set )
- {
- return [ targets.apply-default-build $(property-set)
- : $(self.default-build) ] ;
- }
-
- # Select an alternative for this main target, by finding all alternatives
- # which requirements are satisfied by 'properties' and picking the one with
- # the longest requirements set. Returns the result of calling 'generate' on
- # that alternative.
- #
- rule generate ( property-set )
- {
- start-building $(__name__) ;
-
- # We want composite properties in build request act as if all the
- # properties it expands too are explicitly specified.
- property-set = [ $(property-set).expand ] ;
-
- local all-property-sets = [ apply-default-build $(property-set) ] ;
- local usage-requirements = [ property-set.empty ] ;
- local result ;
- for local p in $(all-property-sets)
- {
- local r = [ generate-really $(p) ] ;
- if $(r)
- {
- usage-requirements = [ $(usage-requirements).add $(r[1]) ] ;
- result += $(r[2-]) ;
- }
- }
- end-building $(__name__) ;
- return $(usage-requirements) [ sequence.unique $(result) ] ;
- }
-
- # Generates the main target with the given property set and returns a list
- # which first element is property-set object containing usage-requirements
- # of generated target and with generated virtual target in other elements.
- # It is possible that no targets are generated.
- #
- local rule generate-really ( property-set )
- {
- local best-alternatives = [ select-alternatives $(property-set) ] ;
- if ! $(best-alternatives)
- {
- ECHO "error: No best alternative for" [ full-name ] ;
- select-alternatives $(property-set) debug ;
- return [ property-set.empty ] ;
- }
- else
- {
- # Now return virtual targets for the only alternative.
- return [ $(best-alternatives).generate $(property-set) ] ;
- }
- }
-
- rule rename ( new-name )
- {
- abstract-target.rename $(new-name) ;
- for local a in $(self.alternatives)
- {
- $(a).rename $(new-name) ;
- }
- }
-}
-
-
-# Abstract target refering to a source file. This is an artificial entity
-# allowing sources to a target to be represented using a list of abstract target
-# instances.
-#
-class file-reference : abstract-target
-{
- import virtual-target ;
- import property-set ;
- import path ;
-
- rule __init__ ( file : project )
- {
- abstract-target.__init__ $(file) : $(project) ;
- }
-
- rule generate ( properties )
- {
- return [ property-set.empty ] [ virtual-target.from-file $(self.name) :
- [ location ] : $(self.project) ] ;
- }
-
- # Returns true if the referred file really exists.
- rule exists ( )
- {
- location ;
- return $(self.file-path) ;
- }
-
- # Returns the location of target. Needed by 'testing.jam'.
- rule location ( )
- {
- if ! $(self.file-location)
- {
- local source-location = [ $(self.project).get source-location ] ;
- for local src-dir in $(source-location)
- {
- if ! $(self.file-location)
- {
- local location = [ path.root $(self.name) $(src-dir) ] ;
- if [ CHECK_IF_FILE [ path.native $(location) ] ]
- {
- self.file-location = $(src-dir) ;
- self.file-path = $(location) ;
- }
- }
- }
- }
- return $(self.file-location) ;
- }
-}
-
-
-# Given a target-reference, made in context of 'project', returns the
-# abstract-target instance that is referred to, as well as properties explicitly
-# specified for this reference.
-#
-rule resolve-reference ( target-reference : project )
-{
- # Separate target name from properties override.
- local split = [ MATCH "^([^<]*)(/(<.*))?$" : $(target-reference) ] ;
- local id = $(split[1]) ;
- local sproperties = ;
- if $(split[3])
- {
- sproperties = [ property.make [ feature.split $(split[3]) ] ] ;
- sproperties = [ feature.expand-composites $(sproperties) ] ;
- }
-
- # Find the target.
- local target = [ $(project).find $(id) ] ;
-
- return $(target) [ property-set.create $(sproperties) ] ;
-}
-
-
-# Attempts to generate the target given by target reference, which can refer
-# both to a main target or to a file. Returns a list consisting of
-# - usage requirements
-# - generated virtual targets, if any
-#
-rule generate-from-reference (
- target-reference # Target reference.
- : project # Project where the reference is made.
- : property-set # Properties of the main target that makes the reference.
-)
-{
- local r = [ resolve-reference $(target-reference) : $(project) ] ;
- local target = $(r[1]) ;
- local sproperties = $(r[2]) ;
-
- # Take properties which should be propagated and refine them with
- # source-specific requirements.
- local propagated = [ $(property-set).propagated ] ;
- local rproperties = [ $(propagated).refine $(sproperties) ] ;
- if $(rproperties[1]) = "@error"
- {
- errors.error
- "When building" [ full-name ] " with properties " $(properties) :
- "Invalid properties specified for " $(source) ":"
- $(rproperties[2-]) ;
- }
- return [ $(target).generate $(rproperties) ] ;
-}
-
-rule apply-default-build ( property-set : default-build )
-{
- # 1. First, see what properties from default-build are already present
- # in property-set.
-
- local raw = [ $(property-set).raw ] ;
- local specified-features = $(raw:G) ;
-
- local defaults-to-apply ;
- for local d in [ $(default-build).raw ]
- {
- if ! $(d:G) in $(specified-features)
- {
- defaults-to-apply += $(d) ;
- }
- }
-
- # 2. If there are any defaults to be applied, form a new build request.
- # Pass it through to 'expand-no-defaults' since default-build might
- # contain "release debug" resulting in two property-sets.
- local result ;
- if $(defaults-to-apply)
- {
- properties = [
- build-request.expand-no-defaults
-
- # We have to compress subproperties here to prevent property
- # lists like:
- #
- # <toolset>msvc <toolset-msvc:version>7.1 <threading>multi
- #
- # from being expanded into:
- #
- # <toolset-msvc:version>7.1/<threading>multi
- # <toolset>msvc/<toolset-msvc:version>7.1/<threading>multi
- #
- # due to a cross-product property combination. That may be an
- # indication that build-request.expand-no-defaults is the wrong
- # rule to use here.
- [ feature.compress-subproperties $(raw) ]
- $(defaults-to-apply)
- ] ;
-
- if $(properties)
- {
- for local p in $(properties)
- {
- result += [ property-set.create
- [ feature.expand [ feature.split $(p) ] ] ] ;
- }
- }
- else
- {
- result = [ property-set.empty ] ;
- }
- }
- else
- {
- result = $(property-set) ;
- }
- return $(result) ;
-}
-
-
-# Given a build request and requirements, return properties common to dependency
-# build request and target requirements.
-#
-# TODO: Document exactly what 'common properties' are, whether they should
-# include default property values, whether they should contain any conditional
-# properties or should those be already processed, etc. See whether there are
-# any differences between use cases with empty and non-empty build-request as
-# well as with requirements containing and those not containing any non-free
-# features.
-#
-rule common-properties ( build-request requirements )
-{
- # For optimization, we add free requirements directly, without using a
- # complex algorithm. This gives the complex algorithm a better chance of
- # caching results.
- local free = [ $(requirements).free ] ;
- local non-free = [ property-set.create [ $(requirements).base ]
- [ $(requirements).incidental ] ] ;
-
- local key = .rp.$(build-request)-$(non-free) ;
- if ! $($(key))
- {
- $(key) = [ common-properties2 $(build-request) $(non-free) ] ;
- }
- return [ $($(key)).add-raw $(free) ] ;
-}
-
-
-# Given a 'context' -- a set of already present properties, and 'requirements',
-# decide which extra properties should be applied to 'context'. For conditional
-# requirements, this means evaluating the condition. For indirect conditional
-# requirements, this means calling a rule. Ordinary requirements are always
-# applied.
-#
-# Handles the situation where evaluating one conditional requirement affects
-# conditions of another conditional requirements, such as:
-# <toolset>gcc:<variant>release <variant>release:<define>RELEASE
-#
-# If 'what' is 'refined' returns context refined with new requirements. If
-# 'what' is 'added' returns just the requirements to be applied.
-#
-rule evaluate-requirements ( requirements : context : what )
-{
- # Apply non-conditional requirements. It is possible that further
- # conditional requirement change a value set by non-conditional
- # requirements. For example:
- #
- # exe a : a.cpp : <threading>single <toolset>foo:<threading>multi ;
- #
- # I am not sure if this should be an error, or not, especially given that
- #
- # <threading>single
- #
- # might come from project's requirements.
-
- local unconditional = [ feature.expand [ $(requirements).non-conditional ] ] ;
-
- local raw = [ $(context).raw ] ;
- raw = [ property.refine $(raw) : $(unconditional) ] ;
-
- # We have collected properties that surely must be present in common
- # properties. We now try to figure out what other properties should be added
- # in order to satisfy rules (4)-(6) from the docs.
-
- local conditionals = [ $(requirements).conditional ] ;
- # The 'count' variable has one element for each conditional feature and for
- # each occurrence of '<indirect-conditional>' feature. It is used as a loop
- # counter: for each iteration of the loop before we remove one element and
- # the property set should stabilize before we are done. It is assumed that
- # #conditionals iterations should be enough for properties to propagate
- # along conditions in any direction.
- local count = $(conditionals)
- [ $(requirements).get <conditional> ]
- and-once-more ;
-
- local added-requirements ;
-
- local current = $(raw) ;
-
- # It is assumed that ordinary conditional requirements can not add
- # <conditional> properties (a.k.a. indirect conditional properties), and
- # that rules referred to by <conditional> properties can not add new
- # <conditional> properties. So the list of indirect conditionals does not
- # change.
- local indirect = [ $(requirements).get <conditional> ] ;
- indirect = [ MATCH ^@(.*) : $(indirect) ] ;
-
- local ok ;
- while $(count)
- {
- # Evaluate conditionals in context of current properties.
- local e = [ property.evaluate-conditionals-in-context $(conditionals)
- : $(current) ] ;
-
- # Evaluate indirect conditionals.
- for local i in $(indirect)
- {
- e += [ indirect.call $(i) $(current) ] ;
- }
-
- if $(e) = $(added-requirements)
- {
- # If we got the same result, we have found the final properties.
- count = ;
- ok = true ;
- }
- else
- {
- # Oops, conditional evaluation results have changed. Also 'current'
- # contains leftovers from a previous evaluation. Recompute 'current'
- # using initial properties and conditional requirements.
- added-requirements = $(e) ;
- current = [ property.refine $(raw) : [ feature.expand $(e) ] ] ;
- }
- count = $(count[2-]) ;
- }
- if ! $(ok)
- {
- errors.error "Can not evaluate conditional properties " $(conditionals) ;
- }
-
- if $(what) = added
- {
- return [ property-set.create $(unconditional) $(added-requirements) ] ;
- }
- else if $(what) = refined
- {
- return [ property-set.create $(current) ] ;
- }
- else
- {
- errors.error "Invalid value of the 'what' parameter." ;
- }
-}
-
-
-rule common-properties2 ( build-request requirements )
-{
- # This guarantees that default properties are present in the result, unless
- # they are overriden by some requirement. FIXME: There is possibility that
- # we have added <foo>bar, which is composite and expands to <foo2>bar2, but
- # default value of <foo2> is not bar2, in which case it is not clear what to
- # do.
- #
- build-request = [ $(build-request).add-defaults ] ;
- # Features added by 'add-default' can be composite and expand to features
- # without default values -- so they are not added yet. It could be clearer/
- # /faster to expand only newly added properties but that is not critical.
- build-request = [ $(build-request).expand ] ;
-
- return [ evaluate-requirements $(requirements) : $(build-request) :
- refined ] ;
-}
-
-rule push-target ( target )
-{
- .targets = $(target) $(.targets) ;
-}
-
-rule pop-target ( )
-{
- .targets = $(.targets[2-]) ;
-}
-
-# Return the metatarget that is currently being generated.
-rule current ( )
-{
- return $(.targets[1]) ;
-}
-
-
-# Implements the most standard way of constructing main target alternative from
-# sources. Allows sources to be either file or other main target and handles
-# generation of those dependency targets.
-#
-class basic-target : abstract-target
-{
- import build-request ;
- import build-system ;
- import "class" : new ;
- import errors ;
- import feature ;
- import property ;
- import property-set ;
- import sequence ;
- import set ;
- import targets ;
- import virtual-target ;
-
- rule __init__ ( name : project : sources * : requirements *
- : default-build * : usage-requirements * )
- {
- abstract-target.__init__ $(name) : $(project) ;
-
- self.sources = $(sources) ;
- if ! $(requirements) {
- requirements = [ property-set.empty ] ;
- }
- self.requirements = $(requirements) ;
- if ! $(default-build)
- {
- default-build = [ property-set.empty ] ;
- }
- self.default-build = $(default-build) ;
- if ! $(usage-requirements)
- {
- usage-requirements = [ property-set.empty ] ;
- }
- self.usage-requirements = $(usage-requirements) ;
-
- if $(sources:G)
- {
- errors.user-error properties found in the 'sources' parameter for
- [ full-name ] ;
- }
- }
-
- rule always ( )
- {
- self.always = 1 ;
- }
-
- # Returns the list of abstract-targets which are used as sources. The extra
- # properties specified for sources are not represented. The only user for
- # this rule at the moment is the "--dump-tests" feature of the test system.
- #
- rule sources ( )
- {
- if ! $(self.source-targets)
- {
- for local s in $(self.sources)
- {
- self.source-targets +=
- [ targets.resolve-reference $(s) : $(self.project) ] ;
- }
- }
- return $(self.source-targets) ;
- }
-
- rule requirements ( )
- {
- return $(self.requirements) ;
- }
-
- rule default-build ( )
- {
- return $(self.default-build) ;
- }
-
- # Returns the alternative condition for this alternative, if the condition
- # is satisfied by 'property-set'.
- #
- rule match ( property-set debug ? )
- {
- # The condition is composed of all base non-conditional properties. It
- # is not clear if we should expand 'self.requirements' or not. For one
- # thing, it would be nice to be able to put
- # <toolset>msvc-6.0
- # in requirements. On the other hand, if we have <variant>release as a
- # condition it does not make sense to require <optimization>full to be
- # in the build request just to select this variant.
- local bcondition = [ $(self.requirements).base ] ;
- local ccondition = [ $(self.requirements).conditional ] ;
- local condition = [ set.difference $(bcondition) : $(ccondition) ] ;
- if $(debug)
- {
- ECHO " next alternative: required properties:" $(condition:E=(empty)) ;
- }
-
- if $(condition) in [ $(property-set).raw ]
- {
- if $(debug)
- {
- ECHO " matched" ;
- }
- return $(condition) ;
- }
- else
- {
- if $(debug)
- {
- ECHO " not matched" ;
- }
- return no-match ;
- }
- }
-
- # Takes a target reference, which might be either target id or a dependency
- # property, and generates that target using 'property-set' as build request.
- #
- # The results are added to the variable called 'result-var'. Usage
- # requirements are added to the variable called 'usage-requirements-var'.
- #
- rule generate-dependencies ( dependencies * : property-set
- : result-var usage-requirements-var )
- {
- for local dependency in $(dependencies)
- {
- local grist = $(dependency:G) ;
- local id = $(dependency:G=) ;
-
- local result = [ targets.generate-from-reference $(id) :
- $(self.project) : $(property-set) ] ;
-
- $(result-var) += $(result[2-]:G=$(grist)) ;
- $(usage-requirements-var) += [ $(result[1]).raw ] ;
- }
- }
-
- # Determines final build properties, generates sources, and calls
- # 'construct'. This method should not be overridden.
- #
- rule generate ( property-set )
- {
- if [ modules.peek : .debug-building ]
- {
- ECHO ;
- local fn = [ full-name ] ;
- ECHO [ targets.indent ] "Building target '$(fn)'" ;
- targets.increase-indent ;
- ECHO [ targets.indent ] "Build request: " $(property-set) [ $(property-set).raw ] ;
- local cf = [ build-system.command-line-free-features ] ;
- ECHO [ targets.indent ] "Command line free features: " [ $(cf).raw ] ;
- ECHO [ targets.indent ] "Target requirements: " [ $(self.requirements).raw ] ;
- }
- targets.push-target $(__name__) ;
-
- if ! $(self.generated.$(property-set))
- {
- # Apply free features from the command line. If user said
- # define=FOO
- # he most likely wants this define to be set for all compiles.
- property-set = [ $(property-set).refine
- [ build-system.command-line-free-features ] ] ;
- local rproperties = [ targets.common-properties $(property-set)
- $(self.requirements) ] ;
-
- if [ modules.peek : .debug-building ]
- {
- ECHO ;
- ECHO [ targets.indent ] "Common properties: " [ $(rproperties).raw ] ;
- }
-
- if ( $(rproperties[1]) != "@error" ) && ( [ $(rproperties).get
- <build> ] != no )
- {
- local source-targets ;
- local properties = [ $(rproperties).non-dependency ] ;
- local usage-requirements ;
-
- generate-dependencies [ $(rproperties).dependency ] :
- $(rproperties) : properties usage-requirements ;
-
- generate-dependencies $(self.sources) : $(rproperties) :
- source-targets usage-requirements ;
-
- if [ modules.peek : .debug-building ]
- {
- ECHO ;
- ECHO [ targets.indent ] "Usage requirements for"
- $(self.name)": " $(usage-requirements) ;
- }
-
- rproperties = [ property-set.create $(properties)
- $(usage-requirements) ] ;
- usage-requirements = [ property-set.create $(usage-requirements) ] ;
-
- if [ modules.peek : .debug-building ]
- {
- ECHO [ targets.indent ] "Build properties: "
- [ $(rproperties).raw ] ;
- }
-
- local extra = [ $(rproperties).get <source> ] ;
- source-targets += $(extra:G=) ;
- # We might get duplicate sources, for example if we link to two
- # libraries having the same <library> usage requirement.
- # Use stable sort, since for some targets the order is
- # important. E.g. RUN_PY target need python source to come
- # first.
- source-targets = [ sequence.unique $(source-targets) : stable ] ;
-
- local result = [ construct $(self.name) : $(source-targets) :
- $(rproperties) ] ;
-
- if $(result)
- {
- local gur = $(result[1]) ;
- result = $(result[2-]) ;
-
- if $(self.always)
- {
- for local t in $(result)
- {
- $(t).always ;
- }
- }
-
- local s = [ create-subvariant $(result)
- : [ virtual-target.recent-targets ]
- : $(property-set) : $(source-targets)
- : $(rproperties) : $(usage-requirements) ] ;
- virtual-target.clear-recent-targets ;
-
- local ur = [ compute-usage-requirements $(s) ] ;
- ur = [ $(ur).add $(gur) ] ;
- $(s).set-usage-requirements $(ur) ;
- if [ modules.peek : .debug-building ]
- {
- ECHO [ targets.indent ] "Usage requirements from"
- $(self.name)": " [ $(ur).raw ] ;
- }
-
- self.generated.$(property-set) = $(ur) $(result) ;
- }
- }
- else
- {
- if $(rproperties[1]) = "@error"
- {
- ECHO [ targets.indent ] "Skipping build of:" [ full-name ]
- "cannot compute common properties" ;
- }
- else if [ $(rproperties).get <build> ] = no
- {
- # If we just see <build>no, we cannot produce any reasonable
- # diagnostics. The code that adds this property is expected
- # to explain why a target is not built, for example using
- # the configure.log-component-configuration function.
- }
- else
- {
- ECHO [ targets.indent ] "Skipping build of: " [ full-name ]
- " unknown reason" ;
- }
-
- # We are here either because there has been an error computing
- # properties or there is <build>no in properties. In the latter
- # case we do not want any diagnostic. In the former case, we
- # need diagnostics. FIXME
-
- # If this target fails to build, add <build>no to properties to
- # cause any parent target to fail to build. Except that it
- # - does not work now, since we check for <build>no only in
- # common properties, but not in properties that came from
- # dependencies
- # - it is not clear if that is a good idea anyway. The alias
- # target, for example, should not fail to build if a
- # dependency fails.
- self.generated.$(property-set) = [ property-set.create <build>no ] ;
- }
- }
- else
- {
- if [ modules.peek : .debug-building ]
- {
- ECHO [ targets.indent ] "Already built" ;
- local ur = $(self.generated.$(property-set)) ;
- ur = $(ur[0]) ;
- targets.increase-indent ;
- ECHO [ targets.indent ] "Usage requirements from"
- $(self.name)": " [ $(ur).raw ] ;
- targets.decrease-indent ;
- }
- }
-
- targets.pop-target ;
- targets.decrease-indent ;
- return $(self.generated.$(property-set)) ;
- }
-
- # Given the set of generated targets, and refined build properties,
- # determines and sets appropriate usage requirements on those targets.
- #
- rule compute-usage-requirements ( subvariant )
- {
- local rproperties = [ $(subvariant).build-properties ] ;
- xusage-requirements = [ targets.evaluate-requirements
- $(self.usage-requirements) : $(rproperties) : added ] ;
-
- # We generate all dependency properties and add them, as well as their
- # usage requirements, to the result.
- local extra ;
- generate-dependencies [ $(xusage-requirements).dependency ] :
- $(rproperties) : extra extra ;
-
- local result = [ property-set.create
- [ $(xusage-requirements).non-dependency ] $(extra) ] ;
-
- # Propagate usage requirements we got from sources, except for the
- # <pch-header> and <pch-file> features.
- #
- # That feature specifies which pch file to use, and should apply only to
- # direct dependents. Consider:
- #
- # pch pch1 : ...
- # lib lib1 : ..... pch1 ;
- # pch pch2 :
- # lib lib2 : pch2 lib1 ;
- #
- # Here, lib2 should not get <pch-header> property from pch1.
- #
- # Essentially, when those two features are in usage requirements, they
- # are propagated only to direct dependents. We might need a more general
- # mechanism, but for now, only those two features are special.
- #
- # TODO - Actually there are more possible candidates like for instance
- # when listing static library X as a source for another static library.
- # Then static library X will be added as a <source> property to the
- # second library's usage requirements but those requirements should last
- # only up to the first executable or shared library that actually links
- # to it.
- local raw = [ $(subvariant).sources-usage-requirements ] ;
- raw = [ $(raw).raw ] ;
- raw = [ property.change $(raw) : <pch-header> ] ;
- raw = [ property.change $(raw) : <pch-file> ] ;
- return [ $(result).add [ property-set.create $(raw) ] ] ;
- }
-
- # Creates new subvariant instances for 'targets'.
- # 'root-targets' - virtual targets to be returned to dependants
- # 'all-targets' - virtual targets created while building this main target
- # 'build-request' - property-set instance with requested build properties
- #
- local rule create-subvariant ( root-targets * : all-targets * :
- build-request : sources * : rproperties : usage-requirements )
- {
- for local e in $(root-targets)
- {
- $(e).root true ;
- }
-
- # Process all virtual targets that will be created if this main target
- # is created.
- local s = [ new subvariant $(__name__) : $(build-request) : $(sources) :
- $(rproperties) : $(usage-requirements) : $(all-targets) ] ;
- for local v in $(all-targets)
- {
- if ! [ $(v).creating-subvariant ]
- {
- $(v).creating-subvariant $(s) ;
- }
- }
- return $(s) ;
- }
-
- # Constructs virtual targets for this abstract target and the dependency
- # graph. Returns a usage-requirements property-set and a list of virtual
- # targets. Should be overriden in derived classes.
- #
- rule construct ( name : source-targets * : properties * )
- {
- errors.error "method should be defined in derived classes" ;
- }
-}
-
-
-class typed-target : basic-target
-{
- import generators ;
-
- rule __init__ ( name : project : type : sources * : requirements * :
- default-build * : usage-requirements * )
- {
- basic-target.__init__ $(name) : $(project) : $(sources) :
- $(requirements) : $(default-build) : $(usage-requirements) ;
-
- self.type = $(type) ;
- }
-
- rule type ( )
- {
- return $(self.type) ;
- }
-
- rule construct ( name : source-targets * : property-set )
- {
- local r = [ generators.construct $(self.project) $(name:S=) : $(self.type)
- : [ property-set.create [ $(property-set).raw ]
- <main-target-type>$(self.type) ]
- : $(source-targets) : true ] ;
- if ! $(r)
- {
- ECHO "warn: Unable to construct" [ full-name ] ;
-
- # Are there any top-level generators for this type/property set.
- if ! [ generators.find-viable-generators $(self.type)
- : $(property-set) ]
- {
- ECHO "error: no generators were found for type '$(self.type)'" ;
- ECHO "error: and the requested properties" ;
- ECHO "error: make sure you've configured the needed tools" ;
- ECHO "See http://boost.org/boost-build2/doc/html/bbv2/advanced/configuration.html" ;
- ECHO "To debug this problem, try the --debug-generators option." ;
- EXIT ;
- }
- }
- return $(r) ;
- }
-}
-
-
-# Return the list of sources to use, if main target rule is invoked with
-# 'sources'. If there are any objects in 'sources', they are treated as main
-# target instances, and the name of such targets are adjusted to be
-# '<name_of_this_target>__<name_of_source_target>'. Such renaming is disabled if
-# a non-empty value is passed as the 'no-renaming' parameter.
-#
-rule main-target-sources ( sources * : main-target-name : no-renaming ? )
-{
- local result ;
- for local t in $(sources)
- {
- if [ class.is-instance $(t) ]
- {
- local name = [ $(t).name ] ;
- if ! $(no-renaming)
- {
- name = $(main-target-name)__$(name) ;
- $(t).rename $(name) ;
- }
- # Inline targets are not built by default.
- local p = [ $(t).project ] ;
- $(p).mark-target-as-explicit $(name) ;
- result += $(name) ;
- }
- else
- {
- result += $(t) ;
- }
- }
- return $(result) ;
-}
-
-
-# Returns the requirements to use when declaring a main target, obtained by
-# translating all specified property paths and refining project requirements
-# with the ones specified for the target.
-#
-rule main-target-requirements (
- specification * # Properties explicitly specified for the main target.
- : project # Project where the main target is to be declared.
-)
-{
- specification += [ toolset.requirements ] ;
-
- local requirements = [ property-set.refine-from-user-input
- [ $(project).get requirements ] : $(specification) :
- [ $(project).project-module ] : [ $(project).get location ] ] ;
- if $(requirements[1]) = "@error"
- {
- errors.error "Conflicting requirements for target:" $(requirements) ;
- }
- return $(requirements) ;
-}
-
-
-# Returns the usage requirements to use when declaring a main target, which are
-# obtained by translating all specified property paths and adding project's
-# usage requirements.
-#
-rule main-target-usage-requirements (
- specification * # Use-properties explicitly specified for a main target.
- : project # Project where the main target is to be declared.
-)
-{
- local project-usage-requirements = [ $(project).get usage-requirements ] ;
-
- # We do not use 'refine-from-user-input' because:
- # - I am not sure if removing parent's usage requirements makes sense
- # - refining usage requirements is not needed, since usage requirements are
- # always free.
- local usage-requirements = [ property-set.create-from-user-input
- $(specification)
- : [ $(project).project-module ] [ $(project).get location ] ] ;
-
- return [ $(project-usage-requirements).add $(usage-requirements) ] ;
-}
-
-
-# Return the default build value to use when declaring a main target, which is
-# obtained by using the specified value if not empty and parent's default build
-# attribute otherwise.
-#
-rule main-target-default-build (
- specification * # Default build explicitly specified for a main target.
- : project # Project where the main target is to be declared.
-)
-{
- local result ;
- if $(specification)
- {
- result = $(specification) ;
- }
- else
- {
- result = [ $(project).get default-build ] ;
- }
- return [ property-set.create-with-validation $(result) ] ;
-}
-
-
-# Registers the specified target as a main target alternative and returns it.
-#
-rule main-target-alternative ( target )
-{
- local ptarget = [ $(target).project ] ;
- $(ptarget).add-alternative $(target) ;
- return $(target) ;
-}
-
-# Creates a new metargets with the specified properties, using 'klass' as
-# the class. The 'name', 'sources',
-# 'requirements', 'default-build' and 'usage-requirements' are assumed to be in
-# the form specified by the user in Jamfile corresponding to 'project'.
-#
-rule create-metatarget ( klass : project : name : sources * : requirements * :
- default-build * : usage-requirements * )
-{
- return [
- targets.main-target-alternative
- [ new $(klass) $(name) : $(project)
- : [ targets.main-target-sources $(sources) : $(name) ]
- : [ targets.main-target-requirements $(requirements) : $(project) ]
- : [ targets.main-target-default-build $(default-build) : $(project) ]
- : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ]
- ] ] ;
-}
-
-# Creates a typed-target with the specified properties. The 'name', 'sources',
-# 'requirements', 'default-build' and 'usage-requirements' are assumed to be in
-# the form specified by the user in Jamfile corresponding to 'project'.
-#
-rule create-typed-target ( type : project : name : sources * : requirements * :
- default-build * : usage-requirements * )
-{
- return [
- targets.main-target-alternative
- [ new typed-target $(name) : $(project) : $(type)
- : [ targets.main-target-sources $(sources) : $(name) ]
- : [ targets.main-target-requirements $(requirements) : $(project) ]
- : [ targets.main-target-default-build $(default-build) : $(project) ]
- : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ]
- ] ] ;
-}
diff --git a/tools/build/v2/build/targets.py b/tools/build/v2/build/targets.py
deleted file mode 100644
index b8546809a8..0000000000
--- a/tools/build/v2/build/targets.py
+++ /dev/null
@@ -1,1402 +0,0 @@
-# Status: ported.
-# Base revision: 64488
-
-# Copyright Vladimir Prus 2002-2007.
-# Copyright Rene Rivera 2006.
-#
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-# Supports 'abstract' targets, which are targets explicitly defined in Jamfile.
-#
-# Abstract targets are represented by classes derived from 'AbstractTarget' class.
-# The first abstract target is 'project_target', which is created for each
-# Jamfile, and can be obtained by the 'target' rule in the Jamfile's module.
-# (see project.jam).
-#
-# Project targets keep a list of 'MainTarget' instances.
-# A main target is what the user explicitly defines in a Jamfile. It is
-# possible to have several definitions for a main target, for example to have
-# different lists of sources for different platforms. So, main targets
-# keep a list of alternatives.
-#
-# Each alternative is an instance of 'AbstractTarget'. When a main target
-# subvariant is defined by some rule, that rule will decide what class to
-# use, create an instance of that class and add it to the list of alternatives
-# for the main target.
-#
-# Rules supplied by the build system will use only targets derived
-# from 'BasicTarget' class, which will provide some default behaviour.
-# There will be two classes derived from it, 'make-target', created by the
-# 'make' rule, and 'TypedTarget', created by rules such as 'exe' and 'dll'.
-
-#
-# +------------------------+
-# |AbstractTarget |
-# +========================+
-# |name |
-# |project |
-# | |
-# |generate(properties) = 0|
-# +-----------+------------+
-# |
-# ^
-# / \
-# +-+-+
-# |
-# |
-# +------------------------+------+------------------------------+
-# | | |
-# | | |
-# +----------+-----------+ +------+------+ +------+-------+
-# | project_target | | MainTarget | | BasicTarget |
-# +======================+ 1 * +=============+ alternatives +==============+
-# | generate(properties) |o-----------+ generate |<>------------->| generate |
-# | main-target | +-------------+ | construct = 0|
-# +----------------------+ +--------------+
-# |
-# ^
-# / \
-# +-+-+
-# |
-# |
-# ...--+----------------+------------------+----------------+---+
-# | | | |
-# | | | |
-# ... ---+-----+ +------+-------+ +------+------+ +--------+-----+
-# | | TypedTarget | | make-target | | stage-target |
-# . +==============+ +=============+ +==============+
-# . | construct | | construct | | construct |
-# +--------------+ +-------------+ +--------------+
-
-import re
-import os.path
-import sys
-
-from b2.manager import get_manager
-
-from b2.util.utility import *
-import property, project, virtual_target, property_set, feature, generators, toolset
-from virtual_target import Subvariant
-from b2.exceptions import *
-from b2.util.sequence import unique
-from b2.util import path, bjam_signature
-from b2.build.errors import user_error_checkpoint
-
-import b2.build.build_request as build_request
-
-import b2.util.set
-_re_separate_target_from_properties = re.compile (r'^([^<]*)(/(<.*))?$')
-
-class TargetRegistry:
-
- def __init__ (self):
- # All targets that are currently being built.
- # Only the key is id (target), the value is the actual object.
- self.targets_being_built_ = {}
-
- # Current indent for debugging messages
- self.indent_ = ""
-
- self.debug_building_ = "--debug-building" in bjam.variable("ARGV")
-
- self.targets_ = []
-
- def main_target_alternative (self, target):
- """ Registers the specified target as a main target alternatives.
- Returns 'target'.
- """
- target.project ().add_alternative (target)
- return target
-
- def main_target_sources (self, sources, main_target_name, no_renaming=0):
- """Return the list of sources to use, if main target rule is invoked
- with 'sources'. If there are any objects in 'sources', they are treated
- as main target instances, and the name of such targets are adjusted to
- be '<name_of_this_target>__<name_of_source_target>'. Such renaming
- is disabled is non-empty value is passed for 'no-renaming' parameter."""
- result = []
-
- for t in sources:
-
- t = b2.util.jam_to_value_maybe(t)
-
- if isinstance (t, AbstractTarget):
- name = t.name ()
-
- if not no_renaming:
- name = main_target_name + '__' + name
- t.rename (name)
-
- # Inline targets are not built by default.
- p = t.project()
- p.mark_targets_as_explicit([name])
- result.append(name)
-
- else:
- result.append (t)
-
- return result
-
-
- def main_target_requirements(self, specification, project):
- """Returns the requirement to use when declaring a main target,
- which are obtained by
- - translating all specified property paths, and
- - refining project requirements with the one specified for the target
-
- 'specification' are the properties xplicitly specified for a
- main target
- 'project' is the project where the main taret is to be declared."""
-
- specification.extend(toolset.requirements())
-
- requirements = property_set.refine_from_user_input(
- project.get("requirements"), specification,
- project.project_module(), project.get("location"))
-
- return requirements
-
- def main_target_usage_requirements (self, specification, project):
- """ Returns the use requirement to use when declaraing a main target,
- which are obtained by
- - translating all specified property paths, and
- - adding project's usage requirements
- specification: Use-properties explicitly specified for a main target
- project: Project where the main target is to be declared
- """
- project_usage_requirements = project.get ('usage-requirements')
-
- # We don't use 'refine-from-user-input' because I'm not sure if:
- # - removing of parent's usage requirements makes sense
- # - refining of usage requirements is not needed, since usage requirements
- # are always free.
- usage_requirements = property_set.create_from_user_input(
- specification, project.project_module(), project.get("location"))
-
- return project_usage_requirements.add (usage_requirements)
-
- def main_target_default_build (self, specification, project):
- """ Return the default build value to use when declaring a main target,
- which is obtained by using specified value if not empty and parent's
- default build attribute otherwise.
- specification: Default build explicitly specified for a main target
- project: Project where the main target is to be declared
- """
- if specification:
- return property_set.create_with_validation(specification)
- else:
- return project.get ('default-build')
-
- def start_building (self, main_target_instance):
- """ Helper rules to detect cycles in main target references.
- """
- if self.targets_being_built_.has_key(id(main_target_instance)):
- names = []
- for t in self.targets_being_built_.values() + [main_target_instance]:
- names.append (t.full_name())
-
- get_manager().errors()("Recursion in main target references\n")
-
- self.targets_being_built_[id(main_target_instance)] = main_target_instance
-
- def end_building (self, main_target_instance):
- assert (self.targets_being_built_.has_key (id (main_target_instance)))
- del self.targets_being_built_ [id (main_target_instance)]
-
- def create_typed_target (self, type, project, name, sources, requirements, default_build, usage_requirements):
- """ Creates a TypedTarget with the specified properties.
- The 'name', 'sources', 'requirements', 'default_build' and
- 'usage_requirements' are assumed to be in the form specified
- by the user in Jamfile corresponding to 'project'.
- """
- return self.main_target_alternative (TypedTarget (name, project, type,
- self.main_target_sources (sources, name),
- self.main_target_requirements (requirements, project),
- self.main_target_default_build (default_build, project),
- self.main_target_usage_requirements (usage_requirements, project)))
-
- def increase_indent(self):
- self.indent_ += " "
-
- def decrease_indent(self):
- self.indent_ = self.indent_[0:-4]
-
- def logging(self):
- return self.debug_building_
-
- def log(self, message):
- if self.debug_building_:
- print self.indent_ + message
-
- def push_target(self, target):
- self.targets_.append(target)
-
- def pop_target(self):
- self.targets_ = self.targets_[:-1]
-
- def current(self):
- return self.targets_[0]
-
-
-class GenerateResult:
-
- def __init__ (self, ur=None, targets=None):
- if not targets:
- targets = []
-
- self.__usage_requirements = ur
- self.__targets = targets
- assert all(isinstance(t, virtual_target.VirtualTarget) for t in targets)
-
- if not self.__usage_requirements:
- self.__usage_requirements = property_set.empty ()
-
- def usage_requirements (self):
- return self.__usage_requirements
-
- def targets (self):
- return self.__targets
-
- def extend (self, other):
- assert (isinstance (other, GenerateResult))
-
- self.__usage_requirements = self.__usage_requirements.add (other.usage_requirements ())
- self.__targets.extend (other.targets ())
-
-class AbstractTarget:
- """ Base class for all abstract targets.
- """
- def __init__ (self, name, project, manager = None):
- """ manager: the Manager object
- name: name of the target
- project: the project target to which this one belongs
- manager:the manager object. If none, uses project.manager ()
- """
- assert (isinstance (project, ProjectTarget))
- # Note: it might seem that we don't need either name or project at all.
- # However, there are places where we really need it. One example is error
- # messages which should name problematic targets. Another is setting correct
- # paths for sources and generated files.
-
- # Why allow manager to be specified? Because otherwise project target could not derive
- # from this class.
- if manager:
- self.manager_ = manager
- else:
- self.manager_ = project.manager ()
-
- self.name_ = name
- self.project_ = project
-
- def manager (self):
- return self.manager_
-
- def name (self):
- """ Returns the name of this target.
- """
- return self.name_
-
- def project (self):
- """ Returns the project for this target.
- """
- return self.project_
-
- def location (self):
- """ Return the location where the target was declared.
- """
- return self.location_
-
- def full_name (self):
- """ Returns a user-readable name for this target.
- """
- location = self.project ().get ('location')
- return location + '/' + self.name_
-
- def generate (self, property_set):
- """ Takes a property set. Generates virtual targets for this abstract
- target, using the specified properties, unless a different value of some
- feature is required by the target.
- On success, returns a GenerateResult instance with:
- - a property_set with the usage requirements to be
- applied to dependents
- - a list of produced virtual targets, which may be
- empty.
- If 'property_set' is empty, performs default build of this
- target, in a way specific to derived class.
- """
- raise BaseException ("method should be defined in derived classes")
-
- def rename (self, new_name):
- self.name_ = new_name
-
-class ProjectTarget (AbstractTarget):
- """ Project target class (derived from 'AbstractTarget')
-
- This class these responsibilities:
- - maintaining a list of main target in this project and
- building it
-
- Main targets are constructed in two stages:
- - When Jamfile is read, a number of calls to 'add_alternative' is made.
- At that time, alternatives can also be renamed to account for inline
- targets.
- - The first time 'main-target' or 'has-main-target' rule is called,
- all alternatives are enumerated an main targets are created.
- """
- def __init__ (self, manager, name, project_module, parent_project, requirements, default_build):
- AbstractTarget.__init__ (self, name, self, manager)
-
- self.project_module_ = project_module
- self.location_ = manager.projects().attribute (project_module, 'location')
- self.requirements_ = requirements
- self.default_build_ = default_build
-
- self.build_dir_ = None
-
- # A cache of IDs
- self.ids_cache_ = {}
-
- # True is main targets have already been built.
- self.built_main_targets_ = False
-
- # A list of the registered alternatives for this project.
- self.alternatives_ = []
-
- # A map from main target name to the target corresponding
- # to it.
- self.main_target_ = {}
-
- # Targets marked as explicit.
- self.explicit_targets_ = set()
-
- # Targets marked as always
- self.always_targets_ = set()
-
- # The constants defined for this project.
- self.constants_ = {}
-
- # Whether targets for all main target are already created.
- self.built_main_targets_ = 0
-
- if parent_project:
- self.inherit (parent_project)
-
-
- # TODO: This is needed only by the 'make' rule. Need to find the
- # way to make 'make' work without this method.
- def project_module (self):
- return self.project_module_
-
- def get (self, attribute):
- return self.manager().projects().attribute(
- self.project_module_, attribute)
-
- def build_dir (self):
- if not self.build_dir_:
- self.build_dir_ = self.get ('build-dir')
- if not self.build_dir_:
- self.build_dir_ = os.path.join(self.project_.get ('location'), 'bin')
-
- return self.build_dir_
-
- def generate (self, ps):
- """ Generates all possible targets contained in this project.
- """
- self.manager_.targets().log(
- "Building project '%s' with '%s'" % (self.name (), str(ps)))
- self.manager_.targets().increase_indent ()
-
- result = GenerateResult ()
-
- for t in self.targets_to_build ():
- g = t.generate (ps)
- result.extend (g)
-
- self.manager_.targets().decrease_indent ()
- return result
-
- def targets_to_build (self):
- """ Computes and returns a list of AbstractTarget instances which
- must be built when this project is built.
- """
- result = []
-
- if not self.built_main_targets_:
- self.build_main_targets ()
-
- # Collect all main targets here, except for "explicit" ones.
- for n, t in self.main_target_.iteritems ():
- if not t.name () in self.explicit_targets_:
- result.append (t)
-
- # Collect all projects referenced via "projects-to-build" attribute.
- self_location = self.get ('location')
- for pn in self.get ('projects-to-build'):
- result.append (self.find(pn + "/"))
-
- return result
-
- def mark_targets_as_explicit (self, target_names):
- """Add 'target' to the list of targets in this project
- that should be build only by explicit request."""
-
- # Record the name of the target, not instance, since this
- # rule is called before main target instaces are created.
- self.explicit_targets_.update(target_names)
-
- def mark_targets_as_always(self, target_names):
- self.always_targets_.update(target_names)
-
- def add_alternative (self, target_instance):
- """ Add new target alternative.
- """
- if self.built_main_targets_:
- raise IllegalOperation ("add-alternative called when main targets are already created for project '%s'" % self.full_name ())
-
- self.alternatives_.append (target_instance)
-
- def main_target (self, name):
- if not self.built_main_targets_:
- self.build_main_targets()
-
- return self.main_target_[name]
-
- def has_main_target (self, name):
- """Tells if a main target with the specified name exists."""
- if not self.built_main_targets_:
- self.build_main_targets()
-
- return self.main_target_.has_key(name)
-
- def create_main_target (self, name):
- """ Returns a 'MainTarget' class instance corresponding to the 'name'.
- """
- if not self.built_main_targets_:
- self.build_main_targets ()
-
- return self.main_targets_.get (name, None)
-
-
- def find_really(self, id):
- """ Find and return the target with the specified id, treated
- relative to self.
- """
- result = None
- current_location = self.get ('location')
-
- __re_split_project_target = re.compile (r'(.*)//(.*)')
- split = __re_split_project_target.match (id)
-
- project_part = None
- target_part = None
-
- if split:
- project_part = split.group (1)
- target_part = split.group (2)
-
- project_registry = self.project_.manager ().projects ()
-
- extra_error_message = ''
- if project_part:
- # There's explicit project part in id. Looks up the
- # project and pass the request to it.
- pm = project_registry.find (project_part, current_location)
-
- if pm:
- project_target = project_registry.target (pm)
- result = project_target.find (target_part, no_error=1)
-
- else:
- extra_error_message = "error: could not find project '$(project_part)'"
-
- else:
- # Interpret target-name as name of main target
- # Need to do this before checking for file. Consider this:
- #
- # exe test : test.cpp ;
- # install s : test : <location>. ;
- #
- # After first build we'll have target 'test' in Jamfile and file
- # 'test' on the disk. We need target to override the file.
-
- result = None
- if self.has_main_target(id):
- result = self.main_target(id)
-
- if not result:
- result = FileReference (self.manager_, id, self.project_)
- if not result.exists ():
- # File actually does not exist.
- # Reset 'target' so that an error is issued.
- result = None
-
-
- if not result:
- # Interpret id as project-id
- project_module = project_registry.find (id, current_location)
- if project_module:
- result = project_registry.target (project_module)
-
- return result
-
- def find (self, id, no_error = False):
- v = self.ids_cache_.get (id, None)
-
- if not v:
- v = self.find_really (id)
- self.ids_cache_ [id] = v
-
- if v or no_error:
- return v
-
- raise BaseException ("Unable to find file or target named '%s'\nreferred from project at '%s'" % (id, self.get ('location')))
-
-
- def build_main_targets (self):
- self.built_main_targets_ = True
-
- for a in self.alternatives_:
- name = a.name ()
- if not self.main_target_.has_key (name):
- t = MainTarget (name, self.project_)
- self.main_target_ [name] = t
-
- if name in self.always_targets_:
- a.always()
-
- self.main_target_ [name].add_alternative (a)
-
- def add_constant(self, name, value, path=0):
- """Adds a new constant for this project.
-
- The constant will be available for use in Jamfile
- module for this project. If 'path' is true,
- the constant will be interpreted relatively
- to the location of project.
- """
-
- if path:
- l = self.location_
- if not l:
- # Project corresponding to config files do not have
- # 'location' attribute, but do have source location.
- # It might be more reasonable to make every project have
- # a location and use some other approach to prevent buildable
- # targets in config files, but that's for later.
- l = get('source-location')
-
- value = os.path.join(l, value)
- # Now make the value absolute path
- value = os.path.join(os.getcwd(), value)
-
- self.constants_[name] = value
- bjam.call("set-variable", self.project_module(), name, value)
-
- def inherit(self, parent_project):
- for c in parent_project.constants_:
- # No need to pass the type. Path constants were converted to
- # absolute paths already by parent.
- self.add_constant(c, parent_project.constants_[c])
-
- # Import rules from parent
- this_module = self.project_module()
- parent_module = parent_project.project_module()
-
- rules = bjam.call("RULENAMES", parent_module)
- if not rules:
- rules = []
- user_rules = [x for x in rules
- if x not in self.manager().projects().project_rules().all_names()]
- if user_rules:
- bjam.call("import-rules-from-parent", parent_module, this_module, user_rules)
-
-class MainTarget (AbstractTarget):
- """ A named top-level target in Jamfile.
- """
- def __init__ (self, name, project):
- AbstractTarget.__init__ (self, name, project)
- self.alternatives_ = []
- self.default_build_ = property_set.empty ()
-
- def add_alternative (self, target):
- """ Add a new alternative for this target.
- """
- d = target.default_build ()
-
- if self.alternatives_ and self.default_build_ != d:
- get_manager().errors()("default build must be identical in all alternatives\n"
- "main target is '%s'\n"
- "with '%s'\n"
- "differing from previous default build: '%s'" % (self.full_name (), d.raw (), self.default_build_.raw ()))
-
- else:
- self.default_build_ = d
-
- self.alternatives_.append (target)
-
- def __select_alternatives (self, property_set, debug):
- """ Returns the best viable alternative for this property_set
- See the documentation for selection rules.
- # TODO: shouldn't this be 'alternative' (singular)?
- """
- # When selecting alternatives we have to consider defaults,
- # for example:
- # lib l : l.cpp : <variant>debug ;
- # lib l : l_opt.cpp : <variant>release ;
- # won't work unless we add default value <variant>debug.
- property_set = property_set.add_defaults ()
-
- # The algorithm: we keep the current best viable alternative.
- # When we've got new best viable alternative, we compare it
- # with the current one.
- best = None
- best_properties = None
-
- if len (self.alternatives_) == 0:
- return None
-
- if len (self.alternatives_) == 1:
- return self.alternatives_ [0]
-
- if debug:
- print "Property set for selection:", property_set
-
- for v in self.alternatives_:
- properties = v.match (property_set, debug)
-
- if properties is not None:
- if not best:
- best = v
- best_properties = properties
-
- else:
- if b2.util.set.equal (properties, best_properties):
- return None
-
- elif b2.util.set.contains (properties, best_properties):
- # Do nothing, this alternative is worse
- pass
-
- elif b2.util.set.contains (best_properties, properties):
- best = v
- best_properties = properties
-
- else:
- return None
-
- return best
-
- def apply_default_build (self, property_set):
- return apply_default_build(property_set, self.default_build_)
-
- def generate (self, ps):
- """ Select an alternative for this main target, by finding all alternatives
- which requirements are satisfied by 'properties' and picking the one with
- longest requirements set.
- Returns the result of calling 'generate' on that alternative.
- """
- self.manager_.targets ().start_building (self)
-
- # We want composite properties in build request act as if
- # all the properties it expands too are explicitly specified.
- ps = ps.expand ()
-
- all_property_sets = self.apply_default_build (ps)
-
- result = GenerateResult ()
-
- for p in all_property_sets:
- result.extend (self.__generate_really (p))
-
- self.manager_.targets ().end_building (self)
-
- return result
-
- def __generate_really (self, prop_set):
- """ Generates the main target with the given property set
- and returns a list which first element is property_set object
- containing usage_requirements of generated target and with
- generated virtual target in other elements. It's possible
- that no targets are generated.
- """
- best_alternative = self.__select_alternatives (prop_set, debug=0)
-
- if not best_alternative:
- # FIXME: revive.
- # self.__select_alternatives(prop_set, debug=1)
- self.manager_.errors()(
- "No best alternative for '%s'.\n"
- % (self.full_name(),))
-
- result = best_alternative.generate (prop_set)
-
- # Now return virtual targets for the only alternative
- return result
-
- def rename(self, new_name):
- AbstractTarget.rename(self, new_name)
- for a in self.alternatives_:
- a.rename(new_name)
-
-class FileReference (AbstractTarget):
- """ Abstract target which refers to a source file.
- This is artificial creature; it's usefull so that sources to
- a target can be represented as list of abstract target instances.
- """
- def __init__ (self, manager, file, project):
- AbstractTarget.__init__ (self, file, project)
- self.file_location_ = None
-
- def generate (self, properties):
- return GenerateResult (None, [
- self.manager_.virtual_targets ().from_file (
- self.name_, self.location(), self.project_) ])
-
- def exists (self):
- """ Returns true if the referred file really exists.
- """
- if self.location ():
- return True
- else:
- return False
-
- def location (self):
- # Returns the location of target. Needed by 'testing.jam'
- if not self.file_location_:
- source_location = self.project_.get('source-location')
-
- for src_dir in source_location:
- location = os.path.join(src_dir, self.name())
- if os.path.isfile(location):
- self.file_location_ = src_dir
- self.file_path = location
- break
-
- return self.file_location_
-
-def resolve_reference(target_reference, project):
- """ Given a target_reference, made in context of 'project',
- returns the AbstractTarget instance that is referred to, as well
- as properties explicitly specified for this reference.
- """
- # Separate target name from properties override
- split = _re_separate_target_from_properties.match (target_reference)
- if not split:
- raise BaseException ("Invalid reference: '%s'" % target_reference)
-
- id = split.group (1)
-
- sproperties = []
-
- if split.group (3):
- sproperties = property.create_from_strings(feature.split(split.group(3)))
- sproperties = feature.expand_composites(sproperties)
-
- # Find the target
- target = project.find (id)
-
- return (target, property_set.create(sproperties))
-
-def generate_from_reference(target_reference, project, property_set):
- """ Attempts to generate the target given by target reference, which
- can refer both to a main target or to a file.
- Returns a list consisting of
- - usage requirements
- - generated virtual targets, if any
- target_reference: Target reference
- project: Project where the reference is made
- property_set: Properties of the main target that makes the reference
- """
- target, sproperties = resolve_reference(target_reference, project)
-
- # Take properties which should be propagated and refine them
- # with source-specific requirements.
- propagated = property_set.propagated()
- rproperties = propagated.refine(sproperties)
-
- return target.generate(rproperties)
-
-
-
-class BasicTarget (AbstractTarget):
- """ Implements the most standard way of constructing main target
- alternative from sources. Allows sources to be either file or
- other main target and handles generation of those dependency
- targets.
- """
- def __init__ (self, name, project, sources, requirements = None, default_build = None, usage_requirements = None):
- AbstractTarget.__init__ (self, name, project)
-
- for s in sources:
- if get_grist (s):
- raise InvalidSource ("property '%s' found in the 'sources' parameter for '%s'" % (s, name))
-
- self.sources_ = sources
-
- if not requirements: requirements = property_set.empty ()
- self.requirements_ = requirements
-
- if not default_build: default_build = property_set.empty ()
- self.default_build_ = default_build
-
- if not usage_requirements: usage_requirements = property_set.empty ()
- self.usage_requirements_ = usage_requirements
-
- # A cache for resolved references
- self.source_targets_ = None
-
- # A cache for generated targets
- self.generated_ = {}
-
- # A cache for build requests
- self.request_cache = {}
-
- # Result of 'capture_user_context' has everything. For example, if this
- # target is declare as result of loading Jamfile which was loaded when
- # building target B which was requested from A, then we'll have A, B and
- # Jamroot location in context. We only care about Jamroot location, most
- # of the times.
- self.user_context_ = self.manager_.errors().capture_user_context()[-1:]
-
- self.always_ = False
-
- def always(self):
- self.always_ = True
-
- def sources (self):
- """ Returns the list of AbstractTargets which are used as sources.
- The extra properties specified for sources are not represented.
- The only used of this rule at the moment is the '--dump-tests'
- feature of the test system.
- """
- if self.source_targets_ == None:
- self.source_targets_ = []
- for s in self.sources_:
- self.source_targets_.append(resolve_reference(s, self.project_)[0])
-
- return self.source_targets_
-
- def requirements (self):
- return self.requirements_
-
- def default_build (self):
- return self.default_build_
-
- def common_properties (self, build_request, requirements):
- """ Given build request and requirements, return properties
- common to dependency build request and target build
- properties.
- """
- # For optimization, we add free unconditional requirements directly,
- # without using complex algorithsm.
- # This gives the complex algorithm better chance of caching results.
- # The exact effect of this "optimization" is no longer clear
- free_unconditional = []
- other = []
- for p in requirements.all():
- if p.feature().free() and not p.condition() and p.feature().name() != 'conditional':
- free_unconditional.append(p)
- else:
- other.append(p)
- other = property_set.create(other)
-
- key = (build_request, other)
- if not self.request_cache.has_key(key):
- self.request_cache[key] = self.__common_properties2 (build_request, other)
-
- return self.request_cache[key].add_raw(free_unconditional)
-
- # Given 'context' -- a set of already present properties, and 'requirements',
- # decide which extra properties should be applied to 'context'.
- # For conditional requirements, this means evaluating condition. For
- # indirect conditional requirements, this means calling a rule. Ordinary
- # requirements are always applied.
- #
- # Handles situation where evaluating one conditional requirements affects
- # condition of another conditional requirements, for example:
- #
- # <toolset>gcc:<variant>release <variant>release:<define>RELEASE
- #
- # If 'what' is 'refined' returns context refined with new requirements.
- # If 'what' is 'added' returns just the requirements that must be applied.
- def evaluate_requirements(self, requirements, context, what):
- # Apply non-conditional requirements.
- # It's possible that that further conditional requirement change
- # a value set by non-conditional requirements. For example:
- #
- # exe a : a.cpp : <threading>single <toolset>foo:<threading>multi ;
- #
- # I'm not sure if this should be an error, or not, especially given that
- #
- # <threading>single
- #
- # might come from project's requirements.
- unconditional = feature.expand(requirements.non_conditional())
-
- context = context.refine(property_set.create(unconditional))
-
- # We've collected properties that surely must be present in common
- # properties. We now try to figure out what other properties
- # should be added in order to satisfy rules (4)-(6) from the docs.
-
- conditionals = property_set.create(requirements.conditional())
-
- # It's supposed that #conditionals iterations
- # should be enough for properties to propagate along conditions in any
- # direction.
- max_iterations = len(conditionals.all()) +\
- len(requirements.get("<conditional>")) + 1
-
- added_requirements = []
- current = context
-
- # It's assumed that ordinary conditional requirements can't add
- # <indirect-conditional> properties, and that rules referred
- # by <indirect-conditional> properties can't add new
- # <indirect-conditional> properties. So the list of indirect conditionals
- # does not change.
- indirect = requirements.get("<conditional>")
-
- ok = 0
- for i in range(0, max_iterations):
-
- e = conditionals.evaluate_conditionals(current).all()[:]
-
- # Evaluate indirect conditionals.
- for i in indirect:
- i = b2.util.jam_to_value_maybe(i)
- if callable(i):
- # This is Python callable, yeah.
- e.extend(i(current))
- else:
- # Name of bjam function. Because bjam is unable to handle
- # list of Property, pass list of strings.
- br = b2.util.call_jam_function(i[1:], [str(p) for p in current.all()])
- if br:
- e.extend(property.create_from_strings(br))
-
- if e == added_requirements:
- # If we got the same result, we've found final properties.
- ok = 1
- break
- else:
- # Oops, results of evaluation of conditionals has changed.
- # Also 'current' contains leftover from previous evaluation.
- # Recompute 'current' using initial properties and conditional
- # requirements.
- added_requirements = e
- current = context.refine(property_set.create(feature.expand(e)))
-
- if not ok:
- self.manager().errors()("Can't evaluate conditional properties "
- + str(conditionals))
-
-
- if what == "added":
- return property_set.create(unconditional + added_requirements)
- elif what == "refined":
- return current
- else:
- self.manager().errors("Invalid value of the 'what' parameter")
-
- def __common_properties2(self, build_request, requirements):
- # This guarantees that default properties are present
- # in result, unless they are overrided by some requirement.
- # TODO: There is possibility that we've added <foo>bar, which is composite
- # and expands to <foo2>bar2, but default value of <foo2> is not bar2,
- # in which case it's not clear what to do.
- #
- build_request = build_request.add_defaults()
- # Featured added by 'add-default' can be composite and expand
- # to features without default values -- so they are not added yet.
- # It could be clearer/faster to expand only newly added properties
- # but that's not critical.
- build_request = build_request.expand()
-
- return self.evaluate_requirements(requirements, build_request,
- "refined")
-
- def match (self, property_set, debug):
- """ Returns the alternative condition for this alternative, if
- the condition is satisfied by 'property_set'.
- """
- # The condition is composed of all base non-conditional properties.
- # It's not clear if we should expand 'self.requirements_' or not.
- # For one thing, it would be nice to be able to put
- # <toolset>msvc-6.0
- # in requirements.
- # On the other hand, if we have <variant>release in condition it
- # does not make sense to require <optimization>full to be in
- # build request just to select this variant.
- bcondition = self.requirements_.base ()
- ccondition = self.requirements_.conditional ()
- condition = b2.util.set.difference (bcondition, ccondition)
-
- if debug:
- print " next alternative: required properties:", [str(p) for p in condition]
-
- if b2.util.set.contains (condition, property_set.all()):
-
- if debug:
- print " matched"
-
- return condition
-
- else:
- return None
-
-
- def generate_dependency_targets (self, target_ids, property_set):
- targets = []
- usage_requirements = []
- for id in target_ids:
-
- result = generate_from_reference(id, self.project_, property_set)
- targets += result.targets()
- usage_requirements += result.usage_requirements().all()
-
- return (targets, usage_requirements)
-
- def generate_dependency_properties(self, properties, ps):
- """ Takes a target reference, which might be either target id
- or a dependency property, and generates that target using
- 'property_set' as build request.
-
- Returns a tuple (result, usage_requirements).
- """
- result_properties = []
- usage_requirements = []
- for p in properties:
-
- result = generate_from_reference(p.value(), self.project_, ps)
-
- for t in result.targets():
- result_properties.append(property.Property(p.feature(), t))
-
- usage_requirements += result.usage_requirements().all()
-
- return (result_properties, usage_requirements)
-
-
-
-
- @user_error_checkpoint
- def generate (self, ps):
- """ Determines final build properties, generates sources,
- and calls 'construct'. This method should not be
- overridden.
- """
- self.manager_.errors().push_user_context(
- "Generating target " + self.full_name(), self.user_context_)
-
- if self.manager().targets().logging():
- self.manager().targets().log(
- "Building target '%s'" % self.name_)
- self.manager().targets().increase_indent ()
- self.manager().targets().log(
- "Build request: '%s'" % str (ps.raw ()))
- cf = self.manager().command_line_free_features()
- self.manager().targets().log(
- "Command line free features: '%s'" % str (cf.raw ()))
- self.manager().targets().log(
- "Target requirements: %s'" % str (self.requirements().raw ()))
-
- self.manager().targets().push_target(self)
-
- if not self.generated_.has_key(ps):
-
- # Apply free features form the command line. If user
- # said
- # define=FOO
- # he most likely want this define to be set for all compiles.
- ps = ps.refine(self.manager().command_line_free_features())
- rproperties = self.common_properties (ps, self.requirements_)
-
- self.manager().targets().log(
- "Common properties are '%s'" % str (rproperties))
-
- if rproperties.get("<build>") != ["no"]:
-
- result = GenerateResult ()
-
- properties = rproperties.non_dependency ()
-
- (p, u) = self.generate_dependency_properties (rproperties.dependency (), rproperties)
- properties += p
- assert all(isinstance(p, property.Property) for p in properties)
- usage_requirements = u
-
- (source_targets, u) = self.generate_dependency_targets (self.sources_, rproperties)
- usage_requirements += u
-
- self.manager_.targets().log(
- "Usage requirements for '%s' are '%s'" % (self.name_, usage_requirements))
-
- # FIXME:
-
- rproperties = property_set.create(properties + usage_requirements)
- usage_requirements = property_set.create (usage_requirements)
-
- self.manager_.targets().log(
- "Build properties: '%s'" % str(rproperties))
-
- source_targets += rproperties.get('<source>')
-
- # We might get duplicate sources, for example if
- # we link to two library which have the same <library> in
- # usage requirements.
- # Use stable sort, since for some targets the order is
- # important. E.g. RUN_PY target need python source to come
- # first.
- source_targets = unique(source_targets, stable=True)
-
- # FIXME: figure why this call messes up source_targets in-place
- result = self.construct (self.name_, source_targets[:], rproperties)
-
- if result:
- assert len(result) == 2
- gur = result [0]
- result = result [1]
-
- if self.always_:
- for t in result:
- t.always()
-
- s = self.create_subvariant (
- result,
- self.manager().virtual_targets().recent_targets(), ps,
- source_targets, rproperties, usage_requirements)
- self.manager().virtual_targets().clear_recent_targets()
-
- ur = self.compute_usage_requirements (s)
- ur = ur.add (gur)
- s.set_usage_requirements (ur)
-
- self.manager_.targets().log (
- "Usage requirements from '%s' are '%s'" %
- (self.name(), str(rproperties)))
-
- self.generated_[ps] = GenerateResult (ur, result)
- else:
- self.generated_[ps] = GenerateResult (property_set.empty(), [])
- else:
- # If we just see <build>no, we cannot produce any reasonable
- # diagnostics. The code that adds this property is expected
- # to explain why a target is not built, for example using
- # the configure.log-component-configuration function.
-
- # If this target fails to build, add <build>no to properties
- # to cause any parent target to fail to build. Except that it
- # - does not work now, since we check for <build>no only in
- # common properties, but not in properties that came from
- # dependencies
- # - it's not clear if that's a good idea anyway. The alias
- # target, for example, should not fail to build if a dependency
- # fails.
- self.generated_[ps] = GenerateResult(
- property_set.create(["<build>no"]), [])
- else:
- self.manager().targets().log ("Already built")
-
- self.manager().targets().pop_target()
- self.manager().targets().decrease_indent()
-
- return self.generated_[ps]
-
- def compute_usage_requirements (self, subvariant):
- """ Given the set of generated targets, and refined build
- properties, determines and sets appripriate usage requirements
- on those targets.
- """
- rproperties = subvariant.build_properties ()
- xusage_requirements =self.evaluate_requirements(
- self.usage_requirements_, rproperties, "added")
-
- # We generate all dependency properties and add them,
- # as well as their usage requirements, to result.
- (r1, r2) = self.generate_dependency_properties(xusage_requirements.dependency (), rproperties)
- extra = r1 + r2
-
- result = property_set.create (xusage_requirements.non_dependency () + extra)
-
- # Propagate usage requirements we've got from sources, except
- # for the <pch-header> and <pch-file> features.
- #
- # That feature specifies which pch file to use, and should apply
- # only to direct dependents. Consider:
- #
- # pch pch1 : ...
- # lib lib1 : ..... pch1 ;
- # pch pch2 :
- # lib lib2 : pch2 lib1 ;
- #
- # Here, lib2 should not get <pch-header> property from pch1.
- #
- # Essentially, when those two features are in usage requirements,
- # they are propagated only to direct dependents. We might need
- # a more general mechanism, but for now, only those two
- # features are special.
- raw = subvariant.sources_usage_requirements().raw()
- raw = property.change(raw, "<pch-header>", None);
- raw = property.change(raw, "<pch-file>", None);
- result = result.add(property_set.create(raw))
-
- return result
-
- def create_subvariant (self, root_targets, all_targets,
- build_request, sources,
- rproperties, usage_requirements):
- """Creates a new subvariant-dg instances for 'targets'
- - 'root-targets' the virtual targets will be returned to dependents
- - 'all-targets' all virtual
- targets created while building this main target
- - 'build-request' is property-set instance with
- requested build properties"""
-
- for e in root_targets:
- e.root (True)
-
- s = Subvariant (self, build_request, sources,
- rproperties, usage_requirements, all_targets)
-
- for v in all_targets:
- if not v.creating_subvariant():
- v.creating_subvariant(s)
-
- return s
-
- def construct (self, name, source_targets, properties):
- """ Constructs the virtual targets for this abstract targets and
- the dependecy graph. Returns a tuple consisting of the properties and the list of virtual targets.
- Should be overrided in derived classes.
- """
- raise BaseException ("method should be defined in derived classes")
-
-
-class TypedTarget (BasicTarget):
- import generators
-
- def __init__ (self, name, project, type, sources, requirements, default_build, usage_requirements):
- BasicTarget.__init__ (self, name, project, sources, requirements, default_build, usage_requirements)
- self.type_ = type
-
- def __jam_repr__(self):
- return b2.util.value_to_jam(self)
-
- def type (self):
- return self.type_
-
- def construct (self, name, source_targets, prop_set):
-
- r = generators.construct (self.project_, os.path.splitext(name)[0],
- self.type_,
- prop_set.add_raw(['<main-target-type>' + self.type_]),
- source_targets, True)
-
- if not r:
- print "warning: Unable to construct '%s'" % self.full_name ()
-
- # Are there any top-level generators for this type/property set.
- if not generators.find_viable_generators (self.type_, prop_set):
- print "error: no generators were found for type '" + self.type_ + "'"
- print "error: and the requested properties"
- print "error: make sure you've configured the needed tools"
- print "See http://boost.org/boost-build2/doc/html/bbv2/advanced/configuration.html"
-
- print "To debug this problem, try the --debug-generators option."
- sys.exit(1)
-
- return r
-
-def apply_default_build(property_set, default_build):
- # 1. First, see what properties from default_build
- # are already present in property_set.
-
- specified_features = set(p.feature() for p in property_set.all())
-
- defaults_to_apply = []
- for d in default_build.all():
- if not d.feature() in specified_features:
- defaults_to_apply.append(d)
-
- # 2. If there's any defaults to be applied, form the new
- # build request. Pass it throw 'expand-no-defaults', since
- # default_build might contain "release debug", which will
- # result in two property_sets.
- result = []
- if defaults_to_apply:
-
- # We have to compress subproperties here to prevent
- # property lists like:
- #
- # <toolset>msvc <toolset-msvc:version>7.1 <threading>multi
- #
- # from being expanded into:
- #
- # <toolset-msvc:version>7.1/<threading>multi
- # <toolset>msvc/<toolset-msvc:version>7.1/<threading>multi
- #
- # due to cross-product property combination. That may
- # be an indication that
- # build_request.expand-no-defaults is the wrong rule
- # to use here.
- compressed = feature.compress_subproperties(property_set.all())
-
- result = build_request.expand_no_defaults(
- b2.build.property_set.create([p]) for p in (compressed + defaults_to_apply))
-
- else:
- result.append (property_set)
-
- return result
-
-
-def create_typed_metatarget(name, type, sources, requirements, default_build, usage_requirements):
-
- from b2.manager import get_manager
- t = get_manager().targets()
-
- project = get_manager().projects().current()
-
- return t.main_target_alternative(
- TypedTarget(name, project, type,
- t.main_target_sources(sources, name),
- t.main_target_requirements(requirements, project),
- t.main_target_default_build(default_build, project),
- t.main_target_usage_requirements(usage_requirements, project)))
-
-
-def create_metatarget(klass, name, sources, requirements=[], default_build=[], usage_requirements=[]):
- from b2.manager import get_manager
- t = get_manager().targets()
-
- project = get_manager().projects().current()
-
- return t.main_target_alternative(
- klass(name, project,
- t.main_target_sources(sources, name),
- t.main_target_requirements(requirements, project),
- t.main_target_default_build(default_build, project),
- t.main_target_usage_requirements(usage_requirements, project)))
-
-def metatarget_function_for_class(class_):
-
- @bjam_signature((["name"], ["sources", "*"], ["requirements", "*"],
- ["default_build", "*"], ["usage_requirements", "*"]))
- def create_metatarget(name, sources, requirements = [], default_build = None, usage_requirements = []):
-
- from b2.manager import get_manager
- t = get_manager().targets()
-
- project = get_manager().projects().current()
-
- return t.main_target_alternative(
- class_(name, project,
- t.main_target_sources(sources, name),
- t.main_target_requirements(requirements, project),
- t.main_target_default_build(default_build, project),
- t.main_target_usage_requirements(usage_requirements, project)))
-
- return create_metatarget
diff --git a/tools/build/v2/build/toolset.jam b/tools/build/v2/build/toolset.jam
deleted file mode 100644
index d19be3c0b3..0000000000
--- a/tools/build/v2/build/toolset.jam
+++ /dev/null
@@ -1,575 +0,0 @@
-# Copyright 2003 Dave Abrahams
-# Copyright 2005 Rene Rivera
-# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Support for toolset definition.
-
-import errors ;
-import feature ;
-import generators ;
-import numbers ;
-import path ;
-import property ;
-import regex ;
-import sequence ;
-import set ;
-import property-set ;
-
-
-.flag-no = 1 ;
-
-.ignore-requirements = ;
-
-# This is used only for testing, to make sure we do not get random extra
-# elements in paths.
-if --ignore-toolset-requirements in [ modules.peek : ARGV ]
-{
- .ignore-requirements = 1 ;
-}
-
-
-# Initializes an additional toolset-like module. First load the 'toolset-module'
-# and then calls its 'init' rule with trailing arguments.
-#
-rule using ( toolset-module : * )
-{
- import $(toolset-module) ;
- $(toolset-module).init $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
-}
-
-
-# Expands subfeatures in each property sets, e.g. '<toolset>gcc-3.2' will be
-# converted to '<toolset>gcc/<toolset-version>3.2'.
-#
-local rule normalize-condition ( property-sets * )
-{
- local result ;
- for local p in $(property-sets)
- {
- local split = [ feature.split $(p) ] ;
- local expanded = [ feature.expand-subfeatures [ feature.split $(p) ] ] ;
- result += $(expanded:J=/) ;
- }
- return $(result) ;
-}
-
-
-# Specifies if the 'flags' rule should check that the invoking module is the
-# same as the module we are setting the flag for. 'v' can be either 'checked' or
-# 'unchecked'. Subsequent call to 'pop-checking-for-flags-module' will restore
-# the setting that was in effect before calling this rule.
-#
-rule push-checking-for-flags-module ( v )
-{
- .flags-module-checking = $(v) $(.flags-module-checking) ;
-}
-
-rule pop-checking-for-flags-module ( )
-{
- .flags-module-checking = $(.flags-module-checking[2-]) ;
-}
-
-
-# Specifies the flags (variables) that must be set on targets under certain
-# conditions, described by arguments.
-#
-rule flags (
- rule-or-module # If contains a dot, should be a rule name. The flags will
- # be applied when that rule is used to set up build
- # actions.
- #
- # If does not contain dot, should be a module name. The
- # flag will be applied for all rules in that module. If
- # module for rule is different from the calling module, an
- # error is issued.
-
- variable-name # Variable that should be set on target.
- condition * : # A condition when this flag should be applied. Should be a
- # set of property sets. If one of those property sets is
- # contained in the build properties, the flag will be used.
- # Implied values are not allowed: "<toolset>gcc" should be
- # used, not just "gcc". Subfeatures, like in
- # "<toolset>gcc-3.2" are allowed. If left empty, the flag
- # will be used unconditionally.
- #
- # Propery sets may use value-less properties ('<a>' vs.
- # '<a>value') to match absent properties. This allows to
- # separately match:
- #
- # <architecture>/<address-model>64
- # <architecture>ia64/<address-model>
- #
- # Where both features are optional. Without this syntax
- # we would be forced to define "default" values.
-
- values * : # The value to add to variable. If <feature> is specified,
- # then the value of 'feature' will be added.
- unchecked ? # If value 'unchecked' is passed, will not test that flags
- # are set for the calling module.
- : hack-hack ? # For
- # flags rule OPTIONS <cxx-abi> : -model ansi
- # Treat <cxx-abi> as condition
- # FIXME: ugly hack.
-)
-{
- local caller = [ CALLER_MODULE ] ;
- if ! [ MATCH ".*([.]).*" : $(rule-or-module) ]
- && [ MATCH "(Jamfile<.*)" : $(caller) ]
- {
- # Unqualified rule name, used inside Jamfile. Most likely used with
- # 'make' or 'notfile' rules. This prevents setting flags on the entire
- # Jamfile module (this will be considered as rule), but who cares?
- # Probably, 'flags' rule should be split into 'flags' and
- # 'flags-on-module'.
- rule-or-module = $(caller).$(rule-or-module) ;
- }
- else
- {
- local module_ = [ MATCH "([^.]*).*" : $(rule-or-module) ] ;
- if $(unchecked) != unchecked
- && $(.flags-module-checking[1]) != unchecked
- && $(module_) != $(caller)
- {
- errors.error "Module $(caller) attempted to set flags for module $(module_)" ;
- }
- }
-
- if $(condition) && ! $(condition:G=) && ! $(hack-hack)
- {
- # We have condition in the form '<feature>', that is, without value.
- # That is an older syntax:
- # flags gcc.link RPATH <dll-path> ;
- # for compatibility, convert it to
- # flags gcc.link RPATH : <dll-path> ;
- values = $(condition) ;
- condition = ;
- }
-
- if $(condition)
- {
- property.validate-property-sets $(condition) ;
- condition = [ normalize-condition $(condition) ] ;
- }
-
- add-flag $(rule-or-module) : $(variable-name) : $(condition) : $(values) ;
-}
-
-
-# Adds a new flag setting with the specified values. Does no checking.
-#
-local rule add-flag ( rule-or-module : variable-name : condition * : values * )
-{
- .$(rule-or-module).flags += $(.flag-no) ;
-
- # Store all flags for a module.
- local module_ = [ MATCH "([^.]*).*" : $(rule-or-module) ] ;
- .module-flags.$(module_) += $(.flag-no) ;
- # Store flag-no -> rule-or-module mapping.
- .rule-or-module.$(.flag-no) = $(rule-or-module) ;
-
- .$(rule-or-module).variable.$(.flag-no) += $(variable-name) ;
- .$(rule-or-module).values.$(.flag-no) += $(values) ;
- .$(rule-or-module).condition.$(.flag-no) += $(condition) ;
-
- .flag-no = [ numbers.increment $(.flag-no) ] ;
-}
-
-
-# Returns the first element of 'property-sets' which is a subset of
-# 'properties' or an empty list if no such element exists.
-#
-rule find-property-subset ( property-sets * : properties * )
-{
- # Cut property values off.
- local prop-keys = $(properties:G) ;
-
- local result ;
- for local s in $(property-sets)
- {
- if ! $(result)
- {
- # Handle value-less properties like '<architecture>' (compare with
- # '<architecture>x86').
-
- local set = [ feature.split $(s) ] ;
-
- # Find the set of features that
- # - have no property specified in required property set
- # - are omitted in the build property set.
- local default-props ;
- for local i in $(set)
- {
- # If $(i) is a value-less property it should match default value
- # of an optional property. See the first line in the example
- # below:
- #
- # property set properties result
- # <a> <b>foo <b>foo match
- # <a> <b>foo <a>foo <b>foo no match
- # <a>foo <b>foo <b>foo no match
- # <a>foo <b>foo <a>foo <b>foo match
- if ! ( $(i:G=) || ( $(i:G) in $(prop-keys) ) )
- {
- default-props += $(i) ;
- }
- }
-
- if $(set) in $(properties) $(default-props)
- {
- result = $(s) ;
- }
- }
- }
- return $(result) ;
-}
-
-
-# Returns a value to be added to some flag for some target based on the flag's
-# value definition and the given target's property set.
-#
-rule handle-flag-value ( value * : properties * )
-{
- local result ;
- if $(value:G)
- {
- local matches = [ property.select $(value) : $(properties) ] ;
- for local p in $(matches)
- {
- local att = [ feature.attributes $(p:G) ] ;
- if dependency in $(att)
- {
- # The value of a dependency feature is a target and needs to be
- # actualized.
- result += [ $(p:G=).actualize ] ;
- }
- else if path in $(att) || free in $(att)
- {
- local values ;
- # Treat features with && in the value specially -- each
- # &&-separated element is considered a separate value. This is
- # needed to handle searched libraries or include paths, which
- # may need to be in a specific order.
- if ! [ MATCH (&&) : $(p:G=) ]
- {
- values = $(p:G=) ;
- }
- else
- {
- values = [ regex.split $(p:G=) "&&" ] ;
- }
- if path in $(att)
- {
- result += [ sequence.transform path.native : $(values) ] ;
- }
- else
- {
- result += $(values) ;
- }
- }
- else
- {
- result += $(p:G=) ;
- }
- }
- }
- else
- {
- result += $(value) ;
- }
- return $(result) ;
-}
-
-
-# Given a rule name and a property set, returns a list of interleaved variables
-# names and values which must be set on targets for that rule/property-set
-# combination.
-#
-rule set-target-variables-aux ( rule-or-module : property-set )
-{
- local result ;
- properties = [ $(property-set).raw ] ;
- for local f in $(.$(rule-or-module).flags)
- {
- local variable = $(.$(rule-or-module).variable.$(f)) ;
- local condition = $(.$(rule-or-module).condition.$(f)) ;
- local values = $(.$(rule-or-module).values.$(f)) ;
-
- if ! $(condition) ||
- [ find-property-subset $(condition) : $(properties) ]
- {
- local processed ;
- for local v in $(values)
- {
- # The value might be <feature-name> so needs special treatment.
- processed += [ handle-flag-value $(v) : $(properties) ] ;
- }
- for local r in $(processed)
- {
- result += $(variable) $(r) ;
- }
- }
- }
-
- # Strip away last dot separated part and recurse.
- local next = [ MATCH ^(.+)\\.([^\\.])* : $(rule-or-module) ] ;
- if $(next)
- {
- result += [ set-target-variables-aux $(next[1]) : $(property-set) ] ;
- }
- return $(result) ;
-}
-
-rule relevant-features ( rule-or-module )
-{
- local result ;
- if ! $(.relevant-features.$(rule-or-module))
- {
- for local f in $(.$(rule-or-module).flags)
- {
- local condition = $(.$(rule-or-module).condition.$(f)) ;
- local values = $(.$(rule-or-module).values.$(f)) ;
-
- for local c in $(condition)
- {
- for local p in [ feature.split $(c) ]
- {
- if $(p:G)
- {
- result += $(p:G) ;
- }
- else
- {
- local temp = [ feature.expand-subfeatures $(p) ] ;
- result += $(temp:G) ;
- }
- }
- }
-
- for local v in $(values)
- {
- if $(v:G)
- {
- result += $(v:G) ;
- }
- }
- }
-
- # Strip away last dot separated part and recurse.
- local next = [ MATCH ^(.+)\\.([^\\.])* : $(rule-or-module) ] ;
- if $(next)
- {
- result += [ relevant-features $(next[1]) ] ;
- }
- result = [ sequence.unique $(result) ] ;
- if $(result[1]) = ""
- {
- result = $(result) ;
- }
- .relevant-features.$(rule-or-module) = $(result) ;
- return $(result) ;
- }
- else
- {
- return $(.relevant-features.$(rule-or-module)) ;
- }
-}
-
-rule filter-property-set ( rule-or-module : property-set )
-{
- if ! $(.filtered.property-set.$(rule-or-module).$(property-set))
- {
- local relevant = [ relevant-features $(rule-or-module) ] ;
- local result ;
- for local p in [ $(property-set).raw ]
- {
- if $(p:G) in $(relevant)
- {
- result += $(p) ;
- }
- }
- .filtered.property-set.$(rule-or-module).$(property-set) = [ property-set.create $(result) ] ;
- }
- return $(.filtered.property-set.$(rule-or-module).$(property-set)) ;
-}
-
-rule set-target-variables ( rule-or-module targets + : property-set )
-{
- property-set = [ filter-property-set $(rule-or-module) : $(property-set) ] ;
- local key = $(rule-or-module).$(property-set) ;
- local settings = $(.stv.$(key)) ;
- if ! $(settings)
- {
- settings = [ set-target-variables-aux $(rule-or-module) :
- $(property-set) ] ;
-
- if ! $(settings)
- {
- settings = none ;
- }
- .stv.$(key) = $(settings) ;
- }
-
- if $(settings) != none
- {
- local var-name = ;
- for local name-or-value in $(settings)
- {
- if $(var-name)
- {
- $(var-name) on $(targets) += $(name-or-value) ;
- var-name = ;
- }
- else
- {
- var-name = $(name-or-value) ;
- }
- }
- }
-}
-
-
-# Make toolset 'toolset', defined in a module of the same name, inherit from
-# 'base'.
-# 1. The 'init' rule from 'base' is imported into 'toolset' with full name.
-# Another 'init' is called, which forwards to the base one.
-# 2. All generators from 'base' are cloned. The ids are adjusted and <toolset>
-# property in requires is adjusted too.
-# 3. All flags are inherited.
-# 4. All rules are imported.
-#
-rule inherit ( toolset : base )
-{
- import $(base) ;
- inherit-generators $(toolset) : $(base) ;
- inherit-flags $(toolset) : $(base) ;
- inherit-rules $(toolset) : $(base) ;
-}
-
-
-rule inherit-generators ( toolset properties * : base : generators-to-ignore * )
-{
- properties ?= <toolset>$(toolset) ;
- local base-generators = [ generators.generators-for-toolset $(base) ] ;
- for local g in $(base-generators)
- {
- local id = [ $(g).id ] ;
-
- if ! $(id) in $(generators-to-ignore)
- {
- # Some generator names have multiple periods in their name, so
- # $(id:B=$(toolset)) does not generate the right new-id name. E.g.
- # if id = gcc.compile.c++ then $(id:B=darwin) = darwin.c++, which is
- # not what we want. Manually parse the base and suffix. If there is
- # a better way to do this, I would love to see it. See also the
- # register() rule in the generators module.
- local base = $(id) ;
- local suffix = "" ;
- while $(base:S)
- {
- suffix = $(base:S)$(suffix) ;
- base = $(base:B) ;
- }
- local new-id = $(toolset)$(suffix) ;
-
- generators.register [ $(g).clone $(new-id) : $(properties) ] ;
- }
- }
-}
-
-
-# Brings all flag definitions from the 'base' toolset into the 'toolset'
-# toolset. Flag definitions whose conditions make use of properties in
-# 'prohibited-properties' are ignored. Do not confuse property and feature, for
-# example <debug-symbols>on and <debug-symbols>off, so blocking one of them does
-# not block the other one.
-#
-# The flag conditions are not altered at all, so if a condition includes a name,
-# or version of a base toolset, it will not ever match the inheriting toolset.
-# When such flag settings must be inherited, define a rule in base toolset
-# module and call it as needed.
-#
-rule inherit-flags ( toolset : base : prohibited-properties * : prohibited-vars * )
-{
- for local f in $(.module-flags.$(base))
- {
- local rule-or-module = $(.rule-or-module.$(f)) ;
- if ( [ set.difference
- $(.$(rule-or-module).condition.$(f)) :
- $(prohibited-properties) ]
- || ! $(.$(rule-or-module).condition.$(f))
- ) && ( ! $(.$(rule-or-module).variable.$(f)) in $(prohibited-vars) )
- {
- local rule_ = [ MATCH "[^.]*\.(.*)" : $(rule-or-module) ] ;
- local new-rule-or-module ;
- if $(rule_)
- {
- new-rule-or-module = $(toolset).$(rule_) ;
- }
- else
- {
- new-rule-or-module = $(toolset) ;
- }
-
- add-flag
- $(new-rule-or-module)
- : $(.$(rule-or-module).variable.$(f))
- : $(.$(rule-or-module).condition.$(f))
- : $(.$(rule-or-module).values.$(f)) ;
- }
- }
-}
-
-
-rule inherit-rules ( toolset : base : localize ? )
-{
- # It appears that "action" creates a local rule.
- local base-generators = [ generators.generators-for-toolset $(base) ] ;
- local rules ;
- for local g in $(base-generators)
- {
- rules += [ MATCH "[^.]*\.(.*)" : [ $(g).rule-name ] ] ;
- }
- rules = [ sequence.unique $(rules) ] ;
- IMPORT $(base) : $(rules) : $(toolset) : $(rules) : $(localize) ;
- IMPORT $(toolset) : $(rules) : : $(toolset).$(rules) ;
-}
-
-
-# Return the list of global 'toolset requirements'. Those requirements will be
-# automatically added to the requirements of any main target.
-#
-rule requirements ( )
-{
- return $(.requirements) ;
-}
-
-
-# Adds elements to the list of global 'toolset requirements'. The requirements
-# will be automatically added to the requirements for all main targets, as if
-# they were specified literally. For best results, all requirements added should
-# be conditional or indirect conditional.
-#
-rule add-requirements ( requirements * )
-{
- if ! $(.ignore-requirements)
- {
- .requirements += $(requirements) ;
- }
-}
-
-
-rule __test__ ( )
-{
- import assert ;
- local p = <b>0 <c>1 <d>2 <e>3 <f>4 ;
- assert.result <c>1/<d>2/<e>3 : find-property-subset <c>1/<d>2/<e>3 <a>0/<b>0/<c>1 <d>2/<e>5 <a>9 : $(p) ;
- assert.result : find-property-subset <a>0/<b>0/<c>9/<d>9/<e>5 <a>9 : $(p) ;
-
- local p-set = <a>/<b> <a>0/<b> <a>/<b>1 <a>0/<b>1 ;
- assert.result <a>/<b> : find-property-subset $(p-set) : ;
- assert.result <a>0/<b> : find-property-subset $(p-set) : <a>0 <c>2 ;
- assert.result <a>/<b>1 : find-property-subset $(p-set) : <b>1 <c>2 ;
- assert.result <a>0/<b>1 : find-property-subset $(p-set) : <a>0 <b>1 ;
-}
diff --git a/tools/build/v2/build/toolset.py b/tools/build/v2/build/toolset.py
deleted file mode 100644
index 3665ab872e..0000000000
--- a/tools/build/v2/build/toolset.py
+++ /dev/null
@@ -1,399 +0,0 @@
-# Status: being ported by Vladimir Prus
-# Base revision: 40958
-#
-# Copyright 2003 Dave Abrahams
-# Copyright 2005 Rene Rivera
-# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-""" Support for toolset definition.
-"""
-
-import feature, property, generators, property_set
-import b2.util.set
-from b2.util import cached, qualify_jam_action
-from b2.util.utility import *
-from b2.util import bjam_signature
-from b2.manager import get_manager
-
-__re_split_last_segment = re.compile (r'^(.+)\.([^\.])*')
-__re_two_ampersands = re.compile ('(&&)')
-__re_first_segment = re.compile ('([^.]*).*')
-__re_first_group = re.compile (r'[^.]*\.(.*)')
-
-# Flag is a mechanism to set a value
-# A single toolset flag. Specifies that when certain
-# properties are in build property set, certain values
-# should be appended to some variable.
-#
-# A flag applies to a specific action in specific module.
-# The list of all flags for a module is stored, and each
-# flag further contains the name of the rule it applies
-# for,
-class Flag:
-
- def __init__(self, variable_name, values, condition, rule = None):
- self.variable_name = variable_name
- self.values = values
- self.condition = condition
- self.rule = rule
-
- def __str__(self):
- return("Flag(" + str(self.variable_name) + ", " + str(self.values) +\
- ", " + str(self.condition) + ", " + str(self.rule) + ")")
-
-def reset ():
- """ Clear the module state. This is mainly for testing purposes.
- """
- global __module_flags, __flags, __stv
-
- # Mapping from module name to a list of all flags that apply
- # to either that module directly, or to any rule in that module.
- # Each element of the list is Flag instance.
- # So, for module named xxx this might contain flags for 'xxx',
- # for 'xxx.compile', for 'xxx.compile.c++', etc.
- __module_flags = {}
-
- # Mapping from specific rule or module name to a list of Flag instances
- # that apply to that name.
- # Say, it might contain flags for 'xxx.compile.c++'. If there are
- # entries for module name 'xxx', they are flags for 'xxx' itself,
- # not including any rules in that module.
- __flags = {}
-
- # A cache for varaible settings. The key is generated from the rule name and the properties.
- __stv = {}
-
-reset ()
-
-# FIXME: --ignore-toolset-requirements
-def using(toolset_module, *args):
- loaded_toolset_module= get_manager().projects().load_module(toolset_module, [os.getcwd()]);
- loaded_toolset_module.init(*args)
-
-# FIXME push-checking-for-flags-module ....
-# FIXME: investigate existing uses of 'hack-hack' parameter
-# in jam code.
-
-@bjam_signature((["rule_or_module", "variable_name", "condition", "*"],
- ["values", "*"]))
-def flags(rule_or_module, variable_name, condition, values = []):
- """ Specifies the flags (variables) that must be set on targets under certain
- conditions, described by arguments.
- rule_or_module: If contains dot, should be a rule name.
- The flags will be applied when that rule is
- used to set up build actions.
-
- If does not contain dot, should be a module name.
- The flags will be applied for all rules in that
- module.
- If module for rule is different from the calling
- module, an error is issued.
-
- variable_name: Variable that should be set on target
-
- condition A condition when this flag should be applied.
- Should be set of property sets. If one of
- those property sets is contained in build
- properties, the flag will be used.
- Implied values are not allowed:
- "<toolset>gcc" should be used, not just
- "gcc". Subfeatures, like in "<toolset>gcc-3.2"
- are allowed. If left empty, the flag will
- always used.
-
- Propery sets may use value-less properties
- ('<a>' vs. '<a>value') to match absent
- properties. This allows to separately match
-
- <architecture>/<address-model>64
- <architecture>ia64/<address-model>
-
- Where both features are optional. Without this
- syntax we'd be forced to define "default" value.
-
- values: The value to add to variable. If <feature>
- is specified, then the value of 'feature'
- will be added.
- """
- caller = bjam.caller()
- if not '.' in rule_or_module and caller and caller[:-1].startswith("Jamfile"):
- # Unqualified rule name, used inside Jamfile. Most likely used with
- # 'make' or 'notfile' rules. This prevents setting flags on the entire
- # Jamfile module (this will be considered as rule), but who cares?
- # Probably, 'flags' rule should be split into 'flags' and
- # 'flags-on-module'.
- rule_or_module = qualify_jam_action(rule_or_module, caller)
- else:
- # FIXME: revive checking that we don't set flags for a different
- # module unintentionally
- pass
-
- if condition and not replace_grist (condition, ''):
- # We have condition in the form '<feature>', that is, without
- # value. That's a previous syntax:
- #
- # flags gcc.link RPATH <dll-path> ;
- # for compatibility, convert it to
- # flags gcc.link RPATH : <dll-path> ;
- values = [ condition ]
- condition = None
-
- if condition:
- transformed = []
- for c in condition:
- # FIXME: 'split' might be a too raw tool here.
- pl = [property.create_from_string(s,False,True) for s in c.split('/')]
- pl = feature.expand_subfeatures(pl);
- transformed.append(property_set.create(pl))
- condition = transformed
-
- property.validate_property_sets(condition)
-
- __add_flag (rule_or_module, variable_name, condition, values)
-
-def set_target_variables (manager, rule_or_module, targets, ps):
- """
- """
- settings = __set_target_variables_aux(manager, rule_or_module, ps)
-
- if settings:
- for s in settings:
- for target in targets:
- manager.engine ().set_target_variable (target, s [0], s[1], True)
-
-def find_satisfied_condition(conditions, ps):
- """Returns the first element of 'property-sets' which is a subset of
- 'properties', or an empty list if no such element exists."""
-
- features = set(p.feature() for p in ps.all())
-
- for condition in conditions:
-
- found_all = True
- for i in condition.all():
-
- found = False
- if i.value():
- found = i.value() in ps.get(i.feature())
- else:
- # Handle value-less properties like '<architecture>' (compare with
- # '<architecture>x86').
- # If $(i) is a value-less property it should match default
- # value of an optional property. See the first line in the
- # example below:
- #
- # property set properties result
- # <a> <b>foo <b>foo match
- # <a> <b>foo <a>foo <b>foo no match
- # <a>foo <b>foo <b>foo no match
- # <a>foo <b>foo <a>foo <b>foo match
- found = not i.feature() in features
-
- found_all = found_all and found
-
- if found_all:
- return condition
-
- return None
-
-
-def register (toolset):
- """ Registers a new toolset.
- """
- feature.extend('toolset', [toolset])
-
-def inherit_generators (toolset, properties, base, generators_to_ignore = []):
- if not properties:
- properties = [replace_grist (toolset, '<toolset>')]
-
- base_generators = generators.generators_for_toolset(base)
-
- for g in base_generators:
- id = g.id()
-
- if not id in generators_to_ignore:
- # Some generator names have multiple periods in their name, so
- # $(id:B=$(toolset)) doesn't generate the right new_id name.
- # e.g. if id = gcc.compile.c++, $(id:B=darwin) = darwin.c++,
- # which is not what we want. Manually parse the base and suffix
- # (if there's a better way to do this, I'd love to see it.)
- # See also register in module generators.
- (base, suffix) = split_action_id(id)
-
- new_id = toolset + '.' + suffix
-
- generators.register(g.clone(new_id, properties))
-
-def inherit_flags(toolset, base, prohibited_properties = []):
- """Brings all flag definitions from the 'base' toolset into the 'toolset'
- toolset. Flag definitions whose conditions make use of properties in
- 'prohibited-properties' are ignored. Don't confuse property and feature, for
- example <debug-symbols>on and <debug-symbols>off, so blocking one of them does
- not block the other one.
-
- The flag conditions are not altered at all, so if a condition includes a name,
- or version of a base toolset, it won't ever match the inheriting toolset. When
- such flag settings must be inherited, define a rule in base toolset module and
- call it as needed."""
- for f in __module_flags.get(base, []):
-
- if not f.condition or b2.util.set.difference(f.condition, prohibited_properties):
- match = __re_first_group.match(f.rule)
- rule_ = None
- if match:
- rule_ = match.group(1)
-
- new_rule_or_module = ''
-
- if rule_:
- new_rule_or_module = toolset + '.' + rule_
- else:
- new_rule_or_module = toolset
-
- __add_flag (new_rule_or_module, f.variable_name, f.condition, f.values)
-
-def inherit_rules (toolset, base):
- pass
- # FIXME: do something about this.
-# base_generators = generators.generators_for_toolset (base)
-
-# import action
-
-# ids = []
-# for g in base_generators:
-# (old_toolset, id) = split_action_id (g.id ())
-# ids.append (id) ;
-
-# new_actions = []
-
-# engine = get_manager().engine()
- # FIXME: do this!
-# for action in engine.action.values():
-# pass
-# (old_toolset, id) = split_action_id(action.action_name)
-#
-# if old_toolset == base:
-# new_actions.append ((id, value [0], value [1]))
-#
-# for a in new_actions:
-# action.register (toolset + '.' + a [0], a [1], a [2])
-
- # TODO: how to deal with this?
-# IMPORT $(base) : $(rules) : $(toolset) : $(rules) : localized ;
-# # Import the rules to the global scope
-# IMPORT $(toolset) : $(rules) : : $(toolset).$(rules) ;
-# }
-#
-
-######################################################################################
-# Private functions
-
-@cached
-def __set_target_variables_aux (manager, rule_or_module, ps):
- """ Given a rule name and a property set, returns a list of tuples of
- variables names and values, which must be set on targets for that
- rule/properties combination.
- """
- result = []
-
- for f in __flags.get(rule_or_module, []):
-
- if not f.condition or find_satisfied_condition (f.condition, ps):
- processed = []
- for v in f.values:
- # The value might be <feature-name> so needs special
- # treatment.
- processed += __handle_flag_value (manager, v, ps)
-
- for r in processed:
- result.append ((f.variable_name, r))
-
- # strip away last dot separated part and recurse.
- next = __re_split_last_segment.match(rule_or_module)
-
- if next:
- result.extend(__set_target_variables_aux(
- manager, next.group(1), ps))
-
- return result
-
-def __handle_flag_value (manager, value, ps):
- result = []
-
- if get_grist (value):
- f = feature.get(value)
- values = ps.get(f)
-
- for value in values:
-
- if f.dependency():
- # the value of a dependency feature is a target
- # and must be actualized
- result.append(value.actualize())
-
- elif f.path() or f.free():
-
- # Treat features with && in the value
- # specially -- each &&-separated element is considered
- # separate value. This is needed to handle searched
- # libraries, which must be in specific order.
- if not __re_two_ampersands.search(value):
- result.append(value)
-
- else:
- result.extend(value.split ('&&'))
- else:
- result.append (ungristed)
- else:
- result.append (value)
-
- return result
-
-def __add_flag (rule_or_module, variable_name, condition, values):
- """ Adds a new flag setting with the specified values.
- Does no checking.
- """
- f = Flag(variable_name, values, condition, rule_or_module)
-
- # Grab the name of the module
- m = __re_first_segment.match (rule_or_module)
- assert m
- module = m.group(1)
-
- __module_flags.setdefault(m, []).append(f)
- __flags.setdefault(rule_or_module, []).append(f)
-
-__requirements = []
-
-def requirements():
- """Return the list of global 'toolset requirements'.
- Those requirements will be automatically added to the requirements of any main target."""
- return __requirements
-
-def add_requirements(requirements):
- """Adds elements to the list of global 'toolset requirements'. The requirements
- will be automatically added to the requirements for all main targets, as if
- they were specified literally. For best results, all requirements added should
- be conditional or indirect conditional."""
-
- #if ! $(.ignore-requirements)
- #{
- __requirements.extend(requirements)
- #}
-
-# Make toolset 'toolset', defined in a module of the same name,
-# inherit from 'base'
-# 1. The 'init' rule from 'base' is imported into 'toolset' with full
-# name. Another 'init' is called, which forwards to the base one.
-# 2. All generators from 'base' are cloned. The ids are adjusted and
-# <toolset> property in requires is adjusted too
-# 3. All flags are inherited
-# 4. All rules are imported.
-def inherit(toolset, base):
- get_manager().projects().load_module(base, []);
-
- inherit_generators(toolset, [], base)
- inherit_flags(toolset, base)
- inherit_rules(toolset, base)
diff --git a/tools/build/v2/build/type.jam b/tools/build/v2/build/type.jam
deleted file mode 100644
index 1a7a578234..0000000000
--- a/tools/build/v2/build/type.jam
+++ /dev/null
@@ -1,425 +0,0 @@
-# Copyright 2002, 2003 Dave Abrahams
-# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Deals with target type declaration and defines target class which supports
-# typed targets.
-
-import "class" : new ;
-import errors ;
-import feature ;
-import generators : * ;
-import project ;
-import property ;
-import scanner ;
-import os ;
-
-# The following import would create a circular dependency:
-# project -> project-root -> builtin -> type -> targets -> project
-# import targets ;
-
-# The feature is optional so it would never get added implicitly. It is used
-# only for internal purposes and in all cases we want to use it explicitly.
-feature.feature target-type : : composite optional ;
-
-feature.feature main-target-type : : optional incidental ;
-feature.feature base-target-type : : composite optional free ;
-
-
-# Registers a target type, possible derived from a 'base-type'. Providing a list
-# of 'suffixes' here is a shortcut for separately calling the register-suffixes
-# rule with the given suffixes and the set-generated-target-suffix rule with the
-# first given suffix.
-#
-rule register ( type : suffixes * : base-type ? )
-{
- # Type names cannot contain hyphens, because when used as feature-values
- # they would be interpreted as composite features which need to be
- # decomposed.
- switch $(type)
- {
- case *-* : errors.error "type name \"$(type)\" contains a hyphen" ;
- }
-
- if $(type) in $(.types)
- {
- errors.error "Type $(type) is already registered." ;
- }
- else
- {
- .types += $(type) ;
- .base.$(type) = $(base-type) ;
- .derived.$(base-type) += $(type) ;
-
- if $(suffixes)-is-not-empty
- {
- # Specify mapping from suffixes to type.
- register-suffixes $(suffixes) : $(type) ;
- # By default generated targets of 'type' will use the first of
- #'suffixes'. This may be overriden.
- set-generated-target-suffix $(type) : : $(suffixes[1]) ;
- }
-
- feature.extend target-type : $(type) ;
- feature.extend main-target-type : $(type) ;
- feature.extend base-target-type : $(type) ;
-
- feature.compose <target-type>$(type) : $(base-type:G=<base-target-type>) ;
- feature.compose <base-target-type>$(type) : <base-target-type>$(base-type) ;
-
- # We used to declare the main target rule only when a 'main' parameter
- # has been specified. However, it is hard to decide that a type will
- # *never* need a main target rule and so from time to time we needed to
- # make yet another type 'main'. So now a main target rule is defined for
- # each type.
- main-rule-name = [ type-to-rule-name $(type) ] ;
- .main-target-type.$(main-rule-name) = $(type) ;
- IMPORT $(__name__) : main-target-rule : : $(main-rule-name) ;
-
- # Adding a new derived type affects generator selection so we need to
- # make the generator selection module update any of its cached
- # information related to a new derived type being defined.
- generators.update-cached-information-with-a-new-type $(type) ;
- }
-}
-
-
-# Given a type, returns the name of the main target rule which creates targets
-# of that type.
-#
-rule type-to-rule-name ( type )
-{
- # Lowercase everything. Convert underscores to dashes.
- import regex ;
- local n = [ regex.split $(type:L) "_" ] ;
- return $(n:J=-) ;
-}
-
-
-# Given a main target rule name, returns the type for which it creates targets.
-#
-rule type-from-rule-name ( rule-name )
-{
- return $(.main-target-type.$(rule-name)) ;
-}
-
-
-# Specifies that files with suffix from 'suffixes' be recognized as targets of
-# type 'type'. Issues an error if a different type is already specified for any
-# of the suffixes.
-#
-rule register-suffixes ( suffixes + : type )
-{
- for local s in $(suffixes)
- {
- if ! $(.type.$(s))
- {
- .type.$(s) = $(type) ;
- }
- else if $(.type.$(s)) != $(type)
- {
- errors.error Attempting to specify multiple types for suffix
- \"$(s)\" : "Old type $(.type.$(s)), New type $(type)" ;
- }
- }
-}
-
-
-# Returns true iff type has been registered.
-#
-rule registered ( type )
-{
- if $(type) in $(.types)
- {
- return true ;
- }
-}
-
-
-# Issues an error if 'type' is unknown.
-#
-rule validate ( type )
-{
- if ! [ registered $(type) ]
- {
- errors.error "Unknown target type $(type)" ;
- }
-}
-
-
-# Sets a scanner class that will be used for this 'type'.
-#
-rule set-scanner ( type : scanner )
-{
- validate $(type) ;
- .scanner.$(type) = $(scanner) ;
-}
-
-
-# Returns a scanner instance appropriate to 'type' and 'properties'.
-#
-rule get-scanner ( type : property-set )
-{
- if $(.scanner.$(type))
- {
- return [ scanner.get $(.scanner.$(type)) : $(property-set) ] ;
- }
-}
-
-
-# Returns a base type for the given type or nothing in case the given type is
-# not derived.
-#
-rule base ( type )
-{
- return $(.base.$(type)) ;
-}
-
-
-# Returns the given type and all of its base types in order of their distance
-# from type.
-#
-rule all-bases ( type )
-{
- local result = $(type) ;
- while $(type)
- {
- type = [ base $(type) ] ;
- result += $(type) ;
- }
- return $(result) ;
-}
-
-
-# Returns the given type and all of its derived types in order of their distance
-# from type.
-#
-rule all-derived ( type )
-{
- local result = $(type) ;
- for local d in $(.derived.$(type))
- {
- result += [ all-derived $(d) ] ;
- }
- return $(result) ;
-}
-
-
-# Returns true if 'type' is equal to 'base' or has 'base' as its direct or
-# indirect base.
-#
-rule is-derived ( type base )
-{
- if $(base) in [ all-bases $(type) ]
- {
- return true ;
- }
-}
-
-# Returns true if 'type' is either derived from or is equal to 'base'.
-#
-# TODO: It might be that is-derived and is-subtype were meant to be different
-# rules - one returning true for type = base and one not, but as currently
-# implemented they are actually the same. Clean this up.
-#
-rule is-subtype ( type base )
-{
- return [ is-derived $(type) $(base) ] ;
-}
-
-
-# Store suffixes for generated targets.
-.suffixes = [ new property-map ] ;
-
-# Store prefixes for generated targets (e.g. "lib" for library).
-.prefixes = [ new property-map ] ;
-
-
-# Sets a file suffix to be used when generating a target of 'type' with the
-# specified properties. Can be called with no properties if no suffix has
-# already been specified for the 'type'. The 'suffix' parameter can be an empty
-# string ("") to indicate that no suffix should be used.
-#
-# Note that this does not cause files with 'suffix' to be automatically
-# recognized as being of 'type'. Two different types can use the same suffix for
-# their generated files but only one type can be auto-detected for a file with
-# that suffix. User should explicitly specify which one using the
-# register-suffixes rule.
-#
-rule set-generated-target-suffix ( type : properties * : suffix )
-{
- set-generated-target-ps suffix : $(type) : $(properties) : $(suffix) ;
-}
-
-
-# Change the suffix previously registered for this type/properties combination.
-# If suffix is not yet specified, sets it.
-#
-rule change-generated-target-suffix ( type : properties * : suffix )
-{
- change-generated-target-ps suffix : $(type) : $(properties) : $(suffix) ;
-}
-
-
-# Returns the suffix used when generating a file of 'type' with the given
-# properties.
-#
-rule generated-target-suffix ( type : property-set )
-{
- return [ generated-target-ps suffix : $(type) : $(property-set) ] ;
-}
-
-
-# Sets a target prefix that should be used when generating targets of 'type'
-# with the specified properties. Can be called with empty properties if no
-# prefix for 'type' has been specified yet.
-#
-# The 'prefix' parameter can be empty string ("") to indicate that no prefix
-# should be used.
-#
-# Usage example: library names use the "lib" prefix on unix.
-#
-rule set-generated-target-prefix ( type : properties * : prefix )
-{
- set-generated-target-ps prefix : $(type) : $(properties) : $(prefix) ;
-}
-
-
-# Change the prefix previously registered for this type/properties combination.
-# If prefix is not yet specified, sets it.
-#
-rule change-generated-target-prefix ( type : properties * : prefix )
-{
- change-generated-target-ps prefix : $(type) : $(properties) : $(prefix) ;
-}
-
-
-rule generated-target-prefix ( type : property-set )
-{
- return [ generated-target-ps prefix : $(type) : $(property-set) ] ;
-}
-
-
-# Common rules for prefix/suffix provisioning follow.
-
-local rule set-generated-target-ps ( ps : type : properties * : psval )
-{
- properties = <target-type>$(type) $(properties) ;
- $(.$(ps)es).insert $(properties) : $(psval) ;
-}
-
-
-local rule change-generated-target-ps ( ps : type : properties * : psval )
-{
- properties = <target-type>$(type) $(properties) ;
- local prev = [ $(.$(ps)es).find-replace $(properties) : $(psval) ] ;
- if ! $(prev)
- {
- set-generated-target-ps $(ps) : $(type) : $(properties) : $(psval) ;
- }
-}
-
-
-# Returns either prefix or suffix (as indicated by 'ps') that should be used
-# when generating a target of 'type' with the specified properties. Parameter
-# 'ps' can be either "prefix" or "suffix". If no prefix/suffix is specified for
-# 'type', returns prefix/suffix for base type, if any.
-#
-local rule generated-target-ps-real ( ps : type : properties * )
-{
- local result ;
- local found ;
- while $(type) && ! $(found)
- {
- result = [ $(.$(ps)es).find <target-type>$(type) $(properties) ] ;
- # If the prefix/suffix is explicitly set to an empty string, we consider
- # prefix/suffix to be found. If we were not to compare with "", there
- # would be no way to specify an empty prefix/suffix.
- if $(result)-is-not-empty
- {
- found = true ;
- }
- type = $(.base.$(type)) ;
- }
- if $(result) = ""
- {
- result = ;
- }
- return $(result) ;
-}
-
-
-local rule generated-target-ps ( ps : type : property-set )
-{
- local key = .$(ps).$(type).$(property-set) ;
- local v = $($(key)) ;
- if ! $(v)
- {
- v = [ generated-target-ps-real $(ps) : $(type) : [ $(property-set).raw ]
- ] ;
- if ! $(v)
- {
- v = none ;
- }
- $(key) = $(v) ;
- }
-
- if $(v) != none
- {
- return $(v) ;
- }
-}
-
-
-# Returns file type given its name. If there are several dots in filename, tries
-# each suffix. E.g. for name of "file.so.1.2" suffixes "2", "1", and "so" will
-# be tried.
-#
-rule type ( filename )
-{
- if [ os.name ] in NT CYGWIN
- {
- filename = $(filename:L) ;
- }
- local type ;
- while ! $(type) && $(filename:S)
- {
- local suffix = $(filename:S) ;
- type = $(.type$(suffix)) ;
- filename = $(filename:S=) ;
- }
- return $(type) ;
-}
-
-
-# Rule used to construct all main targets. Note that this rule gets imported
-# into the global namespace under different alias names and the exact target
-# type to construct is selected based on the alias used to actually invoke this
-# rule.
-#
-rule main-target-rule ( name : sources * : requirements * : default-build * :
- usage-requirements * )
-{
- # First discover the required target type based on the exact alias used to
- # invoke this rule.
- local bt = [ BACKTRACE 1 ] ;
- local rulename = $(bt[4]) ;
- local target-type = [ type-from-rule-name $(rulename) ] ;
-
- # This is a circular module dependency and so must be imported here.
- import targets ;
-
- return [ targets.create-typed-target $(target-type) : [ project.current ] :
- $(name) : $(sources) : $(requirements) : $(default-build) :
- $(usage-requirements) ] ;
-}
-
-
-rule __test__ ( )
-{
- import assert ;
-
- # TODO: Add tests for all the is-derived, is-base & related type relation
- # checking rules.
-}
diff --git a/tools/build/v2/build/version.jam b/tools/build/v2/build/version.jam
deleted file mode 100644
index 512c9ee31a..0000000000
--- a/tools/build/v2/build/version.jam
+++ /dev/null
@@ -1,161 +0,0 @@
-# Copyright 2002, 2003, 2004, 2006 Vladimir Prus
-# Copyright 2008 Jurko Gospodnetic
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import errors ;
-import numbers ;
-
-major = "2011" ;
-minor = "12" ;
-
-rule boost-build ( )
-{
- return "$(major).$(minor)-svn" ;
-}
-
-rule print ( )
-{
- if [ verify-engine-version ]
- {
- ECHO "Boost.Build" [ boost-build ] ;
- }
-}
-
-rule verify-engine-version ( )
-{
- local v = [ modules.peek : JAM_VERSION ] ;
-
- if $(v[1]) != $(major) || $(v[2]) != $(minor)
- {
- local argv = [ modules.peek : ARGV ] ;
- local e = $(argv[1]) ;
- local l = [ modules.binding version ] ;
- l = $(l:D) ;
- l = $(l:D) ;
- ECHO "warning: mismatched versions of Boost.Build engine and core" ;
- ECHO "warning: Boost.Build engine ($(e)) is $(v:J=.)" ;
- ECHO "warning: Boost.Build core (at $(l)) is" [ boost-build ] ;
- }
- else
- {
- return true ;
- }
-}
-
-
-
-# Utility rule for testing whether all elements in a sequence are equal to 0.
-#
-local rule is-all-zeroes ( sequence * )
-{
- local result = "true" ;
- for local e in $(sequence)
- {
- if $(e) != "0"
- {
- result = "" ;
- }
- }
- return $(result) ;
-}
-
-
-# Returns "true" if the first version is less than the second one.
-#
-rule version-less ( lhs + : rhs + )
-{
- numbers.check $(lhs) ;
- numbers.check $(rhs) ;
-
- local done ;
- local result ;
-
- while ! $(done) && $(lhs) && $(rhs)
- {
- if [ numbers.less $(lhs[1]) $(rhs[1]) ]
- {
- done = "true" ;
- result = "true" ;
- }
- else if [ numbers.less $(rhs[1]) $(lhs[1]) ]
- {
- done = "true" ;
- }
- else
- {
- lhs = $(lhs[2-]) ;
- rhs = $(rhs[2-]) ;
- }
- }
- if ( ! $(done) && ! $(lhs) && ! [ is-all-zeroes $(rhs) ] )
- {
- result = "true" ;
- }
-
- return $(result) ;
-}
-
-
-# Returns "true" if the current JAM version version is at least the given
-# version.
-#
-rule check-jam-version ( version + )
-{
- local version-tag = $(version:J=.) ;
- if ! $(version-tag)
- {
- errors.error Invalid version specifier: : $(version:E="(undefined)") ;
- }
-
- if ! $(.jam-version-check.$(version-tag))-is-not-empty
- {
- local jam-version = [ modules.peek : JAM_VERSION ] ;
- if ! $(jam-version)
- {
- errors.error "Unable to deduce Boost Jam version. Your Boost Jam"
- "installation is most likely terribly outdated." ;
- }
- .jam-version-check.$(version-tag) = "true" ;
- if [ version-less [ modules.peek : JAM_VERSION ] : $(version) ]
- {
- .jam-version-check.$(version-tag) = "" ;
- }
- }
- return $(.jam-version-check.$(version-tag)) ;
-}
-
-
-rule __test__ ( )
-{
- import assert ;
-
- local jam-version = [ modules.peek : JAM_VERSION ] ;
- local future-version = $(jam-version) ;
- future-version += "1" ;
-
- assert.true check-jam-version $(jam-version) ;
- assert.false check-jam-version $(future-version) ;
-
- assert.true version-less 0 : 1 ;
- assert.false version-less 0 : 0 ;
- assert.true version-less 1 : 2 ;
- assert.false version-less 1 : 1 ;
- assert.false version-less 2 : 1 ;
- assert.true version-less 3 1 20 : 3 4 10 ;
- assert.false version-less 3 1 10 : 3 1 10 ;
- assert.false version-less 3 4 10 : 3 1 20 ;
- assert.true version-less 3 1 20 5 1 : 3 4 10 ;
- assert.false version-less 3 1 10 5 1 : 3 1 10 ;
- assert.false version-less 3 4 10 5 1 : 3 1 20 ;
- assert.true version-less 3 1 20 : 3 4 10 5 1 ;
- assert.true version-less 3 1 10 : 3 1 10 5 1 ;
- assert.false version-less 3 4 10 : 3 1 20 5 1 ;
- assert.false version-less 3 1 10 : 3 1 10 0 0 ;
- assert.false version-less 3 1 10 0 0 : 3 1 10 ;
- assert.false version-less 3 1 10 0 : 3 1 10 0 0 ;
- assert.false version-less 3 1 10 0 : 03 1 10 0 0 ;
- assert.false version-less 03 1 10 0 : 3 1 10 0 0 ;
-
- # TODO: Add tests for invalid input data being sent to version-less.
-}
diff --git a/tools/build/v2/build/virtual-target.jam b/tools/build/v2/build/virtual-target.jam
deleted file mode 100644
index 2e8446bcc8..0000000000
--- a/tools/build/v2/build/virtual-target.jam
+++ /dev/null
@@ -1,1317 +0,0 @@
-# Copyright 2003 Dave Abrahams
-# Copyright 2005, 2006 Rene Rivera
-# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Implements virtual targets, which correspond to actual files created during a
-# build, but are not yet targets in Jam sense. They are needed, for example,
-# when searching for possible transformation sequences, when it is not yet known
-# whether a particular target should be created at all.
-
-import "class" : new ;
-import errors ;
-import path ;
-import sequence ;
-import set ;
-import type ;
-import utility ;
-
-
-# +--------------------------+
-# | virtual-target |
-# +==========================+
-# | actualize |
-# +--------------------------+
-# | actualize-action() = 0 |
-# | actualize-location() = 0 |
-# +----------------+---------+
-# |
-# ^
-# / \
-# +-+-+
-# |
-# +---------------------+ +-------+--------------+
-# | action | | abstract-file-target |
-# +=====================| * +======================+
-# | action-name | +--+ action |
-# | properties | | +----------------------+
-# +---------------------+--+ | actualize-action() |
-# | actualize() |0..1 +-----------+----------+
-# | path() | |
-# | adjust-properties() | sources |
-# | actualize-sources() | targets |
-# +------+--------------+ ^
-# | / \
-# ^ +-+-+
-# / \ |
-# +-+-+ +-------------+-------------+
-# | | |
-# | +------+---------------+ +--------+-------------+
-# | | file-target | | searched-lib-target |
-# | +======================+ +======================+
-# | | actualize-location() | | actualize-location() |
-# | +----------------------+ +----------------------+
-# |
-# +-+------------------------------+
-# | |
-# +----+----------------+ +---------+-----------+
-# | compile-action | | link-action |
-# +=====================+ +=====================+
-# | adjust-properties() | | adjust-properties() |
-# +---------------------+ | actualize-sources() |
-# +---------------------+
-#
-# The 'compile-action' and 'link-action' classes are not defined here but in
-# builtin.jam modules. They are shown in the diagram to give the big picture.
-
-
-# Models a potential target. It can be converted into a Jam target and used in
-# building, if needed. However, it can be also dropped, which allows us to
-# search for different transformations and select only one.
-#
-class virtual-target
-{
- import scanner ;
- import sequence ;
- import utility ;
- import virtual-target ;
-
- rule __init__ (
- name # Target/project name.
- : project # Project to which this target belongs.
- )
- {
- self.name = $(name) ;
- self.project = $(project) ;
- self.dependencies = ;
- }
-
- # Name of this target.
- #
- rule name ( )
- {
- return $(self.name) ;
- }
-
- # Project of this target.
- #
- rule project ( )
- {
- return $(self.project) ;
- }
-
- # Adds additional 'virtual-target' instances this one depends on.
- #
- rule depends ( d + )
- {
- self.dependencies = [ sequence.merge $(self.dependencies) :
- [ sequence.insertion-sort $(d) ] ] ;
- }
-
- rule dependencies ( )
- {
- return $(self.dependencies) ;
- }
-
- rule always ( )
- {
- .always = 1 ;
- }
-
- # Generates all the actual targets and sets up build actions for this
- # target.
- #
- # If 'scanner' is specified, creates an additional target with the same
- # location as the actual target, which will depend on the actual target and
- # be associated with a 'scanner'. That additional target is returned. See
- # the docs (#dependency_scanning) for rationale. Target must correspond to a
- # file if 'scanner' is specified.
- #
- # If scanner is not specified then the actual target is returned.
- #
- rule actualize ( scanner ? )
- {
- local actual-name = [ actualize-no-scanner ] ;
-
- if $(.always)
- {
- ALWAYS $(actual-name) ;
- }
-
- if ! $(scanner)
- {
- return $(actual-name) ;
- }
- else
- {
- # Add the scanner instance to the grist for name.
- local g = [ sequence.join
- [ utility.ungrist $(actual-name:G) ] $(scanner) : - ] ;
- local name = $(actual-name:G=$(g)) ;
-
- if ! $(self.made.$(name))
- {
- self.made.$(name) = true ;
-
- DEPENDS $(name) : $(actual-name) ;
-
- actualize-location $(name) ;
-
- scanner.install $(scanner) : $(name) $(__name__) ;
- }
- return $(name) ;
- }
- }
-
-# private: (overridables)
-
- # Sets up build actions for 'target'. Should call appropriate rules and set
- # target variables.
- #
- rule actualize-action ( target )
- {
- errors.error "method should be defined in derived classes" ;
- }
-
- # Sets up variables on 'target' which specify its location.
- #
- rule actualize-location ( target )
- {
- errors.error "method should be defined in derived classes" ;
- }
-
- # If the target is a generated one, returns the path where it will be
- # generated. Otherwise, returns an empty list.
- #
- rule path ( )
- {
- errors.error "method should be defined in derived classes" ;
- }
-
- # Returns the actual target name to be used in case when no scanner is
- # involved.
- #
- rule actual-name ( )
- {
- errors.error "method should be defined in derived classes" ;
- }
-
-# implementation
- rule actualize-no-scanner ( )
- {
- # In fact, we just need to merge virtual-target with
- # abstract-file-target as the latter is the only class derived from the
- # former. But that has been left for later.
-
- errors.error "method should be defined in derived classes" ;
- }
-}
-
-
-# Target corresponding to a file. The exact mapping for file is not yet
-# specified in this class. (TODO: Actually, the class name could be better...)
-#
-# May be a source file (when no action is specified) or a derived file
-# (otherwise).
-#
-# The target's grist is a concatenation of its project's location, action
-# properties (for derived targets) and, optionally, value identifying the main
-# target.
-#
-class abstract-file-target : virtual-target
-{
- import project ;
- import regex ;
- import sequence ;
- import path ;
- import type ;
- import property-set ;
- import indirect ;
-
- rule __init__ (
- name # Target's name.
- exact ? # If non-empty, the name is exactly the name created file
- # should have. Otherwise, the '__init__' method will add a
- # suffix obtained from 'type' by calling
- # 'type.generated-target-suffix'.
- : type ? # Target's type.
- : project
- : action ?
- )
- {
- virtual-target.__init__ $(name) : $(project) ;
-
- self.type = $(type) ;
- self.action = $(action) ;
- if $(action)
- {
- $(action).add-targets $(__name__) ;
-
- if $(self.type) && ! $(exact)
- {
- _adjust-name $(name) ;
- }
- }
- }
-
- rule type ( )
- {
- return $(self.type) ;
- }
-
- # Sets the path. When generating target name, it will override any path
- # computation from properties.
- #
- rule set-path ( path )
- {
- self.path = [ path.native $(path) ] ;
- }
-
- # Returns the currently set action.
- #
- rule action ( )
- {
- return $(self.action) ;
- }
-
- # Sets/gets the 'root' flag. Target is root if it directly corresponds to
- # some variant of a main target.
- #
- rule root ( set ? )
- {
- if $(set)
- {
- self.root = true ;
- }
- return $(self.root) ;
- }
-
- # Gets or sets the subvariant which created this target. Subvariant is set
- # when target is brought into existance and is never changed after that. In
- # particular, if a target is shared by a subvariant, only the first is
- # stored.
- #
- rule creating-subvariant ( s ? # If specified, specifies the value to set,
- # which should be a 'subvariant' class
- # instance.
- )
- {
- if $(s) && ! $(self.creating-subvariant)
- {
- self.creating-subvariant = $(s) ;
- }
- return $(self.creating-subvariant) ;
- }
-
- rule actualize-action ( target )
- {
- if $(self.action)
- {
- $(self.action).actualize ;
- }
- }
-
- # Return a human-readable representation of this target. If this target has
- # an action, that is:
- #
- # { <action-name>-<self.name>.<self.type> <action-sources>... }
- #
- # otherwise, it is:
- #
- # { <self.name>.<self.type> }
- #
- rule str ( )
- {
- local action = [ action ] ;
- local name-dot-type = [ sequence.join $(self.name) "." $(self.type) ] ;
-
- if $(action)
- {
- local sources = [ $(action).sources ] ;
- local action-name = [ $(action).action-name ] ;
-
- local ss ;
- for local s in $(sources)
- {
- ss += [ $(s).str ] ;
- }
-
- return "{" $(action-name)-$(name-dot-type) $(ss) "}" ;
- }
- else
- {
- return "{" $(name-dot-type) "}" ;
- }
- }
-
- rule less ( a )
- {
- if [ str ] < [ $(a).str ]
- {
- return true ;
- }
- }
-
- rule equal ( a )
- {
- if [ str ] = [ $(a).str ]
- {
- return true ;
- }
- }
-
-# private:
- rule actual-name ( )
- {
- if ! $(self.actual-name)
- {
- local grist = [ grist ] ;
- local basename = [ path.native $(self.name) ] ;
- self.actual-name = <$(grist)>$(basename) ;
- }
- return $(self.actual-name) ;
- }
-
- # Helper to 'actual-name', above. Computes a unique prefix used to
- # distinguish this target from other targets with the same name creating
- # different files.
- #
- rule grist ( )
- {
- # Depending on target, there may be different approaches to generating
- # unique prefixes. We generate prefixes in the form:
- # <one letter approach code> <the actual prefix>
- local path = [ path ] ;
- if $(path)
- {
- # The target will be generated to a known path. Just use the path
- # for identification, since path is as unique as it can get.
- return p$(path) ;
- }
- else
- {
- # File is either source, which will be searched for, or is not a
- # file at all. Use the location of project for distinguishing.
- local project-location = [ $(self.project).get location ] ;
- local location-grist = [ sequence.join [ regex.split
- $(project-location) "/" ] : "!" ] ;
-
- if $(self.action)
- {
- local ps = [ $(self.action).properties ] ;
- local property-grist = [ $(ps).as-path ] ;
- # 'property-grist' can be empty when 'ps' is an empty property
- # set.
- if $(property-grist)
- {
- location-grist = $(location-grist)/$(property-grist) ;
- }
- }
-
- return l$(location-grist) ;
- }
- }
-
- # Given the target name specified in constructor, returns the name which
- # should be really used, by looking at the <tag> properties. Tag properties
- # need to be specified as <tag>@rule-name. This makes Boost Build call the
- # specified rule with the target name, type and properties to get the new
- # name. If no <tag> property is specified or the rule specified by <tag>
- # returns nothing, returns the result of calling
- # virtual-target.add-prefix-and-suffix.
- #
- rule _adjust-name ( specified-name )
- {
- local ps ;
- if $(self.action)
- {
- ps = [ $(self.action).properties ] ;
- }
- else
- {
- ps = [ property-set.empty ] ;
- }
-
- # We add ourselves to the properties so that any tag rule can get more
- # direct information about the target than just that available through
- # the properties. This is useful in implementing name changes based on
- # the sources of the target. For example to make unique names of object
- # files based on the source file. --grafik
- ps = [ property-set.create [ $(ps).raw ] <target>$(__name__) ] ;
-
- local tag = [ $(ps).get <tag> ] ;
-
- if $(tag)
- {
- local rule-name = [ MATCH ^@(.*) : $(tag) ] ;
- if $(rule-name)
- {
- if $(tag[2])
- {
- errors.error "<tag>@rulename is present but is not the only"
- "<tag> feature" ;
- }
-
- self.name = [ indirect.call $(rule-name) $(specified-name)
- : $(self.type) : $(ps) ] ;
- }
- else
- {
- errors.error
- "The value of the <tag> feature must be '@rule-name'" ;
- }
- }
-
- # If there is no tag or the tag rule returned nothing.
- if ! $(tag) || ! $(self.name)
- {
- self.name = [ virtual-target.add-prefix-and-suffix $(specified-name)
- : $(self.type) : $(ps) ] ;
- }
- }
-
- rule actualize-no-scanner ( )
- {
- local name = [ actual-name ] ;
-
- # Do anything only on the first invocation.
- if ! $(self.made.$(name))
- {
- self.made.$(name) = true ;
-
- if $(self.action)
- {
- # For non-derived target, we do not care if there are several
- # virtual targets that refer to the same name. One case when
- # this is unavoidable is when the file name is main.cpp and two
- # targets have types CPP (for compiling) and MOCCABLE_CPP (for
- # conversion to H via Qt tools).
- virtual-target.register-actual-name $(name) : $(__name__) ;
- }
-
- for local i in $(self.dependencies)
- {
- DEPENDS $(name) : [ $(i).actualize ] ;
- }
-
- actualize-location $(name) ;
- actualize-action $(name) ;
- }
- return $(name) ;
- }
-}
-
-
-# Appends the suffix appropriate to 'type/property-set' combination to the
-# specified name and returns the result.
-#
-rule add-prefix-and-suffix ( specified-name : type ? : property-set )
-{
- local suffix = [ type.generated-target-suffix $(type) : $(property-set) ] ;
-
- # Handle suffixes for which no leading dot is desired. Those are specified
- # by enclosing them in <...>. Needed by python so it can create "_d.so"
- # extensions, for example.
- if $(suffix:G)
- {
- suffix = [ utility.ungrist $(suffix) ] ;
- }
- else
- {
- suffix = .$(suffix) ;
- }
-
- local prefix = [ type.generated-target-prefix $(type) : $(property-set) ] ;
-
- if [ MATCH ^($(prefix)) : $(specified-name) ]
- {
- prefix = ;
- }
- return $(prefix:E="")$(specified-name)$(suffix:E="") ;
-}
-
-
-# File targets with explicitly known location.
-#
-# The file path is determined as
-# * Value passed to the 'set-path' method, if any.
-# * For derived files, project's build dir, joined with components that
-# describe action properties. If free properties are not equal to the
-# project's reference properties an element with the name of the main
-# target is added.
-# * For source files, project's source dir.
-#
-# The file suffix is determined as:
-# * The value passed to the 'suffix' method, if any.
-# * The suffix corresponding to the target's type.
-#
-class file-target : abstract-file-target
-{
- import "class" : new ;
- import common ;
- import errors ;
-
- rule __init__ (
- name exact ?
- : type ? # Optional type for this target.
- : project
- : action ?
- : path ?
- )
- {
- abstract-file-target.__init__ $(name) $(exact) : $(type) : $(project) :
- $(action) ;
-
- self.path = $(path) ;
- }
-
- rule clone-with-different-type ( new-type )
- {
- return [ new file-target $(self.name) exact : $(new-type) :
- $(self.project) : $(self.action) : $(self.path) ] ;
- }
-
- rule actualize-location ( target )
- {
- if $(self.action)
- {
- # This is a derived file.
- local path = [ path ] ;
- LOCATE on $(target) = $(path) ;
-
- # Make sure the path exists.
- DEPENDS $(target) : $(path) ;
- common.MkDir $(path) ;
-
- # It is possible that the target name includes a directory too, for
- # example when installing headers. Create that directory.
- if $(target:D)
- {
- local d = $(target:D) ;
- d = $(d:R=$(path)) ;
- DEPENDS $(target) : $(d) ;
- common.MkDir $(d) ;
- }
-
- # For a real file target, we create a fake target depending on the
- # real target. This allows us to run
- #
- # bjam hello.o
- #
- # without trying to guess the name of the real target. Note that the
- # target has no directory name and uses a special <e> grist.
- #
- # First, that means that "bjam hello.o" will build all known hello.o
- # targets. Second, the <e> grist makes sure this target will not be
- # confused with other targets, for example, if we have subdir 'test'
- # with target 'test' in it that includes a 'test.o' file, then the
- # target for directory will be just 'test' the target for test.o
- # will be <ptest/bin/gcc/debug>test.o and the target we create below
- # will be <e>test.o
- DEPENDS $(target:G=e) : $(target) ;
- # Allow bjam <path-to-file>/<file> to work. This will not catch all
- # possible ways to refer to the path (relative/absolute, extra ".",
- # various "..", but should help in obvious cases.
- DEPENDS $(target:G=e:R=$(path)) : $(target) ;
- }
- else
- {
- SEARCH on $(target) = [ path.native $(self.path) ] ;
- }
- }
-
- # Returns the directory for this target.
- #
- rule path ( )
- {
- if ! $(self.path)
- {
- if $(self.action)
- {
- local p = [ $(self.action).properties ] ;
- local path,relative-to-build-dir = [ $(p).target-path ] ;
- local path = $(path,relative-to-build-dir[1]) ;
- local relative-to-build-dir = $(path,relative-to-build-dir[2]) ;
-
- if $(relative-to-build-dir)
- {
- path = [ path.join [ $(self.project).build-dir ] $(path) ] ;
- }
-
- self.path = [ path.native $(path) ] ;
- }
- }
- return $(self.path) ;
- }
-}
-
-
-class notfile-target : abstract-file-target
-{
- rule __init__ ( name : project : action ? )
- {
- abstract-file-target.__init__ $(name) : : $(project) : $(action) ;
- }
-
- # Returns nothing to indicate that the target's path is not known.
- #
- rule path ( )
- {
- return ;
- }
-
- rule actualize-location ( target )
- {
- NOTFILE $(target) ;
- ALWAYS $(target) ;
- # TEMPORARY $(target) ;
- NOUPDATE $(target) ;
- }
-}
-
-
-# Class representing an action. Both 'targets' and 'sources' should list
-# instances of 'virtual-target'. Action name should name a rule with this
-# prototype:
-# rule action-name ( targets + : sources * : properties * )
-# Targets and sources are passed as actual Jam targets. The rule may not
-# establish additional dependency relationships.
-#
-class action
-{
- import "class" ;
- import errors ;
- import type ;
- import toolset ;
- import property-set ;
- import indirect ;
- import path ;
- import set : difference ;
-
- rule __init__ ( sources * : action-name + : property-set ? )
- {
- self.sources = $(sources) ;
-
- self.action-name = [ indirect.make-qualified $(action-name) ] ;
-
- if ! $(property-set)
- {
- property-set = [ property-set.empty ] ;
- }
-
- if ! [ class.is-instance $(property-set) ]
- {
- errors.error "Property set instance required" ;
- }
-
- self.properties = $(property-set) ;
- }
-
- rule add-targets ( targets * )
- {
- self.targets += $(targets) ;
- }
-
- rule replace-targets ( old-targets * : new-targets * )
- {
- self.targets = [ set.difference $(self.targets) : $(old-targets) ] ;
- self.targets += $(new-targets) ;
- }
-
- rule targets ( )
- {
- return $(self.targets) ;
- }
-
- rule sources ( )
- {
- return $(self.sources) ;
- }
-
- rule action-name ( )
- {
- return $(self.action-name) ;
- }
-
- rule properties ( )
- {
- return $(self.properties) ;
- }
-
- # Generates actual build instructions.
- #
- rule actualize ( )
- {
- if ! $(self.actualized)
- {
- self.actualized = true ;
-
- local ps = [ properties ] ;
- local properties = [ adjust-properties $(ps) ] ;
-
- local actual-targets ;
- for local i in [ targets ]
- {
- actual-targets += [ $(i).actualize ] ;
- }
-
- actualize-sources [ sources ] : $(properties) ;
-
- DEPENDS $(actual-targets) : $(self.actual-sources)
- $(self.dependency-only-sources) ;
-
- # This works around a bug with -j and actions that
- # produce multiple target, where:
- # - dependency on the first output is found, and
- # the action is started
- # - dependency on the second output is found, and
- # bjam noticed that command is already running
- # - instead of waiting for the command, dependents
- # of the second targets are immediately updated.
- if $(actual-targets[2])
- {
- INCLUDES $(actual-targets) : $(actual-targets) ;
- }
-
- # Action name can include additional argument to rule, which should
- # not be passed to 'set-target-variables'
- toolset.set-target-variables
- [ indirect.get-rule $(self.action-name[1]) ] $(actual-targets)
- : $(properties) ;
-
- # Reflect ourselves in a variable for the target. This allows
- # looking up additional info for the action given the raw target.
- # For example to debug or output action information from action
- # rules.
- .action on $(actual-targets) = $(__name__) ;
-
- indirect.call $(self.action-name) $(actual-targets)
- : $(self.actual-sources) : [ $(properties).raw ] ;
-
- # Since we set up the creating action here, we set up the action for
- # cleaning up as well.
- common.Clean clean-all : $(actual-targets) ;
- }
- }
-
- # Helper for 'actualize-sources'. For each passed source, actualizes it with
- # the appropriate scanner. Returns the actualized virtual targets.
- #
- rule actualize-source-type ( sources * : property-set )
- {
- local result = ;
- for local i in $(sources)
- {
- local scanner ;
- if [ $(i).type ]
- {
- scanner = [ type.get-scanner [ $(i).type ] : $(property-set) ] ;
- }
- result += [ $(i).actualize $(scanner) ] ;
- }
- return $(result) ;
- }
-
- # Creates actual Jam targets for sources. Initializes the following member
- # variables:
- # 'self.actual-sources' -- sources passed to the updating action.
- # 'self.dependency-only-sources' -- sources marked as dependencies, but
- # are not used otherwise.
- #
- # New values will be *appended* to the variables. They may be non-empty if
- # caller wants it.
- #
- rule actualize-sources ( sources * : property-set )
- {
- local dependencies = [ $(self.properties).get <dependency> ] ;
-
- self.dependency-only-sources +=
- [ actualize-source-type $(dependencies) : $(property-set) ] ;
- self.actual-sources +=
- [ actualize-source-type $(sources) : $(property-set) ] ;
-
- # This is used to help bjam find dependencies in generated headers and
- # other main targets, e.g. in:
- #
- # make a.h : ....... ;
- # exe hello : hello.cpp : <implicit-dependency>a.h ;
- #
- # For bjam to find the dependency the generated target must be
- # actualized (i.e. have its Jam target constructed). In the above case,
- # if we are building just hello ("bjam hello"), 'a.h' will not be
- # actualized unless we do it here.
- local implicit = [ $(self.properties).get <implicit-dependency> ] ;
- for local i in $(implicit)
- {
- $(i:G=).actualize ;
- }
- }
-
- # Determines real properties when trying to build with 'properties'. This is
- # the last chance to fix properties, for example to adjust includes to get
- # generated headers correctly. Default implementation simply returns its
- # argument.
- #
- rule adjust-properties ( property-set )
- {
- return $(property-set) ;
- }
-}
-
-
-# Action class which does nothing --- it produces the targets with specific
-# properties out of nowhere. It is needed to distinguish virtual targets with
-# different properties that are known to exist and have no actions which create
-# them.
-#
-class null-action : action
-{
- rule __init__ ( property-set ? )
- {
- action.__init__ : .no-action : $(property-set) ;
- }
-
- rule actualize ( )
- {
- if ! $(self.actualized)
- {
- self.actualized = true ;
- for local i in [ targets ]
- {
- $(i).actualize ;
- }
- }
- }
-}
-
-
-# Class which acts exactly like 'action', except that its sources are not
-# scanned for dependencies.
-#
-class non-scanning-action : action
-{
- rule __init__ ( sources * : action-name + : property-set ? )
- {
- action.__init__ $(sources) : $(action-name) : $(property-set) ;
- }
-
- rule actualize-source-type ( sources * : property-set )
- {
- local result ;
- for local i in $(sources)
- {
- result += [ $(i).actualize ] ;
- }
- return $(result) ;
- }
-}
-
-
-# Creates a virtual target with an appropriate name and type from 'file'. If a
-# target with that name in that project already exists, returns that already
-# created target.
-#
-# FIXME: a more correct way would be to compute the path to the file, based on
-# name and source location for the project, and use that path to determine if
-# the target has already been created. This logic should be shared with how we
-# usually find targets identified by a specific target id. It should also be
-# updated to work correctly when the file is specified using both relative and
-# absolute paths.
-#
-# TODO: passing a project with all virtual targets is starting to be annoying.
-#
-rule from-file ( file : file-loc : project )
-{
- import type ; # Had to do this here to break a circular dependency.
-
- # Check whether we already created a target corresponding to this file.
- local path = [ path.root [ path.root $(file) $(file-loc) ] [ path.pwd ] ] ;
-
- if $(.files.$(path))
- {
- return $(.files.$(path)) ;
- }
- else
- {
- local name = [ path.make $(file) ] ;
- local type = [ type.type $(file) ] ;
- local result ;
-
- result = [ new file-target $(file) : $(type) : $(project) : :
- $(file-loc) ] ;
-
- .files.$(path) = $(result) ;
- return $(result) ;
- }
-}
-
-
-# Registers a new virtual target. Checks if there is already a registered target
-# with the same name, type, project and subvariant properties as well as the
-# same sources and equal action. If such target is found it is returned and a
-# new 'target' is not registered. Otherwise, 'target' is registered and
-# returned.
-#
-rule register ( target )
-{
- local signature = [ sequence.join
- [ $(target).path ] [ $(target).name ] : - ] ;
-
- local result ;
- for local t in $(.cache.$(signature))
- {
- local a1 = [ $(t).action ] ;
- local a2 = [ $(target).action ] ;
-
- if ! $(result)
- {
- if ! $(a1) && ! $(a2)
- {
- result = $(t) ;
- }
- else
- {
- if $(a1) && $(a2) &&
- ( [ $(a1).action-name ] = [ $(a2).action-name ] ) &&
- ( [ $(a1).sources ] = [ $(a2).sources ] )
- {
- local ps1 = [ $(a1).properties ] ;
- local ps2 = [ $(a2).properties ] ;
- local p1 = [ $(ps1).base ] [ $(ps1).free ] [ set.difference
- [ $(ps1).dependency ] : [ $(ps1).incidental ] ] ;
- local p2 = [ $(ps2).base ] [ $(ps2).free ] [ set.difference
- [ $(ps2).dependency ] : [ $(ps2).incidental ] ] ;
- if $(p1) = $(p2)
- {
- result = $(t) ;
- }
- }
- }
- }
- }
-
- if ! $(result)
- {
- .cache.$(signature) += $(target) ;
- result = $(target) ;
- }
-
- .recent-targets += $(result) ;
- .all-targets += $(result) ;
-
- return $(result) ;
-}
-
-
-# Each target returned by 'register' is added to the .recent-targets list,
-# returned by this function. This allows us to find all virtual targets created
-# when building a specific main target, even those constructed only as
-# intermediate targets.
-#
-rule recent-targets ( )
-{
- return $(.recent-targets) ;
-}
-
-
-rule clear-recent-targets ( )
-{
- .recent-targets = ;
-}
-
-
-# Returns all virtual targets ever created.
-#
-rule all-targets ( )
-{
- return $(.all-targets) ;
-}
-
-
-# Returns all targets from 'targets' with types equal to 'type' or derived from
-# it.
-#
-rule select-by-type ( type : targets * )
-{
- local result ;
- for local t in $(targets)
- {
- if [ type.is-subtype [ $(t).type ] $(type) ]
- {
- result += $(t) ;
- }
- }
- return $(result) ;
-}
-
-
-rule register-actual-name ( actual-name : virtual-target )
-{
- if $(.actual.$(actual-name))
- {
- local cs1 = [ $(.actual.$(actual-name)).creating-subvariant ] ;
- local cs2 = [ $(virtual-target).creating-subvariant ] ;
- local cmt1 = [ $(cs1).main-target ] ;
- local cmt2 = [ $(cs2).main-target ] ;
-
- local action1 = [ $(.actual.$(actual-name)).action ] ;
- local action2 = [ $(virtual-target).action ] ;
- local properties-added ;
- local properties-removed ;
- if $(action1) && $(action2)
- {
- local p1 = [ $(action1).properties ] ;
- p1 = [ $(p1).raw ] ;
- local p2 = [ $(action2).properties ] ;
- p2 = [ $(p2).raw ] ;
- properties-removed = [ set.difference $(p1) : $(p2) ] ;
- properties-removed ?= "none" ;
- properties-added = [ set.difference $(p2) : $(p1) ] ;
- properties-added ?= "none" ;
- }
- errors.error "Duplicate name of actual target:" $(actual-name)
- : "previous virtual target" [ $(.actual.$(actual-name)).str ]
- : "created from" [ $(cmt1).full-name ]
- : "another virtual target" [ $(virtual-target).str ]
- : "created from" [ $(cmt2).full-name ]
- : "added properties:" $(properties-added)
- : "removed properties:" $(properties-removed) ;
- }
- else
- {
- .actual.$(actual-name) = $(virtual-target) ;
- }
-}
-
-
-# Traverses the dependency graph of 'target' and return all targets that will be
-# created before this one is created. If the root of some dependency graph is
-# found during traversal, it is either included or not, depending on the
-# 'include-roots' value. In either case traversal stops at root targets, i.e.
-# root target sources are not traversed.
-#
-rule traverse ( target : include-roots ? : include-sources ? )
-{
- local result ;
- if [ $(target).action ]
- {
- local action = [ $(target).action ] ;
- # This includes the 'target' as well.
- result += [ $(action).targets ] ;
-
- for local t in [ $(action).sources ]
- {
- if ! [ $(t).root ]
- {
- result += [ traverse $(t) : $(include-roots) : $(include-sources) ] ;
- }
- else if $(include-roots)
- {
- result += $(t) ;
- }
- }
- }
- else if $(include-sources)
- {
- result = $(target) ;
- }
- return $(result) ;
-}
-
-
-# Takes an 'action' instance and creates a new instance of it and all targets
-# produced by the action. The rule-name and properties are set to
-# 'new-rule-name' and 'new-properties', if those are specified. Returns the
-# cloned action.
-#
-rule clone-action ( action : new-project : new-action-name ? : new-properties ? )
-{
- if ! $(new-action-name)
- {
- new-action-name = [ $(action).action-name ] ;
- }
- if ! $(new-properties)
- {
- new-properties = [ $(action).properties ] ;
- }
-
- local action-class = [ modules.peek $(action) : __class__ ] ;
- local cloned-action = [ class.new $(action-class)
- [ $(action).sources ] : $(new-action-name) : $(new-properties) ] ;
-
- local cloned-targets ;
- for local target in [ $(action).targets ]
- {
- local n = [ $(target).name ] ;
- # Do not modify produced target names.
- local cloned-target = [ class.new file-target $(n) exact :
- [ $(target).type ] : $(new-project) : $(cloned-action) ] ;
- local d = [ $(target).dependencies ] ;
- if $(d)
- {
- $(cloned-target).depends $(d) ;
- }
- $(cloned-target).root [ $(target).root ] ;
- $(cloned-target).creating-subvariant [ $(target).creating-subvariant ] ;
-
- cloned-targets += $(cloned-target) ;
- }
-
- return $(cloned-action) ;
-}
-
-
-class subvariant
-{
- import sequence ;
- import type ;
-
- rule __init__ ( main-target # The instance of main-target class.
- : property-set # Properties requested for this target.
- : sources *
- : build-properties # Actually used properties.
- : sources-usage-requirements # Properties propagated from sources.
- : created-targets * ) # Top-level created targets.
- {
- self.main-target = $(main-target) ;
- self.properties = $(property-set) ;
- self.sources = $(sources) ;
- self.build-properties = $(build-properties) ;
- self.sources-usage-requirements = $(sources-usage-requirements) ;
- self.created-targets = $(created-targets) ;
-
- # Pre-compose a list of other dependency graphs this one depends on.
- local deps = [ $(build-properties).get <implicit-dependency> ] ;
- for local d in $(deps)
- {
- self.other-dg += [ $(d:G=).creating-subvariant ] ;
- }
-
- self.other-dg = [ sequence.unique $(self.other-dg) ] ;
- }
-
- rule main-target ( )
- {
- return $(self.main-target) ;
- }
-
- rule created-targets ( )
- {
- return $(self.created-targets) ;
- }
-
- rule requested-properties ( )
- {
- return $(self.properties) ;
- }
-
- rule build-properties ( )
- {
- return $(self.build-properties) ;
- }
-
- rule sources-usage-requirements ( )
- {
- return $(self.sources-usage-requirements) ;
- }
-
- rule set-usage-requirements ( usage-requirements )
- {
- self.usage-requirements = $(usage-requirements) ;
- }
-
- rule usage-requirements ( )
- {
- return $(self.usage-requirements) ;
- }
-
- # Returns all targets referenced by this subvariant, either directly or
- # indirectly, and either as sources, or as dependency properties. Targets
- # referred to using the dependency property are returned as properties, not
- # targets.
- #
- rule all-referenced-targets ( theset )
- {
- # Find directly referenced targets.
- local deps = [ $(self.build-properties).dependency ] ;
- local all-targets = $(self.sources) $(deps) ;
-
- # Find other subvariants.
- local r ;
- for local t in $(all-targets)
- {
- if ! [ $(theset).contains $(t) ]
- {
- $(theset).add $(t) ;
- r += [ $(t:G=).creating-subvariant ] ;
- }
- }
- r = [ sequence.unique $(r) ] ;
- for local s in $(r)
- {
- if $(s) != $(__name__)
- {
- $(s).all-referenced-targets $(theset) ;
- }
- }
- }
-
- # Returns the properties specifying implicit include paths to generated
- # headers. This traverses all targets in this subvariant and subvariants
- # referred by <implcit-dependecy> properties. For all targets of type
- # 'target-type' (or for all targets, if 'target-type' is not specified), the
- # result will contain <$(feature)>path-to-that-target.
- #
- rule implicit-includes ( feature : target-type ? )
- {
- local key = ii$(feature)-$(target-type:E="") ;
- if ! $($(key))-is-not-empty
- {
- local target-paths = [ all-target-directories $(target-type) ] ;
- target-paths = [ sequence.unique $(target-paths) ] ;
- local result = $(target-paths:G=$(feature)) ;
- if ! $(result)
- {
- result = "" ;
- }
- $(key) = $(result) ;
- }
- if $($(key)) = ""
- {
- return ;
- }
- else
- {
- return $($(key)) ;
- }
- }
-
- rule all-target-directories ( target-type ? )
- {
- if ! $(self.target-directories)
- {
- compute-target-directories $(target-type) ;
- }
- return $(self.target-directories) ;
- }
-
- rule compute-target-directories ( target-type ? )
- {
- local result ;
- for local t in $(self.created-targets)
- {
- # Skip targets of the wrong type.
- if ! $(target-type) ||
- [ type.is-derived [ $(t).type ] $(target-type) ]
- {
- result = [ sequence.merge $(result) : [ $(t).path ] ] ;
- }
- }
- for local d in $(self.other-dg)
- {
- result += [ $(d).all-target-directories $(target-type) ] ;
- }
- self.target-directories = $(result) ;
- }
-}
diff --git a/tools/build/v2/build/virtual_target.py b/tools/build/v2/build/virtual_target.py
deleted file mode 100644
index 51dff0374b..0000000000
--- a/tools/build/v2/build/virtual_target.py
+++ /dev/null
@@ -1,1118 +0,0 @@
-# Status: ported.
-# Base revision: 64488.
-#
-# Copyright (C) Vladimir Prus 2002. Permission to copy, use, modify, sell and
-# distribute this software is granted provided this copyright notice appears in
-# all copies. This software is provided "as is" without express or implied
-# warranty, and with no claim as to its suitability for any purpose.
-
-# Implements virtual targets, which correspond to actual files created during
-# build, but are not yet targets in Jam sense. They are needed, for example,
-# when searching for possible transormation sequences, when it's not known
-# if particular target should be created at all.
-#
-#
-# +--------------------------+
-# | VirtualTarget |
-# +==========================+
-# | actualize |
-# +--------------------------+
-# | actualize_action() = 0 |
-# | actualize_location() = 0 |
-# +----------------+---------+
-# |
-# ^
-# / \
-# +-+-+
-# |
-# +---------------------+ +-------+--------------+
-# | Action | | AbstractFileTarget |
-# +=====================| * +======================+
-# | action_name | +--+ action |
-# | properties | | +----------------------+
-# +---------------------+--+ | actualize_action() |
-# | actualize() |0..1 +-----------+----------+
-# | path() | |
-# | adjust_properties() | sources |
-# | actualize_sources() | targets |
-# +------+--------------+ ^
-# | / \
-# ^ +-+-+
-# / \ |
-# +-+-+ +-------------+-------------+
-# | | |
-# | +------+---------------+ +--------+-------------+
-# | | FileTarget | | SearchedLibTarget |
-# | +======================+ +======================+
-# | | actualize-location() | | actualize-location() |
-# | +----------------------+ +----------------------+
-# |
-# +-+------------------------------+
-# | |
-# +----+----------------+ +---------+-----------+
-# | CompileAction | | LinkAction |
-# +=====================+ +=====================+
-# | adjust_properties() | | adjust_properties() |
-# +---------------------+ | actualize_sources() |
-# +---------------------+
-#
-# The 'CompileAction' and 'LinkAction' classes are defined not here,
-# but in builtin.jam modules. They are shown in the diagram to give
-# the big picture.
-
-import bjam
-
-import re
-import os.path
-import string
-import types
-
-from b2.util import path, utility, set
-from b2.util.utility import add_grist, get_grist, ungrist, replace_grist, get_value
-from b2.util.sequence import unique
-from b2.tools import common
-from b2.exceptions import *
-import b2.build.type
-import b2.build.property_set as property_set
-
-import b2.build.property as property
-
-from b2.manager import get_manager
-from b2.util import bjam_signature
-
-__re_starts_with_at = re.compile ('^@(.*)')
-
-class VirtualTargetRegistry:
- def __init__ (self, manager):
- self.manager_ = manager
-
- # A cache for FileTargets
- self.files_ = {}
-
- # A cache for targets.
- self.cache_ = {}
-
- # A map of actual names to virtual targets.
- # Used to make sure we don't associate same
- # actual target to two virtual targets.
- self.actual_ = {}
-
- self.recent_targets_ = []
-
- # All targets ever registed
- self.all_targets_ = []
-
- self.next_id_ = 0
-
- def register (self, target):
- """ Registers a new virtual target. Checks if there's already registered target, with the same
- name, type, project and subvariant properties, and also with the same sources
- and equal action. If such target is found it is retured and 'target' is not registered.
- Otherwise, 'target' is registered and returned.
- """
- if target.path():
- signature = target.path() + "-" + target.name()
- else:
- signature = "-" + target.name()
-
- result = None
- if not self.cache_.has_key (signature):
- self.cache_ [signature] = []
-
- for t in self.cache_ [signature]:
- a1 = t.action ()
- a2 = target.action ()
-
- # TODO: why are we checking for not result?
- if not result:
- if not a1 and not a2:
- result = t
- else:
- if a1 and a2 and a1.action_name () == a2.action_name () and a1.sources () == a2.sources ():
- ps1 = a1.properties ()
- ps2 = a2.properties ()
- p1 = ps1.base () + ps1.free () +\
- b2.util.set.difference(ps1.dependency(), ps1.incidental())
- p2 = ps2.base () + ps2.free () +\
- b2.util.set.difference(ps2.dependency(), ps2.incidental())
- if p1 == p2:
- result = t
-
- if not result:
- self.cache_ [signature].append (target)
- result = target
-
- # TODO: Don't append if we found pre-existing target?
- self.recent_targets_.append(result)
- self.all_targets_.append(result)
-
- return result
-
- def from_file (self, file, file_location, project):
- """ Creates a virtual target with appropriate name and type from 'file'.
- If a target with that name in that project was already created, returns that already
- created target.
- TODO: more correct way would be to compute path to the file, based on name and source location
- for the project, and use that path to determine if the target was already created.
- TODO: passing project with all virtual targets starts to be annoying.
- """
- # Check if we've created a target corresponding to this file.
- path = os.path.join(os.getcwd(), file_location, file)
- path = os.path.normpath(path)
-
- if self.files_.has_key (path):
- return self.files_ [path]
-
- file_type = b2.build.type.type (file)
-
- result = FileTarget (file, file_type, project,
- None, file_location)
- self.files_ [path] = result
-
- return result
-
- def recent_targets(self):
- """Each target returned by 'register' is added to a list of
- 'recent-target', returned by this function. So, this allows
- us to find all targets created when building a given main
- target, even if the target."""
-
- return self.recent_targets_
-
- def clear_recent_targets(self):
- self.recent_targets_ = []
-
- def all_targets(self):
- # Returns all virtual targets ever created
- return self.all_targets_
-
- # Returns all targets from 'targets' with types
- # equal to 'type' or derived from it.
- def select_by_type(self, type, targets):
- return [t for t in targets if b2.build.type.is_sybtype(t.type(), type)]
-
- def register_actual_name (self, actual_name, virtual_target):
- if self.actual_.has_key (actual_name):
- cs1 = self.actual_ [actual_name].creating_subvariant ()
- cs2 = virtual_target.creating_subvariant ()
- cmt1 = cs1.main_target ()
- cmt2 = cs2.main_target ()
-
- action1 = self.actual_ [actual_name].action ()
- action2 = virtual_target.action ()
-
- properties_added = []
- properties_removed = []
- if action1 and action2:
- p1 = action1.properties ()
- p1 = p1.raw ()
- p2 = action2.properties ()
- p2 = p2.raw ()
-
- properties_removed = set.difference (p1, p2)
- if not properties_removed: properties_removed = "none"
-
- properties_added = set.difference (p2, p1)
- if not properties_added: properties_added = "none"
-
- # FIXME: Revive printing of real location.
- get_manager().errors()(
- "Duplicate name of actual target: '%s'\n"
- "previous virtual target '%s'\n"
- "created from '%s'\n"
- "another virtual target '%s'\n"
- "created from '%s'\n"
- "added properties: '%s'\n"
- "removed properties: '%s'\n"
- % (actual_name,
- self.actual_ [actual_name], "loc", #cmt1.location (),
- virtual_target,
- "loc", #cmt2.location (),
- properties_added, properties_removed))
-
- else:
- self.actual_ [actual_name] = virtual_target
-
-
- def add_suffix (self, specified_name, file_type, prop_set):
- """ Appends the suffix appropriate to 'type/property_set' combination
- to the specified name and returns the result.
- """
- suffix = b2.build.type.generated_target_suffix (file_type, prop_set)
-
- if suffix:
- return specified_name + '.' + suffix
-
- else:
- return specified_name
-
-class VirtualTarget:
- """ Potential target. It can be converted into jam target and used in
- building, if needed. However, it can be also dropped, which allows
- to search for different transformation and select only one.
- name: name of this target.
- project: project to which this target belongs.
- """
- def __init__ (self, name, project):
- self.name_ = name
- self.project_ = project
- self.dependencies_ = []
- self.always_ = False
-
- # Caches if dapendencies for scanners have already been set.
- self.made_ = {}
-
- def manager(self):
- return self.project_.manager()
-
- def virtual_targets(self):
- return self.manager().virtual_targets()
-
- def name (self):
- """ Name of this target.
- """
- return self.name_
-
- def project (self):
- """ Project of this target.
- """
- return self.project_
-
- def depends (self, d):
- """ Adds additional instances of 'VirtualTarget' that this
- one depends on.
- """
- self.dependencies_ = unique (self.dependencies_ + d).sort ()
-
- def dependencies (self):
- return self.dependencies_
-
- def always(self):
- self.always_ = True
-
- def actualize (self, scanner = None):
- """ Generates all the actual targets and sets up build actions for
- this target.
-
- If 'scanner' is specified, creates an additional target
- with the same location as actual target, which will depend on the
- actual target and be associated with 'scanner'. That additional
- target is returned. See the docs (#dependency_scanning) for rationale.
- Target must correspond to a file if 'scanner' is specified.
-
- If scanner is not specified, then actual target is returned.
- """
- actual_name = self.actualize_no_scanner ()
-
- if self.always_:
- bjam.call("ALWAYS", actual_name)
-
- if not scanner:
- return actual_name
-
- else:
- # Add the scanner instance to the grist for name.
- g = '-'.join ([ungrist(get_grist(actual_name)), str(id(scanner))])
-
- name = replace_grist (actual_name, '<' + g + '>')
-
- if not self.made_.has_key (name):
- self.made_ [name] = True
-
- self.project_.manager ().engine ().add_dependency (name, actual_name)
-
- self.actualize_location (name)
-
- self.project_.manager ().scanners ().install (scanner, name, str (self))
-
- return name
-
-# private: (overridables)
-
- def actualize_action (self, target):
- """ Sets up build actions for 'target'. Should call appropriate rules
- and set target variables.
- """
- raise BaseException ("method should be defined in derived classes")
-
- def actualize_location (self, target):
- """ Sets up variables on 'target' which specify its location.
- """
- raise BaseException ("method should be defined in derived classes")
-
- def path (self):
- """ If the target is generated one, returns the path where it will be
- generated. Otherwise, returns empty list.
- """
- raise BaseException ("method should be defined in derived classes")
-
- def actual_name (self):
- """ Return that actual target name that should be used
- (for the case where no scanner is involved)
- """
- raise BaseException ("method should be defined in derived classes")
-
-
-class AbstractFileTarget (VirtualTarget):
- """ Target which correspond to a file. The exact mapping for file
- is not yet specified in this class. (TODO: Actually, the class name
- could be better...)
-
- May be a source file (when no action is specified), or
- derived file (otherwise).
-
- The target's grist is concatenation of project's location,
- properties of action (for derived files), and, optionally,
- value identifying the main target.
-
- exact: If non-empty, the name is exactly the name
- created file should have. Otherwise, the '__init__'
- method will add suffix obtained from 'type' by
- calling 'type.generated-target-suffix'.
-
- type: optional type of this target.
- """
- def __init__ (self, name, type, project, action = None, exact=False):
- VirtualTarget.__init__ (self, name, project)
-
- self.type_ = type
-
- self.action_ = action
- self.exact_ = exact
-
- if action:
- action.add_targets ([self])
-
- if self.type and not exact:
- self.__adjust_name (name)
-
-
- self.actual_name_ = None
- self.path_ = None
- self.intermediate_ = False
- self.creating_subvariant_ = None
-
- # True if this is a root target.
- self.root_ = False
-
- def type (self):
- return self.type_
-
- def set_path (self, path):
- """ Sets the path. When generating target name, it will override any path
- computation from properties.
- """
- self.path_ = path
-
- def action (self):
- """ Returns the action.
- """
- return self.action_
-
- def root (self, set = None):
- """ Sets/gets the 'root' flag. Target is root is it directly correspods to some
- variant of a main target.
- """
- if set:
- self.root_ = True
- return self.root_
-
- def creating_subvariant (self, s = None):
- """ Gets or sets the subvariant which created this target. Subvariant
- is set when target is brought into existance, and is never changed
- after that. In particual, if target is shared by subvariant, only
- the first is stored.
- s: If specified, specified the value to set,
- which should be instance of 'subvariant' class.
- """
- if s and not self.creating_subvariant ():
- if self.creating_subvariant ():
- raise BaseException ("Attempt to change 'dg'")
-
- else:
- self.creating_subvariant_ = s
-
- return self.creating_subvariant_
-
- def actualize_action (self, target):
- if self.action_:
- self.action_.actualize ()
-
- # Return a human-readable representation of this target
- #
- # If this target has an action, that's:
- #
- # { <action-name>-<self.name>.<self.type> <action-sources>... }
- #
- # otherwise, it's:
- #
- # { <self.name>.<self.type> }
- #
- def str(self):
- a = self.action()
-
- name_dot_type = self.name_ + "." + self.type_
-
- if a:
- action_name = a.action_name()
- ss = [ s.str() for s in a.sources()]
-
- return "{ %s-%s %s}" % (action_name, name_dot_type, str(ss))
- else:
- return "{ " + name_dot_type + " }"
-
-# private:
-
- def actual_name (self):
- if not self.actual_name_:
- self.actual_name_ = '<' + self.grist() + '>' + self.name_
-
- return self.actual_name_
-
- def grist (self):
- """Helper to 'actual_name', above. Compute unique prefix used to distinguish
- this target from other targets with the same name which create different
- file.
- """
- # Depending on target, there may be different approaches to generating
- # unique prefixes. We'll generate prefixes in the form
- # <one letter approach code> <the actual prefix>
- path = self.path ()
-
- if path:
- # The target will be generated to a known path. Just use the path
- # for identification, since path is as unique as it can get.
- return 'p' + path
-
- else:
- # File is either source, which will be searched for, or is not a file at
- # all. Use the location of project for distinguishing.
- project_location = self.project_.get ('location')
- path_components = b2.util.path.split(project_location)
- location_grist = '!'.join (path_components)
-
- if self.action_:
- ps = self.action_.properties ()
- property_grist = ps.as_path ()
- # 'property_grist' can be empty when 'ps' is an empty
- # property set.
- if property_grist:
- location_grist = location_grist + '/' + property_grist
-
- return 'l' + location_grist
-
- def __adjust_name(self, specified_name):
- """Given the target name specified in constructor, returns the
- name which should be really used, by looking at the <tag> properties.
- The tag properties come in two flavour:
- - <tag>value,
- - <tag>@rule-name
- In the first case, value is just added to name
- In the second case, the specified rule is called with specified name,
- target type and properties and should return the new name.
- If not <tag> property is specified, or the rule specified by
- <tag> returns nothing, returns the result of calling
- virtual-target.add-suffix"""
-
- if self.action_:
- ps = self.action_.properties()
- else:
- ps = property_set.empty()
-
- # FIXME: I'm not sure how this is used, need to check with
- # Rene to figure out how to implement
- #~ We add ourselves to the properties so that any tag rule can get
- #~ more direct information about the target than just that available
- #~ through the properties. This is useful in implementing
- #~ name changes based on the sources of the target. For example to
- #~ make unique names of object files based on the source file.
- #~ --grafik
- #ps = property_set.create(ps.raw() + ["<target>%s" % "XXXX"])
- #ps = [ property-set.create [ $(ps).raw ] <target>$(__name__) ] ;
-
- tag = ps.get("<tag>")
-
- if tag:
-
- if len(tag) > 1:
- get_manager().errors()(
- """<tag>@rulename is present but is not the only <tag> feature""")
-
- tag = tag[0]
- if callable(tag):
- self.name_ = tag(specified_name, self.type_, ps)
- else:
- if not tag[0] == '@':
- self.manager_.errors()("""The value of the <tag> feature must be '@rule-nane'""")
-
- exported_ps = b2.util.value_to_jam(ps, methods=True)
- self.name_ = b2.util.call_jam_function(
- tag[1:], specified_name, self.type_, exported_ps)
- if self.name_:
- self.name_ = self.name_[0]
-
- # If there's no tag or the tag rule returned nothing.
- if not tag or not self.name_:
- self.name_ = add_prefix_and_suffix(specified_name, self.type_, ps)
-
- def actualize_no_scanner(self):
- name = self.actual_name()
-
- # Do anything only on the first invocation
- if not self.made_:
- self.made_[name] = True
-
- if self.action_:
- # For non-derived target, we don't care if there
- # are several virtual targets that refer to the same name.
- # One case when this is unavoidable is when file name is
- # main.cpp and two targets have types CPP (for compiling)
- # and MOCCABLE_CPP (for convertion to H via Qt tools).
- self.virtual_targets().register_actual_name(name, self)
-
- for i in self.dependencies_:
- self.manager_.engine().add_dependency(name, i.actualize())
-
- self.actualize_location(name)
- self.actualize_action(name)
-
- return name
-
-@bjam_signature((["specified_name"], ["type"], ["property_set"]))
-def add_prefix_and_suffix(specified_name, type, property_set):
- """Appends the suffix appropriate to 'type/property-set' combination
- to the specified name and returns the result."""
-
- property_set = b2.util.jam_to_value_maybe(property_set)
-
- suffix = ""
- if type:
- suffix = b2.build.type.generated_target_suffix(type, property_set)
-
- # Handle suffixes for which no leading dot is desired. Those are
- # specified by enclosing them in <...>. Needed by python so it
- # can create "_d.so" extensions, for example.
- if get_grist(suffix):
- suffix = ungrist(suffix)
- elif suffix:
- suffix = "." + suffix
-
- prefix = ""
- if type:
- prefix = b2.build.type.generated_target_prefix(type, property_set)
-
- if specified_name.startswith(prefix):
- prefix = ""
-
- if not prefix:
- prefix = ""
- if not suffix:
- suffix = ""
- return prefix + specified_name + suffix
-
-
-class FileTarget (AbstractFileTarget):
- """ File target with explicitly known location.
-
- The file path is determined as
- - value passed to the 'set_path' method, if any
- - for derived files, project's build dir, joined with components
- that describe action's properties. If the free properties
- are not equal to the project's reference properties
- an element with name of main target is added.
- - for source files, project's source dir
-
- The file suffix is
- - the value passed to the 'suffix' method, if any, or
- - the suffix which correspond to the target's type.
- """
- def __init__ (self, name, type, project, action = None, path=None, exact=False):
- AbstractFileTarget.__init__ (self, name, type, project, action, exact)
-
- self.path_ = path
-
- def __str__(self):
- if self.type_:
- return self.name_ + "." + self.type_
- else:
- return self.name_
-
- def clone_with_different_type(self, new_type):
- return FileTarget(self.name_, new_type, self.project_,
- self.action_, self.path_, exact=True)
-
- def actualize_location (self, target):
- engine = self.project_.manager_.engine ()
-
- if self.action_:
- # This is a derived file.
- path = self.path ()
- engine.set_target_variable (target, 'LOCATE', path)
-
- # Make sure the path exists.
- engine.add_dependency (target, path)
- common.mkdir(engine, path)
-
- # It's possible that the target name includes a directory
- # too, for example when installing headers. Create that
- # directory.
- d = os.path.dirname(get_value(target))
- if d:
- d = os.path.join(path, d)
- engine.add_dependency(target, d)
- common.mkdir(engine, d)
-
- # For real file target, we create a fake target that
- # depends on the real target. This allows to run
- #
- # bjam hello.o
- #
- # without trying to guess the name of the real target.
- # Note the that target has no directory name, and a special
- # grist <e>.
- #
- # First, that means that "bjam hello.o" will build all
- # known hello.o targets.
- # Second, the <e> grist makes sure this target won't be confused
- # with other targets, for example, if we have subdir 'test'
- # with target 'test' in it that includes 'test.o' file,
- # then the target for directory will be just 'test' the target
- # for test.o will be <ptest/bin/gcc/debug>test.o and the target
- # we create below will be <e>test.o
- engine.add_dependency("<e>%s" % get_value(target), target)
-
- # Allow bjam <path-to-file>/<file> to work. This won't catch all
- # possible ways to refer to the path (relative/absolute, extra ".",
- # various "..", but should help in obvious cases.
- engine.add_dependency("<e>%s" % (os.path.join(path, get_value(target))), target)
-
- else:
- # This is a source file.
- engine.set_target_variable (target, 'SEARCH', self.project_.get ('source-location'))
-
-
- def path (self):
- """ Returns the directory for this target.
- """
- if not self.path_:
- if self.action_:
- p = self.action_.properties ()
- (target_path, relative_to_build_dir) = p.target_path ()
-
- if relative_to_build_dir:
- # Indicates that the path is relative to
- # build dir.
- target_path = os.path.join (self.project_.build_dir (), target_path)
-
- # Store the computed path, so that it's not recomputed
- # any more
- self.path_ = target_path
-
- return self.path_
-
-
-class NotFileTarget(AbstractFileTarget):
-
- def __init__(self, name, project, action):
- AbstractFileTarget.__init__(self, name, None, project, action)
-
- def path(self):
- """Returns nothing, to indicate that target path is not known."""
- return None
-
- def actualize_location(self, target):
- bjam.call("NOTFILE", target)
- bjam.call("ALWAYS", target)
- bjam.call("NOUPDATE", target)
-
-
-class Action:
- """ Class which represents an action.
- Both 'targets' and 'sources' should list instances of 'VirtualTarget'.
- Action name should name a rule with this prototype
- rule action_name ( targets + : sources * : properties * )
- Targets and sources are passed as actual jam targets. The rule may
- not establish dependency relationship, but should do everything else.
- """
- def __init__ (self, manager, sources, action_name, prop_set):
- assert(isinstance(prop_set, property_set.PropertySet))
- assert type(sources) == types.ListType
- self.sources_ = sources
- self.action_name_ = action_name
- if not prop_set:
- prop_set = property_set.empty()
- self.properties_ = prop_set
- if not all(isinstance(v, VirtualTarget) for v in prop_set.get('implicit-dependency')):
- import pdb
- pdb.set_trace()
-
- self.manager_ = manager
- self.engine_ = self.manager_.engine ()
- self.targets_ = []
-
- # Indicates whether this has been actualized or not.
- self.actualized_ = False
-
- self.dependency_only_sources_ = []
- self.actual_sources_ = []
-
-
- def add_targets (self, targets):
- self.targets_ += targets
-
-
- def replace_targets (old_targets, new_targets):
- self.targets_ = [t for t in targets if not t in old_targets] + new_targets
-
- def targets (self):
- return self.targets_
-
- def sources (self):
- return self.sources_
-
- def action_name (self):
- return self.action_name_
-
- def properties (self):
- return self.properties_
-
- def actualize (self):
- """ Generates actual build instructions.
- """
- if self.actualized_:
- return
-
- self.actualized_ = True
-
- ps = self.properties ()
- properties = self.adjust_properties (ps)
-
-
- actual_targets = []
-
- for i in self.targets ():
- actual_targets.append (i.actualize ())
-
- self.actualize_sources (self.sources (), properties)
-
- self.engine_.add_dependency (actual_targets, self.actual_sources_ + self.dependency_only_sources_)
-
- # This works around a bug with -j and actions that
- # produce multiple target, where:
- # - dependency on the first output is found, and
- # the action is started
- # - dependency on the second output is found, and
- # bjam noticed that command is already running
- # - instead of waiting for the command, dependents
- # of the second targets are immediately updated.
- if len(actual_targets) > 1:
- bjam.call("INCLUDES", actual_targets, actual_targets)
-
- # FIXME: check the comment below. Was self.action_name_ [1]
- # Action name can include additional argument to rule, which should not
- # be passed to 'set-target-variables'
- # FIXME: breaking circular dependency
- import toolset
- toolset.set_target_variables (self.manager_, self.action_name_, actual_targets, properties)
-
- engine = self.manager_.engine ()
-
- # FIXME: this is supposed to help --out-xml option, but we don't
- # implement that now, and anyway, we should handle it in Python,
- # not but putting variables on bjam-level targets.
- bjam.call("set-target-variable", actual_targets, ".action", repr(self))
-
- self.manager_.engine ().set_update_action (self.action_name_, actual_targets, self.actual_sources_,
- properties)
-
- # Since we set up creating action here, we also set up
- # action for cleaning up
- self.manager_.engine ().set_update_action ('common.Clean', 'clean-all',
- actual_targets)
-
- return actual_targets
-
- def actualize_source_type (self, sources, prop_set):
- """ Helper for 'actualize_sources'.
- For each passed source, actualizes it with the appropriate scanner.
- Returns the actualized virtual targets.
- """
- result = []
- for i in sources:
- scanner = None
-
-# FIXME: what's this?
-# if isinstance (i, str):
-# i = self.manager_.get_object (i)
-
- if i.type ():
- scanner = b2.build.type.get_scanner (i.type (), prop_set)
-
- r = i.actualize (scanner)
- result.append (r)
-
- return result
-
- def actualize_sources (self, sources, prop_set):
- """ Creates actual jam targets for sources. Initializes two member
- variables:
- 'self.actual_sources_' -- sources which are passed to updating action
- 'self.dependency_only_sources_' -- sources which are made dependencies, but
- are not used otherwise.
-
- New values will be *appended* to the variables. They may be non-empty,
- if caller wants it.
- """
- dependencies = self.properties_.get ('<dependency>')
-
- self.dependency_only_sources_ += self.actualize_source_type (dependencies, prop_set)
- self.actual_sources_ += self.actualize_source_type (sources, prop_set)
-
- # This is used to help bjam find dependencies in generated headers
- # in other main targets.
- # Say:
- #
- # make a.h : ....... ;
- # exe hello : hello.cpp : <implicit-dependency>a.h ;
- #
- # However, for bjam to find the dependency the generated target must
- # be actualized (i.e. have the jam target). In the above case,
- # if we're building just hello ("bjam hello"), 'a.h' won't be
- # actualized unless we do it here.
- implicit = self.properties_.get("<implicit-dependency>")
-
- for i in implicit:
- i.actualize()
-
- def adjust_properties (self, prop_set):
- """ Determines real properties when trying building with 'properties'.
- This is last chance to fix properties, for example to adjust includes
- to get generated headers correctly. Default implementation returns
- its argument.
- """
- return prop_set
-
-
-class NullAction (Action):
- """ Action class which does nothing --- it produces the targets with
- specific properties out of nowhere. It's needed to distinguish virtual
- targets with different properties that are known to exist, and have no
- actions which create them.
- """
- def __init__ (self, manager, prop_set):
- Action.__init__ (self, manager, [], None, prop_set)
-
- def actualize (self):
- if not self.actualized_:
- self.actualized_ = True
-
- for i in self.targets ():
- i.actualize ()
-
-class NonScanningAction(Action):
- """Class which acts exactly like 'action', except that the sources
- are not scanned for dependencies."""
-
- def __init__(self, sources, action_name, property_set):
- #FIXME: should the manager parameter of Action.__init__
- #be removed? -- Steven Watanabe
- Action.__init__(self, b2.manager.get_manager(), sources, action_name, property_set)
-
- def actualize_source_type(self, sources, property_set):
-
- result = []
- for s in sources:
- result.append(s.actualize())
- return result
-
-def traverse (target, include_roots = False, include_sources = False):
- """ Traverses the dependency graph of 'target' and return all targets that will
- be created before this one is created. If root of some dependency graph is
- found during traversal, it's either included or not, dependencing of the
- value of 'include_roots'. In either case, sources of root are not traversed.
- """
- result = []
-
- if target.action ():
- action = target.action ()
-
- # This includes 'target' as well
- result += action.targets ()
-
- for t in action.sources ():
-
- # FIXME:
- # TODO: see comment in Manager.register_object ()
- #if not isinstance (t, VirtualTarget):
- # t = target.project_.manager_.get_object (t)
-
- if not t.root ():
- result += traverse (t, include_roots, include_sources)
-
- elif include_roots:
- result.append (t)
-
- elif include_sources:
- result.append (target)
-
- return result
-
-def clone_action (action, new_project, new_action_name, new_properties):
- """Takes an 'action' instances and creates new instance of it
- and all produced target. The rule-name and properties are set
- to 'new-rule-name' and 'new-properties', if those are specified.
- Returns the cloned action."""
-
- if not new_action_name:
- new_action_name = action.action_name()
-
- if not new_properties:
- new_properties = action.properties()
-
- cloned_action = action.__class__(action.manager_, action.sources(), new_action_name,
- new_properties)
-
- cloned_targets = []
- for target in action.targets():
-
- n = target.name()
- # Don't modify the name of the produced targets. Strip the directory f
- cloned_target = FileTarget(n, target.type(), new_project,
- cloned_action, exact=True)
-
- d = target.dependencies()
- if d:
- cloned_target.depends(d)
- cloned_target.root(target.root())
- cloned_target.creating_subvariant(target.creating_subvariant())
-
- cloned_targets.append(cloned_target)
-
- return cloned_action
-
-class Subvariant:
-
- def __init__ (self, main_target, prop_set, sources, build_properties, sources_usage_requirements, created_targets):
- """
- main_target: The instance of MainTarget class
- prop_set: Properties requested for this target
- sources:
- build_properties: Actually used properties
- sources_usage_requirements: Properties propagated from sources
- created_targets: Top-level created targets
- """
- self.main_target_ = main_target
- self.properties_ = prop_set
- self.sources_ = sources
- self.build_properties_ = build_properties
- self.sources_usage_requirements_ = sources_usage_requirements
- self.created_targets_ = created_targets
-
- self.usage_requirements_ = None
-
- # Pre-compose the list of other dependency graphs, on which this one
- # depends
- deps = build_properties.get('<implicit-dependency>')
-
- self.other_dg_ = []
- for d in deps:
- self.other_dg_.append(d.creating_subvariant ())
-
- self.other_dg_ = unique (self.other_dg_)
-
- self.implicit_includes_cache_ = {}
- self.target_directories_ = None
-
- def main_target (self):
- return self.main_target_
-
- def created_targets (self):
- return self.created_targets_
-
- def requested_properties (self):
- return self.properties_
-
- def build_properties (self):
- return self.build_properties_
-
- def sources_usage_requirements (self):
- return self.sources_usage_requirements_
-
- def set_usage_requirements (self, usage_requirements):
- self.usage_requirements_ = usage_requirements
-
- def usage_requirements (self):
- return self.usage_requirements_
-
- def all_referenced_targets(self, result):
- """Returns all targets referenced by this subvariant,
- either directly or indirectly, and either as sources,
- or as dependency properties. Targets referred with
- dependency property are returned a properties, not targets."""
-
- # Find directly referenced targets.
- deps = self.build_properties().dependency()
- all_targets = self.sources_ + deps
-
- # Find other subvariants.
- r = []
- for e in all_targets:
- if not e in result:
- result.add(e)
- if isinstance(e, property.Property):
- t = e.value()
- else:
- t = e
-
- # FIXME: how can this be?
- cs = t.creating_subvariant()
- if cs:
- r.append(cs)
- r = unique(r)
- for s in r:
- if s != self:
- s.all_referenced_targets(result)
-
-
- def implicit_includes (self, feature, target_type):
- """ Returns the properties which specify implicit include paths to
- generated headers. This traverses all targets in this subvariant,
- and subvariants referred by <implcit-dependecy>properties.
- For all targets which are of type 'target-type' (or for all targets,
- if 'target_type' is not specified), the result will contain
- <$(feature)>path-to-that-target.
- """
-
- if not target_type:
- key = feature
- else:
- key = feature + "-" + target_type
-
-
- result = self.implicit_includes_cache_.get(key)
- if not result:
- target_paths = self.all_target_directories(target_type)
- target_paths = unique(target_paths)
- result = ["<%s>%s" % (feature, p) for p in target_paths]
- self.implicit_includes_cache_[key] = result
-
- return result
-
- def all_target_directories(self, target_type = None):
- # TODO: does not appear to use target_type in deciding
- # if we've computed this already.
- if not self.target_directories_:
- self.target_directories_ = self.compute_target_directories(target_type)
- return self.target_directories_
-
- def compute_target_directories(self, target_type=None):
- result = []
- for t in self.created_targets():
- if not target_type or b2.build.type.is_derived(t.type(), target_type):
- result.append(t.path())
-
- for d in self.other_dg_:
- result.extend(d.all_target_directories(target_type))
-
- result = unique(result)
- return result
diff --git a/tools/build/v2/build_system.py b/tools/build/v2/build_system.py
deleted file mode 100644
index cc1a61b2fb..0000000000
--- a/tools/build/v2/build_system.py
+++ /dev/null
@@ -1,881 +0,0 @@
-# Status: mostly ported. Missing is --out-xml support, 'configure' integration
-# and some FIXME.
-# Base revision: 64351
-
-# Copyright 2003, 2005 Dave Abrahams
-# Copyright 2006 Rene Rivera
-# Copyright 2003, 2004, 2005, 2006, 2007 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-
-
-from b2.build.engine import Engine
-from b2.manager import Manager
-from b2.util.path import glob
-from b2.build import feature, property_set
-import b2.build.virtual_target
-from b2.build.targets import ProjectTarget
-from b2.util.sequence import unique
-import b2.build.build_request
-from b2.build.errors import ExceptionWithUserContext
-import b2.tools.common
-from b2.build.toolset import using
-
-import b2.build.project as project
-import b2.build.virtual_target as virtual_target
-import b2.build.build_request as build_request
-
-import b2.util.regex
-
-from b2.manager import get_manager
-from b2.util import cached
-from b2.util import option
-
-
-import bjam
-
-import os
-import sys
-import re
-
-################################################################################
-#
-# Module global data.
-#
-################################################################################
-
-# Flag indicating we should display additional debugging information related to
-# locating and loading Boost Build configuration files.
-debug_config = False
-
-# Legacy option doing too many things, some of which are not even documented.
-# Should be phased out.
-# * Disables loading site and user configuration files.
-# * Disables auto-configuration for toolsets specified explicitly on the
-# command-line.
-# * Causes --toolset command-line options to be ignored.
-# * Prevents the default toolset from being used even if no toolset has been
-# configured at all.
-legacy_ignore_config = False
-
-# The cleaning is tricky. Say, if user says 'bjam --clean foo' where 'foo' is a
-# directory, then we want to clean targets which are in 'foo' as well as those
-# in any children Jamfiles under foo but not in any unrelated Jamfiles. To
-# achieve this we collect a list of projects under which cleaning is allowed.
-project_targets = []
-
-# Virtual targets obtained when building main targets references on the command
-# line. When running 'bjam --clean main_target' we want to clean only files
-# belonging to that main target so we need to record which targets are produced
-# for it.
-results_of_main_targets = []
-
-# Was an XML dump requested?
-out_xml = False
-
-# Default toolset & version to be used in case no other toolset has been used
-# explicitly by either the loaded configuration files, the loaded project build
-# scripts or an explicit toolset request on the command line. If not specified,
-# an arbitrary default will be used based on the current host OS. This value,
-# while not strictly necessary, has been added to allow testing Boost-Build's
-# default toolset usage functionality.
-default_toolset = None
-default_toolset_version = None
-
-################################################################################
-#
-# Public rules.
-#
-################################################################################
-
-# Returns the property set with the free features from the currently processed
-# build request.
-#
-def command_line_free_features():
- return command_line_free_features
-
-# Sets the default toolset & version to be used in case no other toolset has
-# been used explicitly by either the loaded configuration files, the loaded
-# project build scripts or an explicit toolset request on the command line. For
-# more detailed information see the comment related to used global variables.
-#
-def set_default_toolset(toolset, version=None):
- default_toolset = toolset
- default_toolset_version = version
-
-
-pre_build_hook = []
-
-def add_pre_build_hook(callable):
- pre_build_hook.append(callable)
-
-post_build_hook = None
-
-def set_post_build_hook(callable):
- post_build_hook = callable
-
-################################################################################
-#
-# Local rules.
-#
-################################################################################
-
-# Returns actual Jam targets to be used for executing a clean request.
-#
-def actual_clean_targets(targets):
-
- # Construct a list of projects explicitly detected as targets on this build
- # system run. These are the projects under which cleaning is allowed.
- for t in targets:
- if isinstance(t, b2.build.targets.ProjectTarget):
- project_targets.append(t.project_module())
-
- # Construct a list of targets explicitly detected on this build system run
- # as a result of building main targets.
- targets_to_clean = set()
- for t in results_of_main_targets:
- # Do not include roots or sources.
- targets_to_clean.update(virtual_target.traverse(t))
-
- to_clean = []
- for t in get_manager().virtual_targets().all_targets():
-
- # Remove only derived targets.
- if t.action():
- p = t.project()
- if t in targets_to_clean or should_clean_project(p.project_module()):
- to_clean.append(t)
-
- return [t.actualize() for t in to_clean]
-
-_target_id_split = re.compile("(.*)//(.*)")
-
-# Given a target id, try to find and return the corresponding target. This is
-# only invoked when there is no Jamfile in ".". This code somewhat duplicates
-# code in project-target.find but we can not reuse that code without a
-# project-targets instance.
-#
-def find_target(target_id):
-
- projects = get_manager().projects()
- m = _target_id_split.match(target_id)
- if m:
- pm = projects.find(m.group(1), ".")
- else:
- pm = projects.find(target_id, ".")
-
- if pm:
- result = projects.target(pm)
-
- if m:
- result = result.find(m.group(2))
-
- return result
-
-def initialize_config_module(module_name, location=None):
-
- get_manager().projects().initialize(module_name, location)
-
-# Helper rule used to load configuration files. Loads the first configuration
-# file with the given 'filename' at 'path' into module with name 'module-name'.
-# Not finding the requested file may or may not be treated as an error depending
-# on the must-find parameter. Returns a normalized path to the loaded
-# configuration file or nothing if no file was loaded.
-#
-def load_config(module_name, filename, paths, must_find=False):
-
- if debug_config:
- print "notice: Searching '%s' for '%s' configuration file '%s." \
- % (paths, module_name, filename)
-
- where = None
- for path in paths:
- t = os.path.join(path, filename)
- if os.path.exists(t):
- where = t
- break
-
- if where:
- where = os.path.realpath(where)
-
- if debug_config:
- print "notice: Loading '%s' configuration file '%s' from '%s'." \
- % (module_name, filename, where)
-
- # Set source location so that path-constant in config files
- # with relative paths work. This is of most importance
- # for project-config.jam, but may be used in other
- # config files as well.
- attributes = get_manager().projects().attributes(module_name) ;
- attributes.set('source-location', os.path.dirname(where), True)
- get_manager().projects().load_standalone(module_name, where)
-
- else:
- msg = "Configuration file '%s' not found in '%s'." % (filename, path)
- if must_find:
- get_manager().errors()(msg)
-
- elif debug_config:
- print msg
-
- return where
-
-# Loads all the configuration files used by Boost Build in the following order:
-#
-# -- test-config --
-# Loaded only if specified on the command-line using the --test-config
-# command-line parameter. It is ok for this file not to exist even if specified.
-# If this configuration file is loaded, regular site and user configuration
-# files will not be. If a relative path is specified, file is searched for in
-# the current folder.
-#
-# -- site-config --
-# Always named site-config.jam. Will only be found if located on the system
-# root path (Windows), /etc (non-Windows), user's home folder or the Boost Build
-# path, in that order. Not loaded in case the test-config configuration file is
-# loaded or either the --ignore-site-config or the --ignore-config command-line
-# option is specified.
-#
-# -- user-config --
-# Named user-config.jam by default or may be named explicitly using the
-# --user-config command-line option or the BOOST_BUILD_USER_CONFIG environment
-# variable. If named explicitly the file is looked for from the current working
-# directory and if the default one is used then it is searched for in the
-# user's home directory and the Boost Build path, in that order. Not loaded in
-# case either the test-config configuration file is loaded, --ignore-config
-# command-line option is specified or an empty file name is explicitly
-# specified. If the file name has been given explicitly then the file must
-# exist.
-#
-# Test configurations have been added primarily for use by Boost Build's
-# internal unit testing system but may be used freely in other places as well.
-#
-def load_configuration_files():
-
- # Flag indicating that site configuration should not be loaded.
- ignore_site_config = "--ignore-site-config" in sys.argv
-
- if legacy_ignore_config and debug_config:
- print "notice: Regular site and user configuration files will be ignored"
- print "notice: due to the --ignore-config command-line option."
-
- initialize_config_module("test-config")
- test_config = None
- for a in sys.argv:
- m = re.match("--test-config=(.*)$", a)
- if m:
- test_config = b2.util.unquote(m.group(1))
- break
-
- if test_config:
- where = load_config("test-config", os.path.basename(test_config), [os.path.dirname(test_config)])
- if where:
- if debug_config and not legacy_ignore_config:
- print "notice: Regular site and user configuration files will"
- print "notice: be ignored due to the test configuration being loaded."
-
- user_path = [os.path.expanduser("~")] + bjam.variable("BOOST_BUILD_PATH")
- site_path = ["/etc"] + user_path
- if os.name in ["nt"]:
- site_path = [os.getenv("SystemRoot")] + user_path
-
- if ignore_site_config and not legacy_ignore_config:
- print "notice: Site configuration files will be ignored due to the"
- print "notice: --ignore-site-config command-line option."
-
- initialize_config_module("site-config")
- if not test_config and not ignore_site_config and not legacy_ignore_config:
- load_config('site-config', 'site-config.jam', site_path)
-
- initialize_config_module('user-config')
- if not test_config and not legacy_ignore_config:
-
- # Here, user_config has value of None if nothing is explicitly
- # specified, and value of '' if user explicitly does not want
- # to load any user config.
- user_config = None
- for a in sys.argv:
- m = re.match("--user-config=(.*)$", a)
- if m:
- user_config = m.group(1)
- break
-
- if user_config is None:
- user_config = os.getenv("BOOST_BUILD_USER_CONFIG")
-
- # Special handling for the case when the OS does not strip the quotes
- # around the file name, as is the case when using Cygwin bash.
- user_config = b2.util.unquote(user_config)
- explicitly_requested = user_config
-
- if user_config is None:
- user_config = "user-config.jam"
-
- if user_config:
- if explicitly_requested:
-
- user_config = os.path.abspath(user_config)
-
- if debug_config:
- print "notice: Loading explicitly specified user configuration file:"
- print " " + user_config
-
- load_config('user-config', os.path.basename(user_config), [os.path.dirname(user_config)], True)
- else:
- load_config('user-config', os.path.basename(user_config), user_path)
- else:
- if debug_config:
- print "notice: User configuration file loading explicitly disabled."
-
- # We look for project-config.jam from "." upward.
- # I am not sure this is 100% right decision, we might as well check for
- # it only alonside the Jamroot file. However:
- #
- # - We need to load project-root.jam before Jamroot
- # - We probably would need to load project-root.jam even if there's no
- # Jamroot - e.g. to implement automake-style out-of-tree builds.
- if os.path.exists("project-config.jam"):
- file = ["project-config.jam"]
- else:
- file = b2.util.path.glob_in_parents(".", ["project-config.jam"])
-
- if file:
- initialize_config_module('project-config', os.path.dirname(file[0]))
- load_config('project-config', "project-config.jam", [os.path.dirname(file[0])], True)
-
-
-# Autoconfigure toolsets based on any instances of --toolset=xx,yy,...zz or
-# toolset=xx,yy,...zz in the command line. May return additional properties to
-# be processed as if they had been specified by the user.
-#
-def process_explicit_toolset_requests():
-
- extra_properties = []
-
- option_toolsets = [e for option in b2.util.regex.transform(sys.argv, "^--toolset=(.*)$")
- for e in option.split(',')]
- feature_toolsets = [e for option in b2.util.regex.transform(sys.argv, "^toolset=(.*)$")
- for e in option.split(',')]
-
- for t in option_toolsets + feature_toolsets:
-
- # Parse toolset-version/properties.
- (toolset_version, toolset, version) = re.match("(([^-/]+)-?([^/]+)?)/?.*", t).groups()
-
- if debug_config:
- print "notice: [cmdline-cfg] Detected command-line request for '%s': toolset= %s version=%s" \
- % (toolset_version, toolset, version)
-
- # If the toolset is not known, configure it now.
- known = False
- if toolset in feature.values("toolset"):
- known = True
-
- if known and version and not feature.is_subvalue("toolset", toolset, "version", version):
- known = False
- # TODO: we should do 'using $(toolset)' in case no version has been
- # specified and there are no versions defined for the given toolset to
- # allow the toolset to configure its default version. For this we need
- # to know how to detect whether a given toolset has any versions
- # defined. An alternative would be to do this whenever version is not
- # specified but that would require that toolsets correctly handle the
- # case when their default version is configured multiple times which
- # should be checked for all existing toolsets first.
-
- if not known:
-
- if debug_config:
- print "notice: [cmdline-cfg] toolset '%s' not previously configured; attempting to auto-configure now" % toolset_version
- if version is not None:
- using(toolset, version)
- else:
- using(toolset)
-
- else:
-
- if debug_config:
-
- print "notice: [cmdline-cfg] toolset '%s' already configured" % toolset_version
-
- # Make sure we get an appropriate property into the build request in
- # case toolset has been specified using the "--toolset=..." command-line
- # option form.
- if not t in sys.argv and not t in feature_toolsets:
-
- if debug_config:
- print "notice: [cmdline-cfg] adding toolset=%s) to the build request." % t ;
- extra_properties += "toolset=%s" % t
-
- return extra_properties
-
-
-
-# Returns 'true' if the given 'project' is equal to or is a (possibly indirect)
-# child to any of the projects requested to be cleaned in this build system run.
-# Returns 'false' otherwise. Expects the .project-targets list to have already
-# been constructed.
-#
-@cached
-def should_clean_project(project):
-
- if project in project_targets:
- return True
- else:
-
- parent = get_manager().projects().attribute(project, "parent-module")
- if parent and parent != "user-config":
- return should_clean_project(parent)
- else:
- return False
-
-################################################################################
-#
-# main()
-# ------
-#
-################################################################################
-
-def main():
-
- sys.argv = bjam.variable("ARGV")
-
- # FIXME: document this option.
- if "--profiling" in sys.argv:
- import cProfile
- r = cProfile.runctx('main_real()', globals(), locals(), "stones.prof")
-
- import pstats
- stats = pstats.Stats("stones.prof")
- stats.strip_dirs()
- stats.sort_stats('time', 'calls')
- stats.print_callers(20)
- return r
- else:
- try:
- return main_real()
- except ExceptionWithUserContext, e:
- e.report()
-
-def main_real():
-
- global debug_config, legacy_ignore_config, out_xml
-
- debug_config = "--debug-configuration" in sys.argv
- legacy_ignore_config = "--ignore_config" in sys.argv
- out_xml = any(re.match("^--out-xml=(.*)$", a) for a in sys.argv)
-
- engine = Engine()
-
- global_build_dir = option.get("build-dir")
- manager = Manager(engine, global_build_dir)
-
- import b2.build.configure as configure
-
- if "--version" in sys.argv:
-
- version.report()
- return
-
- # This module defines types and generator and what not,
- # and depends on manager's existence
- import b2.tools.builtin
-
- b2.tools.common.init(manager)
-
- load_configuration_files()
-
- extra_properties = []
- # Note that this causes --toolset options to be ignored if --ignore-config
- # is specified.
- if not legacy_ignore_config:
- extra_properties = process_explicit_toolset_requests()
-
- # We always load project in "." so that 'use-project' directives have any
- # chance of being seen. Otherwise, we would not be able to refer to
- # subprojects using target ids.
- current_project = None
- projects = get_manager().projects()
- if projects.find(".", "."):
- current_project = projects.target(projects.load("."))
-
- # In case there are no toolsets currently defined makes the build run using
- # the default toolset.
- if not legacy_ignore_config and not feature.values("toolset"):
-
- dt = default_toolset
- dtv = None
- if default_toolset:
- dtv = default_toolset_version
- else:
- dt = "gcc"
- if os.name == 'nt':
- dt = "msvc"
- # FIXME:
- #else if [ os.name ] = MACOSX
- #{
- # default-toolset = darwin ;
- #}
-
- print "warning: No toolsets are configured."
- print "warning: Configuring default toolset '%s'." % dt
- print "warning: If the default is wrong, your build may not work correctly."
- print "warning: Use the \"toolset=xxxxx\" option to override our guess."
- print "warning: For more configuration options, please consult"
- print "warning: http://boost.org/boost-build2/doc/html/bbv2/advanced/configuration.html"
-
- using(dt, dtv)
-
- # Parse command line for targets and properties. Note that this requires
- # that all project files already be loaded.
- (target_ids, properties) = build_request.from_command_line(sys.argv[1:] + extra_properties)
-
- # Expand properties specified on the command line into multiple property
- # sets consisting of all legal property combinations. Each expanded property
- # set will be used for a single build run. E.g. if multiple toolsets are
- # specified then requested targets will be built with each of them.
- if properties:
- expanded = build_request.expand_no_defaults(properties)
- else:
- expanded = [property_set.empty()]
-
- # Check that we actually found something to build.
- if not current_project and not target_ids:
- get_manager().errors()("no Jamfile in current directory found, and no target references specified.")
- # FIXME:
- # EXIT
-
- # Flags indicating that this build system run has been started in order to
- # clean existing instead of create new targets. Note that these are not the
- # final flag values as they may get changed later on due to some special
- # targets being specified on the command line.
- clean = "--clean" in sys.argv
- cleanall = "--clean-all" in sys.argv
-
- # List of explicitly requested files to build. Any target references read
- # from the command line parameter not recognized as one of the targets
- # defined in the loaded Jamfiles will be interpreted as an explicitly
- # requested file to build. If any such files are explicitly requested then
- # only those files and the targets they depend on will be built and they
- # will be searched for among targets that would have been built had there
- # been no explicitly requested files.
- explicitly_requested_files = []
-
- # List of Boost Build meta-targets, virtual-targets and actual Jam targets
- # constructed in this build system run.
- targets = []
- virtual_targets = []
- actual_targets = []
-
- explicitly_requested_files = []
-
- # Process each target specified on the command-line and convert it into
- # internal Boost Build target objects. Detect special clean target. If no
- # main Boost Build targets were explictly requested use the current project
- # as the target.
- for id in target_ids:
- if id == "clean":
- clean = 1
- else:
- t = None
- if current_project:
- t = current_project.find(id, no_error=1)
- else:
- t = find_target(id)
-
- if not t:
- print "notice: could not find main target '%s'" % id
- print "notice: assuming it's a name of file to create " ;
- explicitly_requested_files.append(id)
- else:
- targets.append(t)
-
- if not targets:
- targets = [projects.target(projects.module_name("."))]
-
- # FIXME: put this BACK.
-
- ## if [ option.get dump-generators : : true ]
- ## {
- ## generators.dump ;
- ## }
-
-
- # We wish to put config.log in the build directory corresponding
- # to Jamroot, so that the location does not differ depending on
- # directory where we do build. The amount of indirection necessary
- # here is scary.
- first_project = targets[0].project()
- first_project_root_location = first_project.get('project-root')
- first_project_root_module = manager.projects().load(first_project_root_location)
- first_project_root = manager.projects().target(first_project_root_module)
- first_build_build_dir = first_project_root.build_dir()
- configure.set_log_file(os.path.join(first_build_build_dir, "config.log"))
-
- virtual_targets = []
-
- global results_of_main_targets
-
- # Now that we have a set of targets to build and a set of property sets to
- # build the targets with, we can start the main build process by using each
- # property set to generate virtual targets from all of our listed targets
- # and any of their dependants.
- for p in expanded:
- manager.set_command_line_free_features(property_set.create(p.free()))
-
- for t in targets:
- try:
- g = t.generate(p)
- if not isinstance(t, ProjectTarget):
- results_of_main_targets.extend(g.targets())
- virtual_targets.extend(g.targets())
- except ExceptionWithUserContext, e:
- e.report()
- except Exception:
- raise
-
- # Convert collected virtual targets into actual raw Jam targets.
- for t in virtual_targets:
- actual_targets.append(t.actualize())
-
-
- # FIXME: restore
-## # If XML data output has been requested prepare additional rules and targets
-## # so we can hook into Jam to collect build data while its building and have
-## # it trigger the final XML report generation after all the planned targets
-## # have been built.
-## if $(.out-xml)
-## {
-## # Get a qualified virtual target name.
-## rule full-target-name ( target )
-## {
-## local name = [ $(target).name ] ;
-## local project = [ $(target).project ] ;
-## local project-path = [ $(project).get location ] ;
-## return $(project-path)//$(name) ;
-## }
-
-## # Generate an XML file containing build statistics for each constituent.
-## #
-## rule out-xml ( xml-file : constituents * )
-## {
-## # Prepare valid XML header and footer with some basic info.
-## local nl = "
-## " ;
-## local jam = [ version.jam ] ;
-## local os = [ modules.peek : OS OSPLAT JAMUNAME ] "" ;
-## local timestamp = [ modules.peek : JAMDATE ] ;
-## local cwd = [ PWD ] ;
-## local command = $(.sys.argv) ;
-## local bb-version = [ version.boost-build ] ;
-## .header on $(xml-file) =
-## "<?xml version=\"1.0\" encoding=\"utf-8\"?>"
-## "$(nl)<build format=\"1.0\" version=\"$(bb-version)\">"
-## "$(nl) <jam version=\"$(jam:J=.)\" />"
-## "$(nl) <os name=\"$(os[1])\" platform=\"$(os[2])\"><![CDATA[$(os[3-]:J= )]]></os>"
-## "$(nl) <timestamp><![CDATA[$(timestamp)]]></timestamp>"
-## "$(nl) <directory><![CDATA[$(cwd)]]></directory>"
-## "$(nl) <command><![CDATA[\"$(command:J=\" \")\"]]></command>"
-## ;
-## .footer on $(xml-file) =
-## "$(nl)</build>" ;
-
-## # Generate the target dependency graph.
-## .contents on $(xml-file) +=
-## "$(nl) <targets>" ;
-## for local t in [ virtual-target.all-targets ]
-## {
-## local action = [ $(t).action ] ;
-## if $(action)
-## # If a target has no action, it has no dependencies.
-## {
-## local name = [ full-target-name $(t) ] ;
-## local sources = [ $(action).sources ] ;
-## local dependencies ;
-## for local s in $(sources)
-## {
-## dependencies += [ full-target-name $(s) ] ;
-## }
-
-## local path = [ $(t).path ] ;
-## local jam-target = [ $(t).actual-name ] ;
-
-## .contents on $(xml-file) +=
-## "$(nl) <target>"
-## "$(nl) <name><![CDATA[$(name)]]></name>"
-## "$(nl) <dependencies>"
-## "$(nl) <dependency><![CDATA[$(dependencies)]]></dependency>"
-## "$(nl) </dependencies>"
-## "$(nl) <path><![CDATA[$(path)]]></path>"
-## "$(nl) <jam-target><![CDATA[$(jam-target)]]></jam-target>"
-## "$(nl) </target>"
-## ;
-## }
-## }
-## .contents on $(xml-file) +=
-## "$(nl) </targets>" ;
-
-## # Build $(xml-file) after $(constituents). Do so even if a
-## # constituent action fails and regenerate the xml on every bjam run.
-## INCLUDES $(xml-file) : $(constituents) ;
-## ALWAYS $(xml-file) ;
-## __ACTION_RULE__ on $(xml-file) = build-system.out-xml.generate-action ;
-## out-xml.generate $(xml-file) ;
-## }
-
-## # The actual build actions are here; if we did this work in the actions
-## # clause we would have to form a valid command line containing the
-## # result of @(...) below (the name of the XML file).
-## #
-## rule out-xml.generate-action ( args * : xml-file
-## : command status start end user system : output ? )
-## {
-## local contents =
-## [ on $(xml-file) return $(.header) $(.contents) $(.footer) ] ;
-## local f = @($(xml-file):E=$(contents)) ;
-## }
-
-## # Nothing to do here; the *real* actions happen in
-## # out-xml.generate-action.
-## actions quietly out-xml.generate { }
-
-## # Define the out-xml file target, which depends on all the targets so
-## # that it runs the collection after the targets have run.
-## out-xml $(.out-xml) : $(actual-targets) ;
-
-## # Set up a global __ACTION_RULE__ that records all the available
-## # statistics about each actual target in a variable "on" the --out-xml
-## # target.
-## #
-## rule out-xml.collect ( xml-file : target : command status start end user
-## system : output ? )
-## {
-## local nl = "
-## " ;
-## # Open the action with some basic info.
-## .contents on $(xml-file) +=
-## "$(nl) <action status=\"$(status)\" start=\"$(start)\" end=\"$(end)\" user=\"$(user)\" system=\"$(system)\">" ;
-
-## # If we have an action object we can print out more detailed info.
-## local action = [ on $(target) return $(.action) ] ;
-## if $(action)
-## {
-## local action-name = [ $(action).action-name ] ;
-## local action-sources = [ $(action).sources ] ;
-## local action-props = [ $(action).properties ] ;
-
-## # The qualified name of the action which we created the target.
-## .contents on $(xml-file) +=
-## "$(nl) <name><![CDATA[$(action-name)]]></name>" ;
-
-## # The sources that made up the target.
-## .contents on $(xml-file) +=
-## "$(nl) <sources>" ;
-## for local source in $(action-sources)
-## {
-## local source-actual = [ $(source).actual-name ] ;
-## .contents on $(xml-file) +=
-## "$(nl) <source><![CDATA[$(source-actual)]]></source>" ;
-## }
-## .contents on $(xml-file) +=
-## "$(nl) </sources>" ;
-
-## # The properties that define the conditions under which the
-## # target was built.
-## .contents on $(xml-file) +=
-## "$(nl) <properties>" ;
-## for local prop in [ $(action-props).raw ]
-## {
-## local prop-name = [ MATCH ^<(.*)>$ : $(prop:G) ] ;
-## .contents on $(xml-file) +=
-## "$(nl) <property name=\"$(prop-name)\"><![CDATA[$(prop:G=)]]></property>" ;
-## }
-## .contents on $(xml-file) +=
-## "$(nl) </properties>" ;
-## }
-
-## local locate = [ on $(target) return $(LOCATE) ] ;
-## locate ?= "" ;
-## .contents on $(xml-file) +=
-## "$(nl) <jam-target><![CDATA[$(target)]]></jam-target>"
-## "$(nl) <path><![CDATA[$(target:G=:R=$(locate))]]></path>"
-## "$(nl) <command><![CDATA[$(command)]]></command>"
-## "$(nl) <output><![CDATA[$(output)]]></output>" ;
-## .contents on $(xml-file) +=
-## "$(nl) </action>" ;
-## }
-
-## # When no __ACTION_RULE__ is set "on" a target, the search falls back to
-## # the global module.
-## module
-## {
-## __ACTION_RULE__ = build-system.out-xml.collect
-## [ modules.peek build-system : .out-xml ] ;
-## }
-
-## IMPORT
-## build-system :
-## out-xml.collect
-## out-xml.generate-action
-## : :
-## build-system.out-xml.collect
-## build-system.out-xml.generate-action
-## ;
-## }
-
- j = option.get("jobs")
- if j:
- bjam.call("set-variable", PARALLELISM, j)
-
- k = option.get("keep-going", "true", "true")
- if k in ["on", "yes", "true"]:
- bjam.call("set-variable", "KEEP_GOING", "1")
- elif k in ["off", "no", "false"]:
- bjam.call("set-variable", "KEEP_GOING", "0")
- else:
- print "error: Invalid value for the --keep-going option"
- sys.exit()
-
- # The 'all' pseudo target is not strictly needed expect in the case when we
- # use it below but people often assume they always have this target
- # available and do not declare it themselves before use which may cause
- # build failures with an error message about not being able to build the
- # 'all' target.
- bjam.call("NOTFILE", "all")
-
- # And now that all the actual raw Jam targets and all the dependencies
- # between them have been prepared all that is left is to tell Jam to update
- # those targets.
- if explicitly_requested_files:
- # Note that this case can not be joined with the regular one when only
- # exact Boost Build targets are requested as here we do not build those
- # requested targets but only use them to construct the dependency tree
- # needed to build the explicitly requested files.
- # FIXME: add $(.out-xml)
- bjam.call("UPDATE", ["<e>%s" % x for x in explicitly_requested_files])
- elif cleanall:
- bjam.call("UPDATE", "clean-all")
- elif clean:
- manager.engine().set_update_action("common.Clean", "clean",
- actual_clean_targets(targets))
- bjam.call("UPDATE", "clean")
- else:
- # FIXME:
- #configure.print-configure-checks-summary ;
-
- if pre_build_hook:
- for h in pre_build_hook:
- h()
-
- bjam.call("DEPENDS", "all", actual_targets)
- ok = bjam.call("UPDATE_NOW", "all") # FIXME: add out-xml
- if post_build_hook:
- post_build_hook(ok)
- # Prevent automatic update of the 'all' target, now that
- # we have explicitly updated what we wanted.
- bjam.call("UPDATE")
-
- if manager.errors().count() == 0:
- return ["ok"]
- else:
- return []
diff --git a/tools/build/v2/contrib/boost.jam b/tools/build/v2/contrib/boost.jam
deleted file mode 100644
index 388ac4d157..0000000000
--- a/tools/build/v2/contrib/boost.jam
+++ /dev/null
@@ -1,321 +0,0 @@
-# $Id: boost.jam 63913 2010-07-12 07:37:43Z vladimir_prus $
-# Copyright 2008 Roland Schwarz
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Boost library support module.
-#
-# This module allows to use the boost library from boost-build projects.
-# The location of a boost source tree or the path to a pre-built
-# version of the library can be configured from either site-config.jam
-# or user-config.jam. If no location is configured the module looks for
-# a BOOST_ROOT environment variable, which should point to a boost source
-# tree. As a last resort it tries to use pre-built libraries from the standard
-# search path of the compiler.
-#
-# If the location to a source tree is known, the module can be configured
-# from the *-config.jam files:
-#
-# using boost : 1.35 : <root>/path-to-boost-root ;
-#
-# If the location to a pre-built version is known:
-#
-# using boost : 1.34
-# : <include>/usr/local/include/boost_1_34
-# <library>/usr/local/lib
-# ;
-#
-# It is legal to configure more than one boost library version in the config
-# files. The version identifier is used to disambiguate between them.
-# The first configured version becomes the default.
-#
-# To use a boost library you need to put a 'use' statement into your
-# Jamfile:
-#
-# import boost ;
-#
-# boost.use-project 1.35 ;
-#
-# If you don't care about a specific version you just can omit the version
-# part, in which case the default is picked up:
-#
-# boost.use-project ;
-#
-# The library can be referenced with the project identifier '/boost'. To
-# reference the program_options you would specify:
-#
-# exe myexe : mysrc.cpp : <library>/boost//program_options ;
-#
-# Note that the requirements are automatically transformed into suitable
-# tags to find the correct pre-built library.
-#
-
-import modules ;
-import errors ;
-import project ;
-import string ;
-import toolset ;
-import property-set ;
-import regex ;
-import common ;
-import option ;
-import numbers ;
-
-.boost.auto_config = [ property-set.create <layout>system ] ;
-
-if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
-{
- .debug-configuration = true ;
-}
-
-# Configuration of the boost library to use.
-#
-# This can either be a boost source tree or
-# pre-built libraries. The 'version' parameter must be a valid boost
-# version number, e.g. 1.35, if specifying a pre-built version with
-# versioned layout. It may be a symbolic name, e.g. 'trunk' if specifying
-# a source tree. The options are specified as named parameters (like
-# properties). The following paramters are available:
-#
-# <root>/path-to-boost-root: Specify a source tree.
-#
-# <include>/path-to-include: The include directory to search.
-#
-# <library>/path-to-library: The library directory to search.
-#
-# <layout>system or <layout>versioned.
-#
-# <build-id>my_build_id: The custom build id to use.
-#
-rule init
-(
- version # Version identifier.
- : options * # Set the option properties.
-)
-{
- if $(.boost.$(version)) {
- errors.user-error
- "Boost " $(version) "already configured." ;
- }
- else {
- if $(.debug-configuration) {
- if ! $(.boost_default) {
- echo notice: configuring default boost library $(version) ;
- }
- echo notice: configuring boost library $(version) ;
- }
- .boost_default ?= $(version) ; # the first configured is default
- .boost.$(version) = [ property-set.create $(options) ] ;
- }
-}
-
-# Use a certain version of the library.
-#
-# The use-project rule causes the module to define a boost project of
-# searchable pre-built boost libraries, or references a source tree
-# of the boost library. If the 'version' parameter is omitted either
-# the configured default (first in config files) is used or an auto
-# configuration will be attempted.
-#
-rule use-project
-(
- version ? # The version of the library to use.
-)
-{
- project.push-current [ project.current ] ;
- version ?= $(.boost_default) ;
- version ?= auto_config ;
-
- if $(.initialized) {
- if $(.initialized) != $(version) {
- errors.user-error
- "Attempt to use" $(__name__) "with different parameters" ;
- }
- }
- else {
- if $(.boost.$(version)) {
- local opt = $(.boost.$(version)) ;
- local root = [ $(opt).get <root> ] ;
- local inc = [ $(opt).get <include> ] ;
- local lib = [ $(opt).get <library> ] ;
-
- if $(.debug-configuration) {
- echo notice: using boost library $(version) [ $(opt).raw ] ;
- }
-
- .layout = [ $(opt).get <layout> ] ;
- .layout ?= versioned ;
- .build_id = [ $(opt).get <build-id> ] ;
- .version_tag = [ regex.replace $(version) "[*\\/:.\"\' ]" "_" ] ;
- .initialized = $(version) ;
-
- if ( $(root) && $(inc) )
- || ( $(root) && $(lib) )
- || ( $(lib) && ! $(inc) )
- || ( ! $(lib) && $(inc) ) {
- errors.user-error
- "Ambiguous parameters,"
- "use either <root> or <inlude> with <library>." ;
- }
- else if ! $(root) && ! $(inc) {
- root = [ modules.peek : BOOST_ROOT ] ;
- }
-
- local prj = [ project.current ] ;
- local mod = [ $(prj).project-module ] ;
-
- if $(root) {
- modules.call-in $(mod) : use-project boost : $(root) ;
- }
- else {
- project.initialize $(__name__) ;
- # It is possible to overide the setup of the searched
- # libraries per version. The (unlikely) tag 0.0.1 is
- # meant as an example template only.
- switch $(version) {
- case 0.0.1 : boost_0_0_1 $(inc) $(lib) ;
- case * : boost_std $(inc) $(lib) ;
- }
- }
- }
- else {
- errors.user-error
- "Reference to unconfigured boost version." ;
- }
- }
- project.pop-current ;
-}
-
-rule boost_std ( inc ? lib ? )
-{
-# The default definitions for pre-built libraries.
-
- project boost
- : usage-requirements <include>$(inc) <define>BOOST_ALL_NO_LIB
- : requirements <tag>@tag_std <search>$(lib)
- ;
-
- alias headers ;
- lib date_time : : : :
- <link>shared:<define>BOOST_DATE_TIME_DYN_LINK ;
- lib filesystem : : : :
- <link>shared:<define>BOOST_FILE_SYSTEM_DYN_LINK ;
- lib graph : : : :
- <link>shared:<define>BOOST_GRAPH_DYN_LINK ;
- lib graph_parallel : : : :
- <link>shared:<define>BOOST_GRAPH_DYN_LINK ;
- lib iostreams : : : :
- <link>shared:<define>BOOST_IOSTREAMS_DYN_LINK ;
- lib math_tr1 : : : :
- <link>shared:<define>BOOST_MATH_TR1_DYN_LINK ;
- lib math_tr1f : : : :
- <link>shared:<define>BOOST_MATH_TR1_DYN_LINK ;
- lib math_tr1l : : : :
- <link>shared:<define>BOOST_MATH_TR1_DYN_LINK ;
- lib math_c99 : : : :
- <link>shared:<define>BOOST_MATH_TR1_DYN_LINK ;
- lib math_c99f : : : :
- <link>shared:<define>BOOST_MATH_TR1_DYN_LINK ;
- lib math_c99l : : : :
- <link>shared:<define>BOOST_MATH_TR1_DYN_LINK ;
- lib mpi : : : :
- <link>shared:<define>BOOST_MPI_DYN_LINK ;
- lib program_options : : : :
- <link>shared:<define>BOOST_PROGRAM_OPTIONS_DYN_LINK ;
- lib python : : : :
- <link>shared:<define>BOOST_PYTHON_DYN_LINK ;
- lib random : : : :
- <link>shared:<define>BOOST_RANDOM_DYN_LINK ;
- lib regex : : : :
- <link>shared:<define>BOOST_REGEX_DYN_LINK ;
- lib serialization : : : :
- <link>shared:<define>BOOST_SERIALIZATION_DYN_LINK ;
- lib wserialization : : : :
- <link>shared:<define>BOOST_SERIALIZATION_DYN_LINK ;
- lib signals : : : :
- <link>shared:<define>BOOST_SIGNALS_DYN_LINK ;
- lib system : : : :
- <link>shared:<define>BOOST_SYSTEM_DYN_LINK ;
- lib unit_test_framework : : : :
- <link>shared:<define>BOOST_TEST_DYN_LINK ;
- lib prg_exec_monitor : : : :
- <link>shared:<define>BOOST_TEST_DYN_LINK ;
- lib test_exec_monitor : : : :
- <link>shared:<define>BOOST_TEST_DYN_LINK ;
- lib thread : : : :
- <link>shared:<define>BOOST_THREAD_DYN_DLL ;
- lib wave : : : :
- <link>shared:<define>BOOST_WAVE_DYN_LINK ;
-}
-
-rule boost_0_0_1 ( inc ? lib ? )
-{
- echo "You are trying to use an example placeholder for boost libs." ;
- # Copy this template to another place (in the file boost.jam)
- # and define a project and libraries modelled after the
- # boost_std rule. Please note that it is also possible to have
- # a per version taging rule in case they are different between
- # versions.
-}
-
-rule tag_std ( name : type ? : property-set )
-{
- name = boost_$(name) ;
- if ( [ $(property-set).get <link> ] in static ) &&
- ( [ $(property-set).get <target-os> ] in windows )
- {
- name = lib$(name) ;
- }
-
- local result ;
- if $(.layout) = system
- {
- local version = [ MATCH ^([0-9]+)_([0-9]+) : $(.version_tag) ] ;
- if $(version[1]) = "1" && [ numbers.less $(version[2]) 39 ]
- {
- result = [ tag_tagged $(name) : $(type) : $(property-set) ] ;
- }
- else
- {
- result = [ tag_system $(name) : $(type) : $(property-set) ] ;
- }
- }
- else if $(.layout) = tagged
- {
- result = [ tag_tagged $(name) : $(type) : $(property-set) ] ;
- }
- else if $(.layout) = versioned
- {
- result = [ tag_versioned $(name) : $(type) : $(property-set) ] ;
- }
- else
- {
- errors.error "Missing layout" ;
- }
-
- return $(result) ;
-}
-
-rule tag_system ( name : type ? : property-set )
-{
- return [ common.format-name
- <base>
- -$(.build_id)
- : $(name) : $(type) : $(property-set) ] ;
-}
-
-rule tag_tagged ( name : type ? : property-set )
-{
- return [ common.format-name
- <base> <threading> <runtime>
- -$(.build_id)
- : $(name) : $(type) : $(property-set) ] ;
-}
-
-rule tag_versioned ( name : type ? : property-set )
-{
- return [ common.format-name
- <base> <toolset> <threading> <runtime> -$(.version_tag)
- -$(.build_id)
- : $(name) : $(type) : $(property-set) ] ;
-}
diff --git a/tools/build/v2/debian/boost-build.docs b/tools/build/v2/debian/boost-build.docs
deleted file mode 100644
index 70af14954e..0000000000
--- a/tools/build/v2/debian/boost-build.docs
+++ /dev/null
@@ -1,4 +0,0 @@
-boost_build_v2.html
-index_v2.html
-boost.png
-doc \ No newline at end of file
diff --git a/tools/build/v2/debian/boost-build.examples b/tools/build/v2/debian/boost-build.examples
deleted file mode 100644
index e5a7b8d3bf..0000000000
--- a/tools/build/v2/debian/boost-build.examples
+++ /dev/null
@@ -1 +0,0 @@
-example/* \ No newline at end of file
diff --git a/tools/build/v2/debian/changelog b/tools/build/v2/debian/changelog
deleted file mode 100644
index bfc1f139c4..0000000000
--- a/tools/build/v2/debian/changelog
+++ /dev/null
@@ -1,6 +0,0 @@
-boost-build (2.0.m10-1) unstable; urgency=low
-
- * Initial Release.
-
- -- Vladimir Prus <ghost@cs.msu.su> Wed, 14 Aug 2002 14:08:00 +0400
-
diff --git a/tools/build/v2/debian/conffiles b/tools/build/v2/debian/conffiles
deleted file mode 100644
index 291d688edd..0000000000
--- a/tools/build/v2/debian/conffiles
+++ /dev/null
@@ -1 +0,0 @@
-/etc/site-config.jam
diff --git a/tools/build/v2/debian/control b/tools/build/v2/debian/control
deleted file mode 100644
index 7d1733e783..0000000000
--- a/tools/build/v2/debian/control
+++ /dev/null
@@ -1,13 +0,0 @@
-Source: boost-build
-Section: devel
-Priority: optional
-Maintainer: Vladimir Prus <ghost@cs.msu.su>
-Build-Depends: debhelper (>> 3.0.0), docbook-to-man, bison
-Standards-Version: 3.5.2
-
-Package: boost-build
-Architecture: all
-Depends: ${shlibs:Depends}, bjam (>> 3.1.9-1)
-Description: Build system
- Boost.Build is a build system with a simple and high-level language.
- It supports build variants, and several different compilers and tools.
diff --git a/tools/build/v2/debian/excludes b/tools/build/v2/debian/excludes
deleted file mode 100644
index 59882930dd..0000000000
--- a/tools/build/v2/debian/excludes
+++ /dev/null
@@ -1,14 +0,0 @@
-boost.css
-boost_build_v2.html
-index_v2.html
-boost.png
-generators_prototype.py
-hacking.txt
-release_procedure.txt
-site-config.jam
-roll.sh
-debian
-doc
-example
-test
-CVS \ No newline at end of file
diff --git a/tools/build/v2/debian/rules b/tools/build/v2/debian/rules
deleted file mode 100755
index 96ccc418b6..0000000000
--- a/tools/build/v2/debian/rules
+++ /dev/null
@@ -1,56 +0,0 @@
-#!/usr/bin/make -f
-# Sample debian/rules that uses debhelper.
-# This file is public domain software, originally written by Joey Hess.
-
-# Uncomment this to turn on verbose mode.
-#export DH_VERBOSE=1
-
-build:
-clean:
-binary-arch:
-
-binary-indep:
- dh_testdir
- dh_testroot
-
- dh_clean -k
- dh_installdirs usr/share/boost-build etc
-
- # Add here commands to install the package into debian/<packagename>
- (tar --exclude-from debian/excludes -cpf - * ) | (cd `pwd`/debian/tmp/usr/share/boost-build && tar xpf - )
- chmod a-x -R `pwd`/debian/tmp/usr/share/boost-build
-
- dh_installchangelogs
- dh_installdocs -XCVS
- mv `pwd`/debian/tmp/usr/share/doc/boost-build/index_v2.html `pwd`/debian/tmp/usr/share/doc/boost-build/index.html
-
- (tar --exclude make --exclude CVS -cpf - example/* ) | ( cd `pwd`/debian/tmp/usr/share/doc/boost-build && tar xpf - )
-
- sed 's/# using gcc ;/using gcc ;/' user-config.jam > `pwd`/debian/tmp/etc/site-config.jam
-
-# dh_install
-# dh_installmenu
-# dh_installdebconf
-# dh_installlogrotate
-# dh_installemacsen
-# dh_installcatalogs
-# dh_installpam
-# dh_installmime
-# dh_installinit
-# dh_installcron
-# dh_installinfo
-# dh_undocumented
- dh_installman
- dh_link
- dh_compress
- dh_fixperms
-# dh_perl
-# dh_python
-# dh_makeshlibs
- dh_installdeb
- dh_gencontrol
- dh_md5sums
- dh_builddeb
-
-binary: binary-indep binary-arch
-.PHONY: build clean binary-indep binary-arch binary install
diff --git a/tools/build/v2/doc/bjam.qbk b/tools/build/v2/doc/bjam.qbk
deleted file mode 100644
index 95207332dc..0000000000
--- a/tools/build/v2/doc/bjam.qbk
+++ /dev/null
@@ -1,1696 +0,0 @@
-[article Boost.Jam
- [quickbook 1.3]
- [version: 3.1.19]
- [authors [Rivera, Rene], [Abrahams, David], [Prus, Vladimir]]
- [copyright 2003 2004 2005 2006 2007 Rene Rivera, David Abrahams, Vladimir Prus]
- [category tool-build]
- [id jam]
- [dirname jam]
- [purpose
- Jam is a make(1) replacement that makes building simple things simple
- and building complicated things manageable.
- ]
- [license
- Distributed under the Boost Software License, Version 1.0.
- (See accompanying file LICENSE_1_0.txt or copy at
- [@http://www.boost.org/LICENSE_1_0.txt])
- ]
-]
-
-[/ QuickBook Document version 1.3 ]
-
-[/ Shortcuts ]
-
-[def :version: 3.1.19]
-
-[/ Images ]
-
-[def :NOTE: [$images/note.png]]
-[def :ALERT: [$images/caution.png]]
-[def :DETAIL: [$images/note.png]]
-[def :TIP: [$images/tip.png]]
-
-[/ Links ]
-
-[def :Boost: [@http://www.boost.org Boost]]
-[def :Perforce_Jam: [@http://www.perforce.com/jam/jam.html Perforce Jam]]
-
-[/ Templates ]
-
-[template literal[text]'''<literallayout><literal>'''[text]'''</literal></literallayout>''']
-[template list[items]'''<itemizedlist>'''[items]'''</itemizedlist>''']
-[template orderedlist[items]'''<orderedlist>'''[items]'''</orderedlist>''']
-[template li[text]'''<listitem>'''[text]'''</listitem>''']
-[template lines[items]'''<simplelist type='vert' columns='1'>'''[items]'''</simplelist>''']
-[template line[text]'''<member>'''[text]'''</member>''']
-
-[section:building Building BJam]
-
-Installing =BJam= after building it is simply a matter of copying the
-generated executables someplace in your =PATH=. For building the executables
-there are a set of =build= bootstrap scripts to accomodate particular
-environments. The scripts take one optional argument, the name of the toolset
-to build with. When the toolset is not given an attempt is made to detect an
-available toolset and use that. The build scripts accept these arguments:
-
-[pre
-/build/ \[/toolset/\]
-]
-
-Running the scripts without arguments will give you the best chance of success. On Windows platforms from a command console do:
-
-[pre
-cd /jam source location/
-.\\build.bat
-]
-
-On Unix type platforms do:
-
-[pre
-cd /jam source location/
-sh ./build.sh
-]
-
-For the Boost.Jam source included with the Boost distribution the /jam source location/ is =BOOST_ROOT/tools/jam/src=.
-
-If the scripts fail to detect an appropriate toolset to build with your particular toolset may not be auto-detectable. In that case, you can specify the toolset as the first argument, this assumes that the toolset is readily available in the =PATH=.
-
-[note
-The toolset used to build Boost.Jam is independent of the toolsets used for Boost.Build. Only one version of Boost.Jam is needed to use Boost.Build.
-]
-
-The supported toolsets, and whether they are auto-detected, are:
-
-[table Supported Toolsets
-
-[[Script] [Platform] [Toolset] [Detection and Notes]]
-
-[ [=build.bat=] [Windows NT, 2000, and XP]
- [[lines
- [line [@http://www.codegear.com/downloads/free/cppbuilder =borland=]]
- [line [@http://www.borland.com/ Borland] C++Builder (BCC 5.5)]
- ]]
- [[list
- [li Common install location: "=C:\Borland\BCC55="]
- [li =BCC32.EXE= in =PATH=]
- ]]
-]
-
-[ [] []
- [[lines
- [line [@http://www.comeaucomputing.com/ =como=]]
- [line Comeau Computing C/C++]
- ]]
- []
-]
-
-[ [] []
- [[lines
- [line [@http://gcc.gnu.org/ =gcc=]]
- [line GNU GCC]
- ]]
- []
-]
-
-[ [] []
- [[lines
- [line [@http://gcc.gnu.org/ =gcc-nocygwin=]]
- [line GNU GCC]
- ]]
- []
-]
-
-[ [] []
- [[lines
- [line [@http://www.intel.com/software/products/compilers/c60 =intel-win32=]]
- [line Intel C++ Compiler for Windows]
- ]]
- [[list
- [li =ICL.EXE= in =PATH=]
- ]]
-]
-
-[ [] []
- [[lines
- [line [@http://www.metrowerks.com/ =metrowerks=]]
- [line MetroWerks CodeWarrior C/C++ 7.x, 8.x, 9.x]
- ]]
- [[list
- [li =CWFolder= variable configured]
- [li =MWCC.EXE= in =PATH=]
- ]]
-]
-
-[ [] []
- [[lines
- [line [@http://www.mingw.org/ =mingw=]]
- [line GNU [@http://gcc.gnu.org/ GCC] as the [@http://www.mingw.org/ MinGW] configuration]
- ]]
- [[list
- [li Common install location: "=C:\MinGW="]
- ]]
-]
-
-[ [] []
- [[lines
- [line [@http://msdn.microsoft.com/visualc/ =msvc=]]
- [line Microsoft Visual C++ 6.x]
- ]]
- [[list
- [li =VCVARS32.BAT= already configured]
- [li =%MSVCDir%= is present in environment]
- [li Common install locations: "=%ProgramFiles%\Microsoft Visual Studio=", "=%ProgramFiles%\Microsoft Visual C++="]
- [li =CL.EXE= in =PATH=]
- ]]
-]
-
-[ [] []
- [[lines
- [line [@http://msdn.microsoft.com/visualc/ =vc7=]]
- [line Microsoft Visual C++ 7.x]
- ]]
- [[list
- [li =VCVARS32.BAT= or =VSVARS32.BAT= already configured]
- [li =%VS71COMNTOOLS%= is present in environment]
- [li =%VCINSTALLDIR%= is present in environment]
- [li Common install locations: "=%ProgramFiles%\Microsoft Visual Studio .NET=", "=%ProgramFiles%\Microsoft Visual Studio .NET 2003="]
- [li =CL.EXE= in =PATH=]
- ]]
-]
-
-[ [] []
- [[lines
- [line [@http://msdn.microsoft.com/visualc/ =vc8= and =vc9=]]
- [line Microsoft Visual C++ 8.x and 9.x]
- ]]
- [Detection:
- [list
- [li =VCVARSALL.BAT= already configured]
- [li =%VS90COMNTOOLS%= is present in environment]
- [li Common install location: "=%ProgramFiles%\Microsoft Visual Studio 9="]
- [li =%VS80COMNTOOLS%= is present in environment]
- [li Common install location: "=%ProgramFiles%\Microsoft Visual Studio 8="]
- [li =CL.EXE= in =PATH=]
- ]
-
- Notes:
- [list
- [li If =VCVARSALL.BAT= is called to set up the toolset, it is passed all the extra arguments, see below for what those arguments are. This can be used to build, for example, a Win64 specific version of =bjam=. Consult the VisualStudio documentation for what the possible argument values to the =VCVARSALL.BAT= are.]
- ]
- ]
-]
-
-[ [=build.sh=] [Unix, Linux, Cygwin, etc.]
- [[lines
- [line [@http://www.hp.com/go/c++ =acc=]]
- [line HP-UX aCC]
- ]]
- [[list
- [li =aCC= in =PATH=]
- [li =uname= is "HP-UX"]
- ]]
-]
-
-[ [] []
- [[lines
- [line [@http://www.comeaucomputing.com/ =como=]]
- [line Comeau Computing C/C++]
- ]]
- [[list
- [li como in =PATH=]
- ]]
-]
-
-[ [] []
- [[lines
- [line [@http://gcc.gnu.org/ =gcc=]]
- [line GNU GCC]
- ]]
- [[list
- [li gcc in =PATH=]
- ]]
-]
-
-[ [] []
- [[lines
- [line [@http://www.intel.com/software/products/compilers/c60l/ =intel-linux=]]
- [line Intel C++ for Linux]
- ]]
- [[list
- [li =icc= in =PATH=]
- [li Common install locations: "=/opt/intel/cc/9.0=", "=/opt/intel_cc_80=", "=/opt/intel/compiler70=", "=/opt/intel/compiler60=", "=/opt/intel/compiler50="]
- ]]
-]
-
-[ [] []
- [[lines
- [line =kcc=]
- [line Intel KAI C++]
- ]]
- [[list
- [li =KCC= in =PATH=]
- ]]
-]
-
-[ [] []
- [[lines
- [line [@http://www.codegear.com/downloads/free/cppbuilder =kylix=]]
- [line [@http://www.borland.com/ Borland] C++Builder]
- ]]
- [[list
- [li bc++ in PATH]
- ]]
-]
-
-[ [] []
- [[lines
- [line [@http://www.sgi.com/developers/devtools/languages/mipspro.html =mipspro=]]
- [line SGI MIPSpro C]
- ]]
- [[list
- [li =uname= is "=IRIX=" or "=IRIX64="]
- ]]
-]
-
-[ [] []
- [[lines
- [line =sunpro=]
- [line Sun Workshop 6 C++]
- ]]
- [[list
- [li Standard install location: "=/opt/SUNWspro="]
- ]]
-]
-
-[ [] []
- [[lines
- [line =qcc=]
- [line [@http://www.qnx.com/ QNX Neutrino]]
- ]]
- [[list
- [li =uname= is "=QNX=" and =qcc= in =PATH=]
- ]]
-]
-
-[ [] []
- [[lines
- [line [@http://www.tru64unix.compaq.com/cplus/ =true64cxx=]]
- [line Compaq C++ Compiler for True64 UNIX]
- ]]
- [[list
- [li =uname= is "=OSF1="]
- ]]
-]
-
-[ [] []
- [[lines
- [line [@http://www.ibm.com/software/awdtools/vacpp/ =vacpp=]]
- [line IBM VisualAge C++]
- ]]
- [[list
- [li =xlc= in =PATH=]
- ]]
-]
-
-[ [] [MacOS X]
- [[lines
- [line [@http://developer.apple.com/tools/compilers.html =darwin=]]
- [line Apple MacOS X GCC]
- ]]
- [[list
- [li =uname= is "=Darwin="]
- ]]
-]
-
-[ [] [Windows NT, 2000, and XP]
- [[lines
- [line [@http://www.mingw.org/ =mingw=]]
- [line GNU [@http://gcc.gnu.org/ GCC] as the [@http://www.mingw.org/ MinGW] configuration with the MSYS shell]
- ]]
- [[list
- [li Common install location: "=/mingw="]
- ]]
-]
-
-]
-
-The built executables are placed in a subdirectory specific to your platform. For example, in Linux running on an Intel x86 compatible chip, the executables are placed in: "=bin.linuxx86=". The =bjam[.exe]= executable can be used to invoke Boost.Build.
-
-The build scripts support additional invocation arguments for use by developers of Boost.Jam and for additional setup of the toolset. The extra arguments come after the toolset:
-
-* Arguments not in the form of an option, before option arguments, are used for extra setup to toolset configuration scripts.
-* Arguments of the form "=--option=", which are passed to the =build.jam= build script.
-* Arguments not in the form of an option, after the options, which are targets for the =build.jam= script.
-
-[pre
-/build/ \[/toolset/\] \[/setup/\*\] \[--/option/+ /target/\*\]
-]
-
-The arguments immediately after the toolset are passed directly to the setup script of the toolset, if available and if it needs to be invoked. This allows one to configure the toolset ass needed to do non-default builds of =bjam=. For example to build a Win64 version with =vc8=. See the toolset descriptiona above for when particular toolsets support this.
-
-The arguments starting with the "=--option=" forms are passed to the =build.jam= script and are used to further customize what gets built. Options and targets supported by the =build.jam= script:
-
-[variablelist
- [[[literal ---]]
- [Empty option when one wants to only specify a target.]]
- [[[literal --release]]
- [The default, builds the optimized executable.]]
- [[[literal --debug]]
- [Builds debugging versions of the executable. When built they are placed in their own directory "=bin./platform/.debug=".]]
- [[[literal --grammar]]
- [Normally the Jam language grammar parsing files are not regenerated. This forces building of the grammar, although it may not force the regeneration of the grammar parser. If the parser is out of date it will be regenerated and subsequently built.]]
- [[[literal --with-python=/path/]]
- [Enables Python integration, given a path to the Python libraries.]]
- [[[literal --gc]]
- [Enables use of the Boehm Garbage Collector. The build will look for the Boehm-GC source in a "boehm_gc" subdirectory from the =bjam= sources.]]
- [[[literal --duma]]
- [Enables use of the DUMA (Detect Uintended Memory Access) debugging memory allocator. The build expects to find the DUMA source files in a "duma" subdirectory from the =bjam= sources.]]
- [[[literal --toolset-root=/path/]]
- [Indicates where the toolset used to build is located. This option is passed in by the bootstrap (=build.bat= or =build.sh=) script.]]
- [[[literal --show-locate-target]]
- [For information, prints out where it will put the built executable.]]
- [[[literal --noassert]]
- [Disable debug assertions, even if building the debug version of the executable.]]
- [[[literal dist]]
- [Generate packages (compressed archives) as appropriate for distribution in the platform, if possible.]]
- [[[literal clean]]
- [Remove all the built executables and objects.]]
-]
-
-[endsect]
-
-[section:language Language]
-
-=BJam= has an interpreted, procedural language. Statements in =bjam= are rule (procedure) definitions, rule invocations, flow-of-control structures, variable assignments, and sundry language support.
-
-[section:lexical Lexical Features]
-
-=BJam= treats its input files as whitespace-separated tokens, with two exceptions: double quotes (") can enclose whitespace to embed it into a token, and everything between the matching curly braces ({}) in the definition of a rule action is treated as a single string. A backslash (\\) can escape a double quote, or any single whitespace character.
-
-=BJam= requires whitespace (blanks, tabs, or newlines) to surround all tokens, including the colon (:) and semicolon (;) tokens.
-
-=BJam= keywords (an mentioned in this document) are reserved and generally
-must be quoted with double quotes (") to be used as arbitrary tokens, such as
-variable or target names.
-
-Comments start with the [^#] character and extend until the end of line.
-
-[endsect]
-
-[section:target Targets]
-
-The essential =bjam= data entity is a target. Build targets are files to be updated. Source targets are the files used in updating built targets. Built targets and source targets are collectively referred to as file targets, and frequently built targets are source targets for other built targets. Pseudotargets are symbols which represent dependencies on other targets, but which are not themselves associated with any real file.
-
-A file target's identifier is generally the file's name, which can be absolutely rooted, relative to the directory of =bjam='s invocation, or simply local (no directory). Most often it is the last case, and the actual file path is bound using the =$(SEARCH)= and =$(LOCATE)= special variables. See [link jam.language.variables.builtins.search SEARCH and LOCATE Variables] below. A local filename is optionally qualified with grist, a string value used to assure uniqueness. A file target with an identifier of the form /file(member)/ is a library member (usually an =ar=(1) archive on Unix).
-
-[section Binding Detection]
-
-Whenever a target is bound to a location in the filesystem, Boost Jam will look for a variable called =BINDRULE= (first "on" the target being bound, then in the global module). If non-empty, =$(BINDRULE[1])= names a rule which is called with the name of the target and the path it is being bound to. The signature of the rule named by =$(BINDRULE[1])= should match the following:
-
-[pre
-rule /bind-rule/ ( /target/ : /path/ )
-]
-
-This facility is useful for correct header file scanning, since many compilers will search for `#include` files first in the directory containing the file doing the `#include` directive. =$(BINDRULE)= can be used to make a record of that directory.
-
-[endsect]
-
-[endsect]
-
-[section:rules Rules]
-
-The basic =bjam= language entity is called a rule. A rule is defined in two parts: the procedure and the actions. The procedure is a body of jam statements to be run when the rule is invoked; the actions are the OS shell commands to execute when updating the built targets of the rule.
-
-Rules can return values, which can be expanded into a list with "[ /rule/ /args/ ... ]". A rule's value is the value of its last statement, though only the following statements have values: 'if' (value of the leg chosen), 'switch' (value of the case chosen), set (value of the resulting variable), and 'return' (value of its arguments). Note that 'return' doesn't actually cause a return, i.e., is a no-op unless it is the last statement of the last block executed within rule body.
-
-The =bjam= statements for defining and invoking rules are as follows:
-
-Define a rule's procedure, replacing any previous definition.
-
-[pre
-rule /rulename/ { /statements/ }
-]
-
-Define a rule's updating actions, replacing any previous definition.
-
-[pre
-actions \[ /modifiers/ \] /rulename/ { /commands/ }
-]
-
-Invoke a rule.
-
-[pre
-/rulename/ /field1/ : /field2/ : /.../ : /fieldN/ ;
-]
-
-Invoke a rule under the influence of target's specific variables..
-
-[pre
-on /target/ /rulename/ /field1/ : /field2/ : /.../ : /fieldN/ ;
-]
-
-Used as an argument, expands to the return value of the rule invoked.
-
-[pre
-\[ /rulename/ /field1/ : /field2/ : /.../ : /fieldN/ \]
-\[ on /target/ /rulename/ /field1/ : /field2/ : /.../ : /fieldN/ \]
-]
-
-A rule is invoked with values in /field1/ through /fieldN/. They may be referenced in the procedure's statements as [^$(1)] through [^$(['N])] (9 max), and the first two only may be referenced in the action's /commands/ as [^$(1)] and [^$(2)]. [^$(<)] and [^$(>)] are synonymous with [^$(1)] and [^$(2)].
-
-Rules fall into two categories: updating rules (with actions), and pure procedure rules (without actions). Updating rules treat arguments [^$(1)] and [^$(2)] as built targets and sources, respectively, while pure procedure rules can take arbitrary arguments.
-
-When an updating rule is invoked, its updating actions are added to those associated with its built targets ([^$(1)]) before the rule's procedure is run. Later, to build the targets in the updating phase, /commands/ are passed to the OS command shell, with [^$(1)] and [^$(2)] replaced by bound versions of the target names. See Binding above.
-
-Rule invocation may be indirected through a variable:
-
-[pre
-$(/var/) /field1/ : /field2/ : /.../ : /fieldN/ ;
-
-on /target/ $(/var/) /field1/ : /field2/ : /.../ : /fieldN/ ;
-
-\[ $(/var/) /field1/ : /field2/ : /.../ : /fieldN/ \]
-\[ on /target/ $(/var/) /field1/ : /field2/ : /.../ : /fieldN/ \]
-]
-
-The variable's value names the rule (or rules) to be invoked. A rule is
-invoked for each element in the list of [^$(/var/)]'s values. The fields
-[^/field1/ : /field2/ : /.../] are passed as arguments for each
-invokation. For the [ ... ] forms, the return value is the concatenation of
-the return values for all of the invocations.
-
-[section Action Modifiers]
-
-The following action modifiers are understood:
-
-[variablelist
-
-[[[^actions bind /vars/]]
- [[^$(/vars/)] will be replaced with bound values.]]
-
-[[[^actions existing]]
- [[^$(>)] includes only source targets currently existing.]]
-
-[[[^actions ignore]]
- [The return status of the commands is ignored.]]
-
-[[[^actions piecemeal]]
- [commands are repeatedly invoked with a subset of [^$(>)] small enough to fit in the command buffer on this OS.]]
-
-[[[^actions quietly]]
- [The action is not echoed to the standard output.]]
-
-[[[^actions together]]
- [The [^$(>)] from multiple invocations of the same action on the same built target are glommed together.]]
-
-[[[^actions updated]]
- [[^$(>)] includes only source targets themselves marked for updating.]]
-
-]
-
-[endsect]
-
-[section Argument lists]
-
-You can describe the arguments accepted by a rule, and refer to them by name within the rule. For example, the following prints "I'm sorry, Dave" to the console:
-
-[pre
-rule report ( pronoun index ? : state : names + )
-{
- local he.suffix she.suffix it.suffix = s ;
- local I.suffix = m ;
- local they.suffix you.suffix = re ;
- ECHO $(pronoun)'$($(pronoun).suffix) $(state), $(names\[$(index)\]) ;
-}
-report I 2 : sorry : Joe Dave Pete ;
-]
-
-Each name in a list of formal arguments (separated by "=:=" in the rule declaration) is bound to a single element of the corresponding actual argument unless followed by one of these modifiers:
-
-[table
-[[Symbol] [Semantics of preceding symbol]]
-[[=?=] [optional]]
-[[=*=] [Bind to zero or more unbound elements of the actual argument. When =*= appears where an argument name is expected, any number of additional arguments are accepted. This feature can be used to implement "varargs" rules.]]
-[[=+=] [Bind to one or more unbound elements of the actual argument.]]
-]
-
-The actual and formal arguments are checked for inconsistencies, which cause Jam to exit with an error code:
-
-[pre
-### argument error
-# rule report ( pronoun index ? : state : names + )
-# called with: ( I 2 foo : sorry : Joe Dave Pete )
-# extra argument foo
-### argument error
-# rule report ( pronoun index ? : state : names + )
-# called with: ( I 2 : sorry )
-# missing argument names
-]
-
-If you omit the list of formal arguments, all checking is bypassed as in "classic" Jam. Argument lists drastically improve the reliability and readability of your rules, however, and are *strongly recommended* for any new Jam code you write.
-
-[endsect]
-
-[section:builtins Built-in Rules]
-
-=BJam= has a growing set of built-in rules, all of which are pure procedure rules without updating actions. They are in three groups: the first builds the dependency graph; the second modifies it; and the third are just utility rules.
-
-[section Dependency Building]
-
-[section =DEPENDS= ]
-
-[pre
-rule DEPENDS ( /targets1/ * : /targets2/ * )
-]
-
-Builds a direct dependency: makes each of /targets1/ depend on each of /targets2/. Generally, /targets1/ will be rebuilt if /targets2/ are themselves rebuilt or are newer than /targets1/.
-
-[endsect]
-
-[section =INCLUDES= ]
-
-[pre
-rule INCLUDES ( /targets1/ * : /targets2/ * )
-]
-
-Builds a sibling dependency: makes any target that depends on any of /targets1/ also depend on each of /targets2/. This reflects the dependencies that arise when one source file includes another: the object built from the source file depends both on the original and included source file, but the two sources files don't depend on each other. For example:
-
-[pre
-DEPENDS foo.o : foo.c ;
-INCLUDES foo.c : foo.h ;
-]
-
-"=foo.o=" depends on "=foo.c=" and "=foo.h=" in this example.
-
-[endsect]
-
-[endsect]
-
-[section Modifying Binding]
-
-The six rules =ALWAYS=, =LEAVES=, =NOCARE=, =NOTFILE=, =NOUPDATE=, and =TEMPORARY= modify the dependency graph so that =bjam= treats the targets differently during its target binding phase. See Binding above. Normally, =bjam= updates a target if it is missing, if its filesystem modification time is older than any of its dependencies (recursively), or if any of its dependencies are being updated. This basic behavior can be changed by invoking the following rules:
-
-[section =ALWAYS= ]
-
-[pre
-rule ALWAYS ( /targets/ * )
-]
-
-Causes /targets/ to be rebuilt regardless of whether they are up-to-date (they must still be in the dependency graph). This is used for the clean and uninstall targets, as they have no dependencies and would otherwise appear never to need building. It is best applied to targets that are also =NOTFILE= targets, but it can also be used to force a real file to be updated as well.
-
-[endsect]
-
-[section =LEAVES= ]
-
-[pre
-rule LEAVES ( /targets/ * )
-]
-
-Makes each of /targets/ depend only on its leaf sources, and not on any intermediate targets. This makes it immune to its dependencies being updated, as the "leaf" dependencies are those without their own dependencies and without updating actions. This allows a target to be updated only if original source files change.
-
-[endsect]
-
-[section =NOCARE= ]
-
-[pre
-rule NOCARE ( /targets/ * )
-]
-
-Causes =bjam= to ignore /targets/ that neither can be found nor have updating actions to build them. Normally for such targets =bjam= issues a warning and then skips other targets that depend on these missing targets. The =HdrRule= in =Jambase= uses =NOCARE= on the header file names found during header file scanning, to let =bjam= know that the included files may not exist. For example, if an `#include` is within an `#ifdef`, the included file may not actually be around.
-
-[warning For targets with build actions: if their build actions exit with a nonzero return code, dependent targets will still be built.]
-
-[endsect]
-
-[section =NOTFILE= ]
-
-[pre
-rule NOTFILE ( /targets/ * )
-]
-
-Marks /targets/ as pseudotargets and not real files. No timestamp is checked, and so the actions on such a target are only executed if the target's dependencies are updated, or if the target is also marked with =ALWAYS=. The default =bjam= target "=all=" is a pseudotarget. In =Jambase=, =NOTFILE= is used to define several addition convenient pseudotargets.
-
-[endsect]
-
-[section =NOUPDATE= ]
-
-[pre
-rule NOUPDATE ( /targets/ * )
-]
-
-Causes the timestamps on /targets/ to be ignored. This has two effects: first, once the target has been created it will never be updated; second, manually updating target will not cause other targets to be updated. In =Jambase=, for example, this rule is applied to directories by the =MkDir= rule, because =MkDir= only cares that the target directory exists, not when it has last been updated.
-
-[endsect]
-
-[section =TEMPORARY= ]
-
-[pre
-rule TEMPORARY ( /targets/ * )
-]
-
-Marks /targets/ as temporary, allowing them to be removed after other targets that depend upon them have been updated. If a =TEMPORARY= target is missing, =bjam= uses the timestamp of the target's parent. =Jambase= uses =TEMPORARY= to mark object files that are archived in a library after they are built, so that they can be deleted after they are archived.
-
-[endsect]
-
-[section =FAIL_EXPECTED= ]
-
-[pre
-rule FAIL_EXPECTED ( /targets/ * )
-]
-
-For handling targets whose build actions are expected to fail (e.g. when testing
-that assertions or compile-time type checking work properly), Boost Jam supplies
-the =FAIL_EXPECTED= rule in the same style as =NOCARE=, et. al. During target
-updating, the return code of the build actions for arguments to =FAIL_EXPECTED=
-is inverted: if it fails, building of dependent targets continues as though it
-succeeded. If it succeeds, dependent targets are skipped.
-
-[endsect]
-
-[section =RMOLD= ]
-
-[pre
-rule RMOLD ( /targets/ * )
-]
-
-=BJam= removes any target files that may exist on disk when the rule used to build those targets fails. However, targets whose dependencies fail to build are not removed by default. The =RMOLD= rule causes its arguments to be removed if any of their dependencies fail to build.
-
-[endsect]
-
-[section =ISFILE= ]
-
-[pre
-rule ISFILE ( /targets/ * )
-]
-
-=ISFILE= marks targets as required to be files. This changes the way =bjam= searches for the target such that it ignores mathes for file system items that are not file, like directories. This makes it possible to avoid `#include "exception"` matching if one happens to have a directory named exception in the header search path.
-
-[warning This is currently not fully implemented.]
-
-[endsect]
-
-[endsect]
-
-[section Utility]
-
-The two rules =ECHO= and =EXIT= are utility rules, used only in =bjam='s parsing phase.
-
-[section =ECHO= ]
-
-[pre
-rule ECHO ( /args/ * )
-]
-
-Blurts out the message /args/ to stdout.
-
-[endsect]
-
-[section =EXIT= ]
-
-[pre
-rule EXIT ( /message/ * : /result-value/ ? )
-]
-
-Blurts out the /message/ to stdout and then exits with a failure status if no /result-value/ is given, otherwise it exits with the given /result-value/.
-
-"=Echo=", "=echo=", "=Exit=", and "=exit=" are accepted as aliases for =ECHO= and =EXIT=, since it is hard to tell that these are built-in rules and not part of the language, like "=include=".
-
-[endsect]
-
-[section =GLOB= ]
-
-The =GLOB= rule does filename globbing.
-
-[pre
-rule GLOB ( /directories/ * : /patterns/ * : /downcase-opt/ ? )
-]
-
-Using the same wildcards as for the patterns in the switch statement. It is invoked by being used as an argument to a rule invocation inside of "=[ ]=". For example: "[^FILES = \[ GLOB dir1 dir2 : *.c *.h \]]" sets =FILES= to the list of C source and header files in =dir1= and =dir2=. The resulting filenames are the full pathnames, including the directory, but the pattern is applied only to the file name without the directory.
-
-If /downcase-opt/ is supplied, filenames are converted to all-lowercase before matching against the pattern; you can use this to do case-insensitive matching using lowercase patterns. The paths returned will still have mixed case if the OS supplies them. On Windows NT and Cygwin, filenames are always downcased before matching.
-
-[endsect]
-
-[section =MATCH= ]
-
-The =MATCH= rule does pattern matching.
-
-[pre
-rule MATCH ( /regexps/ + : /list/ * )
-]
-
-Matches the =egrep=(1) style regular expressions /regexps/ against the strings in /list/. The result is the concatenation of matching =()= subexpressions for each string in /list/, and for each regular expression in /regexps/. Only useful within the "=[ ]=" construct, to change the result into a list.
-
-[endsect]
-
-[section =BACKTRACE= ]
-
-[pre
-rule BACKTRACE ( )
-]
-
-Returns a list of quadruples: /filename/ /line/ /module/ /rulename/..., describing each shallower level of the call stack. This rule can be used to generate useful diagnostic messages from Jam rules.
-
-[endsect]
-
-[section =UPDATE= ]
-
-[pre
-rule UPDATE ( /targets/ * )
-]
-
-Classic jam treats any non-option element of command line as a name of target to be updated. This prevented more sophisticated handling of command line. This is now enabled again but with additional changes to the =UPDATE= rule to allow for the flexibility of changing the list of targets to update. The UPDATE rule has two effects:
-
-# It clears the list of targets to update, and
-# Causes the specified targets to be updated.
-
-If no target was specified with the =UPDATE= rule, no targets will be updated. To support changing of the update list in more useful ways, the rule also returns the targets previously in the update list. This makes it possible to add targets as such:
-
-[pre
-local previous-updates = \[ UPDATE \] ;
-UPDATE $(previous-updates) a-new-target ;
-]
-
-[endsect]
-
-[section =W32_GETREG= ]
-
-[pre
-rule W32_GETREG ( /path/ : /data/ ? )
-]
-
-Defined only for win32 platform. It reads the registry of Windows. '/path/' is the location of the information, and '/data/' is the name of the value which we want to get. If '/data/' is omitted, the default value of '/path/' will be returned. The '/path/' value must conform to MS key path format and must be prefixed with one of the predefined root keys. As usual,
-
-* '=HKLM=' is equivalent to '=HKEY_LOCAL_MACHINE='.
-* '=HKCU=' is equivalent to '=HKEY_CURRENT_USER='.
-* '=HKCR=' is equivalent to '=HKEY_CLASSES_ROOT='.
-
-Other predefined root keys are not supported.
-
-Currently supported data types : '=REG_DWORD=', '=REG_SZ=', '=REG_EXPAND_SZ=', '=REG_MULTI_SZ='. The data with '=REG_DWORD=' type will be turned into a string, '=REG_MULTI_SZ=' into a list of strings, and for those with '=REG_EXPAND_SZ=' type environment variables in it will be replaced with their defined values. The data with '=REG_SZ=' type and other unsupported types will be put into a string without modification. If it can't receive the value of the data, it just return an empty list. For example,
-
-[pre
-local PSDK-location =
- \[ W32_GETREG HKEY_LOCAL_MACHINE\\\\SOFTWARE\\\\Microsoft\\\\MicrosoftSDK\\\\Directories : "Install Dir" \] ;
-]
-
-[endsect]
-
-[section =W32_GETREGNAMES= ]
-
-[pre
-rule W32_GETREGNAMES ( /path/ : /result-type/ )
-]
-
-Defined only for win32 platform. It reads the registry of Windows. '/path/' is the location of the information, and '/result-type/' is either '=subkeys=' or '=values='. For more information on '/path/' format and constraints, please see =W32_GETREG=.
-
-Depending on '/result-type/', the rule returns one of the following:
-
-[variablelist
- [[=subkeys=] [Names of all direct subkeys of '/path/'.]]
- [[=values=] [Names of values contained in registry key given by '/path/'. The "default" value of the key appears in the returned list only if its value has been set in the registry.]]
-]
-
-If '/result-type/' is not recognized, or requested data cannot be retrieved, the rule returns an empty list.
-Example:
-
-[pre
-local key = "HKEY_LOCAL_MACHINE\\\\SOFTWARE\\\\Microsoft\\\\Windows\\\\CurrentVersion\\\\App Paths" ;
-local subkeys = \[ W32_GETREGNAMES "$(key)" : subkeys \] ;
-for local subkey in $(subkeys)
-{
- local values = \[ W32_GETREGNAMES "$(key)\\\\$(subkey)" : values \] ;
- for local value in $(values)
- {
- local data = \[ W32_GETREG "$(key)\\\\$(subkey)" : "$(value)" \] ;
- ECHO "Registry path: " $(key)\\\\$(subkey) ":" $(value) "=" $(data) ;
- }
-}
-]
-
-[endsect]
-
-[section =SHELL= ]
-
-[pre
-rule SHELL ( /command/ : * )
-]
-
-=SHELL= executes /command/, and then returns the standard output of /command/. =SHELL= only works on platforms with a =popen()= function in the C library. On platforms without a working =popen()= function, =SHELL= is implemented as a no-op. =SHELL= works on Unix, MacOS X, and most Windows compilers. =SHELL= is a no-op on Metrowerks compilers under Windows. There is a variable set of allowed options as additional arguments:
-
-[variablelist
- [[=exit-status=] [In addition to the output the result status of the executed command is returned as a second element of the result.]]
- [[=no-output=] [Don't capture the output of the command. Instead an empty ("") string value is returned in place of the output.]]
-]
-
-Because the Perforce/Jambase defines a =SHELL= rule which hides the
-builtin rule, =COMMAND= can be used as an alias for =SHELL= in such a case.
-
-[endsect]
-
-[section =MD5= ]
-
-[pre
-rule MD5 ( /string/ )
-]
-
-=MD5= computes the MD5 hash of the string passed as paramater and returns it.
-
-[endsect]
-
-[section =SPLIT_BY_CHARACTERS= ]
-
-[pre
-rule SPLIT_BY_CHARACTERS ( /string/ : /delimiters/ )
-]
-
-=SPLIT_BY_CHARACTERS= splits the specified /string/ on any delimiter character
-present in /delimiters/ and returns the resulting list.
-
-[endsect]
-
-[section =PRECIOUS= ]
-
-[pre
-rule PRECIOUS ( /targets/ * )
-]
-
-The =PRECIOUS= rule specifies that each of the targets passed as the arguments
-should not be removed even if the command updating that target fails.
-
-[endsect]
-
-[section =PAD= ]
-
-[pre
-rule PAD ( /string/ : /width/ )
-]
-
-If /string/ is shorter than /width/ characters, pads it with whitespace
-characters on the right, and returns the result. Otherwise, returns
-/string/ unmodified.
-
-[endsect]
-
-[section =FILE_OPEN= ]
-
-[pre
-rule FILE_OPEN ( /filename/ : /mode/ )
-]
-
-The =FILE_OPEN= rule opens the specified file and returns a file
-descriptor. The /mode/ parameter can be either "w" or "r". Note
-that at present, only the =UPDATE_NOW= rule can use the resulting
-file descriptor number.
-
-[endsect]
-
-[section =UPDATE_NOW= ]
-
-[pre
-rule UPDATE_NOW ( /targets/ * : /log/ ? : /ignore-minus-n/ ? )
-]
-
-The =UPDATE_NOW= caused the specified targets to be updated immediately.
-If update was successfull, non-empty string is returned. The /log/ parameter,
-if present, specifies a descriptor of a file where all output from building
-is redirected. If the /ignore-minus-n/ parameter is specified, the targets
-are updated even if the =-n= parameter is specified on the command line.
-
-[endsect]
-
-[endsect]
-
-[endsect]
-
-[endsect]
-
-[section Flow-of-Control]
-
-=BJam= has several simple flow-of-control statements:
-
-[pre
-for /var/ in /list/ { /statements/ }
-]
-
-Executes /statements/ for each element in /list/, setting the variable /var/ to the element value.
-
-[pre
-if /cond/ { /statements/ }
-\[ else { /statements/ } \]
-]
-
-Does the obvious; the =else= clause is optional. /cond/ is built of:
-
-[variablelist
-
-[[[^['a]]]
- [true if any ['a] element is a non-zero-length string]]
-
-[[[^['a] = ['b]]]
- [list ['a] matches list ['b] string-for-string]]
-
-[[[^['a] != ['b]]]
- [list ['a] does not match list ['b]]]
-
-[[[^['a] < ['b]]]
- [['a\[i\]] string is less than ['b\[i\]] string, where ['i] is first mismatched element in lists ['a] and ['b]]]
-
-[[[^['a] <= ['b]]]
- [every ['a] string is less than or equal to its ['b] counterpart]]
-
-[[[^['a] > ['b]]]
- [['a\[i\]] string is greater than ['b\[i\]] string, where ['i] is first mismatched element]]
-
-[[[^['a] >= ['b]]]
- [every ['a] string is greater than or equal to its ['b] counterpart]]
-
-[[[^['a] in ['b]]]
- [true if all elements of ['a] can be found in ['b], or if ['a] has no elements]]
-
-[[[^! ['cond]]]
- [condition not true]]
-
-[[[^['cond] && ['cond]]]
- [conjunction]]
-
-[[[^['cond] || ['cond]]]
- [disjunction]]
-
-[[[^( ['cond] )]]
- [precedence grouping]]
-
-]
-
-[pre
-include /file/ ;
-]
-
-Causes =bjam= to read the named /file/. The /file/ is bound like a regular target (see Binding above) but unlike a regular target the include /file/ cannot be built.
-
-The include /file/ is inserted into the input stream during the parsing phase. The primary input file and all the included file(s) are treated as a single file; that is, jam infers no scope boundaries from included files.
-
-[pre
-local /vars/ \[ = /values/ \] ;
-]
-
-Creates new /vars/ inside to the enclosing ={}= block, obscuring any previous values they might have. The previous values for vars are restored when the current block ends. Any rule called or file included will see the local and not the previous value (this is sometimes called Dynamic Scoping). The local statement may appear anywhere, even outside of a block (in which case the previous value is restored when the input ends). The /vars/ are initialized to /values/ if present, or left uninitialized otherwise.
-
-[pre
-return /values/ ;
-]
-
-Within a rule body, the return statement sets the return value for an invocation of the rule. It does *not* cause the rule to return; a rule's value is actually the value of the last statement executed, so a return should be the last statement executed before the rule "naturally" returns.
-
-[pre
-switch /value/
-{
- case /pattern1/ : /statements/ ;
- case /pattern2/ : /statements/ ;
- ...
-}
-]
-
-The switch statement executes zero or one of the enclosed /statements/, depending on which, if any, is the first case whose /pattern/ matches /value/. The /pattern/ values are not variable-expanded. The pattern values may include the following wildcards:
-
-[variablelist
-
-[[[^?]]
- [match any single character]]
-
-[[[^*]]
- [match zero or more characters]]
-
-[[[^\[/chars/\]]]
- [match any single character in /chars/]]
-
-[[[^\[\^/chars/\]]]
- [match any single character not in /chars/]]
-
-[[[^\\/x/]]
- [match /x/ (escapes the other wildcards)]]
-
-]
-
-[pre
-while /cond/ { /statements/ }
-]
-
-Repeatedly execute /statements/ while /cond/ remains true upon entry. (See the description of /cond/ expression syntax under if, above).
-
-[endsect]
-
-[section Variables]
-
-=BJam= variables are lists of zero or more elements, with each element being a string value. An undefined variable is indistinguishable from a variable with an empty list, however, a defined variable may have one more elements which are null strings. All variables are referenced as [^$(/variable/)].
-
-Variables are either global or target-specific. In the latter case, the variable takes on the given value only during the updating of the specific target.
-
-A variable is defined with:
-
-[pre
-/variable/ = /elements/ ;
-/variable/ += /elements/ ;
-/variable/ on /targets/ = /elements/ ;
-/variable/ on /targets/ += /elements/ ;
-/variable/ default = /elements/ ;
-/variable/ ?= /elements/ ;
-]
-
-The first two forms set /variable/ globally. The third and forth forms set a target-specific variable. The [^\=] operator replaces any previous elements of /variable/ with /elements/; the [^+=] operation adds /elements/ to /variable/'s list of elements. The final two forms are synonymous: they set /variable/ globally, but only if it was previously unset.
-
-Variables referenced in updating commands will be replaced with their values; target-specific values take precedence over global values. Variables passed as arguments (=$(1)= and =$(2)=) to actions are replaced with their bound values; the "=bind=" modifier can be used on actions to cause other variables to be replaced with bound values. See Action Modifiers above.
-
-=BJam= variables are not re-exported to the environment of the shell that executes the updating actions, but the updating actions can reference =bjam= variables with [^$(/variable/)].
-
-[section:expansion Variable Expansion]
-
-During parsing, =bjam= performs variable expansion on each token that is not a keyword or rule name. Such tokens with embedded variable references are replaced with zero or more tokens. Variable references are of the form [^$(/v/)] or [^$(/vm/)], where ['v] is the variable name, and ['m] are optional modifiers.
-
-Variable expansion in a rule's actions is similar to variable expansion in statements, except that the action string is tokenized at whitespace regardless of quoting.
-
-The result of a token after variable expansion is the /product/ of the components of the token, where each component is a literal substring or a list substituting a variable reference. For example:
-
-[pre
-$(X) -> a b c
-t$(X) -> ta tb tc
-$(X)z -> az bz cz
-$(X)-$(X) -> a-a a-b a-c b-a b-b b-c c-a c-b c-c
-]
-
-The variable name and modifiers can themselves contain a variable reference, and this partakes of the product as well:
-
-[pre
-$(X) -> a b c
-$(Y) -> 1 2
-$(Z) -> X Y
-$($(Z)) -> a b c 1 2
-]
-
-Because of this product expansion, if any variable reference in a token is undefined, the result of the expansion is an empty list. If any variable element is a null string, the result propagates the non-null elements:
-
-[pre
-$(X) -> a ""
-$(Y) -> "" 1
-$(Z) ->
--$(X)$(Y)- -> -a- -a1- -- -1-
--$(X)$(Z)- ->
-]
-
-A variable element's string value can be parsed into grist and filename-related components. Modifiers to a variable are used to select elements, select components, and replace components. The modifiers are:
-
-[variablelist
-
-[[[^\[['n]\]]] [Select element number ['n] (starting at 1). If the variable
- contains fewer than ['n] elements, the result is a zero-element list. ['n]
- can be negative in which case the element number ['n] from the last leftward
- is returned.]]
-
-[[[^\[['n]-['m]\]]]
- [Select elements number ['n] through ['m]. ['n] and ['m] can be negative in which case they refer to elements counting from the last leftward.]]
-
-[[[^\[['n]-\]]]
- [Select elements number ['n] through the last. ['n] can be negative in which case it refers to the element counting from the last leftward.]]
-
-[[[^:B]]
- [Select filename base.]]
-
-[[[^:S]]
- [Select (last) filename suffix.]]
-
-[[[^:M]]
- [Select archive member name.]]
-
-[[[^:D]]
- [Select directory path.]]
-
-[[[^:P]]
- [Select parent directory.]]
-
-[[[^:G]]
- [Select grist.]]
-
-[[[^:U]]
- [Replace lowercase characters with uppercase.]]
-
-[[[^:L]]
- [Replace uppercase characters with lowercase.]]
-
-[[[^:T]]
- [Converts all back-slashes ("\\") to forward slashes ("/"). For example
-``
- x = "C:\\Program Files\\Borland" ; ECHO $(x:T) ;
-``
-prints [^"C:/Program Files/Borland"]
-]]
-
-[[[^:W]]
- [When invoking Windows-based tools from [@http://www.cygwin.com/ Cygwin]
- it can be important to pass them true windows-style paths. The =:W=
- modifier, *under Cygwin only*, turns a cygwin path into a Win32 path using
- the [@http://www.cygwin.com/cygwin-api/func-cygwin-conv-to-win32-path.html
- =cygwin_conv_to_win32_path=] function. On other platforms, the string is
- unchanged. For example
-``
- x = "/cygdrive/c/Program Files/Borland" ; ECHO $(x:W) ;
-``
-prints [^"C:\\Program Files\\Borland"] on Cygwin
-]]
-
-[[[^:['chars]]]
- [Select the components listed in ['chars].]]
-
-[[[^:G=['grist]]]
- [Replace grist with ['grist].]]
-
-[[[^:D=['path]]]
- [Replace directory with ['path].]]
-
-[[[^:B=['base]]]
- [Replace the base part of file name with ['base].]]
-
-[[[^:S=['suf]]]
- [Replace the suffix of file name with ['suf].]]
-
-[[[^:M=['mem]]]
- [Replace the archive member name with ['mem].]]
-
-[[[^:R=['root]]]
- [Prepend ['root] to the whole file name, if not already rooted.]]
-
-[[[^:E=['value]]]
- [Assign ['value] to the variable if it is unset.]]
-
-[[[^:J=['joinval]]]
- [Concatentate list elements into single element, separated by ['joinval]'.]]
-
-]
-
-On VMS, [^$(var:P)] is the parent directory of [^$(var:D)].
-
-[endsect]
-
-[section Local For Loop Variables]
-
-Boost Jam allows you to declare a local for loop control variable right in the loop:
-
-[pre
-x = 1 2 3 ;
-y = 4 5 6 ;
-for *local* y in $(x)
-{
- ECHO $(y) ; # prints "1", "2", or "3"
-}
-ECHO $(y) ; # prints "4 5 6"
-]
-
-[endsect]
-
-[section:atfile Generated File Expansion]
-
-During expansion of expressions =bjam= also looks for subexpressions of the form
-=@(filename:E=filecontents)= and replaces the expression with =filename= after
-creating the given file with the contents set to =filecontents=. This is useful
-for creating compiler response files, and other "internal" files. The expansion
-works both during parsing and action execution. Hence it is possible to create
-files during any of the three build phases.
-
-[endsect]
-
-[section:builtins Built-in Variables]
-
-This section discusses variables that have special meaning to =bjam=. All of
-these must be defined or used in the global module -- using those variables
-inside a named module will not have the desired effect.
-See [link jam.language.modules Modules].
-
-[section:search SEARCH and LOCATE]
-
-These two variables control the binding of file target names to locations in
-the file system. Generally, =$(SEARCH)= is used to find existing sources
-while =$(LOCATE)= is used to fix the location for built targets.
-
-Rooted (absolute path) file targets are bound as is. Unrooted file target names are also normally bound as is, and thus relative to the current directory, but the settings of =$(LOCATE)= and =$(SEARCH)= alter this:
-
-* If =$(LOCATE)= is set then the target is bound relative to the first directory in =$(LOCATE)=. Only the first element is used for binding.
-* If =$(SEARCH)= is set then the target is bound to the first directory in =$(SEARCH)= where the target file already exists.
-* If the =$(SEARCH)= search fails, the target is bound relative to the current directory anyhow.
-
-Both =$(SEARCH)= and =$(LOCATE)= should be set target-specific and not globally. If they were set globally, =bjam= would use the same paths for all file binding, which is not likely to produce sane results. When writing your own rules, especially ones not built upon those in Jambase, you may need to set =$(SEARCH)= or =$(LOCATE)= directly. Almost all of the rules defined in Jambase set =$(SEARCH)= and =$(LOCATE)= to sensible values for sources they are looking for and targets they create, respectively.
-
-[endsect]
-
-[section:hdrscan HDRSCAN and HDRRULE]
-
-These two variables control header file scanning. =$(HDRSCAN)= is an
-=egrep(1)= pattern, with ()'s surrounding the file name, used to find file
-inclusion statements in source files. =Jambase= uses =$(HDRPATTERN)= as the
-pattern for =$(HDRSCAN)=. =$(HDRRULE)= is the name of a rule to invoke with
-the results of the scan: the scanned file is the target, the found files are
-the sources. This is the only place where =bjam= invokes a rule through a
-variable setting.
-
-Both =$(HDRSCAN)= and =$(HDRRULE)= must be set for header file scanning to take place, and they should be set target-specific and not globally. If they were set globally, all files, including executables and libraries, would be scanned for header file include statements.
-
-The scanning for header file inclusions is not exact, but it is at least dynamic, so there is no need to run something like =makedepend(GNU)= to create a static dependency file. The scanning mechanism errs on the side of inclusion (i.e., it is more likely to return filenames that are not actually used by the compiler than to miss include files) because it can't tell if `#include` lines are inside `#ifdefs` or other conditional logic. In =Jambase=, =HdrRule= applies the =NOCARE= rule to each header file found during scanning so that if the file isn't present yet doesn't cause the compilation to fail, =bjam= won't care.
-
-Also, scanning for regular expressions only works where the included file name is literally in the source file. It can't handle languages that allow including files using variable names (as the =Jam= language itself does).
-
-[endsect]
-
-[section Semaphores]
-
-It is sometimes desirable to disallow parallel execution of some actions. For example:
-
-* Old versions of yacc use files with fixed names. So, running two yacc actions is dangerous.
-* One might want to perform parallel compiling, but not do parallel linking, because linking is i/o bound and only gets slower.
-
-Craig McPeeters has extended Perforce Jam to solve such problems, and that extension was integrated in Boost.Jam.
-
-Any target can be assigned a /semaphore/, by setting a variable called =SEMAPHORE= on that target. The value of the variable is the semaphore name. It must be different from names of any declared target, but is arbitrary otherwise.
-
-The semantic of semaphores is that in a group of targets which have the same semaphore, only one can be updated at the moment, regardless of "=-j=" option.
-
-[endsect]
-
-[section Platform Identifier]
-
-A number of Jam built-in variables can be used to identify runtime platform:
-
-[variablelist
-[[=OS=] [OS identifier string]]
-[[=OSPLAT=] [Underlying architecture, when applicable]]
-[[=MAC=] [true on MAC platform]]
-[[=NT=] [true on NT platform]]
-[[=OS2=] [true on OS2 platform]]
-[[=UNIX=] [true on Unix platforms]]
-[[=VMS=] [true on VMS platform]]
-]
-
-[endsect]
-
-[section Jam Version]
-
-[variablelist
-[[=JAMDATE=] [Time and date at =bjam= start-up as an ISO-8601 UTC value.]]
-[[=JAMUNAME=] [Ouput of uname(1) command (Unix only)]]
-[[=JAMVERSION=] [=bjam= version, currently ":version:"]]
-[[=JAM_VERSION=] [A predefined global variable with two elements indicates the version number of Boost Jam. Boost Jam versions start at "=03=" "=00=". Earlier versions of =Jam= do not automatically define =JAM_VERSION=.]]
-]
-
-[endsect]
-
-[section JAMSHELL]
-
-When =bjam= executes a rule's action block, it forks and execs a shell, passing the action block as an argument to the shell. The invocation of the shell can be controlled by =$(JAMSHELL)=. The default on Unix is, for example:
-
-[pre
-JAMSHELL = /bin/sh -c % ;
-]
-
-The =%= is replaced with the text of the action block.
-
-=BJam= does not directly support building in parallel across multiple hosts, since that is heavily dependent on the local environment. To build in parallel across multiple hosts, you need to write your own shell that provides access to the multiple hosts. You then reset =$(JAMSHELL)= to reference it.
-
-Just as =bjam= expands a =%= to be the text of the rule's action block, it expands a =!= to be the multi-process slot number. The slot number varies between 1 and the number of concurrent jobs permitted by the =-j= flag given on the command line. Armed with this, it is possible to write a multiple host shell. For example:
-
-[pre
-#!/bin/sh
-
-# This sample JAMSHELL uses the SunOS on(1) command to execute a
-# command string with an identical environment on another host.
-
-# Set JAMSHELL = jamshell ! %
-#
-# where jamshell is the name of this shell file.
-#
-# This version handles up to -j6; after that they get executed
-# locally.
-
-case $1 in
-1|4) on winken sh -c "$2";;
-2|5) on blinken sh -c "$2";;
-3|6) on nod sh -c "$2";;
-*) eval "$2";;
-esac
-]
-
-[endsect]
-
-[section:actionrule =__TIMING_RULE__= and =__ACTION_RULE__=]
-
-The =__TIMING_RULE__= and =__ACTION_RULE__= can be set to the name of a rule
-for =bjam= to call *after* an action completes for a target. They both give
-diagnostic information about the action that completed. For =__TIMING_RULE__=
-the rule is called as:
-
- rule timing-rule ( args * : target : start end user system )
-
-And =__ACTION_RULE__= is called as:
-
- rule action-rule ( args * : target : command status start end user system : output ? )
-
-The arguments for both are:
-
-[variablelist
- [[[^args]]
- [Any values following the rule name in the =__TIMING_RULE__= or =__ACTION_RULE__=
- are passed along here.]]
- [[[^target]]
- [The =bjam= target that was built.]]
- [[[^command]]
- [The text of the executed command in the action body.]]
- [[[^status]]
- [The integer result of the executed command.]]
- [[[^start]]
- [The starting timestamp of the executed command as a ISO-8601 UTC value.]]
- [[[^end]]
- [The completion timestamp of the executed command as a ISO-8601 UTC value.]]
- [[[^user]]
- [The number of user CPU seconds the executed command spent as a floating
- point value.]]
- [[[^system]]
- [The number of system CPU seconds the executed command spent as a floating
- point value.]]
- [[[^output]]
- [The output of the command as a single string. The content of the output
- reflects the use of the =-pX= option.]]
-]
-
-[note
- If both variables are set for a target both are called, first =__TIMING_RULE__=
- then =__ACTION_RULE__=. ]
-
-[endsect]
-
-[endsect]
-
-[endsect]
-
-[section Modules]
-
-Boost Jam introduces support for modules, which provide some rudimentary namespace protection for rules and variables. A new keyword, "=module=" was also introduced. The features described in this section are primitives, meaning that they are meant to provide the operations needed to write Jam rules which provide a more elegant module interface.
-
-[section Declaration]
-
-[pre
-module /expression/ { ... }
-]
-
-Code within the [^{ ... }] executes within the module named by evaluating expression. Rule definitions can be found in the module's own namespace, and in the namespace of the global module as /module-name/./rule-name/, so within a module, other rules in that module may always be invoked without qualification:
-
-[pre
-*module my_module*
-*{*
- rule salute ( x ) { ECHO $(x), world ; }
- rule greet ( ) { salute hello ; }
- greet ;
-*}*
-*my_module.salute* goodbye ;
-]
-
-When an invoked rule is not found in the current module's namespace, it is looked up in the namespace of the global module, so qualified calls work across modules:
-
-[pre
-module your_module
-{
- rule bedtime ( ) { *my_module.salute* goodnight ; }
-}
-]
-
-[endsect]
-
-[section Variable Scope]
-
-Each module has its own set of dynamically nested variable scopes. When execution passes from module A to module B, all the variable bindings from A become unavailable, and are replaced by the bindings that belong to B. This applies equally to local and global variables:
-
-[pre
-module A
-{
- x = 1 ;
- rule f ( )
- {
- local y = 999 ; # becomes visible again when B.f calls A.g
- B.f ;
- }
- rule g ( )
- {
- ECHO $(y) ; # prints "999"
- }
-}
-module B
-{
- y = 2 ;
- rule f ( )
- {
- ECHO $(y) ; # always prints "2"
- A.g ;
- }
-}
-]
-
-The only way to access another module's variables is by entering that module:
-
-[pre
-rule peek ( module-name ? : variables + )
-{
- module $(module-name)
- {
- return $($(>)) ;
- }
-}
-]
-
-Note that because existing variable bindings change whenever a new module scope is entered, argument bindings become unavailable. That explains the use of "=$(>)=" in the peek rule above.
-
-[endsect]
-
-[section Local Rules]
-
-[pre
-local rule /rulename/...
-]
-
-The rule is declared locally to the current module. It is not entered in the global module with qualification, and its name will not appear in the result of:
-
-[pre
-\[ RULENAMES /module-name/ \]
-]
-
-[endsect]
-
-[section The =RULENAMES= Rule]
-
-[pre
-rule RULENAMES ( /module/ ? )
-]
-
-Returns a list of the names of all non-local rules in the given module. If /module/ is omitted, the names of all non-local rules in the global module are returned.
-
-[endsect]
-
-[section The =VARNAMES= Rule]
-
-[pre
-rule VARNAMES ( /module/ ? )
-]
-
-Returns a list of the names of all variable bindings in the given module. If /module/ is omitted, the names of all variable bindings in the global module are returned.
-
-[note This includes any local variables in rules from the call stack which have not returned at the time of the =VARNAMES= invocation.]
-
-[endsect]
-
-[section The =IMPORT= Rule]
-
-=IMPORT= allows rule name aliasing across modules:
-
-[pre
-rule IMPORT ( /source_module/ ? : /source_rules/ *
- : /target_module/ ? : /target_rules/ * )
-]
-
-The =IMPORT= rule copies rules from the /source_module/ into the /target_module/ as local rules. If either /source_module/ or /target_module/ is not supplied, it refers to the global module. /source_rules/ specifies which rules from the /source_module/ to import; /target_rules/ specifies the names to give those rules in /target_module/. If /source_rules/ contains a name which doesn't correspond to a rule in /source_module/, or if it contains a different number of items than /target_rules/, an error is issued. For example,
-
-[pre
-# import m1.rule1 into m2 as local rule m1-rule1.
-IMPORT m1 : rule1 : m2 : m1-rule1 ;
-# import all non-local rules from m1 into m2
-IMPORT m1 : \[ RULENAMES m1 \] : m2 : \[ RULENAMES m1 \] ;
-]
-
-[endsect]
-
-[section The =EXPORT= Rule]
-
-=EXPORT= allows rule name aliasing across modules:
-
-[pre
-rule EXPORT ( /module/ ? : /rules/ * )
-]
-
-The =EXPORT= rule marks /rules/ from the =source_module= as non-local (and thus exportable). If an element of /rules/ does not name a rule in /module/, an error is issued. For example,
-
-[pre
-module X {
- local rule r { ECHO X.r ; }
-}
-IMPORT X : r : : r ; # error - r is local in X
-EXPORT X : r ;
-IMPORT X : r : : r ; # OK.
-]
-
-[endsect]
-
-[section The =CALLER_MODULE= Rule]
-
-[pre
-rule CALLER_MODULE ( /levels/ ? )
-]
-
-=CALLER_MODULE= returns the name of the module scope enclosing the call to its caller (if levels is supplied, it is interpreted as an integer number of additional levels of call stack to traverse to locate the module). If the scope belongs to the global module, or if no such module exists, returns the empty list. For example, the following prints "{Y} {X}":
-
-[pre
-module X {
- rule get-caller { return \[ CALLER_MODULE \] ; }
- rule get-caller's-caller { return \[ CALLER_MODULE 1 \] ; }
- rule call-Y { return Y.call-X2 ; }
-}
-module Y {
- rule call-X { return X.get-caller ; }
- rule call-X2 { return X.get-caller's-caller ; }
-}
-callers = \[ X.get-caller \] \[ Y.call-X \] \[ X.call-Y \] ;
-ECHO {$(callers)} ;
-]
-
-[endsect]
-
-[section The =DELETE_MODULE= Rule]
-
-[pre
-rule DELETE_MODULE ( /module/ ? )
-]
-
-=DELETE_MODULE= removes all of the variable bindings and otherwise-unreferenced rules from the given module (or the global module, if no module is supplied), and returns their memory to the system.
-
-[note Though it won't affect rules that are currently executing until they complete, =DELETE_MODULE= should be used with extreme care because it will wipe out any others and all variable (including locals in that module) immediately. Because of the way dynamic binding works, variables which are shadowed by locals will not be destroyed, so the results can be really unpredictable.]
-
-[endsect]
-
-[endsect]
-
-[endsect]
-
-[section Miscellaneous]
-
-[section Diagnostics]
-
-In addition to generic error messages, =bjam= may emit one of the following:
-
-[pre warning: unknown rule X]
-
-A rule was invoked that has not been defined with an "=actions=" or "=rule=" statement.
-
-[pre using N temp target(s)]
-
-Targets marked as being temporary (but nonetheless present) have been found.
-
-[pre updating N target(s)]
-
-Targets are out-of-date and will be updated.
-
-[pre can't find N target(s)]
-
-Source files can't be found and there are no actions to create them.
-
-[pre can't make N target(s)]
-
-Due to sources not being found, other targets cannot be made.
-
-[pre warning: X depends on itself]
-
-A target depends on itself either directly or through its sources.
-
-[pre don't know how to make X]
-
-A target is not present and no actions have been defined to create it.
-
-[pre X skipped for lack of Y]
-
-A source failed to build, and thus a target cannot be built.
-
-[pre warning: using independent target X]
-
-A target that is not a dependency of any other target is being referenced with =$(<)= or =$(>)=.
-
-[pre X removed]
-
-=BJam= removed a partially built target after being interrupted.
-
-[endsect]
-
-[section Bugs, Limitations]
-
-For parallel building to be successful, the dependencies among files must be properly spelled out, as targets tend to get built in a quickest-first ordering. Also, beware of un-parallelizable commands that drop fixed-named files into the current directory, like =yacc(1)= does.
-
-A poorly set =$(JAMSHELL)= is likely to result in silent failure.
-
-[endsect]
-
-[section Fundamentals]
-
-This section is derived from the official Jam documentation and from experience using it and reading the Jambase rules. We repeat the information here mostly because it is essential to understanding and using Jam, but is not consolidated in a single place. Some of it is missing from the official documentation altogether. We hope it will be useful to anyone wishing to become familiar with Jam and the Boost build system.
-
-* Jam "=rules=" are actually simple procedural entities. Think of them as functions. Arguments are separated by colons.
-
-* A Jam *target* is an abstract entity identified by an arbitrary string. The build-in =DEPENDS= rule creates a link in the dependency graph between the named targets.
-
-* Note that the original Jam documentation for the built-in =INCLUDES= rule is incorrect: [^INCLUDES ['targets1] : ['targets2]] causes everything that depends on a member of /targets1/ to depend on all members of /targets2/. It does this in an odd way, by tacking /targets2/ onto a special tail section in the dependency list of everything in /targets1/. It seems to be OK to create circular dependencies this way; in fact, it appears to be the "right thing to do" when a single build action produces both /targets1/ and /targets2/.
-
-* When a rule is invoked, if there are =actions= declared with the same name as the rule, the actions are added to the updating actions for the target identified by the rule's first argument. It is actually possible to invoke an undeclared rule if corresponding actions are declared: the rule is treated as empty.
-
-* Targets (other than =NOTFILE= targets) are associated with paths in the file system through a process called binding. Binding is a process of searching for a file with the same name as the target (sans grist), based on the settings of the target-specific =SEARCH= and =LOCATE= variables.
-
-* In addition to local and global variables, jam allows you to set a variable =on= a target. Target-specific variable values can usually not be read, and take effect only in the following contexts:
-
- * In updating actions, variable values are first looked up =on= the target named by the first argument (the target being updated). Because Jam builds its entire dependency tree before executing actions, Jam rules make target-specific variable settings as a way of supplying parameters to the corresponding actions.
- * Binding is controlled /entirely/ by the target-specific setting of the =SEARCH= and =LOCATE= variables, as described here.
- * In the special rule used for header file scanning, variable values are first looked up =on= the target named by the rule's first argument (the source file being scanned).
-
-* The "bound value" of a variable is the path associated with the target named by the variable. In build actions, the first two arguments are automatically replaced with their bound values. Target-specific variables can be selectively replaced by their bound values using the =bind= action modifier.
-
-* Note that the term "binding" as used in the Jam documentation indicates a phase of processing that includes three sub-phases: /binding/ (yes!), update determination, and header file scanning. The repetition of the term "binding" can lead to some confusion. In particular, the Modifying Binding section in the Jam documentation should probably be titled "Modifying Update Determination".
-
-* "Grist" is just a string prefix of the form </characters/>. It is used in Jam to create unique target names based on simpler names. For example, the file name "=test.exe=" may be used by targets in separate subprojects, or for the debug and release variants of the "same" abstract target. Each distinct target bound to a file called "test.exe" has its own unique grist prefix. The Boost build system also takes full advantage of Jam's ability to divide strings on grist boundaries, sometimes concatenating multiple gristed elements at the beginning of a string. Grist is used instead of identifying targets with absolute paths for two reasons:
-
- # The location of targets cannot always be derived solely from what the user puts in a Jamfile, but sometimes depends also on the binding process. Some mechanism to distinctly identify targets with the same name is still needed.
- # Grist allows us to use a uniform abstract identifier for each built target, regardless of target file location (as allowed by setting ALL_LOCATE_TARGET).
-
-* When grist is extracted from a name with $(var:G), the result includes the leading and trailing angle brackets. When grist is added to a name with $(var:G=expr), existing grist is first stripped. Then, if expr is non-empty, leading <s and trailing >s are added if necessary to form an expression of the form <expr2>; <expr2> is then prepended.
-
-* When Jam is invoked it imports all environment variable settings into corresponding Jam variables, followed by all command-line (-s...) variable settings. Variables whose name ends in PATH, Path, or path are split into string lists on OS-specific path-list separator boundaries (e.g. ":" for UNIX and ";" for Windows). All other variables are split on space (" ") boundaries. Boost Jam modifies that behavior by allowing variables to be quoted.
-
-* A variable whose value is an empty list or which consists entirely of empty
- strings has a negative logical value. Thus, for example, code like the
- following allows a sensible non-empty default which can easily be overridden
- by the user:
- ``
-MESSAGE ?\= starting jam... ;
-if $(MESSAGE) { ECHO The message is: $(MESSAGE) ; }
-``
- If the user wants a specific message, he invokes jam with [^"-sMESSAGE\=message text"]. If he wants no message, he invokes jam with [^-sMESSAGE\=] and nothing at all is printed.
-
-* The parsing of command line options in Jam can be rather unintuitive, with regards to how other Unix programs accept options. There are two variants accepted as valid for an option:
-
- # =-xvalue=, and
- # =-x value=.
-
-[endsect]
-
-[endsect]
-
-
-[section History]
-[include history.qbk]
-[endsect]
diff --git a/tools/build/v2/doc/jamfile.jam b/tools/build/v2/doc/jamfile.jam
deleted file mode 100644
index 9bfbd06411..0000000000
--- a/tools/build/v2/doc/jamfile.jam
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright 2004,2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import quickbook
- ;
-
-project tools/build/v2/doc
- ;
-
-boostbook userman : src/standalone.xml
- : <xsl:param>toc.section.depth=1
- <xsl:param>doc.standalone=true
- <xsl:param>nav.layout=none
- <implicit-dependency>jam_docs
- <dependency>jam_docs
- <xsl:param>boost.root=../../../../..
- <xsl:param>boost.defaults=Boost
- ;
-
-xml jam_docs : bjam.qbk ;
-
-if ! $(BOOST_ROOT)
-{
- BOOST_ROOT = [ modules.peek : BOOST_ROOT ] ;
-}
diff --git a/tools/build/v2/doc/src/architecture.xml b/tools/build/v2/doc/src/architecture.xml
deleted file mode 100644
index 2ee0edce41..0000000000
--- a/tools/build/v2/doc/src/architecture.xml
+++ /dev/null
@@ -1,636 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE appendix PUBLIC "-//Boost//DTD BoostBook XML V1.0//EN"
- "http://www.boost.org/tools/boostbook/dtd/boostbook.dtd">
-
- <appendix id="bbv2.arch">
- <title>Boost.Build v2 architecture</title>
-
- <sidebar>
- <para>This document is work-in progress. Don't expect much from it
- yet.</para>
- </sidebar>
-
- <section id="bbv2.arch.overview">
- <title>Overview</title>
-
- <para>The Boost.Build code is structured in four different components:
- "kernel", "util", "build" and "tools". The first two are relatively
- uninteresting, so we'll focus on the remaining pair. The "build" component
- provides classes necessary to declare targets, determine which properties
- should be used for their building, and for creating the dependency
- graph. The "tools" component provides user-visible functionality. It
- mostly allows to declare specific kind of main targets, and declare
- avaiable tools, which are then used when creating the dependency graph.
- </para>
-
- </section>
-
- <section id="bbv2.arch.build">
- <title>The build layer</title>
-
- <para>The build layer has just four main parts -- metatargets (abstract targets),
- virtual targets, generators and properties.
- <itemizedlist>
- <listitem><para>Metatargets (see the "targets.jam" module) represent
- all the user-defined entities which can be built. The "meta" prefix
- signify that they don't really corrspond to files -- depending of
- build request, they can produce different set of
- files. Metatargets are created when Jamfiles are loaded. Each
- metagarget has a <code>generate</code> method which is given a
- property set and produces virtual targets for the passed properties.
- </para></listitem>
- <listitem><para>Virtual targets (see the "virtual-targets.jam"
- module) correspond to the atomic things which can be updated --
- most typically files.
- </para></listitem>
- <listitem><para>Properties are just (name, value) pairs, specified
- by the user and describing how the targets should be
- built. Properties are stored using the <code>property-set</code> class.
- </para></listitem>
- <listitem><para>Generators are the objects which encapsulate tools
- -- they can take a list of source virtual targets and produce new
- virtual targets from them.
- </para></listitem>
- </itemizedlist>
- </para>
-
- <para>The build process includes those steps:
- <orderedlist>
- <listitem><para>Top-level code calls the <code>generate</code>
- method of a metatarget with some properties. </para></listitem>
-
-
- <listitem><para>The metatarget combines the requested properties
- with requirements and passes the result, together with the list
- of sources, to the <code>generators.construct</code>
- function</para></listitem>
-
-
- <listitem><para>A generator appropriate for the build properties is
- selected and its <code>run</code> method is
- called. The method returns a list of virtual targets
- </para></listitem>
-
- <listitem><para>The targets are returned to the top level code. They
- are converted into bjam targets (via
- <code>virtual-target.actualize</code>) and passed to bjam for building.
- </para></listitem>
- </orderedlist>
- </para>
-
- <section id="bbv2.arch.metatargets">
- <title>Metatargets</title>
-
- <para>There are several classes derived from "abstract-target". The
- "main-target" class represents top-level main target, the "project-target"
- acts like container for all main targets, and "basic-target" class is a
- base class for all further target types.
- </para>
-
- <para>Since each main target can have several alternatives, all top-level
- target objects are just containers, referring to "real" main target
- classes. The type is that container is "main-target". For example, given:
-<programlisting>
-alias a ;
-lib a : a.cpp : &lt;toolset&gt;gcc ;
-</programlisting>
- we would have one-top level instance of "main-target-class", which will
- contain one instance of "alias-target-class" and one instance of
- "lib-target-class". The "generate" method of "main-target" decides
- which of the alternative should be used, and call "generate" on the
- corresponding instance.
- </para>
-
- <para>Each alternative is a instance of a class derived from
- "basic-target". The "basic-target.generate" does several things that are
- always should be done:
- <itemizedlist>
- <listitem>
- <para>Determines what properties should be used for building the
- target. This includes looking at requested properties, requirements,
- and usage requirements of all sources.</para>
- </listitem>
- <listitem>
- <para>Builds all sources</para>
- </listitem>
- <listitem>
- <para>Computes the usage requirements which should be passes back.</para>
- </listitem>
- </itemizedlist>
- For the real work of constructing virtual target, a new method
- "construct" is called.
- </para>
-
- <para>The "construct" method can be implemented in any way by classes
- derived from "basic-target", but one specific derived class plays the
- central role -- "typed-target". That class holds the desired type of file
- to be produces, and calls the generators modules to do the job.
- </para>
-
- <para>This means that a specific metatarget subclass may avoid using
- generators at all. However, this is deprecated and we're trying to
- eliminate all such subsclasses at the moment.
- </para>
-
- <para>Note that the <filename>build/targets.jam</filename> file contains
- an UML diagram which might help.</para>
-
- </section>
-
- <section id="bbv2.arch.virtual">
- <title>Virtual targets</title>
-
- <para>Virtual targets correspond to the atomic things which can be
- updated. Each virtual target can be assigned an updating action --
- instance of the <code>action</code> class. The action class, in
- turn, contains a list of source targets, properties, and a name of
- bjam action block which should be executed.
- </para>
-
- <para>We try hard to never create equal instances of the
- <code>virtual-target</code> class. Each code which creates virtual
- targets passes them though the <code>virtual-target.register</code>
- function, which detects if a target with the same name, sources, and
- properties was created. In that case, existing target is returned.
- </para>
-
- <para>When all virtual targets are produced, they are
- "actualized". This means that the real file names are computed, and
- the commands that should be run are generated. This is done by the
- <code>virtual-target.actualize</code> method and the
- <code>action.actualize</code> methods. The first is conceptually
- simple, while the second need additional explanation. The commands
- in bjam are generated in two-stage process. First, a rule with the
- appropriate name (for example
- "gcc.compile") is called and is given the names of targets. The rule
- sets some variables, like "OPTIONS". After that, the command string
- is taken, and variable are substitutes, so use of OPTIONS inside the
- command string become the real compile options.
- </para>
-
- <para>Boost.Build added a third stage to simplify things. It's now
- possible to automatically convert properties to appropriate assignments to
- variables. For example, &lt;debug-symbols&gt;on would add "-g" to the
- OPTIONS variable, without requiring to manually add this logic to
- gcc.compile. This functionality is part of the "toolset" module.
- </para>
-
- <para>Note that the <filename>build/virtual-targets.jam</filename> file
- contains an UML diagram which might help.</para>
- </section>
-
- <section id="bbv2.arch.properties">
- <para>Above, we noted that metatargets are built with a set of
- properties. That set is represented with the
- <code>property-set</code> class. An important point is that handling
- of property sets can get very expensive. For that reason, we make
- sure that for each set of (name, value) pairs only one
- <code>property-set</code> instance is created. The
- <code>property-set</code> uses extensive caching for all operation,
- so most work is avoided. The <code>property-set.create</code> is the
- factory function which should be used to create instances of the
- <code>property-set</code> class.
- </para>
- </section>
-
-
- </section>
-
- <section id="bbv2.arch.tools">
- <title>The tools layer</title>
-
- <para>Write me!</para>
-
- </section>
-
- <section id="bbv2.arch.targets">
- <title>Targets</title>
-
- <para>NOTE: THIS SECTION IS NOT EXPECTED TO BE READ!
- There are two user-visible kinds of targets in Boost.Build.
- First are "abstract" &#x2014; they correspond to things declared
- by user, for example, projects and executable files. The primary
- thing about abstract target is that it's possible to request them
- to be build with a particular values of some properties. Each
- combination of properties may possible yield different set of
- real file, so abstract target do not have a direct correspondence
- with files.</para>
-
- <para>File targets, on the contary, are associated with concrete
- files. Dependency graphs for abstract targets with specific
- properties are constructed from file targets. User has no was to
- create file targets, however it can specify rules that detect
- file type for sources, and also rules for transforming between
- file targets of different types. That information is used in
- constructing dependency graph, as desribed in the "next section".
- [ link? ] <emphasis role="bold">Note:</emphasis>File targets are not
- the same as targets in Jam sense; the latter are created from
- file targets at the latest possible moment. <emphasis role="bold">Note:</emphasis>"File
- target" is a proposed name for what we call virtual targets. It
- it more understandable by users, but has one problem: virtual
- targets can potentially be "phony", and not correspond to any
- file.</para>
-
- <section id="bbv2.arch.depends">
- <title>Dependency scanning</title>
-
- <para>Dependency scanning is the process of finding implicit
- dependencies, like "#include" statements in C++. The requirements
- for right dependency scanning mechanism are:</para>
-
- <itemizedlist>
- <listitem>
- <simpara>
- Support for different scanning algorithms. C++ and XML have
- quite different syntax for includes and rules for looking up
- included files.
- </simpara>
- </listitem>
-
- <listitem>
- <simpara>
- Ability to scan the same file several times. For example,
- single C++ file can be compiled with different include
- paths.
- </simpara>
- </listitem>
-
- <listitem>
- <simpara>
- Proper detection of dependencies on generated files.
- </simpara>
- </listitem>
-
- <listitem>
- <simpara>
- Proper detection of dependencies from generated file.
- </simpara>
- </listitem>
- </itemizedlist>
-
- <section>
- <title>Support for different scanning algorithms</title>
-
- <para>Different scanning algorithm are encapsulated by objects
- called "scanners". Please see the documentation for "scanner"
- module for more details.</para>
-
- </section>
-
- <section>
- <title>Ability to scan the same file several times</title>
-
- <para>As said above, it's possible to compile a C++ file twice, with
- different include paths. Therefore, include dependencies for
- those compilations can be different. The problem is that bjam
- does not allow several scans of the same target.</para>
-
- <para>The solution in Boost.Build is straigtforward. When a virtual
- target is converted to bjam target (via
- <literal>virtual-target.actualize</literal> method), we specify the scanner
- object to be used. The actualize method will create different
- bjam targets for different scanners.</para>
-
- <para>All targets with specific scanner are made dependent on target
- without scanner, which target is always created. This is done in
- case the target is updated. The updating action will be
- associated with target without scanner, but if sources for that
- action are touched, all targets &#x2014; with scanner and without
- should be considered outdated.</para>
-
- <para>For example, assume that "a.cpp" is compiled by two compilers
- with different include path. It's also copied into some install
- location. In turn, it's produced from "a.verbatim". The
- dependency graph will look like:</para>
-
-<programlisting>
-a.o (&lt;toolset&gt;gcc) &lt;--(compile)-- a.cpp (scanner1) ----+
-a.o (&lt;toolset&gt;msvc) &lt;--(compile)-- a.cpp (scanner2) ----|
-a.cpp (installed copy) &lt;--(copy) ----------------------- a.cpp (no scanner)
- ^
- |
- a.verbose --------------------------------+
-</programlisting>
-
- </section>
- <section>
- <title>Proper detection of dependencies on generated files.</title>
-
- <para>This requirement breaks down to the following ones.</para>
-
- <orderedlist>
- <listitem>
- <simpara>
- If when compiling "a.cpp" there's include of "a.h", the
- "dir" directory is in include path, and a target called "a.h"
- will be generated to "dir", then bjam should discover the
- include, and create "a.h" before compiling "a.cpp".
- </simpara>
- </listitem>
-
- <listitem>
- <simpara>
- Since almost always Boost.Build generates targets to a
- "bin" directory, it should be supported as well. I.e. in the
- scanario above, Jamfile in "dir" might create a main target,
- which generates "a.h". The file will be generated to "dir/bin"
- directory, but we still have to recognize the dependency.
- </simpara>
- </listitem>
- </orderedlist>
-
- <para>The first requirement means that when determining what "a.h"
- means, when found in "a.cpp", we have to iterate over all
- directories in include paths, checking for each one:</para>
-
- <orderedlist>
- <listitem>
- <simpara>
- If there's file "a.h" in that directory, or
- </simpara>
- </listitem>
-
- <listitem>
- <simpara>
- If there's a target called "a.h", which will be generated
- to that directory.
- </simpara>
- </listitem>
- </orderedlist>
-
- <para>Classic Jam has built-in facilities for point (1) above, but
- that's not enough. It's hard to implement the right semantic
- without builtin support. For example, we could try to check if
- there's targer called "a.h" somewhere in dependency graph, and
- add a dependency to it. The problem is that without search in
- include path, the semantic may be incorrect. For example, one can
- have an action which generated some "dummy" header, for system
- which don't have the native one. Naturally, we don't want to
- depend on that generated header on platforms where native one is
- included.</para>
-
- <para>There are two design choices for builtin support. Suppose we
- have files a.cpp and b.cpp, and each one includes header.h,
- generated by some action. Dependency graph created by classic jam
- would look like:</para>
-
-<programlisting>
-a.cpp -----&gt; &lt;scanner1&gt;header.h [search path: d1, d2, d3]
-
-
- &lt;d2&gt;header.h --------&gt; header.y
- [generated in d2]
-
-b.cpp -----&gt; &lt;scanner2&gt;header.h [ search path: d1, d2, d4]
-</programlisting>
-
- <para>
-In this case, Jam thinks all header.h target are not
-realated. The right dependency graph might be:
-
-<programlisting>
-a.cpp ----
- \
- \
- &gt;----&gt; &lt;d2&gt;header.h --------&gt; header.y
- / [generated in d2]
- /
-b.cpp ----
-</programlisting>
-
-or
-
-<programlisting>
-a.cpp -----&gt; &lt;scanner1&gt;header.h [search path: d1, d2, d3]
- |
- (includes)
- V
- &lt;d2&gt;header.h --------&gt; header.y
- [generated in d2]
- ^
- (includes)
- |
-b.cpp -----&gt; &lt;scanner2&gt;header.h [ search path: d1, d2, d4]
-</programlisting>
- </para>
-
- <para>
-The first alternative was used for some time. The problem
-however is: what include paths should be used when scanning
-header.h? The second alternative was suggested by Matt Armstrong.
-It has similiar effect: add targets which depend on
-&lt;scanner1&gt;header.h will also depend on &lt;d2&gt;header.h.
-But now we have two different target with two different scanners,
-and those targets can be scanned independently. The problem of
-first alternative is avoided, so the second alternative is
-implemented now.
- </para>
-
- <para>The second sub-requirements is that targets generated to "bin"
- directory are handled as well. Boost.Build implements
- semi-automatic approach. When compiling C++ files the process
- is:</para>
-
- <orderedlist>
- <listitem>
- <simpara>
- The main target to which compiled file belongs is found.
- </simpara>
- </listitem>
-
- <listitem>
- <simpara>
- All other main targets that the found one depends on are
- found. Those include main target which are used as sources, or
- present as values of "dependency" features.
- </simpara>
- </listitem>
-
- <listitem>
- <simpara>
- All directories where files belonging to those main target
- will be generated are added to the include path.
- </simpara>
- </listitem>
- </orderedlist>
-
- <para>After this is done, dependencies are found by the approach
- explained previously.</para>
-
- <para>Note that if a target uses generated headers from other main
- target, that main target should be explicitly specified as
- dependency property. It would be better to lift this requirement,
- but it seems not very problematic in practice.</para>
-
- <para>For target types other than C++, adding of include paths must
- be implemented anew.</para>
-
- </section>
- <section>
- <title>Proper detection of dependencies from generated files</title>
-
- <para>Suppose file "a.cpp" includes "a.h" and both are generated by
- some action. Note that classic jam has two stages. In first stage
- dependency graph graph is build and actions which should be run
- are determined. In second stage the actions are executed.
- Initially, neither file exists, so the include is not found. As
- the result, jam might attempt to compile a.cpp before creating
- a.h, and compilation will fail.</para>
-
- <para>The solution in Boost.Jam is to perform additional dependency
- scans after targets are updated. This break separation between
- build stages in jam &#x2014; which some people consider a good
- thing &#x2014; but I'm not aware of any better solution.</para>
-
- <para>In order to understand the rest of this section, you better
- read some details about jam dependency scanning, available
- <ulink url=
- "http://public.perforce.com:8080/@md=d&amp;cd=//public/jam/src/&amp;ra=s&amp;c=kVu@//2614?ac=10">
- at this link</ulink>.</para>
-
- <para>Whenever a target is updated, Boost.Jam rescans it for
- includes. Consider this graph, created before any actions are
- run.</para>
-
-<programlisting>
-A -------&gt; C ----&gt; C.pro
- /
-B --/ C-includes ---&gt; D
-</programlisting>
-
- <para>
-Both A and B have dependency on C and C-includes (the latter
-dependency is not shown). Say during building we've tried to create
-A, then tried to create C and successfully created C.
- </para>
-
- <para>In that case, the set of includes in C might well have
- changed. We do not bother to detect precisely which includes were
- added or removed. Instead we create another internal node
- C-includes-2. Then we determine what actions should be run to
- update the target. In fact this mean that we perform logic of
- first stage while already executing stage.</para>
-
- <para>After actions for C-includes-2 are determined, we add
- C-includes-2 to the list of A's dependents, and stage 2 proceeds
- as usual. Unfortunately, we can't do the same with target B,
- since when it's not visited, C target does not know B depends on
- it. So, we add a flag to C which tells and it was rescanned. When
- visiting B target, the flag is notices and C-includes-2 will be
- added to the list of B's dependencies.</para>
-
- <para>Note also that internal nodes are sometimes updated too.
- Consider this dependency graph:</para>
-
-<programlisting>
-a.o ---&gt; a.cpp
- a.cpp-includes --&gt; a.h (scanned)
- a.h-includes ------&gt; a.h (generated)
- |
- |
- a.pro &lt;-------------------------------------------+
-</programlisting>
-
- <para>Here, out handling of generated headers come into play. Say
- that a.h exists but is out of date with respect to "a.pro", then
- "a.h (generated)" and "a.h-includes" will be marking for
- updating, but "a.h (scanned)" won't be marked. We have to rescan
- "a.h" file after it's created, but since "a.h (generated)" has no
- scanner associated with it, it's only possible to rescan "a.h"
- after "a.h-includes" target was updated.</para>
-
- <para>Tbe above consideration lead to decision that we'll rescan a
- target whenever it's updated, no matter if this target is
- internal or not.</para>
-
- <warning>
- <para>
- The remainder of this document is not indended to be read at
- all. This will be rearranged in future.
- </para>
- </warning>
-
- <section>
- <title>File targets</title>
-
- <para>
- As described above, file targets corresponds
- to files that Boost.Build manages. User's may be concerned about
- file targets in three ways: when declaring file target types,
- when declaring transformations between types, and when
- determining where file target will be placed. File targets can
- also be connected with actions, that determine how the target is
- created. Both file targets and actions are implemented in the
- <literal>virtual-target</literal> module.
- </para>
-
- <section>
- <title>Types</title>
-
- <para>A file target can be given a file, which determines
- what transformations can be applied to the file. The
- <literal>type.register</literal> rule declares new types. File type can
- also be assigned a scanner, which is used to find implicit
- dependencies. See "dependency scanning" [ link? ] below.</para>
- </section>
- </section>
-
- <section>
- <title>Target paths</title>
-
- <para>To distinguish targets build with different properties, they
- are put in different directories. Rules for determining target
- paths are given below:</para>
-
- <orderedlist>
- <listitem>
- <simpara>
- All targets are placed under directory corresponding to the
- project where they are defined.
- </simpara>
- </listitem>
-
- <listitem>
- <simpara>
- Each non free, non incidental property cause an additional
- element to be added to the target path. That element has the
- form <literal>&lt;feature-name&gt;-&lt;feature-value&gt;</literal> for
- ordinary features and <literal>&lt;feature-value&gt;</literal> for
- implicit ones. [Note about composite features].
- </simpara>
- </listitem>
-
- <listitem>
- <simpara>
- If the set of free, non incidental properties is different
- from the set of free, non incidental properties for the project
- in which the main target that uses the target is defined, a
- part of the form <literal>main_target-&lt;name&gt;</literal> is added to
- the target path. <emphasis role="bold">Note:</emphasis>It would be nice to completely
- track free features also, but this appears to be complex and
- not extremely needed.
- </simpara>
- </listitem>
- </orderedlist>
-
- <para>For example, we might have these paths:</para>
-
-<programlisting>
-debug/optimization-off
-debug/main-target-a
-</programlisting>
-
- </section>
- </section>
- </section>
- </section>
- </appendix>
-
-<!--
- Local Variables:
- mode: xml
- sgml-indent-data: t
- sgml-parent-document: ("userman.xml" "chapter")
- sgml-set-face: t
- End:
--->
diff --git a/tools/build/v2/doc/src/extending.xml b/tools/build/v2/doc/src/extending.xml
deleted file mode 100644
index 43ef0bbb37..0000000000
--- a/tools/build/v2/doc/src/extending.xml
+++ /dev/null
@@ -1,1216 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE appendix PUBLIC "-//Boost//DTD BoostBook XML V1.0//EN"
- "http://www.boost.org/tools/boostbook/dtd/boostbook.dtd">
-
- <chapter id="bbv2.extender">
- <title>Extender Manual</title>
-
- <section id="bbv2.extender.intro">
- <title>Introduction</title>
-
- <para>
- This section explains how to extend Boost.Build to accomodate your
- local requirements&mdash;primarily to add support for non-standard
- tools you have. Before we start, be sure you have read and understoon
- the concept of metatarget, <xref linkend="bbv2.overview.concepts"/>,
- which is critical to understanding the remaining material.
- </para>
-
- <para>
- The current version of Boost.Build has three levels of targets, listed
- below.
- </para>
-
- <variablelist>
-
- <varlistentry>
- <term>metatarget</term>
- <listitem>
- <para>
- Object that is created from declarations in Jamfiles. May
- be called with a set of properties to produce concrete
- targets.
- </para>
- </listitem>
- </varlistentry>
-
- <varlistentry>
- <term>concrete target</term>
- <listitem>
- <para>
- Object that corresponds to a file or an action.
- </para>
- </listitem>
- </varlistentry>
-
- <varlistentry>
- <term>jam target</term>
- <listitem>
- <para>
- Low-level concrete target that is specific to Boost.Jam build
- engine. Essentially a string&mdash;most often a name of file.
- </para>
- </listitem>
- </varlistentry>
-
- </variablelist>
-
- <para>
- In most cases, you will only have to deal with concrete targets and
- the process that creates concrete targets from
- metatargets. Extending metatarget level is rarely required. The jam
- targets are typically only used inside the command line patterns.
- </para>
-
- <warning>
- <para>All of the Boost.Jam target-related builtin functions, like
- <code>DEPENDS</code> or <code>ALWAYS</code> operate on jam
- targets. Applying them to metatargets or concrete targets has no
- effect.</para>
- </warning>
-
- <section id="bbv2.extender.overview.metatargets">
- <title>Metatargets</title>
-
- <para>Metatarget is an object that records information specified
- in Jamfile, such as metatarget kind, name, sources and properties,
- and can be called with specific properties to generate concrete
- targets. At the code level it is represented by an instance of
- class derived from <link linkend="bbv2.reference.class.abstract-target">abstract-target</link>.
- <footnote><para>This name is historic, and will be eventuall changed to
- <code>metatarget</code></para></footnote>
- </para>
-
- <para>The <link linkend="bbv2.reference.class.abstract-target.generate">generate</link>
- method takes the build properties
- (as an instance of the <link linkend="bbv2.reference.class.property-set">
- property-set</link> class) and returns
- a list containing:</para>
- <itemizedlist>
- <listitem><para>As front element&mdash;Usage-requirements from this invocation
- (an instance of <link linkend="bbv2.reference.class.property-set">
- property-set</link>)</para></listitem>
- <listitem><para>As subsequent elements&mdash;created concrete targets (
- instances of the <classname>virtual-target</classname> class.)</para></listitem>
- </itemizedlist>
-
- <para>It's possible to lookup a metataget by target-id using the
- <code>targets.resolve-reference</code> function, and the
- <code>targets.generate-from-reference</code> function can both
- lookup and generate a metatarget.</para>
-
- <para>The <link linkend="bbv2.reference.class.abstract-target">abstract-target</link>
- class has three immediate derived classes:</para>
- <itemizedlist>
-
- <listitem><para><link linkend="bbv2.reference.class.project-target">project-target</link> that
- corresponds to a project and is not intended for further
- subclassing. The <link linkend="bbv2.reference.class.project-target.generate">
- generate</link> method of this
- class builds all targets in the project that are not marked as
- explicit.</para></listitem>
-
- <listitem><para><link linkend="bbv2.reference.class.main-target">main-target</link>
- corresponds to a target in a project
- and contains one or more target alternatives. This class also should not be
- subclassed. The <link linkend="bbv2.reference.class.main-target.generate">generate</link>
- method of this class selects an alternative to build, and calls the
- <link linkend="bbv2.reference.class.basic-target.generate">generate</link>
- method of that alternative.</para></listitem>
-
- <listitem><para><link linkend="bbv2.reference.class.basic-target">basic-target</link>
- corresponds to a specific target alternative. This is base class,
- with a number of derived classes. The
- <link linkend="bbv2.reference.class.basic-target.generate">generate</link> method
- processes the target requirements and requested build properties to
- determine final properties for the target, builds all sources, and
- finally calls the abstract
- <link linkend="bbv2.reference.class.basic-target.construct">construct</link>
- method with the list of source virtual targets, and the final properties.
- </para></listitem>
-
- </itemizedlist>
-
- <para>The instances of the <link linkend="bbv2.reference.class.project-target">project-target</link> and
- <link linkend="bbv2.reference.class.main-target">main-target</link> classes are created
- implicitly&mdash;when loading a new Jamfiles, or when a new target
- alternative with as-yet unknown name is created. The instances of the
- classes derived from <link linkend="bbv2.reference.class.basic-target">basic-target</link>
- are typically created when Jamfile calls a <firstterm>metatarget rule</firstterm>,
- such as such as <code>exe</code>.
- </para>
-
- <para>It it permissible to create a custom class derived from
- <link linkend="bbv2.reference.class.basic-target">basic-target</link> and create new metatarget rule
- that creates instance of such target. However, in the majority
- of cases, a specific subclass of <link linkend="bbv2.reference.class.basic-target">basic-target</link>&mdash;
- <link linkend="bbv2.reference.class.typed-target">typed-target</link> is used. That class is associated
- with a <firstterm>type</firstterm> and relays to <firstterm>generators</firstterm>
- to construct concrete targets of that type. This process will be explained below.
- When a new type is declared, a new metatarget rule is automatically defined.
- That rule creates new instance of type-target, associated with that type.
- </para>
-
- </section>
-
- <section id="bbv2.extender.overview.targets">
- <title>Concrete targets</title>
-
- <para>Concrete targets are represented by instance of classes derived
- from <classname>virtual-target</classname>. The most commonly used
- subclass is <classname>file-target</classname>. A file target is associated
- with an action that creates it&mdash; an instance of the <classname>action</classname>
- class. The action, in turn, hold a list of source targets. It also holds the
- <link linkend="bbv2.reference.class.property-set">property-set</link>
- instance with the build properties that should be used for the action.</para>
-
- <para>Here's an example of creating a target from another target, <code>source</code></para>
-<programlisting>
-local a = [ new action $(source) : common.copy : $(property-set) ] ;
-local t = [ new file-target $(name) : CPP : $(project) : $(a) ] ;
-</programlisting>
- <para>The first line creates an instance of the <classname>action></classname> class.
- The first parameter is the list of sources. The second parameter is the name
- a jam-level <link linkend="bbv2.overview.jam_language.actions">action</link>.
- The third parameter is the property-set applying to this action. The second line
- creates a target. We specifie a name, a type and a project. We also pass the
- action object created earlier. If the action creates several targets, we can repeat
- the second line several times.</para>
-
- <para>In some cases, code that creates concrete targets may be invoked more than
- once with the same properties. Returning to different instance of <classname>file-target</classname>
- that correspond to the same file clearly will result in problems. Therefore, whenever
- returning targets you should pass them via the <code>virtual-target.register</code>
- function, that will replace targets with previously created identical ones, as
- necessary.<footnote><para>This create-then-register pattern is caused by limitations
- of the Boost.Jam language. Python port is likely to never create duplicate targets.</para></footnote>
- Here are a couple of examples:
-<programlisting>
-return [ virtual-target.register $(t) ] ;
-return [ sequence.transform virtual-target.register : $(targets) ] ;
-</programlisting>
- </para>
-
- </section>
-
- <section id="bbv2.extender.overview.generators">
- <title>Generators</title>
-
- <para>In theory, every kind of metatarget in Boost.Build (like <code>exe</code>,
- <code>lib</code> or <code>obj</code>) could be implemented
- by writing a new metatarget class that, independently of the other code, figures
- what files to produce and what commands to use. However, that would be rather inflexible.
- For example, adding support for a new compiler would require editing several metatargets.
- </para>
-
- <para>In practice, most files have specific types, and most tools
- consume and produce files of specific type. To take advantage of this
- fact, Boost.Build defines concept of target type and
- <indexterm><primary>generators</primary></indexterm>
- <firstterm>generators</firstterm>, and has special metatarget class
- <link linkend="bbv2.reference.class.typed-target">typed-target</link>. Target type is merely an
- identifier. It is associated with a set of file extensions that
- correspond to that type. Generator is an abstraction of a tool. It advertises
- the types it produces and, if called with a set of input target, tries to construct
- output targets of the advertised types. Finally,
- <link linkend="bbv2.reference.class.typed-target">typed-target</link>
- is associated with specific target type, and relays the generator (or generators)
- for that type.
- </para>
-
- <para>A generator is an instance of a class derived from <classname>generator</classname>.
- The <classname>generator</classname> class itself is suitable for common cases.
- You can define derived classes for custom scenarios.</para>
-
- <!--
- <para>Given a set of generators, the fundamental operation is to
- construct a target of a given type, with given properties, from a
- set of targets. That operation is performed by rule
- <literal>generators.construct</literal> and the used algorithm is described
- below.</para>
-
- <section>
- <title>Selecting and ranking viable generators</title>
-
- <para>Each generator, in addition to target types that it can
- produce, have attribute that affects its applicability in
- particular sitiation. Those attributes are:</para>
-
- <orderedlist>
- <listitem>
- <simpara>
- Required properties, which are properties absolutely
- necessary for the generator to work. For example, generator
- encapsulating the gcc compiler would have &lt;toolset&gt;gcc as
- required property.
- </simpara>
- </listitem>
-
- <listitem>
- <simpara>
- Optional properties, which increase the generators
- suitability for a particual build.
- </simpara>
- </listitem>
- </orderedlist>
-
- <para>
- Generator's required and optional properties may not include
- either free or incidental properties. (Allowing this would
- greatly complicate caching targets).
- </para>
-
- <para>When trying to construct a target, the first step is to select
- all possible generators for the requested target type, which
- required properties are a subset of requested properties.
- Generators that were already selected up the call stack are
- excluded. In addition, if any composing generators were selected
- up the call stack, all other composing generators are ignored
- (TODO: define composing generators). The found generators
- are assigned a rank, which is the number of optional properties
- present in requested properties. Finally, generators with highest
- rank are selected for futher processing.</para>
-
- </section>
- <section>
- <title>Running generators</title>
-
- <para>When generators are selected, each is run to produce a list of
- created targets. This list might include targets that are not of
- requested types, because generators create the same targets as
- some tool, and tool's behaviour is fixed. (Note: should specify
- that in some cases we actually want extra targets). If generator
- fails, it returns an empty list. Generator is free to call
- 'construct' again, to convert sources to the types it can handle.
- It also can pass modified properties to 'construct'. However, a
- generator is not allowed to modify any propagated properties,
- otherwise when actually consuming properties we might discover
- that the set of propagated properties is different from what was
- used for building sources.</para>
-
- <para>For all targets that are not of requested types, we try to
- convert them to requested type, using a second call to
- <literal>construct</literal>. This is done in order to support
- transformation sequences where single source file expands to
- several later. See <ulink url=
- "http://groups.yahoo.com/group/jamboost/message/1667">this
- message</ulink> for details.</para>
-
- </section>
-
- -->
-
- <!-- FIXME: review the below content. Maybe, some of it is
- still useful.
- <section>
- <title>Property adjustment</title>
-
- <para>Because target location is determined by the build system, it
- is sometimes necessary to adjust properties, in order to not
- break actions. For example, if there's an action that generates
- a header, say "a_parser.h", and a source file "a.cpp" which
- includes that file, we must make everything work as if a_parser.h
- is generated in the same directory where it would be generated
- without any subvariants.</para>
-
- <para>Correct property adjustment can be done only after all targets
- are created, so the approach taken is:</para>
-
- <orderedlist>
- <listitem>
- <para>
- When dependency graph is constructed, each action can be
- assigned a rule for property adjustment.
- </para>
- </listitem>
-
- <listitem>
- <para>
- When virtual target is actualized, that rule is run and
- return the final set of properties. At this stage it can use
- information of all created virtual targets.
- </para>
- </listitem>
- </orderedlist>
-
- <para>In case of quoted includes, no adjustment can give 100% correct
- results. If target dirs are not changed by build system, quoted
- includes are searched in "." and then in include path, while angle
- includes are searched only in include path. When target dirs are
- changed, we'd want to make quoted includes to be search in "." then in
- additional dirs and then in the include path and make angle includes
- be searched in include path, probably with additional paths added at
- some position. Unless, include path already has "." as the first
- element, this is not possible. So, either generated headers should not
- be included with quotes, or first element of include path should be
- ".", which essentially erases the difference between quoted and angle
- includes. <emphasis role="bold">Note:</emphasis> the only way to get
- "." as include path into compiler command line is via verbatim
- compiler option. In all other case, Boost.Build will convert "." into
- directory where it occurs.</para>
-
- </section>
-
- -->
-
- </section>
-
- </section>
-
- <section id="bbv2.extender.example">
- <title>Example: 1-to-1 generator</title>
-
- <para>Say you're writing an application that generates C++ code. If
- you ever did this, you know that it's not nice. Embedding large
- portions of C++ code in string literals is very awkward. A much
- better solution is:</para>
-
- <orderedlist>
- <listitem>
- <simpara>
- Write the template of the code to be generated, leaving
- placeholders at the points that will change
- </simpara>
- </listitem>
-
- <listitem>
- <simpara>
- Access the template in your application and replace
- placeholders with appropriate text.
- </simpara>
- </listitem>
-
- <listitem>
- <simpara>Write the result.</simpara>
- </listitem>
- </orderedlist>
-
- <para>It's quite easy to achieve. You write special verbatim files that are
- just C++, except that the very first line of the file contains the name of a
- variable that should be generated. A simple tool is created that takes a
- verbatim file and creates a cpp file with a single <code>char*</code> variable
- whose name is taken from the first line of the verbatim file and whose value
- is the file's properly quoted content.</para>
-
- <para>Let's see what Boost.Build can do.</para>
-
- <para>First off, Boost.Build has no idea about "verbatim files". So, you must
- register a new target type. The following code does it:</para>
-
-<programlisting>
-import type ;
-type.register VERBATIM : verbatim ;
-</programlisting>
-
- <para>The first parameter to <link linkend="bbv2.reference.modules.type.register">type.register</link> gives
- the name of the declared type. By convention, it's uppercase. The second
- parameter is the suffix for files of this type. So, if Boost.Build sees
- <filename>code.verbatim</filename> in a list of sources, it knows that it's of
- type <code>VERBATIM</code>.</para>
-
- <para>Next, you tell Boost.Build that the verbatim files can be
- transformed into C++ files in one build step. A
- <firstterm>generator</firstterm> is a template for a build step that
- transforms targets of one type (or set of types) into another. Our
- generator will be called <code>verbatim.inline-file</code>; it
- transforms <code>VERBATIM</code> files into <code>CPP</code> files:
-
-<programlisting>
-import generators ;
-generators.register-standard verbatim.inline-file : VERBATIM : CPP ;
-</programlisting>
- </para>
-
- <para>Lastly, you have to inform Boost.Build about the shell
- commands used to make that transformation. That's done with an
- <code>actions</code> declaration.
-
-<programlisting>
-actions inline-file
-{
- "./inline-file.py" $(&lt;) $(&gt;)
-}
-</programlisting>
-
-<!-- You need to explain all the parameters to an "actions" and
- describe the accompanying rule declaration: the user has no clue
- what $(<) and $(>) are, and doesn't know about the third
- parameter that gets passed to the rule. -->
-
-<!-- We use verbatim.inline-file in one place and just inline-file in
- another. Is this confusing for user?
- -->
-</para>
-
- <para>
- Now, we're ready to tie it all together. Put all the code above in file
- <filename>verbatim.jam</filename>, add <code>import verbatim ;</code> to
- <filename>Jamroot.jam</filename>, and it's possible to write the following
- in your Jamfile:
- </para>
-
-<programlisting>
-exe codegen : codegen.cpp class_template.verbatim usage.verbatim ;
-</programlisting>
-
- <para>
- The listed verbatim files will be automatically converted into C++ source
- files, compiled and then linked to the codegen executable.
- </para>
-
- <para>
- In subsequent sections, we will extend this example, and review all the
- mechanisms in detail. The complete code is available in the
- <filename>example/customization</filename> directory.
- </para>
- </section>
-
- <section id="bbv2.extending.targets">
- <title>Target types</title>
- <para>The first thing we did in the <link
- linkend="bbv2.extender.intro">intruduction</link> was declaring a
- new target type:
-<programlisting>
-import type ;
-type.register VERBATIM : verbatim ;
-</programlisting>
- The type is the most important property of a target. Boost.Build can
- automatically generate necessary build actions only because you
- specify the desired type (using the different main target rules), and
- because Boost.Build can guess the type of sources from their
- extensions.
- </para>
-
- <para>The first two parameters for the <code>type.register</code> rule
- are the name of new type and the list of extensions associated with
- it. A file with an extension from the list will have the given target
- type. In the case where a target of the declared type is generated
- from other sources, the first specified extension will be used.
- </para>
-
- <para>Sometimes you want to change the suffix used for generated targets
- depending on build properties, such as toolset. For example, some compiler
- uses extension <literal>elf</literal> for executable files. You can use the
- <code>type.set-generated-target-suffix</code> rule:
-<programlisting>
-type.set-generated-target-suffix EXE : &lt;toolset&gt;elf : elf ;
-</programlisting>
- </para>
-
- <para>A new target type can be inherited from an existing one.
-<programlisting>
-type.register PLUGIN : : SHARED_LIB ;
-</programlisting>
- The above code defines a new type derived from
- <code>SHARED_LIB</code>. Initially, the new type inherits all the
- properties of the base type - in particular generators and suffix.
- Typically, you'll change the new type in some way. For example, using
- <code>type.set-generated-target-suffix</code> you can set the suffix for
- the new type. Or you can write special a generator for the new type. For
- example, it can generate additional metainformation for the plugin.
- In either way, the <code>PLUGIN</code> type can be used whenever
- <code>SHARED_LIB</code> can. For example, you can directly link plugins
- to an application.
- </para>
-
- <para>A type can be defined as "main", in which case Boost.Build will
- automatically declare a main target rule for building targets of that
- type. More details can be found <link
- linkend="bbv2.extending.rules.main-type">later</link>.
- </para>
-
- <section id="bbv2.extending.scanners">
- <title>Scanners</title>
- <para>
- Sometimes, a file can refer to other files via some include system. To
- make Boost.Build track dependencies between included files, you need
- to provide a scanner. The primary limitation is that only one scanner
- can be assigned to a target type.
- </para>
-
- <para>First, we need to declare a new class for the scanner:
-<programlisting>
-class verbatim-scanner : common-scanner
-{
- rule pattern ( )
- {
- return "//###include[ ]*\"([^\"]*)\"" ;
- }
-}
-</programlisting>
- All the complex logic is in the <code>common-scanner</code>
- class, and you only need to override the method that returns
- the regular expression to be used for scanning. The
- parentheses in the regular expression indicate which part
- of the string is the name of the included file. Only the
- first parenthesized group in the regular expression will be
- recognized; if you can't express everything you want that
- way, you can return multiple regular expressions, each of
- which contains a parenthesized group to be matched.
- </para>
-
- <para>After that, we need to register our scanner class:
-<programlisting>
-scanner.register verbatim-scanner : include ;
-</programlisting>
- The value of the second parameter, in this case
- <code>include</code>, specifies the properties that contain the list
- of paths that should be searched for the included files.
- </para>
-
- <para>Finally, we assign the new scanner to the <code>VERBATIM</code>
- target type:
-<programlisting>
-type.set-scanner VERBATIM : verbatim-scanner ;
-</programlisting>
- That's enough for scanning include dependencies.
- </para>
-
- </section>
-
- </section>
-
- <section id="bbv2.extending.tools">
- <title>Tools and generators</title>
- <para>
- This section will describe how Boost.Build can be extended to support
- new tools.
- </para>
-
- <para>For each additional tool, a Boost.Build object called generator
- must be created. That object has specific types of targets that it
- accepts and produces. Using that information, Boost.Build is able
- to automatically invoke the generator. For example, if you declare a
- generator that takes a target of the type <literal>D</literal> and
- produces a target of the type <literal>OBJ</literal>, when placing a
- file with extention <literal>.d</literal> in a list of sources will
- cause Boost.Build to invoke your generator, and then to link the
- resulting object file into an application. (Of course, this requires
- that you specify that the <literal>.d</literal> extension corresponds
- to the <literal>D</literal> type.)
- </para>
-
- <para>Each generator should be an instance of a class derived from the
- <code>generator</code> class. In the simplest case, you don't need to
- create a derived class, but simply create an instance of the
- <code>generator</code> class. Let's review the example we've seen in the
- <link linkend="bbv2.extender.intro">introduction</link>.
- <!-- Is the following supposed to be verbatim.jam? Tell the
- user so. You also need to describe the meanings of $(<)
- and $(>); this is the first time they're encountered. -->
-<programlisting>
-import generators ;
-generators.register-standard verbatim.inline-file : VERBATIM : CPP ;
-actions inline-file
-{
- "./inline-file.py" $(&lt;) $(&gt;)
-}
-</programlisting>
- </para>
-
- <para>We declare a standard generator, specifying its id, the source type
- and the target type. When invoked, the generator will create a target
- of type <literal>CPP</literal> with a source target of
- type <literal>VERBATIM</literal> as the only source. But what command
- will be used to actually generate the file? In bjam, actions are
- specified using named "actions" blocks and the name of the action
- block should be specified when creating targets. By convention,
- generators use the same name of the action block as their own id. So,
- in above example, the "inline-file" actions block will be used to
- convert the source into the target.
- </para>
-
- <para>
- There are two primary kinds of generators: standard and composing,
- which are registered with the
- <code>generators.register-standard</code> and the
- <code>generators.register-composing</code> rules, respectively. For
- example:
-<programlisting>
-generators.register-standard verbatim.inline-file : VERBATIM : CPP ;
-generators.register-composing mex.mex : CPP LIB : MEX ;
-</programlisting>
- The first (standard) generator takes a <emphasis>single</emphasis>
- source of type <code>VERBATIM</code> and produces a result. The second
- (composing) generator takes any number of sources, which can have either
- the <code>CPP</code> or the <code>LIB</code> type. Composing generators
- are typically used for generating top-level target type. For example,
- the first generator invoked when building an <code>exe</code> target is
- a composing generator corresponding to the proper linker.
- </para>
-
- <para>You should also know about two specific functions for registering
- generators: <code>generators.register-c-compiler</code> and
- <code>generators.register-linker</code>. The first sets up header
- dependecy scanning for C files, and the seconds handles various
- complexities like searched libraries. For that reason, you should always
- use those functions when adding support for compilers and linkers.
- </para>
-
- <para>(Need a note about UNIX)</para>
- <!-- What kind of note? Either write the note or don't, but remove this dross. -->
- <bridgehead>Custom generator classes</bridgehead>
-
- <para>The standard generators allows you to specify source and target
- types, an action, and a set of flags. If you need anything more complex,
- <!-- What sort of flags? Command-line flags? What does the system do with them? -->
- you need to create a new generator class with your own logic. Then,
- you have to create an instance of that class and register it. Here's
- an example how you can create your own generator class:
-<programlisting>
-class custom-generator : generator
-{
- rule __init__ ( * : * )
- {
- generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
- }
-<!-- What is the point of this __init__ function?? -->
-}
-
-generators.register
- [ new custom-generator verbatim.inline-file : VERBATIM : CPP ] ;
-</programlisting>
- This generator will work exactly like the
- <code>verbatim.inline-file</code> generator we've defined above, but
- it's possible to customize the behaviour by overriding methods of the
- <code>generator</code> class.
- </para>
-
- <para>There are two methods of interest. The <code>run</code> method is
- responsible for the overall process - it takes a number of source targets,
- converts them to the right types, and creates the result. The
- <code>generated-targets</code> method is called when all sources are
- converted to the right types to actually create the result.
- </para>
-
- <para>The <code>generated-targets</code> method can be overridden when you
- want to add additional properties to the generated targets or use
- additional sources. For a real-life example, suppose you have a program
- analysis tool that should be given a name of executable and the list of
- all sources. Naturally, you don't want to list all source files
- manually. Here's how the <code>generated-targets</code> method can find
- the list of sources automatically:
-<programlisting>
-class itrace-generator : generator {
-....
- rule generated-targets ( sources + : property-set : project name ? )
- {
- local leaves ;
- local temp = [ virtual-target.traverse $(sources[1]) : : include-sources ] ;<!-- You must explain include-sources! -->
- for local t in $(temp)
- {
- if ! [ $(t).action<!-- In what namespace is this evaluated? --> ]
- {
- leaves += $(t) ;
- }
- }
- return [ generator.generated-targets $(sources) $(leafs)
- : $(property-set) : $(project) $(name) ] ;
- }
-}
-generators.register [ new itrace-generator nm.itrace : EXE : ITRACE ] ;
-</programlisting>
- The <code>generated-targets</code> method will be called with a single
- source target of type <literal>EXE</literal>. The call to
- <code>virtual-target.traverse</code> will return all targets the
- executable depends on, and we further find files that are not
- produced from anything. <!-- What does "not produced from anything" mean? -->
- The found targets are added to the sources.
- </para>
-
- <para>The <code>run</code> method can be overriden to completely
- customize the way the generator works. In particular, the conversion of
- sources to the desired types can be completely customized. Here's
- another real example. Tests for the Boost Python library usually
- consist of two parts: a Python program and a C++ file. The C++ file is
- compiled to Python extension that is loaded by the Python
- program. But in the likely case that both files have the same name,
- the created Python extension must be renamed. Otherwise, the Python
- program will import itself, not the extension. Here's how it can be
- done:
-<programlisting>
-rule run ( project name ? : property-set : sources * )
-{
- local python ;
- for local s in $(sources)
- {
- if [ $(s).type ] = PY
- {
- python = $(s) ;
- }
- }
- <!-- This is horrible code. Use a filter function, or at _least_ consolidate the two loops! -->
- local libs ;
- for local s in $(sources)
- {
- if [ type.is-derived [ $(s).type ] LIB ]
- {
- libs += $(s) ;
- }
- }
-
- local new-sources ;
- for local s in $(sources)
- {
- if [ type.is-derived [ $(s).type ] CPP ]
- {
- local name = [ $(s).name ] ; # get the target's basename
- if $(name) = [ $(python).name ]
- {
- name = $(name)_ext ; # rename the target
- }
- new-sources += [ generators.construct $(project) $(name) :
- PYTHON_EXTENSION : $(property-set) : $(s) $(libs) ] ;
- }
- }
-
- result = [ construct-result $(python) $(new-sources) : $(project) $(name)
- : $(property-set) ] ;
-}
-</programlisting>
- <!-- Why are we doing this with a generator??? It seems
- insane. We could just use a nice front-end rule that
- calls some normal target-creation rules. No? -->
-
- First, we separate all source into python files, libraries and C++
- sources. For each C++ source we create a separate Python extension by
- calling <code>generators.construct</code> and passing the C++ source
- and the libraries. At this point, we also change the extension's name,
- if necessary.
- </para>
-
-
- </section>
-
- <section id="bbv2.extending.features">
- <title>Features</title>
- <para>
- Often, we need to control the options passed the invoked tools. This
- is done with features. Consider an example:
-<programlisting>
-# Declare a new free feature
-import feature : feature ;
-feature verbatim-options : : free ;
-
-# Cause the value of the 'verbatim-options' feature to be
-# available as 'OPTIONS' variable inside verbatim.inline-file
-import toolset : flags ;
-flags verbatim.inline-file OPTIONS &lt;verbatim-options&gt; ;<!-- You must tell the reader what the syntax of the flags rule is -->
-
-# Use the "OPTIONS" variable
-actions inline-file
-{
- "./inline-file.py" $(OPTIONS) $(&lt;) $(&gt;)
-}
-</programlisting>
- We first define a new feature. Then, the <code>flags</code> invocation
- says that whenever verbatin.inline-file action is run, the value of
- the <code>verbatim-options</code> feature will be added to the
- <code>OPTIONS</code> variable, and can be used inside the action body.
- You'd need to consult online help (--help) to find all the features of
- the <code>toolset.flags</code> rule.
- <!-- It's been a while since I wrote these notes, so I don't
- remember what I meant. But right here, I wrote "bad" and
- circled it. Maybe you can figure out what I meant. ;-)
- -->
- </para>
-
- <para>
- Although you can define any set of features and interpret their values
- in any way, Boost.Build suggests the following coding standard for
- designing features.
- </para>
-
- <para>Most features should have a fixed set of values that is portable
- (tool neutral) across the class of tools they are designed to work
- with. The user does not have to adjust the values for a exact tool. For
- example, <code>&lt;optimization&gt;speed</code> has the same meaning for
- all C++ compilers and the user does not have to worry about the exact
- options passed to the compiler's command line.
- </para>
-
- <para>
- Besides such portable features there are special 'raw' features that
- allow the user to pass any value to the command line parameters for a
- particular tool, if so desired. For example, the
- <code>&lt;cxxflags&gt;</code> feature allows you to pass any command line
- options to a C++ compiler. The <code>&lt;include&gt;</code> feature
- allows you to pass any string preceded by <code>-I</code> and the interpretation
- is tool-specific. <!-- It's really tool-specific? That surprises me --> (See <xref
- linkend="bbv2.faq.external"/> for an example of very smart usage of that
- feature). Of course one should always strive to use portable
- features, but these are still be provided as a backdoor just to make
- sure Boost.Build does not take away any control from the user.
- </para>
-
- <para>
- Using portable features is a good idea because:
- <itemizedlist>
- <listitem>
- <para>When a portable feature is given a fixed set of
- values, you can build your project with two different
- settings of the feature and Boost.Build will automatically
- use two different directories for generated files.
- Boost.Build does not try to separate targets built with
- different raw options.
- <!-- It's a computer program. It doesn't "care" about options -->
- </para>
- </listitem>
-
- <listitem>
- <para>Unlike with “raw†features, you don't need to use
- specific command-line flags in your Jamfile, and it will be
- more likely to work with other tools.
- </para>
- </listitem>
- </itemizedlist>
- </para>
-
- <bridgehead>Steps for adding a feauture</bridgehead>
- <!-- This section is redundant with the previous one -->
- <para>Adding a feature requires three steps:
-
- <orderedlist>
- <listitem><para>Declaring a feature. For that, the "feature.feature"
- rule is used. You have to decide on the set of <link
- linkend="bbv2.reference.features.attributes">feature
- attributes</link>:
-
- <itemizedlist>
- <listitem><para>if you want a feature value set for one target
- to automaticaly propagate to its dependant targets then make it
- “propagatedâ€. <!-- Examples needed. --></para></listitem>
-
- <listitem><para>if a feature does not have a fixed list of
- values, it must be “free.†For example, the <code>include
- </code> feature is a free feature.</para></listitem>
-
- <listitem><para>if a feature is used to refer to a path relative
- to the Jamfile, it must be a “path†feature. Such features will
- also get their values automatically converted to Boost Build's
- internal path representation. For example, <code>include</code>
- is a path feature.</para></listitem>
-
- <listitem><para>if feature is used to refer to some target, it
- must be a “dependency†feature. <!-- for example? --></para>
-
- <!-- Any other feature attributes? -->
- </listitem>
- </itemizedlist>
- </para>
- </listitem>
-
- <listitem><para>Representing the feature value in a
- target-specific variable. Build actions are command
- templates modified by Boost.Jam variable expansions. The
- <code>toolset.flags</code> rule sets a target-specific
- variable to the value of a feature.</para></listitem>
-
- <listitem><para>Using the variable. The variable set in step 2 can
- be used in a build action to form command parameters or
- files.</para></listitem>
-
- </orderedlist>
- </para>
-
- <bridgehead>Another example</bridgehead>
-
- <para>Here's another example.
- Let's see how we can make a feature that refers to a target. For example,
- when linking dynamic libraries on Windows, one sometimes needs to
- specify a "DEF file", telling what functions should be exported. It
- would be nice to use this file like this:
-<programlisting>
- lib a : a.cpp : &lt;def-file&gt;a.def ;
-</programlisting>
-<!-- Why would that be nice? It seems to me that having a.def in the sources is the obvious and much nicer thing to do:
-
- lib a : a.cpp a.def ;
--->
- Actually, this feature is already supported, but anyway...
- <!-- Something about saying that is very off-putting. I'm
- sorry that I can't put my finger on it -->
- </para>
-
- <orderedlist>
- <listitem>
- <para>Since the feature refers to a target, it must be "dependency".
-<programlisting>
-feature def-file : : free dependency ;
-</programlisting>
- </para></listitem>
-
- <listitem><para>One of the toolsets that cares about
- <!-- The toolset doesn't "care." What do your really mean? -->
- DEF files is msvc. The following line should be added to it.
- <!-- Are you saying the msvc toolset is broken (or that it
- doesn't use DEF files) as-shipped and the reader needs to
- fix it? -->
-
-<programlisting>
-flags msvc.link DEF_FILE &lt;def-file&gt; ;
-</programlisting>
- <!-- And that line does... what? -->
- </para></listitem>
-
- <listitem><para>Since the DEF_FILE variable is not used by the
-msvc.link action,
-<!-- It's not? You just told us that MSVC "cares" about DEF files. I
- presume that means that it uses them in some appropriate way? -->
-we need to modify it to be:
-
-<programlisting>
-actions link bind DEF_FILE
-{
- $(.LD) .... /DEF:$(DEF_FILE) ....
-}
-</programlisting>
- </para>
-
-
- <para> Note the <code>bind DEF_FILE</code> part. It tells
- bjam to translate the internal target name in
- <varname>DEF_FILE</varname> to a corresponding filename in
- the <code>link</code> action. Without it the expansion of
- <code>$(DEF_FILE)</code> would be a strange symbol that is
- not likely to make sense for the linker.
- </para>
-
- <!-- I have a note here that says: "none of this works for
- targets in general, only source files." I'm not sure
- what I meant by that; maybe you can figure it out. -->
- <para>
- We are almost done, but we should stop for a small workaround. Add the following
- code to msvc.jam
-
-<programlisting>
-rule link
-{
- DEPENDS $(&lt;) : [ on $(&lt;) return $(DEF_FILE) ] ;
-}
-</programlisting>
-<!-- You *must* explain the part in [...] above. It's completely opaque to the casual reader -->
-
- This is needed to accomodate some bug in bjam, which hopefully
- will be fixed one day.
- <!-- This is *NOT* a bug!! Anyway, BBv2 shouild handle this automatically. Why doesn't it? -->
-</para></listitem>
-
- </orderedlist>
-
- <bridgehead>Variants and composite features.</bridgehead>
-
- <para>Sometimes you want to create a shortcut for some set of
- features. For example, <code>release</code> is a value of
- <code>&lt;variant&gt;</code> and is a shortcut for a set of features.
- </para>
-
- <para>It is possible to define your own build variants. For example:
-<programlisting>
-variant crazy : &lt;optimization&gt;speed &lt;inlining&gt;off
- &lt;debug-symbols&gt;on &lt;profiling&gt;on ;
-</programlisting>
- will define a new variant with the specified set of properties. You
- can also extend an existing variant:
-<programlisting>
-variant super_release : release : &lt;define&gt;USE_ASM ;
-</programlisting>
- In this case, <code>super_release</code> will expand to all properties
- specified by <code>release</code>, and the additional one you've specified.
- </para>
-
- <para>You are not restricted to using the <code>variant</code> feature
- only.
- <!-- What do you mean by that? How is defining a new feature related to what came before? -->
- Here's example that defines a brand new feature:
-<programlisting>
-feature parallelism : mpi fake none : composite link-incompatible ;
-feature.compose &lt;parallelism&gt;mpi : &lt;library&gt;/mpi//mpi/&lt;parallelism&gt;none ;
-feature.compose &lt;parallelism&gt;fake : &lt;library&gt;/mpi//fake/&lt;parallelism&gt;none ;
-</programlisting>
-<!-- The use of the <library>/mpi//mpi/<parallelism>none construct
- above is at best confusing and unexplained -->
- This will allow you to specify the value of feature
- <code>parallelism</code>, which will expand to link to the necessary
- library.
- </para>
-
- </section>
-
- <section id="bbv2.extending.rules">
- <title>Main target rules</title>
- <para>
- A main target rule (e.g “<link linkend="bbv2.tasks.programs">exe</link>â€
- Or “<link linkend="bbv2.tasks.libraries">lib</link>â€) creates a top-level target. It's quite likely that you'll want to declare your own and
- there are two ways to do that.
- <!-- Why did "that" get changed to "this" above? -->
- </para>
-
- <para id="bbv2.extending.rules.main-type">The first way applies when
-<!-- This is not a "way of defining a main target rule." Rephrase this and the previous sentence. -->
- your target rule should just produce a target of specific type. In that case, a
- rule is already defined for you! When you define a new type, Boost.Build
- automatically defines a corresponding rule. The name of the rule is
- obtained from the name of the type, by downcasing all letters and
- replacing underscores with dashes.
- <!-- This strikes me as needless complexity, and confusing. Why
- do we have the uppercase-underscore convention for target
- types? If we just dropped that, the rule names could be
- the same as the type names. -->
- For example, if you create a module
- <filename>obfuscate.jam</filename> containing:
-
-<programlisting>
-import type ;
-type.register OBFUSCATED_CPP : ocpp ;
-
-import generators ;
-generators.register-standard obfuscate.file : CPP : OBFUSCATED_CPP ;
-</programlisting>
- and import that module, you'll be able to use the rule "obfuscated-cpp"
- in Jamfiles, which will convert source to the OBFUSCATED_CPP type.
- </para>
-
- <para>
- The second way is to write a wrapper rule that calls any of the existing
- rules. For example, suppose you have only one library per directory and
- want all cpp files in the directory to be compiled into that library. You
- can achieve this effect using:
-<programlisting>
-lib codegen : [ glob *.cpp ] ;
-</programlisting>
- If you want to make it even simpler, you could add the following
- definition to the <filename>Jamroot.jam</filename> file:
-<programlisting>
-rule glib ( name : extra-sources * : requirements * )
-{
- lib $(name) : [ glob *.cpp ] $(extra-sources) : $(requirements) ;
-}
-</programlisting>
- allowing you to reduce the Jamfile to just
-<programlisting>
-glib codegen ;
-</programlisting>
- </para>
-
- <para>
- Note that because you can associate a custom generator with a target type,
- the logic of building can be rather complicated. For example, the
- <code>boostbook</code> module declares a target type
- <code>BOOSTBOOK_MAIN</code> and a custom generator for that type. You can
- use that as example if your main target rule is non-trivial.
- </para>
- </section>
-
- <section id="bbv2.extending.toolset_modules">
-
- <title>Toolset modules</title>
-
- <para>
- If your extensions will be used only on one project, they can be placed in
- a separate <filename>.jam</filename> file and imported by your
- <filename>Jamroot.jam</filename>. If the extensions will be used on many
- projects, users will thank you for a finishing touch.
- </para>
-
- <para>The <code>using</code> rule provides a standard mechanism
- for loading and configuring extensions. To make it work, your module
- <!-- "module" hasn't been defined yet. Furthermore you haven't
- said anything about where that module file must be
- placed. -->
- should provide an <code>init</code> rule. The rule will be called
- with the same parameters that were passed to the
- <code>using</code> rule. The set of allowed parameters is
- determined by you. For example, you can allow the user to specify
- paths, tool versions, and other options.
- <!-- But it's not entirely arbitrary. We have a standard
- parameter order which you should describe here for
- context. -->
- </para>
-
- <para>Here are some guidelines that help to make Boost.Build more
- consistent:
- <itemizedlist>
- <listitem><para>The <code>init</code> rule should never fail. Even if
- the user provided an incorrect path, you should emit a warning and go
- on. Configuration may be shared between different machines, and
- wrong values on one machine can be OK on another.
- <!-- So why shouldn't init fail on machines where it's wrong?? -->
- </para></listitem>
-
- <listitem><para>Prefer specifying the command to be executed
- to specifying the tool's installation path. First of all, this
- gives more control: it's possible to specify
-<programlisting>
-/usr/bin/g++-snapshot
-time g++
-<!-- Is this meant to be a single command? If not, insert "or" -->
-</programlisting>
- as the command. Second, while some tools have a logical
- "installation root", it's better if the user doesn't have to remember whether
- a specific tool requires a full command or a path.
- <!-- But many tools are really collections: e.g. a
- compiler, a linker, and others. The idea that the
- "command to invoke" has any significance may be
- completely bogus. Plus if you want to allow "time
- /usr/bin/g++" the toolset may need to somehow parse
- the command and find the path when it needs to invoke
- some related executable. And in that case, will the
- command be ignored? This scheme doesn't scale and
- should be fixed. -->
- </para></listitem>
-
- <listitem><para>Check for multiple initialization. A user can try to
- initialize the module several times. You need to check for this
- and decide what to do. Typically, unless you support several
- versions of a tool, duplicate initialization is a user error.
- <!-- Why should that be typical? -->
- If the
- tool's version can be specified during initialization, make sure the
- version is either always specified, or never specified (in which
- case the tool is initialied only once). For example, if you allow:
-<programlisting>
-using yfc ;
-using yfc : 3.3 ;
-using yfc : 3.4 ;
-</programlisting>
- Then it's not clear if the first initialization corresponds to
- version 3.3 of the tool, version 3.4 of the tool, or some other
- version. This can lead to building twice with the same version.
- <!-- That would not be so terrible, and is much less harmful
- than this restriction, IMO. It makes site-config
- harder to maintain than necessary. -->
- </para></listitem>
-
- <listitem><para>If possible, <code>init</code> must be callable
- with no parameters. In which case, it should try to autodetect all
- the necessary information, for example, by looking for a tool in
- <envar>PATH</envar> or in common installation locations. Often this
- is possible and allows the user to simply write:
-<programlisting>
-using yfc ;
-</programlisting>
- </para></listitem>
-
- <listitem><para>Consider using facilities in the
- <code>tools/common</code> module. You can take a look at how
- <code>tools/gcc.jam</code> uses that module in the <code>init</code> rule.
- </para></listitem>
-
- </itemizedlist>
- </para>
-
-
-
-
- </section>
-
- </chapter>
-
-<!--
- Local Variables:
- sgml-indent-data: t
- sgml-parent-document: ("userman.xml" "chapter")
- sgml-set-face: t
- End:
--->
diff --git a/tools/build/v2/doc/src/faq.xml b/tools/build/v2/doc/src/faq.xml
deleted file mode 100644
index a648a3761c..0000000000
--- a/tools/build/v2/doc/src/faq.xml
+++ /dev/null
@@ -1,458 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE chapter PUBLIC "-//Boost//DTD BoostBook XML V1.0//EN"
- "http://www.boost.org/tools/boostbook/dtd/boostbook.dtd">
-
-<chapter id="bbv2.faq">
- <title>Frequently Asked Questions</title>
-
- <section id="bbv2.faq.featurevalue">
- <title>
- How do I get the current value of feature in Jamfile?
- </title>
-
- <para>
- This is not possible, since Jamfile does not have "current" value of any
- feature, be it toolset, build variant or anything else. For a single
- run of Boost.Build, any given main target can be
- built with several property sets. For example, user can request two build
- variants on the command line. Or one library is built as shared when used
- from one application, and as static when used from another. Each Jamfile
- is read only once so generally there is no single value of a feature you
- can access in Jamfile.
- </para>
-
- <para>
- A feature has a specific value only when building a target, and there are
- two ways you can use that value:
- </para>
-
- <itemizedlist>
- <listitem>
- <simpara>
- Use conditional requirements or indirect conditional requirements. See
- <xref linkend="bbv2.overview.targets.requirements.conditional"/>.
- </simpara>
- </listitem>
- <listitem>
- Define a custom generator and a custom main target type. The custom
- generator can do arbitrary processing or properties. See the <xref
- linkend="bbv2.extender">extender manual</xref>.
- </listitem>
- </itemizedlist>
- </section>
-
- <section id="bbv2.faq.duplicate">
- <title>
- I am getting a "Duplicate name of actual target" error. What does that
- mean?
- </title>
-
- <para>
- The most likely case is that you are trying to compile the same file
- twice, with almost the same, but differing properties. For example:
-<programlisting>
-exe a : a.cpp : &lt;include&gt;/usr/local/include ;
-exe b : a.cpp ;
-</programlisting>
- </para>
-
- <para>
- The above snippet requires two different compilations of
- <code>a.cpp</code>, which differ only in their <literal>include</literal>
- property. Since the <literal>include</literal> feature is declared as
- <literal>free</literal> Boost.Build does not create a separate build
- directory for each of its values and those two builds would both produce
- object files generated in the same build directory. Ignoring this and
- compiling the file only once would be dangerous as different includes
- could potentially cause completely different code to be compiled.
- </para>
-
- <para>
- To solve this issue, you need to decide if the file should be compiled
- once or twice.
- </para>
-
- <orderedlist>
- <listitem>
- <para>
- To compile the file only once, make sure that properties are the same
- for both target requests:
-<programlisting>
-exe a : a.cpp : &lt;include&gt;/usr/local/include ;
-exe b : a.cpp : &lt;include&gt;/usr/local/include ;
-</programlisting>
- or:
-<programlisting>
-alias a-with-include : a.cpp : &lt;include&gt;/usr/local/include ;
-exe a : a-with-include ;
-exe b : a-with-include ;
-</programlisting>
- or if you want the <literal>includes</literal> property not to affect
- how any other sources added for the built <code>a</code> and
- <code>b</code> executables would be compiled:
-<programlisting>
-obj a-obj : a.cpp : &lt;include&gt;/usr/local/include ;
-exe a : a-obj ;
-exe b : a-obj ;
-</programlisting>
- </para>
- <para>
- Note that in both of these cases the <literal>include</literal>
- property will be applied only for building these object files and not
- any other sources that might be added for targets <code>a</code> and
- <code>b</code>.
- </para>
- </listitem>
-
- <listitem>
- <para>
- To compile the file twice, you can tell Boost.Build to compile it to
- two separate object files like so:
-<programlisting>
- obj a_obj : a.cpp : &lt;include&gt;/usr/local/include ;
- obj b_obj : a.cpp ;
- exe a : a_obj ;
- exe b : b_obj ;
-</programlisting>
- or you can make the object file targets local to the main target:
-<programlisting>
- exe a : [ obj a_obj : a.cpp : &lt;include&gt;/usr/local/include ] ;
- exe b : [ obj a_obj : a.cpp ] ;
-</programlisting>
- which will cause Boost.Build to actually change the generated object
- file names a bit for you and thus avoid any conflicts.
- </para>
- <para>
- Note that in both of these cases the <literal>include</literal>
- property will be applied only for building these object files and not
- any other sources that might be added for targets <code>a</code> and
- <code>b</code>.
- </para>
- </listitem>
- </orderedlist>
-
- <para>
- A good question is why Boost.Build can not use some of the above
- approaches automatically. The problem is that such magic would only help
- in half of the cases, while in the other half it would be silently doing
- the wrong thing. It is simpler and safer to ask the user to clarify his
- intention in such cases.
- </para>
- </section>
-
- <section id="bbv2.faq.envar">
- <title>
- Accessing environment variables
- </title>
-
- <para>
- Many users would like to use environment variables in Jamfiles, for
- example, to control the location of external libraries. In many cases it
- is better to declare those external libraries in the site-config.jam file,
- as documented in the <link linkend="bbv2.recipies.site-config">recipes
- section</link>. However, if the users already have the environment
- variables set up, it may not be convenient for them to set up their
- site-config.jam files as well and using the environment variables might be
- reasonable.
- </para>
-
- <para>
- Boost.Jam automatically imports all environment variables into its
- built-in .ENVIRON module so user can read them from there directly or by
- using the helper os.environ rule. For example:
-<programlisting>
-import os ;
-local unga-unga = [ os.environ UNGA_UNGA ] ;
-ECHO $(unga-unga) ;
-</programlisting>
- or a bit more realistic:
-<programlisting>
-import os ;
-local SOME_LIBRARY_PATH = [ os.environ SOME_LIBRARY_PATH ] ;
-exe a : a.cpp : &lt;include&gt;$(SOME_LIBRARY_PATH) ;
-</programlisting>
- </para>
- </section>
-
- <section id="bbv2.faq.proporder">
- <title>
- How to control properties order?
- </title>
-
- <para>
- For internal reasons, Boost.Build sorts all the properties alphabetically.
- This means that if you write:
-<programlisting>
-exe a : a.cpp : &lt;include&gt;b &lt;include&gt;a ;
-</programlisting>
- then the command line with first mention the <code>a</code> include
- directory, and then <code>b</code>, even though they are specified in the
- opposite order. In most cases, the user does not care. But sometimes the
- order of includes, or other properties, is important. For such cases, a
- special syntax is provided:
-<programlisting>
-exe a : a.cpp : &lt;include&gt;a&amp;&amp;b ;
-</programlisting>
- </para>
-
- <para>
- The <code>&amp;&amp;</code> symbols separate property values and specify
- that their order should be preserved. You are advised to use this feature
- only when the order of properties really matters and not as a convenient
- shortcut. Using it everywhere might negatively affect performance.
- </para>
- </section>
-
- <section id="bbv2.faq.liborder">
- <title>
- How to control the library linking order on Unix?
- </title>
-
- <para>
- On Unix-like operating systems, the order in which static libraries are
- specified when invoking the linker is important, because by default, the
- linker uses one pass though the libraries list. Passing the libraries in
- the incorrect order will lead to a link error. Further, this behaviour is
- often used to make one library override symbols from another. So,
- sometimes it is necessary to force specific library linking order.
- </para>
-
- <para>
- Boost.Build tries to automatically compute the right order. The primary
- rule is that if library <code>a</code> "uses" library <code>b</code>, then
- library <code>a</code> will appear on the command line before library
- <code>b</code>. Library <code>a</code> is considered to use <code>b</code>
- if <code>b</code> is present either in the <code>a</code> library's
- sources or its usage is listed in its requirements. To explicitly specify
- the <literal>use</literal> relationship one can use the
- <literal>&lt;use&gt;</literal> feature. For example, both of the following
- lines will cause <code>a</code> to appear before <code>b</code> on the
- command line:
-<programlisting>
-lib a : a.cpp b ;
-lib a : a.cpp : &lt;use&gt;b ;
-</programlisting>
- </para>
-
- <para>
- The same approach works for searched libraries as well:
-<programlisting>
-lib z ;
-lib png : : &lt;use&gt;z ;
-exe viewer : viewer png z ;
-</programlisting>
- </para>
- </section>
-
- <section id="bbv2.faq.external">
- <title>
- Can I get capture external program output using a Boost.Jam variable?
- </title>
-
- <para>
- The <literal>SHELL</literal> builtin rule may be used for this purpose:
-<programlisting>
-local gtk_includes = [ SHELL "gtk-config --cflags" ] ;
-</programlisting>
- </para>
- </section>
-
- <section id="bbv2.faq.projectroot">
- <title>
- How to get the project root (a.k.a. Jamroot) location?
- </title>
-
- <para>
- You might want to use your project's root location in your Jamfiles. To
- access it just declare a path constant in your Jamroot.jam file using:
-<programlisting>
-path-constant TOP : . ;
-</programlisting>
- After that, the <code>TOP</code> variable can be used in every Jamfile.
- </para>
- </section>
-
- <section id="bbv2.faq.flags">
- <title>
- How to change compilation flags for one file?
- </title>
-
- <para>
- If one file must be compiled with special options, you need to explicitly
- declare an <code>obj</code> target for that file and then use that target
- in your <code>exe</code> or <code>lib</code> target:
-<programlisting>
-exe a : a.cpp b ;
-obj b : b.cpp : &lt;optimization&gt;off ;
-</programlisting>
- Of course you can use other properties, for example to specify specific
- C/C++ compiler options:
-<programlisting>
-exe a : a.cpp b ;
-obj b : b.cpp : &lt;cflags&gt;-g ;
-</programlisting>
- You can also use <link linkend="bbv2.tutorial.conditions">conditional
- properties</link> for finer control:
-<programlisting>
-exe a : a.cpp b ;
-obj b : b.cpp : &lt;variant&gt;release:&lt;optimization&gt;off ;
-</programlisting>
- </para>
- </section>
-
- <section id="bbv2.faq.dll-path">
- <title>
- Why are the <literal>dll-path</literal> and <literal>hardcode-dll-paths
- </literal> properties useful?
- </title>
- <note>
- <para>
- This entry is specific to Unix systems.
- </para>
- </note>
- <para>
- Before answering the questions, let us recall a few points about shared
- libraries. Shared libraries can be used by several applications, or other
- libraries, without physically including the library in the application
- which can greatly decrease the total application size. It is also possible
- to upgrade a shared library when the application is already installed.
- </para>
-
- <para>
- However, in order for application depending on shared libraries to be
- started the OS may need to find the shared library when the application is
- started. The dynamic linker will search in a system-defined list of paths,
- load the library and resolve the symbols. Which means that you should
- either change the system-defined list, given by the <envar>LD_LIBRARY_PATH
- </envar> environment variable, or install the libraries to a system
- location. This can be inconvenient when developing, since the libraries
- are not yet ready to be installed, and cluttering system paths may be
- undesirable. Luckily, on Unix there is another way.
- </para>
-
- <para>
- An executable can include a list of additional library paths, which will
- be searched before system paths. This is excellent for development because
- the build system knows the paths to all libraries and can include them in
- the executables. That is done when the <literal>hardcode-dll-paths
- </literal> feature has the <literal>true</literal> value, which is the
- default. When the executables should be installed, the story is different.
- </para>
-
- <para>
- Obviously, installed executable should not contain hardcoded paths to your
- development tree. <!-- Make the following parenthised sentence a footer
- note --> (The <literal>install</literal> rule explicitly disables the
- <literal>hardcode-dll-paths</literal> feature for that reason.) However,
- you can use the <literal>dll-path</literal> feature to add explicit paths
- manually. For example:
-<programlisting>
-install installed : application : &lt;dll-path&gt;/usr/lib/snake
- &lt;location&gt;/usr/bin ;
-</programlisting>
- will allow the application to find libraries placed in the <filename>
- /usr/lib/snake</filename> directory.
- </para>
-
- <para>
- If you install libraries to a nonstandard location and add an explicit
- path, you get more control over libraries which will be used. A library of
- the same name in a system location will not be inadvertently used. If you
- install libraries to a system location and do not add any paths, the
- system administrator will have more control. Each library can be
- individually upgraded, and all applications will use the new library.
- </para>
-
- <para>
- Which approach is best depends on your situation. If the libraries are
- relatively standalone and can be used by third party applications, they
- should be installed in the system location. If you have lots of libraries
- which can be used only by your application, it makes sense to install them
- to a nonstandard directory and add an explicit path, like the example
- above shows. Please also note that guidelines for different systems differ
- in this respect. For example, the Debian GNU guidelines prohibit any
- additional search paths while Solaris guidelines suggest that they should
- always be used.
- </para>
- </section>
-
- <section id="bbv2.recipies.site-config">
- <title>Targets in site-config.jam</title>
-
- <para>
- It is desirable to declare standard libraries available on a given system.
- Putting target declaration in a specific project's Jamfile is not really
- good, since locations of the libraries can vary between different
- development machines and then such declarations would need to be
- duplicated in different projects. The solution is to declare the targets
- in Boost.Build's <filename>site-config.jam</filename> configuration file:
-<programlisting>
-project site-config ;
-lib zlib : : &lt;name&gt;z ;
-</programlisting>
- </para>
-
- <para>
- Recall that both <filename>site-config.jam</filename> and
- <filename>user-config.jam</filename> are projects, and everything you can
- do in a Jamfile you can do in those files as well. So, you declare a
- project id and a target. Now, one can write:
-<programlisting>
-exe hello : hello.cpp /site-config//zlib ;
-</programlisting>
- in any Jamfile.
- </para>
- </section>
-
- <section id="bbv2.faq.header-only-libraries">
- <title>Header-only libraries</title>
-
- <para>
- In modern C++, libraries often consist of just header files, without any
- source files to compile. To use such libraries, you need to add proper
- includes and possibly defines to your project. But with a large number of
- external libraries it becomes problematic to remember which libraries are
- header only, and which ones you have to link to. However, with Boost.Build
- a header-only library can be declared as Boost.Build target and all
- dependents can use such library without having to remeber whether it is a
- header-only library or not.
- </para>
-
- <para>
- Header-only libraries may be declared using the <code>alias</code> rule,
- specifying their include path as a part of its usage requirements, for
- example:
-<programlisting>
-alias my-lib
- : # no sources
- : # no build requirements
- : # no default build
- : &lt;include&gt;whatever ;
-</programlisting>
- The includes specified in usage requirements of <code>my-lib</code> are
- automatically added to all of its dependants' build properties. The
- dependants need not care if <code>my-lib</code> is a header-only or not,
- and it is possible to later make <code>my-lib</code> into a regular
- compiled library without having to that its dependants' declarations.
- </para>
-
- <para>
- If you already have proper usage requirements declared for a project where
- a header-only library is defined, you do not need to duplicate them for
- the <code>alias</code> target:
-<programlisting>
-project my : usage-requirements &lt;include&gt;whatever ;
-alias mylib ;
-</programlisting>
- </para>
- </section>
-</chapter>
-
-<!--
- Local Variables:
- mode: nxml
- sgml-indent-data: t
- sgml-parent-document: ("userman.xml" "chapter")
- sgml-set-face: t
- End:
--->
diff --git a/tools/build/v2/doc/src/install.xml b/tools/build/v2/doc/src/install.xml
deleted file mode 100644
index 545f786695..0000000000
--- a/tools/build/v2/doc/src/install.xml
+++ /dev/null
@@ -1,150 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE chapter PUBLIC "-//Boost//DTD BoostBook XML V1.0//EN"
- "http://www.boost.org/tools/boostbook/dtd/boostbook.dtd">
-
- <chapter id="bbv2.installation">
- <title>Installation</title>
-
- <para>
- To install Boost.Build from an official release or a nightly build, as
- available on the <ulink url="http://boost.org/boost-build2">official web site</ulink>,
- follow these steps:
- </para>
-
- <orderedlist>
- <listitem>
- <simpara>
- Unpack the release. On the command line, go to the root of the
- unpacked tree.
- </simpara>
- </listitem>
-
- <listitem>
- <simpara>
- Run either <command>.\bootstrap.bat</command> (on Windows), or
- <command>./bootstrap.sh</command> (on other operating systems).
- </simpara>
- </listitem>
-
- <listitem>
- <simpara>
- Run
- <screen>./b2 install --prefix=<replaceable>PREFIX</replaceable></screen>
- where <replaceable>PREFIX</replaceable> is a directory where you
- want Boost.Build to be installed.
- </simpara>
- </listitem>
-
- <listitem>
- <simpara>
- Optionally, add <filename><replaceable>PREFIX</replaceable>/bin</filename>
- to your <envar>PATH</envar> environment variable.
- </simpara>
- </listitem>
- </orderedlist>
-
- <para>If you are not using a Boost.Build package, but rather the version
- bundled with the Boost C++ Libraries, the above commands should be run
- in the <filename>tools/build/v2</filename> directory.</para>
-
- <para>
- Now that Boost.Build is installed, you can try some of the examples. Copy
- <filename><replaceable>PREFIX</replaceable>/share/boost-build/examples/hello</filename>
- to a different directory, then change to that directory and run:
-<screen><filename><replaceable>PREFIX</replaceable>/bin/b2</filename></screen>
- A simple executable should be built.
- </para>
-
- <!--
- <simpara>
- Configure Boost.Build to recognize the build resources (such
- as compilers and libraries) you have installed on your
- system. Open the
- <filename>user-config.jam</filename> file in the Boost.Build
- root directory and follow the instructions there to describe
- your toolsets and libraries, and, if necessary, where they
- are located.
- </simpara>
- </listitem>
- -->
-
- <!-- This part should not go into intoduction docs, but we need to
- place it somewhere.
-
- <para>It is slighly better way is to copy
- <filename>new/user-config.jam</filename> into one of the locations
- where it can be found (given in <link linkend=
- "bbv2.reference.init.config">this table</link>). This prevent you
- from accidentally overwriting your config when updating.</para>
-
- -->
-
- <!--
- <bridgehead>Information for distributors</bridgehead>
-
- <para>
- If you're planning to package Boost.Build for a Linux distribution,
- please follow these guidelines:
-
- <itemizedlist>
- <listitem><para>Create a separate package for Boost.Jam.</para></listitem>
-
- <listitem>
- <para>Create another package for Boost.Build, and make
- this package install all Boost.Build files to
- <filename>/usr/share/boost-build</filename> directory. After
- install, that directory should contain everything you see in
- Boost.Build release package, except for
- <filename>jam_src</filename> directory. If you're using Boost CVS
- to obtain Boost.Build, as opposed to release package, take
- everything from the <filename>tools/build/v2</filename> directory.
- For a check, make sure that
- <filename>/usr/share/boost-build/boost-build.jam</filename> is installed.
- </para>
-
- <para>Placing Boost.Build into <filename>/usr/share/boost-build</filename>
- will make sure that <command>bjam</command> will find Boost.Build
- without any additional setup.</para>
- </listitem>
-
- <listitem><para>Provide a
- <filename>/etc/site-config.jam</filename> configuration file that will
- contain:
-<programlisting>
-using gcc ;
-</programlisting>
- You might want to add dependency from Boost.Build package to gcc,
- to make sure that users can always build Boost.Build examples.
- </para></listitem>
- </itemizedlist>
- </para>
-
- <para>If those guidelines are met, users will be able to invoke
- <command>bjam</command> without any explicit configuration.
- </para>
-
-
- -->
-
-
- </chapter>
-
-<!--
- Local Variables:
- mode: nxml
- sgml-indent-data:t
- sgml-parent-document:("userman.xml" "chapter")
- sgml-set-face: t
- sgml-omittag:nil
- sgml-shorttag:nil
- sgml-namecase-general:t
- sgml-general-insert-case:lower
- sgml-minimize-attributes:nil
- sgml-always-quote-attributes:t
- sgml-indent-step:2
- sgml-exposed-tags:nil
- sgml-local-catalogs:nil
- sgml-local-ecat-files:nil
- End:
--->
-
diff --git a/tools/build/v2/doc/src/overview.xml b/tools/build/v2/doc/src/overview.xml
deleted file mode 100644
index 90b7c27227..0000000000
--- a/tools/build/v2/doc/src/overview.xml
+++ /dev/null
@@ -1,1684 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE chapter PUBLIC "-//Boost//DTD BoostBook XML V1.0//EN"
- "http://www.boost.org/tools/boostbook/dtd/boostbook.dtd">
-
- <chapter id="bbv2.overview">
- <title>Overview</title>
-
- <para>
- This section will provide the information necessary to create your own
- projects using Boost.Build. The information provided here is relatively
- high-level, and <xref linkend="bbv2.reference"/> as well as the on-line
- help system must be used to obtain low-level documentation (see <xref
- linkend="bbv2.reference.init.options.help"/>).
- </para>
-
- <para>
- Boost.Build actually consists of two parts - Boost.Jam, a build engine
- with its own interpreted language, and Boost.Build itself, implemented in
- Boost.Jam's language. The chain of events when you type
- <command>b2</command> on the command line is as follows:
- <orderedlist>
- <listitem>
- <para>
- The Boost.Build executable tries to find Boost.Build modules and
- loads the top-level module. The exact process is described in <xref linkend=
- "bbv2.reference.init"/>
- </para>
- </listitem>
- <listitem>
- <para>
- The top-level module loads user-defined configuration files,
- <filename>user-config.jam</filename> and
- <filename>site-config.jam</filename>, which define available toolsets.
- </para>
- </listitem>
- <listitem>
- <para>
- The Jamfile in the current directory is read. That in turn might
- cause reading of further Jamfiles. As a result, a tree of projects
- is created, with targets inside projects.
- </para>
- </listitem>
- <listitem>
- <para>
- Finally, using the build request specified on the command line,
- Boost.Build decides which targets should be built and how. That
- information is passed back to Boost.Jam, which takes care of
- actually running the scheduled build action commands.
- </para>
- </listitem>
- </orderedlist>
- </para>
-
- <para>
- So, to be able to successfully use Boost.Build, you need to know only four
- things:
- <itemizedlist>
- <listitem>
- <para>
- <link linkend="bbv2.overview.configuration">How to configure
- Boost.Build</link>
- </para>
- </listitem>
- <listitem>
- <para>
- <link linkend="bbv2.overview.targets">How to declare targets in
- Jamfiles</link>
- </para>
- </listitem>
- <listitem>
- <para>
- <link linkend="bbv2.overview.build_process">How the build process
- works</link>
- </para>
- </listitem>
- <listitem>
- <para>
- Some Basics about the Boost.Jam language. See <xref linkend=
- "bbv2.overview.jam_language"/>.
- </para>
- </listitem>
- </itemizedlist>
- </para>
-
- <section id="bbv2.overview.concepts">
- <title>Concepts</title>
-
- <para>Boost.Build has a few unique concepts that are introduced in this section. The best
- way to explain the concepts is by comparison with more classical build tools.</para>
-
- <para>
- When using any flavour of make, you directly specify <firstterm>targets</firstterm>
- and commands that are used to create them from other target. The below example
- creates <filename>a.o</filename> from <filename>a.c</filename> using a hardcoded
- compiler invocation command.
-<programlisting>
-a.o: a.c
- g++ -o a.o -g a.c
-</programlisting>
- This is a rather low-level description mechanism and it's hard to adjust commands, options,
- and sets of created targets depending on the compiler and operating system used.
- </para>
-
- <para>
- To improve portability, most modern build system provide a set of higher-level
- functions that can be used in build description files. Consider this example:
-<programlisting>
-add_program ("a", "a.c")
-</programlisting>
- This is a function call that creates the targets necessary to create a executable file
- from the source file <filename>a.c</filename>. Depending on configured properties,
- different command lines may be used. However, <code>add_program</code> is higher-level,
- but rather thin level. All targets are created immediately when the build description
- is parsed, which makes it impossible to perform multi-variant builds. Often, change
- in any build property requires a complete reconfiguration of the build tree.
- </para>
-
- <para>
- In order to support true multivariant builds, Boost.Build introduces the concept of a
- <indexterm> <primary>metatarget</primary> <secondary>definition</secondary></indexterm>
- <indexterm> <primary>main target</primary> <see>metataget</see> </indexterm>
- <firstterm>metatarget</firstterm>&mdash;an object that is created when the build description
- is parsed and can be called later with specific build properties to generate
- actual targets.
- </para>
-
- <para>
- Consider an example:
-<programlisting>
-exe a : a.cpp ;
-</programlisting>
- When this declaration is parsed, Boost.Build creates a metatarget, but does not
- yet decide what files must be created, or what commands must be used. After
- all build files are parsed, Boost.Build considers the properties requested on the
- command line. Supposed you have invoked Boost.Build with:
-<screen>
-b2 toolset=gcc toolset=msvc
-</screen>
- In that case, the metatarget will be called twice, once with <code>toolset=gcc</code>
- and once with <code>toolset=msvc</code>. Both invocations will produce concrete
- targets, that will have different extensions and use different command lines.
- </para>
-
- <para>
- Another key concept is
- <indexterm><primary>property</primary><secondary>definition</secondary></indexterm>
- <firstterm>build property</firstterm>. A build property is a variable
- that affects the build process. It can be specified on the command line, and is
- passed when calling a metatarget. While all build tools have a similar mechanism,
- Boost.Build differs by requiring that all build properties are declared in advance,
- and providing a large set of properties with portable semantics.
- </para>
-
- <para>
- The final concept is <indexterm><primary>property</primary><secondary>propagation</secondary></indexterm>
- <firstterm>property propagation</firstterm>. Boost.Build does not require that every
- metatarget is called with the same properties. Instead, the
- "top-level" metatargets are called with the properties specified on the command line.
- Each metatarget can elect to augment or override some properties (in particular,
- using the requirements mechanism, see <xref linkend="bbv2.overview.targets.requirements"/>).
- Then, the dependency metatargets are called with the modified properties and produce
- concrete targets that are then used in the build process. Of course, dependency metatargets
- maybe in turn modify build properties and have dependencies of their own.
- </para>
-
- <para>For a more in-depth treatment of the requirements and concepts, you may refer
- to <ulink url="http://syrcose.ispras.ru/2009/files/04_paper.pdf">SYRCoSE 2009 Boost.Build article</ulink>.
- </para>
-
- </section>
-
- <section id="bbv2.overview.jam_language">
- <title>Boost.Jam Language</title>
-
- <para>
- This section will describe the basics of the Boost.Jam language&#x2014;just
- enough for writing Jamfiles. For more information, please see the
- <link linkend="bbv2.jam">Boost.Jam</link> documentation.
- </para>
-
- <para>
- <link linkend="bbv2.jam">Boost.Jam</link> has an interpreted, procedural
- language. On the lowest level, a <link linkend="bbv2.jam">Boost.Jam
- </link> program consists of variables and <indexterm><primary>rule
- </primary></indexterm> <firstterm>rules</firstterm> (the Jam term for
- functions). They are grouped into modules&#x2014;there is one global
- module and a number of named modules. Besides that, a <link linkend=
- "bbv2.jam">Boost.Jam</link> program contains classes and class
- instances.
- </para>
-
- <para>
- Syntantically, a <link linkend="bbv2.jam">Boost.Jam</link> program
- consists of two kind of elements&#x2014;keywords (which have a special
- meaning to <link linkend="bbv2.jam">Boost.Jam</link>) and literals.
- Consider this code:
-<programlisting>
-a = b ;
-</programlisting>
- which assigns the value <literal>b</literal> to the variable <literal>a
- </literal>. Here, <literal>=</literal> and <literal>;</literal> are
- keywords, while <literal>a</literal> and <literal>b</literal> are
- literals.
- <warning>
- <para>
- All syntax elements, even keywords, must be separated by spaces. For
- example, omitting the space character before <literal>;</literal>
- will lead to a syntax error.
- </para>
- </warning>
- If you want to use a literal value that is the same as some keyword, the
- value can be quoted:
-<programlisting>
-a = "=" ;
-</programlisting>
- </para>
-
- <para>
- All variables in <link linkend="bbv2.jam">Boost.Jam</link> have the same
- type&#x2014;list of strings. To define a variable one assigns a value to
- it, like in the previous example. An undefined variable is the same as a
- variable with an empty value. Variables can be accessed using the
- <code>$(<replaceable>variable</replaceable>)</code> syntax. For example:
-<programlisting>
-a = $(b) $(c) ;
-</programlisting>
- </para>
-
- <para>
- Rules are defined by specifying the rule name, the parameter names, and
- the allowed value list size for each parameter.
-<programlisting>
-rule <replaceable>example</replaceable>
- (
- <replaceable>parameter1</replaceable> :
- <replaceable>parameter2 ?</replaceable> :
- <replaceable>parameter3 +</replaceable> :
- <replaceable>parameter4 *</replaceable>
- )
- {
- # rule body
- }
- </programlisting>
- When this rule is called, the list passed as the first argument must
- have exactly one value. The list passed as the second argument can
- either have one value of be empty. The two remaining arguments can be
- arbitrarily long, but the third argument may not be empty.
- </para>
-
- <para>
- The overview of <link linkend="bbv2.jam">Boost.Jam</link> language
- statements is given below:
-<programlisting>
-helper 1 : 2 : 3 ;
-x = [ helper 1 : 2 : 3 ] ;
-</programlisting>
- This code calls the named rule with the specified arguments. When the
- result of the call must be used inside some expression, you need to add
- brackets around the call, like shown on the second line.
-<programlisting>
-if cond { statements } [ else { statements } ]
-</programlisting>
- This is a regular if-statement. The condition is composed of:
- <itemizedlist>
- <listitem>
- <para>
- Literals (true if at least one string is not empty)
- </para>
- </listitem>
- <listitem>
- <para>
- Comparisons: <code>a <replaceable>operator</replaceable> b</code>
- where <replaceable>operator</replaceable> is one of
- <code>=</code>, <code>!=</code>, <code>&lt;</code>,
- <code>&gt;</code>, <code>&lt;=</code> or <code>&gt;=</code>. The
- comparison is done pairwise between each string in the left and
- the right arguments.
- </para>
- </listitem>
- <listitem>
- <para>
- Logical operations: <code>! a</code>, <code>a &amp;&amp; b</code>,
- <code>a || b</code>
- </para>
- </listitem>
- <listitem>
- <para>
- Grouping: <code>( cond )</code>
- </para>
- </listitem>
- </itemizedlist>
-<programlisting>
-for var in list { statements }
-</programlisting>
- Executes statements for each element in list, setting the variable
- <varname>var</varname> to the element value.
-<programlisting>
-while cond { statements }
-</programlisting>
- Repeatedly execute statements while cond remains true upon entry.
-<programlisting>
-return values ;
-</programlisting>
- This statement should be used only inside a rule and assigns
- <code>values</code> to the return value of the rule.
- <warning>
- <para>
- The <code>return</code> statement does not exit the rule. For
- example:
-<programlisting>
-rule test ( )
-{
- if 1 = 1
- {
- return "reasonable" ;
- }
- return "strange" ;
-}
-</programlisting>
- will return <literal>strange</literal>, not
- <literal>reasonable</literal>.
- </para>
- </warning>
-<programlisting>
-import <replaceable>module</replaceable> ;
-import <replaceable>module</replaceable> : <replaceable>rule</replaceable> ;
-</programlisting>
- The first form imports the specified module. All rules from that
- module are made available using the qualified name: <code><replaceable>
- module</replaceable>.<replaceable>rule</replaceable></code>. The second
- form imports the specified rules only, and they can be called using
- unqualified names.
- </para>
-
- <para id="bbv2.overview.jam_language.actions">
- Sometimes, you need to specify the actual command lines to be used
- when creating targets. In the jam language, you use named actions to do
- this. For example:
-<programlisting>
-actions create-file-from-another
-{
- create-file-from-another $(&lt;) $(&gt;)
-}
-</programlisting>
- This specifies a named action called <literal>
- create-file-from-another</literal>. The text inside braces is the
- command to invoke. The <literal>$(&lt;)</literal> variable will be
- expanded to a list of generated files, and the <literal>$(&gt;)
- </literal> variable will be expanded to a list of source files.
- </para>
-
- <para>
- To adjust the command line flexibly, you can define a rule with the same
- name as the action and taking three parameters&mdash;targets, sources and
- properties. For example:
-<programlisting>
-rule create-file-from-another ( targets * : sources * : properties * )
-{
- if &lt;variant&gt;debug in $(properties)
- {
- OPTIONS on $(targets) = --debug ;
- }
-}
-actions create-file-from-another
-{
- create-file-from-another $(OPTIONS) $(&lt;) $(&gt;)
-}
-</programlisting>
- In this example, the rule checks if a certain build property is specified.
- If so, it sets the variable <varname>OPIONS</varname> that is then used
- inside the action. Note that the variables set "on a target" will be
- visible only inside actions building that target, not globally. Were
- they set globally, using variable named <varname>OPTIONS</varname> in
- two unrelated actions would be impossible.
- </para>
-
- <para>
- More details can be found in the Jam reference, <xref
- linkend="jam.language.rules"/>.
- </para>
- </section>
-
- <section id="bbv2.overview.configuration">
- <title>Configuration</title>
-
- <para>
- On startup, Boost.Build searches and reads two configuration files:
- <filename>site-config.jam</filename> and <filename>user-config.jam</filename>.
- The first one is usually installed and maintained by a system administrator, and
- the second is for the user to modify. You can edit the one in the top-level
- directory of your Boost.Build installation or create a copy in your home
- directory and edit the copy. The following table explains where both files
- are searched.
- </para>
-
- <table id="bbv2.reference.init.config">
- <title>Search paths for configuration files</title>
-
- <tgroup cols="3">
- <thead>
-
- <row>
- <entry></entry>
-
- <entry>site-config.jam</entry>
-
- <entry>user-config.jam</entry>
- </row>
-
- </thead>
- <tbody>
-
- <row>
- <entry>Linux</entry>
-
- <entry>
- <simpara><code>/etc</code></simpara>
- <simpara><code>$HOME</code></simpara>
- <simpara><code>$BOOST_BUILD_PATH</code></simpara>
- </entry>
-
- <entry>
- <simpara><code>$HOME</code></simpara>
- <simpara><code>$BOOST_BUILD_PATH</code></simpara>
- </entry>
- </row>
-
- <row>
- <entry>Windows</entry>
-
- <entry>
- <simpara><code>%SystemRoot%</code></simpara>
- <simpara><code>%HOMEDRIVE%%HOMEPATH%</code></simpara>
- <simpara><code>%HOME%</code></simpara>
- <simpara><code>%BOOST_BUILD_PATH%</code></simpara>
- </entry>
-
- <entry>
- <simpara><code>%HOMEDRIVE%%HOMEPATH%</code></simpara>
- <simpara><code>%HOME%</code></simpara>
- <simpara><code>%BOOST_BUILD_PATH%</code></simpara>
- </entry>
- </row>
- </tbody>
- </tgroup>
- </table>
-
- <tip>
- <para>
- You can use the <command>--debug-configuration</command> option to
- find which configuration files are actually loaded.
- </para>
- </tip>
-
- <para>
- Usually, <filename>user-config.jam</filename> just defines the available compilers
- and other tools (see <xref linkend="bbv2.recipies.site-config"/> for more advanced
- usage). A tool is configured using the following syntax:
- </para>
-
-<programlisting>
-using <replaceable>tool-name</replaceable> : ... ;
-</programlisting>
-<para>
- The <code language="jam">using</code> rule is given the name of tool, and
- will make that tool available to Boost.Build. For example,
-<programlisting>
-using gcc ;
-</programlisting> will make the <ulink url="http://gcc.gnu.org">GCC</ulink> compiler available.
- </para>
-
- <para>
- All the supported tools are documented in <xref linkend="bbv2.reference.tools"/>,
- including the specific options they take. Some general notes that apply to most
- C++ compilers are below.
- </para>
-
- <para>
- For all the C++ compiler toolsets that Boost.Build supports
- out-of-the-box, the list of parameters to
- <code language="jam">using</code> is the same: <parameter
- class="function">toolset-name</parameter>, <parameter
- class="function">version</parameter>, <parameter
- class="function">invocation-command</parameter>, and <parameter
- class="function">options</parameter>.
- </para>
-
- <para>If you have a single compiler, and the compiler executable
- <itemizedlist>
- <listitem><para>has its &#x201C;usual name&#x201D; and is in the
- <envar>PATH</envar>, or</para></listitem>
- <listitem><para>was installed in a standard &#x201C;installation
- directory&#x201D;, or</para></listitem>
- <listitem><para>can be found using a global system like the Windows
- registry.</para></listitem>
- </itemizedlist>
- it can be configured by simply:</para>
-<programlisting>
-using <replaceable>tool-name</replaceable> ;
-</programlisting>
- <!-- TODO: mention auto-configuration? -->
-
- <para>If the compiler is installed in a custom directory, you should provide the
- command that invokes the compiler, for example:</para>
-<programlisting>
-using gcc : : g++-3.2 ;
-using msvc : : "Z:/Programs/Microsoft Visual Studio/vc98/bin/cl" ;
-</programlisting>
- <para>
- Some Boost.Build toolsets will use that path to take additional actions
- required before invoking the compiler, such as calling vendor-supplied
- scripts to set up its required environment variables. When the compiler
- executables for C and C++ are different, the path to the C++ compiler
- executable must be specified. The command can
- be any command allowed by the operating system. For example:
-<programlisting>
-using msvc : : echo Compiling &#x26;&#x26; foo/bar/baz/cl ;
-</programlisting>
- will work.
- </para>
-
- <para>
- To configure several versions of a toolset, simply invoke the
- <code language="jam">using</code> rule multiple times:
-<programlisting>
-using gcc : 3.3 ;
-using gcc : 3.4 : g++-3.4 ;
-using gcc : 3.2 : g++-3.2 ;
-</programlisting>
- Note that in the first call to <code language="jam">using</code>, the
- compiler found in the <envar>PATH</envar> will be used, and there is no
- need to explicitly specify the command.
- </para>
-
-<!-- TODO: This is not actually relevant for gcc now, and we need to rethink this
- <para>As shown above, both the <parameter
- class="function">version</parameter> and <parameter
- class="function">invocation-command</parameter> parameters are
- optional, but there's an important restriction: if you configure
- the same toolset more than once, you must pass the <parameter
- class="function">version</parameter>
- parameter every time. For example, the following is not allowed:
-<programlisting>
-using gcc ;
-using gcc : 3.4 : g++-3.4 ;
-</programlisting>
- because the first <functionname>using</functionname> call does
- not specify a <parameter class="function">version</parameter>.
- </para> -->
-
- <para>
- Many of toolsets have an <parameter class="function">options</parameter>
- parameter to fine-tune the configuration. All of
- Boost.Build's standard compiler toolsets accept four options
- <varname>cflags</varname>, <varname>cxxflags</varname>,
- <varname>compileflags</varname> and <varname>linkflags</varname> as <parameter
- class="function">options</parameter> specifying flags that will be
- always passed to the corresponding tools. Values of the
- <varname>cflags</varname> feature are passed directly to the C
- compiler, values of the <varname>cxxflags</varname> feature are
- passed directly to the C++ compiler, and values of the
- <varname>compileflags</varname> feature are passed to both. For
- example, to configure a <command>gcc</command> toolset so that it
- always generates 64-bit code you could write:
-<programlisting>
- using gcc : 3.4 : : &lt;compileflags&gt;-m64 &lt;linkflags&gt;-m64 ;
-</programlisting>
- </para>
-
- <warning>
- <para>
- Although the syntax used to specify toolset options is very similar
- to syntax used to specify requirements in Jamfiles, the toolset options
- are not the same as features. Don't try to specify a feature value
- in toolset initialization.
- </para>
- </warning>
-
- </section>
-
- <section id="bbv2.overview.invocation">
- <title>Invocation</title>
-
- <para>To invoke Boost.Build, type <command>b2</command> on the command line. Three kinds
- of command-line tokens are accepted, in any order:</para>
- <variablelist>
- <varlistentry>
- <term>options</term>
-
- <listitem><para>Options start with either one or two dashes. The standard options
- are listed below, and each project may add additional options</para></listitem>
- </varlistentry>
-
- <varlistentry>
- <term>properties</term>
-
- <listitem><para>Properties specify details of what you want to build (e.g. debug
- or release variant). Syntactically, all command line tokens with an equal sign in them
- are considered to specify properties. In the simplest form, a property looks like
- <command><replaceable>feature</replaceable>=<replaceable>value</replaceable></command>
- </para></listitem>
- </varlistentry>
-
- <varlistentry>
- <term>target</term>
-
- <listitem><para>All tokens that are neither options nor properties specify
- what targets to build. The available targets entirely depend on the project
- you are building.</para></listitem>
- </varlistentry>
- </variablelist>
-
- <section id="bbv2.overview.invocation.examples">
- <title>Examples</title>
-
- <para>To build all targets defined in the Jamfile in the current directory with the default properties, run:
-<screen>
-b2
-</screen>
- </para>
-
- <para>To build specific targets, specify them on the command line:
-<screen>
-b2 lib1 subproject//lib2
-</screen>
- </para>
-
- <para>To request a certain value for some property, add <literal>
- <replaceable>property</replaceable>=<replaceable>value</replaceable></literal> to the command line:
-<screen>
-b2 toolset=gcc variant=debug optimization=space
-</screen>
- </para>
- </section>
-
- <section id="bbv2.overview.invocation.options">
- <title>Options</title>
-
- <para>Boost.Build recognizes the following command line options.</para>
-
- <variablelist>
-
- <varlistentry id="bbv2.reference.init.options.help">
- <term><option>--help</option></term>
- <listitem>
- <para>Invokes the online help system. This prints general
- information on how to use the help system with additional
- --help* options.
- </para>
- </listitem>
- </varlistentry>
-
- <varlistentry>
- <term><option>--clean</option></term>
- <listitem>
- <para>Cleans all targets in the current directory and
- in any subprojects. Note that unlike the <literal>clean</literal>
- target in make, you can use <literal>--clean</literal>
- together with target names to clean specific targets.</para>
- </listitem>
- </varlistentry>
-
- <varlistentry>
- <term><option>--clean-all</option></term>
- <listitem>
- <para>Cleans all targets,
- no matter where they are defined. In particular, it will clean targets
- in parent Jamfiles, and targets defined under other project roots.
- </para>
- </listitem>
- </varlistentry>
-
- <varlistentry>
- <term><option>--build-dir</option></term>
- <listitem>
- <para>Changes the build directories for all project roots being built. When
- this option is specified, all Jamroot files must declare a project name.
- The build directory for the project root will be computed by concatanating
- the value of the <option>--build-dir</option> option, the project name
- specified in Jamroot, and the build dir specified in Jamroot
- (or <literal>bin</literal>, if none is specified).
- </para>
-
- <para>The option is primarily useful when building from read-only
- media, when you can't modify Jamroot.
- </para>
- </listitem>
- </varlistentry>
-
- <varlistentry>
- <term><option>--version</option></term>
- <listitem>
- <para>Prints information on the Boost.Build and Boost.Jam
- versions.
- </para>
- </listitem>
- </varlistentry>
-
- <varlistentry>
- <term><option>-a</option></term>
- <listitem>
- <para>Causes all files to be rebuilt.</para>
- </listitem>
- </varlistentry>
-
- <varlistentry>
- <term><option>-n</option></term>
- <listitem>
- <para>Do no execute the commands, only print them.</para>
- </listitem>
- </varlistentry>
-
- <varlistentry>
- <term><option>-q</option></term>
- <listitem>
- <para>Stop at the first error, as opposed to continuing to build targets
- that don't depend on the failed ones.</para>
- </listitem>
- </varlistentry>
-
- <varlistentry>
- <term><option>-j <replaceable>N</replaceable></option></term>
- <listitem>
- <para>Run up to <replaceable>N</replaceable> commands in parallel.</para>
- </listitem>
- </varlistentry>
-
- <varlistentry>
- <term><option>--debug-configuration</option></term>
- <listitem>
- <para>Produces debug information about the loading of Boost.Build
- and toolset files.</para>
- </listitem>
- </varlistentry>
-
- <varlistentry>
- <term><option>--debug-building</option></term>
- <listitem>
- <para>Prints what targets are being built and with what properties.
- </para>
- </listitem>
- </varlistentry>
-
- <varlistentry>
- <term><option>--debug-generators</option></term>
- <listitem>
- <para>Produces debug output from the generator search process.
- Useful for debugging custom generators.
- </para>
- </listitem>
- </varlistentry>
-
- <varlistentry>
- <term><option>--ignore-config</option></term>
- <listitem>
- <para>Do not load <literal>site-config.jam</literal> or
- <literal>user-config.jam</literal>.
- </para>
- </listitem>
- </varlistentry>
-
- <varlistentry>
- <term><option>-d0</option></term>
- <listitem>
- <para>Supress all informational messages.</para>
- </listitem>
- </varlistentry>
-
- <varlistentry>
- <term><option>-d <replaceable>N</replaceable></option></term>
- <listitem>
- <para>Enable cummulative debugging levels from 1 to n. Values are:
- <orderedlist>
- <listitem>Show the actions taken for building targets, as they are executed (the default).</listitem>
- <listitem>Show "quiet" actions and display all action text, as they are executed.</listitem>
- <listitem>Show dependency analysis, and target/source timestamps/paths.</listitem>
- <listitem>Show arguments and timming of shell invocations.</listitem>
- <listitem>Show rule invocations and variable expansions.</listitem>
- <listitem>Show directory/header file/archive scans, and attempts at binding to targets.</listitem>
- <listitem>Show variable settings.</listitem>
- <listitem>Show variable fetches, variable expansions, and evaluation of '"if"' expressions.</listitem>
- <listitem>Show variable manipulation, scanner tokens, and memory usage.</listitem>
- <listitem>Show profile information for rules, both timing and memory.</listitem>
- <listitem>Show parsing progress of Jamfiles.</listitem>
- <listitem>Show graph of target dependencies.</listitem>
- <listitem>Show change target status (fate).</listitem>
- </orderedlist>
- </para>
- </listitem>
- </varlistentry>
-
- <varlistentry>
- <term><option>-d +<replaceable>N</replaceable></option></term>
- <listitem>
- <para>Enable debugging level <replaceable>N</replaceable>.</para>
- </listitem>
- </varlistentry>
-
- <varlistentry>
- <term><option>-o <replaceable>file</replaceable></option></term>
- <listitem>
- <para>Write the updating actions to the specified file instead of running them.
- </para>
- </listitem>
- </varlistentry>
-
- <varlistentry>
- <term><option>-s <replaceable>var</replaceable>=<replaceable>value</replaceable></option></term>
- <listitem>
- <para>Set the variable <replaceable>var</replaceable> to
- <replaceable>value</replaceable> in the global scope of the jam
- language interpreter, overriding variables imported from the
- environment.
- </para>
- </listitem>
- </varlistentry>
- </variablelist>
- </section>
-
- <section id="bbv2.overview.invocation.properties">
- <title>Properties</title>
-
- <para>In the simplest case, the build is performed with a single set of properties,
- that you specify on the command line with elements in the form
- <command><replaceable>feature</replaceable>=<replaceable>value</replaceable></command>.
- The complete list of features can be found in <xref linkend="bbv2.overview.builtins.features"/>.
- The most common features are summarized below.</para>
-
- <table>
- <tgroup cols="3">
- <thead>
-
- <row>
- <entry>Feature</entry>
-
- <entry>Allowed values</entry>
-
- <entry>Notes</entry>
- </row>
-
- </thead>
- <tbody>
-
- <row>
- <entry>variant</entry>
-
- <entry>debug,release</entry>
-
- <entry></entry>
- </row>
-
- <row>
- <entry>link</entry>
-
- <entry>shared,static</entry>
-
- <entry>Determines if Boost.Build creates shared or static libraries</entry>
- </row>
-
- <row>
- <entry>threading</entry>
-
- <entry>single,multi</entry>
-
- <entry>Cause the produced binaries to be thread-safe. This requires proper support in the source code itself.</entry>
- </row>
-
- <row>
- <entry>address-model</entry>
-
- <entry>32,64</entry>
-
- <entry>Explicitly request either 32-bit or 64-bit code generation. This typically
- requires that your compiler is appropriately configured. Please refer to
- <xref linkend="bbv2.reference.tools.compilers"/> and your compiler documentation
- in case of problems.</entry>
- </row>
-
- <row>
- <entry>toolset</entry>
-
- <entry>(Depends on configuration)</entry>
-
- <entry>The C++ compiler to use. See <xref linkend="bbv2.reference.tools.compilers"/> for a detailed list.</entry>
- </row>
-
- <row>
- <entry>include</entry>
-
- <entry>(Arbitrary string)</entry>
-
- <entry>Additional include paths for C and C++ compilers.</entry>
- </row>
-
- <row>
- <entry>define</entry>
-
- <entry>(Arbitrary string)</entry>
-
- <entry>Additional macro definitions for C and C++ compilers. The string should be either
- <code>SYMBOL</code> or <code>SYMBOL=VALUE</code></entry>
- </row>
-
- <row>
- <entry>cxxflags</entry>
-
- <entry>(Arbitrary string)</entry>
-
- <entry>Custom options to pass to the C++ compiler.</entry>
- </row>
-
- <row>
- <entry>cflags</entry>
-
- <entry>(Arbitrary string)</entry>
-
- <entry>Custom options to pass to the C compiler.</entry>
- </row>
-
- <row>
- <entry>linkflags</entry>
-
- <entry>(Arbitrary string)</entry>
-
- <entry>Custom options to pass to the C++ linker.</entry>
- </row>
-
- <row>
- <entry>runtime-link</entry>
-
- <entry>shared,static</entry>
-
- <entry>Determines if shared or static version of C and C++ runtimes should be used.</entry>
- </row>
-
- </tbody>
- </tgroup>
- </table>
-
- <para>If you have more than one version of a given C++ toolset (e.g. configured in
- <filename>user-config.jam</filename>, or autodetected, as happens with msvc), you can
- request the specific version by passing
- <code><replaceable>toolset</replaceable>-<replaceable>version</replaceable></code> as
- the value of the <code>toolset</code> feature, for example <code>toolset=msvc-8.0</code>.
- </para>
-
-
- <para>
- If a feature has a fixed set of values it can be specified more than
- once on the command line. <!-- define 'base' and link to it -->
- In which case, everything will be built several times --
- once for each specified value of a feature. For example, if you use
- </para>
-<screen>
-b2 link=static link=shared threading=single threading=multi
-</screen>
- <para>
- Then a total of 4 builds will be performed. For convenience,
- instead of specifying all requested values of a feature in separate command line elements,
- you can separate the values with commas, for example:
- </para>
-<screen>
-b2 link=static,shared threading=single,multi
-</screen>
- <para>
- The comma has this special meaning only if the feature has a fixed set of values, so
- </para>
-<screen>
-b2 include=static,shared
-</screen>
- <para>is not treated specially.</para>
-
- </section>
-
- <section id="bbv2.overview.invocation.targets">
- <title>Targets</title>
-
- <para>All command line elements that are neither options nor properties are the names of the
- targets to build. See <xref linkend="bbv2.reference.ids"/>. If no target is specified,
- the project in the current directory is built.</para>
- </section>
-
- </section>
-
- <section id="bbv2.overview.targets">
- <title>Declaring Targets</title>
-
- <para id="bbv2.overview.targets.main">
- A <firstterm>Main target</firstterm> is a user-defined named
- entity that can be built, for example an executable file.
- Declaring a main target is usually done using one of the main
- target rules described in <xref linkend=
- "bbv2.reference.rules"/>. The user can also declare
- custom main target rules as shown in <xref
- linkend="bbv2.extending.rules"/>.
- </para>
-
- <indexterm><primary>main target</primary><secondary>declaration
- syntax</secondary></indexterm>
- <para>Most main target rules in Boost.Build have the same common
- signature:</para>
-
- <!-- I think we maybe ought to be talking about a common
- _signature_ here, having already explained Boost.Jam function
- signatures at the beginning of this chapter. Then we could show
- ( main-target-name : sources * : requirements * : default-build * : usage-requirements * )
- instead. More precise.
-
- Also, I suggest replacing "default-build" by "default-properties" everywhere.
- -->
-
-<indexterm><primary>common signature</primary></indexterm>
-<anchor id="bbv2.main-target-rule-syntax"/>
-<programlisting>
-rule <replaceable>rule-name</replaceable> (
- main-target-name :
- sources + :
- requirements * :
- default-build * :
- usage-requirements * )
-</programlisting>
-
- <itemizedlist>
- <listitem>
- <simpara>
- <parameter>main-target-name</parameter> is the name used
- to request the target on command line and to use it from
- other main targets. A main target name may contain
- alphanumeric characters, dashes
- (&#x2018;<code>-</code>&#x2019;), and underscores
- (&#x2018;<code>_</code>&#x2019;).
- </simpara>
- </listitem>
-
- <listitem>
- <simpara>
- <parameter>sources</parameter> is the list of source files and other main
- targets that must be combined.
- </simpara>
- </listitem>
-
- <listitem>
- <simpara>
- <parameter>requirements</parameter> is the list of properties that must always
- be present when this main target is built.
- </simpara>
- </listitem>
-
- <listitem>
- <simpara>
- <parameter>default-build</parameter> is the list of properties that will be used
- unless some other value of the same feature is already
- specified, e.g. on the command line or by propagation from a dependent target.
- </simpara>
- </listitem>
-
- <listitem>
- <simpara>
- <parameter>usage-requirements</parameter> is the list of properties that will be
- propagated to all main targets that use this one, i.e. to all its
- dependents.
- </simpara>
- </listitem>
- </itemizedlist>
-
- <para>
- Some main target rules have a different list of parameters as explicitly
- stated in their documentation.
- </para>
-
- <para>The actual requirements for a target are obtained by refining
- the requirements of the project where the target is declared with the
- explicitly specified requirements. The same is true for
- usage-requirements. More details can be found in
- <xref linkend="bbv2.reference.variants.proprefine"/>
- </para>
-
- <section>
- <title>Name</title>
-
- <!-- perphaps we should use 'name-target-name' to closer
- bind this description to the rule's signature. Here, and for
- other parameters. -->
- <para>The name of main target has two purposes. First, it's used to refer to this target from
- other targets and from command line. Second, it's used to compute the names of the generated files.
- Typically, filenames are obtained from main target name by appending system-dependent suffixes and
- prefixes.
- </para>
-
- <para>The name of a main target can contain alphanumeric characters,
- dashes, undescores and dots. The entire
- name is significant when resolving references from other targets. For determining filenames, only the
- part before the first dot is taken. For example:</para>
-<programlisting>
-obj test.release : test.cpp : &lt;variant&gt;release ;
-obj test.debug : test.cpp : &lt;variant&gt;debug ;
-</programlisting>
- <para>will generate two files named <filename>test.obj</filename> (in two different directories), not
- two files named <filename>test.release.obj</filename> and <filename>test.debug.obj</filename>.
- </para>
-
- </section>
-
- <section>
- <title>Sources</title>
-
- <para>The list of sources specifies what should be processed to
- get the resulting targets. Most of the time, it's just a list of
- files. Sometimes, you'll want to automatically construct the
- list of source files rather than having to spell it out
- manually, in which case you can use the
- <link linkend="bbv2.reference.rules.glob">glob</link> rule.
- Here are two examples:</para>
-<programlisting>
-exe a : a.cpp ; # a.cpp is the only source file
-exe b : [ glob *.cpp ] ; # all .cpp files in this directory are sources
-</programlisting>
- <para>
- Unless you specify a file with an absolute path, the name is
- considered relative to the source directory&#x200A;&#x2014;&#x200A;which is typically
- the directory where the Jamfile is located, but can be changed as
- described in <xref linkend=
- "bbv2.overview.projects.attributes.projectrule"/>.
- </para>
-
- <para>
- <!-- use "project-id" here? -->
- The list of sources can also refer to other main targets. Targets in
- the same project can be referred to by name, while targets in other
- projects must be qualified with a directory or a symbolic project
- name. The directory/project name is separated from the target name by
- a double forward slash. There is no special syntax to distinguish the
- directory name from the project name&#x2014;the part before the double
- slash is first looked up as project name, and then as directory name.
- For example:
- </para>
-<programlisting>
-lib helper : helper.cpp ;
-exe a : a.cpp helper ;
-# Since all project ids start with slash, ".." is a directory name.
-exe b : b.cpp ..//utils ;
-exe c : c.cpp /boost/program_options//program_options ;
-</programlisting>
- <para>
- The first exe uses the library defined in the same project. The second
- one uses some target (most likely a library) defined by a Jamfile one
- level higher. Finally, the third target uses a <ulink url=
- "http://boost.org">C++ Boost</ulink> library, referring to it using
- its absolute symbolic name. More information about target references
- can be found in <xref linkend="bbv2.tutorial.libs"/> and <xref
- linkend="bbv2.reference.ids"/>.
- </para>
- </section>
-
- <section id="bbv2.overview.targets.requirements">
- <title>Requirements</title>
- <indexterm><primary>requirements</primary></indexterm>
- <para>Requirements are the properties that should always be present when
- building a target. Typically, they are includes and defines:
-<programlisting>
-exe hello : hello.cpp : &lt;include&gt;/opt/boost &lt;define&gt;MY_DEBUG ;
-</programlisting>
- There are a number of other features, listed in
- <xref linkend="bbv2.overview.builtins.features"/>. For example if
- a library can only be built statically, or a file can't be compiled
- with optimization due to a compiler bug, one can use
-<programlisting>
-lib util : util.cpp : &lt;link&gt;static ;
-obj main : main.cpp : &lt;optimization&gt;off ;
-</programlisting>
- </para>
-
- <para id="bbv2.overview.targets.requirements.conditional">Sometimes, particular relationships need to be maintained
- among a target's build properties. This can be achieved with
- <firstterm>conditional
- requirements</firstterm>. For example, you might want to set
- specific <code>#defines</code> when a library is built as shared,
- or when a target's <code>release</code> variant is built in
- release mode.
-<programlisting>
-lib network : network.cpp
- : <emphasis role="bold">&lt;link&gt;shared:&lt;define&gt;NEWORK_LIB_SHARED</emphasis>
- &lt;variant&gt;release:&lt;define&gt;EXTRA_FAST
- ;
-</programlisting>
-
- In the example above, whenever <filename>network</filename> is
- built with <code>&lt;link&gt;shared</code>,
- <code>&lt;define&gt;NEWORK_LIB_SHARED</code> will be in its
- properties, too.
- </para>
-
- <para>You can use several properties in the condition, for example:
-<programlisting>
-lib network : network.cpp
- : &lt;toolset&gt;gcc,&lt;optimization&gt;speed:&lt;define&gt;USE_INLINE_ASSEMBLER
- ;
-</programlisting>
- </para>
-
- <para id="bbv2.overview.targets.requirements.indirect">
- A more powerful variant of conditional requirements
- is <firstterm>indirect conditional requirements</firstterm>.
- You can provide a rule that will be called with the current build properties and can compute additional properties
- to be added. For example:
-<programlisting>
-lib network : network.cpp
- : &lt;conditional&gt;@my-rule
- ;
-rule my-rule ( properties * )
-{
- local result ;
- if &lt;toolset&gt;gcc &lt;optimization&gt;speed in $(properties)
- {
- result += &lt;define&gt;USE_INLINE_ASSEMBLER ;
- }
- return $(result) ;
-}
-</programlisting>
- This example is equivalent to the previous one, but for complex cases, indirect conditional
- requirements can be easier to write and understand.
- </para>
-
- <para>Requirements explicitly specified for a target are usually
- combined with the requirements specified for the containing project. You
- can cause a target to completely ignore a specific project requirement
- using the syntax by adding a minus sign before the property, for example:
-<programlisting>
-exe main : main.cpp : <emphasis role="bold">-&lt;define&gt;UNNECESSARY_DEFINE</emphasis> ;
-</programlisting>
- This syntax is the only way to ignore free properties, such as defines,
- from a parent. It can be also useful for ordinary properties. Consider
- this example:
-<programlisting>
-project test : requirements &lt;threading&gt;multi ;
-exe test1 : test1.cpp ;
-exe test2 : test2.cpp : &lt;threading&gt;single ;
-exe test3 : test3.cpp : -&lt;threading&gt;multi ;
-</programlisting>
- Here, <code>test1</code> inherits the project requirements and will always
- be built in multi-threaded mode. The <code>test2</code> target
- <emphasis>overrides</emphasis> the project's requirements and will
- always be built in single-threaded mode. In contrast, the
- <code>test3</code> target <emphasis>removes</emphasis> a property
- from the project requirements and will be built either in single-threaded or
- multi-threaded mode depending on which variant is requested by the
- user.</para>
-
- <para>Note that the removal of requirements is completely textual:
- you need to specify exactly the same property to remove it.</para>
-
- </section>
-
- <section>
- <title>Default Build</title>
-
- <para>The <varname>default-build</varname> parameter
- is a set of properties to be used if the build request does
- not otherwise specify a value for features in the set. For example:
-<programlisting>
-exe hello : hello.cpp : : &lt;threading&gt;multi ;
-</programlisting>
- would build a multi-threaded target unless the user
- explicitly requests a single-threaded version. The difference between
- the requirements and the default-build is that the requirements cannot be
- overridden in any way.
- </para>
- </section>
-
- <section>
- <title>Additional Information</title>
-
- <para>
- The ways a target is built can be so different that
- describing them using conditional requirements would be
- hard. For example, imagine that a library actually uses
- different source files depending on the toolset used to build
- it. We can express this situation using <firstterm>target
- alternatives</firstterm>:
-<programlisting>
-lib demangler : dummy_demangler.cpp ; # alternative 1
-lib demangler : demangler_gcc.cpp : &lt;toolset&gt;gcc ; # alternative 2
-lib demangler : demangler_msvc.cpp : &lt;toolset&gt;msvc ; # alternative 3
-</programlisting>
- In the example above, when built with <literal>gcc</literal>
- or <literal>msvc</literal>, <filename>demangler</filename>
- will use a source file specific to the toolset. Otherwise, it
- will use a generic source file,
- <filename>dummy_demangler.cpp</filename>.
- </para>
-
- <para>It is possible to declare a target inline, i.e. the "sources"
- parameter may include calls to other main rules. For example:</para>
-
-<programlisting>
-exe hello : hello.cpp
- [ obj helpers : helpers.cpp : &lt;optimization&gt;off ] ;</programlisting>
-
- <para>
- Will cause "helpers.cpp" to be always compiled without
- optimization. When referring to an inline main target, its declared
- name must be prefixed by its parent target's name and two dots. In
- the example above, to build only helpers, one should run
- <code>b2 hello..helpers</code>.
- </para>
-
- <para>When no target is requested on the command line, all targets in the
- current project will be built. If a target should be built only by
- explicit request, this can be expressed by the
- <link linkend="bbv2.reference.rules.explicit">explicit</link> rule:
- <programlisting>
-explicit install_programs ;</programlisting>
- </para>
-
- </section>
- </section>
-
- <section id="bbv2.overview.projects">
- <title>Projects</title>
-
- <para>As mentioned before, targets are grouped into projects,
- and each Jamfile is a separate project. Projects are useful
- because they allow us to group related targets together, define
- properties common to all those targets, and assign a symbolic
- name to the project that can be used in referring to its
- targets.
- </para>
-
- <para>Projects are named using the
- <code language="jam">project</code> rule, which has the
- following syntax:
-<programlisting>
-project <replaceable>id</replaceable> : <replaceable>attributes</replaceable> ;
-</programlisting>
- Here, <replaceable>attributes</replaceable> is a sequence of
- rule arguments, each of which begins with an attribute-name
- and is followed by any number of build properties. The list
- of attribute names along with its handling is also shown in
- the table below. For example, it is possible to write:
-<programlisting>
-project tennis
- : requirements &lt;threading&gt;multi
- : default-build release
- ;
-</programlisting>
- </para>
-
- <para>The possible attributes are listed below.</para>
-
- <para><emphasis>Project id</emphasis> is a short way to denote a project, as
- opposed to the Jamfile's pathname. It is a hierarchical path,
- unrelated to filesystem, such as "boost/thread". <link linkend=
- "bbv2.reference.ids">Target references</link> make use of project ids to
- specify a target.</para>
- <!--
- This is actually spelled "project-id," isn't it? You
- have to fix all of these and use a code font. Also below
- in the table.
- -->
-
- <para><emphasis>Source location</emphasis> specifies the directory where sources
- for the project are located.</para>
-
- <para><emphasis>Project requirements</emphasis> are requirements that apply to
- all the targets in the projects as well as all subprojects.</para>
-
- <para><emphasis>Default build</emphasis> is the build request that should be
- used when no build request is specified explicitly.</para>
- <!--
- This contradicts your earlier description of default
- build and I believe it is incorrect. Specifying a build
- request does not neccessarily render default build
- ineffective, because it may cover different features.
- This description is repeated too many times in the
- documentation; you almost *had* to get it wrong once.
- -->
-
- <para id="bbv2.overview.projects.attributes.projectrule">
- The default values for those attributes are
- given in the table below.
-
- <table>
- <title/>
- <tgroup cols="4">
- <thead>
- <row>
- <entry>Attribute</entry>
-
- <entry>Name</entry>
-
- <entry>Default value</entry>
-
- <entry>Handling by the <code language="jam">project</code>
- rule</entry>
-
- </row>
- </thead>
-
- <tbody>
-
- <row>
- <entry>Project id</entry>
-
- <entry>none</entry>
-
- <entry>none</entry>
-
- <entry>Assigned from the first parameter of the 'project' rule.
- It is assumed to denote absolute project id.</entry>
- </row>
-
- <row>
- <entry>Source location</entry>
-
- <entry><literal>source-location</literal></entry>
-
- <entry>The location of jamfile for the project</entry>
-
- <entry>Sets to the passed value</entry>
- </row>
-
- <row>
- <entry>Requirements</entry>
-
- <entry><literal>requirements</literal></entry>
-
- <entry>The parent's requirements</entry>
-
- <entry>The parent's requirements are refined with the passed
- requirement and the result is used as the project
- requirements.</entry>
- </row>
-
- <row>
- <entry>Default build</entry>
-
- <entry><literal>default-build</literal></entry>
-
- <entry>none</entry>
-
- <entry>Sets to the passed value</entry>
- </row>
-
- <row>
- <entry>Build directory</entry>
-
- <entry><literal>build-dir</literal></entry>
-
- <entry>Empty if the parent has no build directory set.
- Otherwise, the parent's build directory with the
- relative path from parent to the current project
- appended to it.
- </entry>
-
- <entry>Sets to the passed value, interpreted as relative to the
- project's location.</entry>
- </row>
- </tbody>
- </tgroup>
- </table>
- </para>
-
- <para>Besides defining projects and main targets, Jamfiles
- often invoke various utility rules. For the full list of rules
- that can be directly used in Jamfile see
- <xref linkend="bbv2.reference.rules"/>.
- </para>
-
- <para>Each subproject inherits attributes, constants and rules
- from its parent project, which is defined by the nearest
- Jamfile in an ancestor directory above
- the subproject. The top-level project is declared in a file
- called <filename>Jamroot</filename> rather than
- <filename>Jamfile</filename>. When loading a project,
- Boost.Build looks for either <filename>Jamroot</filename> or
- <code>Jamfile</code>. They are handled identically, except
- that if the file is called <filename>Jamroot</filename>, the
- search for a parent project is not performed.
- </para>
-
- <para>Even when building in a subproject directory, parent
- project files are always loaded before those of their
- subprojects, so that every definition made in a parent project
- is always available to its children. The loading order of any
- other projects is unspecified. Even if one project refers to
- another via the <code>use-project</code> or a target reference,
- no specific order should be assumed.
- </para>
-
- <note>
- <para>Giving the root project the special name
- &#x201C;<filename>Jamroot</filename>&#x201D; ensures that
- Boost.Build won't misinterpret a directory above it as the
- project root just because the directory contains a Jamfile.
- <!-- The logic of the previous reasoning didn't hang together -->
- </para>
- </note>
-
- <!-- All this redundancy with the tutorial is bad. The tutorial
- should just be made into the introductory sections of this
- document, which should be called the "User Guide." It's
- perfectly appropriate to start a user guide with that kind
- of material. -->
- </section>
-
- <section id="bbv2.overview.build_process">
- <title>The Build Process</title>
-
- <para>When you've described your targets, you want Boost.Build to run the
- right tools and create the needed targets.
- <!-- That sentence is awkward and doesn't add much. -->
- This section will describe
- two things: how you specify what to build, and how the main targets are
- actually constructed.
- </para>
-
- <para>The most important thing to note is that in Boost.Build, unlike
- other build tools, the targets you declare do not correspond to specific
- files. What you declare in a Jamfile is more like a “metatarget.â€
- <!-- Do we need a new word? We already have “main target.†If
- you're going to introduce “metatarget†you should at least
- tie it together with the main target concept. It's too
- strange to have been saying “main target†all along and now
- suddenly start saying “what you declare in a jamfile†-->
- Depending on the properties you specify on the command line,
- each metatarget will produce a set of real targets corresponding
- to the requested properties. It is quite possible that the same
- metatarget is built several times with different properties,
- producing different files.
- </para>
- <tip>
- <para>
- This means that for Boost.Build, you cannot directly obtain a build
- variant from a Jamfile. There could be several variants requested by the
- user, and each target can be built with different properties.
- </para>
- </tip>
-
- <section id="bbv2.overview.build_request">
- <title>Build Request</title>
-
- <para>
- The command line specifies which targets to build and with which
- properties. For example:
-<programlisting>
-b2 app1 lib1//lib1 toolset=gcc variant=debug optimization=full
-</programlisting>
- would build two targets, "app1" and "lib1//lib1" with the specified
- properties. You can refer to any targets, using
- <link linkend="bbv2.reference.ids">target id</link> and specify arbitrary
- properties. Some of the properties are very common, and for them the name
- of the property can be omitted. For example, the above can be written as:
-<programlisting>
-b2 app1 lib1//lib1 gcc debug optimization=full
-</programlisting>
- The complete syntax, which has some additional shortcuts, is
- described in <xref linkend="bbv2.overview.invocation"/>.
- </para>
- </section>
-
- <section><title>Building a main target</title>
-
- <para>When you request, directly or indirectly, a build of a main target
- with specific requirements, the following steps are done. Some brief
- explanation is provided, and more details are given in <xref
- linkend="bbv2.reference.buildprocess"/>.
- <orderedlist>
-
- <listitem><para>Applying default build. If the default-build
- property of a target specifies a value of a feature that is not
- present in the build request, that value is added.</para>
- <!--
- Added to what? Don't say “the build request!†The
- request is what was requested; if its meaning changes
- the reader will be confused.
- -->
- </listitem>
-
- <listitem><para>Selecting the main target alternative to use. For
- each alternative we look how many properties are present both in
- alternative's requirements, and in build request. The
- alternative with large number of matching properties is selected.
- </para></listitem>
-
- <listitem><para>Determining "common" properties.
- <!-- It would be nice to have a better name for this. But
- even more importantly, unless you say something about
- the reason for choosing whatever term you use, the
- reader is going to wonder what it means. -->
- The build request
- is <link linkend="bbv2.reference.variants.proprefine">refined</link>
- with target's requirements.
- <!-- It's good that you have the links here and below,
- but I'm concerned that it doesn't communicate well
- in print and there's not enough information for the
- print reader. Maybe we need separate XSL for PDF
- printing that generates a readable footnote. -->
- The conditional properties in
- requirements are handled as well. Finally, default values of
- features are added.
- </para></listitem>
-
- <listitem><para>Building targets referred by the sources list and
- dependency properties. The list of sources and the properties
- can refer to other target using <link
- linkend="bbv2.reference.ids">target references</link>. For each
- reference, we take all <link
- linkend="bbv2.reference.features.attributes.propagated">propagated</link>
- properties, refine them by explicit properties specified in the
- target reference, and pass the resulting properties as build
- request to the other target.
- </para></listitem>
-
- <listitem><para>Adding the usage requirements produced when building
- dependencies to the "common" properties. When dependencies are
- built in the previous step, they return
- <!-- don't assume reader has a mental model for BB internals! -->
- both the set of created
- "real" targets, and usage requirements. The usage requirements
- are added to the common properties and the resulting property
- set will be used for building the current target.
- </para></listitem>
-
- <listitem><para>Building the target using generators. To convert the
- sources to the desired type, Boost.Build uses "generators" ---
- objects that correspond to tools like compilers and linkers. Each
- generator declares what type of targets it can produce and what
- type of sources it requires. Using this information, Boost.Build
- determines which generators must be run to produce a specific
- target from specific sources. When generators are run, they return
- the "real" targets.
- </para></listitem>
-
- <listitem><para>Computing the usage requirements to be returned. The
- conditional properties in usage requirements are expanded
- <!-- what does "expanded" mean? -->
- and the result is returned.</para></listitem>
- </orderedlist>
- </para>
- </section>
-
- <section><title>Building a Project</title>
-
- <para>Often, a user builds a complete project, not just one main
- target. In fact, invoking <command>b2</command> without
- arguments
- <!-- do you know the difference between parameters and
- arguments? I only learned this year -->
- builds the project defined in the current
- directory.</para>
-
- <para>When a project is built, the build request is passed without
- modification to all main targets in that project.
- <!-- What does it mean to pass a build request to a target?
- -->
- It's is possible to
- prevent implicit building of a target in a project with the
- <code>explicit</code> rule:
-<programlisting>
-explicit hello_test ;
-</programlisting>
- would cause the <code>hello_test</code> target to be built only if
- explicitly requested by the user or by some other target.
- </para>
-
- <para>The Jamfile for a project can include a number of
- <code>build-project</code> rule calls that specify additional projects to
- be built.
- </para>
-
- </section>
-
- </section>
-
- </chapter>
-
-<!--
- Local Variables:
- mode: nxml
- sgml-indent-data: t
- sgml-parent-document: ("userman.xml" "chapter")
- sgml-set-face: t
- End:
--->
diff --git a/tools/build/v2/doc/src/path.xml b/tools/build/v2/doc/src/path.xml
deleted file mode 100644
index 31f47a8dca..0000000000
--- a/tools/build/v2/doc/src/path.xml
+++ /dev/null
@@ -1,248 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE section PUBLIC "-//Boost//DTD BoostBook XML V1.0//EN"
- "http://www.boost.org/tools/boostbook/dtd/boostbook.dtd">
-
-<section id="bbv2.reference.modules.path">
-
- <title>path</title>
- <indexterm>
- <primary>path</primary>
- </indexterm>
-
- <para>
- Performs various path manipulations. Paths are always in a 'normalized'
- representation. In it, a path may be either:
-
- <itemizedlist>
- <listitem><para><code>'.'</code>, or</para></listitem>
- <listitem>
- <para>
- <code>['/'] [ ( '..' '/' )* (token '/')* token ]</code>
- </para>
- </listitem>
- </itemizedlist>
-
- In plain english, a path can be rooted, <code>'..'</code>
- elements are allowed only at the beginning, and it never
- ends in slash, except for the path consisting of slash only.
- </para>
-
- <orderedlist>
-
- <listitem id="bbv2.reference.modules.path.make">
- <indexterm zone="bbv2.reference.modules.path.make">
- <primary>make</primary>
- <secondary>path</secondary>
- </indexterm>
- <code language="jam">rule make ( native )</code>
- <para>Converts the native path into normalized form.</para>
- </listitem>
-
- <listitem id="bbv2.reference.modules.path.native">
- <indexterm zone="bbv2.reference.modules.path.native">
- <primary>native</primary>
- </indexterm>
- <code language="jam">rule native ( path )</code>
- <para>Builds the native representation of the path.</para>
- </listitem>
-
- <listitem id="bbv2.reference.modules.path.is-rooted">
- <indexterm zone="bbv2.reference.modules.path.is-rooted">
- <primary>is-rooted</primary>
- </indexterm>
- <code language="jam">rule is-rooted ( path )</code>
- <para>Tests if a path is rooted.</para>
- </listitem>
-
- <listitem id="bbv2.reference.modules.path.has-parent">
- <indexterm zone="bbv2.reference.modules.path.has-parent">
- <primary>has-parent</primary>
- </indexterm>
- <code language="jam">rule has-parent ( path )</code>
- <para>Tests if a path has a parent.</para>
- </listitem>
-
- <listitem id="bbv2.reference.modules.path.basename">
- <indexterm zone="bbv2.reference.modules.path.basename">
- <primary>basename</primary>
- </indexterm>
- <code language="jam">rule basename ( path )</code>
- <para>Returns the path without any directory components.</para>
- </listitem>
-
- <listitem id="bbv2.reference.modules.path.parent">
- <indexterm zone="bbv2.reference.modules.path.parent">
- <primary>parent</primary>
- </indexterm>
- <code language="jam">rule parent ( path )</code>
- <para>Returns the parent directory of the path. If no parent exists, an error is issued.</para>
- </listitem>
-
- <listitem id="bbv2.reference.modules.path.reverse">
- <indexterm zone="bbv2.reference.modules.path.reverse">
- <primary>reverse</primary>
- </indexterm>
- <code language="jam">rule reverse ( path )</code>
- <para>
- Returns <code language="jam">path2</code> such that
- <code language="jam">[ join path path2 ] = "."</code>.
- The path may not contain <code language="jam">".."</code>
- element or be rooted.
- </para>
- </listitem>
-
- <listitem id="bbv2.reference.modules.path.join">
- <indexterm zone="bbv2.reference.modules.path.join">
- <primary>join</primary>
- </indexterm>
- <code language="jam">rule join ( elements + )</code>
- <para>
- Concatenates the passed path elements. Generates an error if any
- element other than the first one is rooted. Skips any empty or
- undefined path elements.
- </para>
- </listitem>
-
- <listitem id="bbv2.reference.modules.path.root">
- <indexterm zone="bbv2.reference.modules.path.root">
- <primary>root</primary>
- </indexterm>
- <code language="jam">rule root ( path root )</code>
- <para>
- If <code language="jam">path</code> is relative, it is rooted at
- <code language="jam">root</code>. Otherwise, it is unchanged.
- </para>
- </listitem>
-
- <listitem id="bbv2.reference.modules.path.pwd">
- <indexterm zone="bbv2.reference.modules.path.pwd">
- <primary>pwd</primary>
- </indexterm>
- <code language="jam">rule pwd ( )</code>
- <para>Returns the current working directory.</para>
- </listitem>
-
- <listitem id="bbv2.reference.modules.path.glob">
- <indexterm zone="bbv2.reference.modules.path.glob">
- <primary>glob</primary>
- </indexterm>
- <code language="jam">rule glob ( dirs * : patterns + : exclude-patterns * )</code>
- <para>
- Returns the list of files matching the given pattern in the specified
- directory. Both directories and patterns are supplied as portable paths. Each
- pattern should be a non-absolute path, and can't contain "." or ".." elements.
- Each slash separated element of a pattern can contain the following special
- characters:
- <itemizedlist>
- <listitem>
- <para>'?' matches any character</para>
- </listitem>
- <listitem>
- <para>'*' matches an arbitrary number of characters</para>
- </listitem>
- </itemizedlist>
- A file $(d)/e1/e2/e3 (where 'd' is in $(dirs)) matches the pattern p1/p2/p3 if and
- only if e1 matches p1, e2 matches p2 and so on.
-
- For example:
-<programlisting language="jam">
-[ glob . : *.cpp ]
-[ glob . : */build/Jamfile ]
-</programlisting>
- </para>
- </listitem>
-
- <listitem id="bbv2.reference.modules.path.glob-tree">
- <indexterm zone="bbv2.reference.modules.path.glob-tree">
- <primary>glob-tree</primary>
- </indexterm>
- <code language="jam">rule glob-tree ( roots * : patterns + : exclude-patterns * )</code>
- <para>
- Recursive version of <link linkend="bbv2.reference.modules.path.glob">glob</link>.
- Builds the glob of files while also searching in
- the subdirectories of the given roots. An optional set of exclusion patterns
- will filter out the matching entries from the result. The exclusions also
- apply to the subdirectory scanning, such that directories that match the
- exclusion patterns will not be searched.
- </para>
- </listitem>
-
- <listitem id="bbv2.reference.modules.path.exists">
- <indexterm zone="bbv2.reference.modules.path.exists">
- <primary>exists</primary>
- </indexterm>
- <code language="jam">rule exists ( file )</code>
- <para>Returns true if the specified file exists.</para>
- </listitem>
-
- <listitem id="bbv2.reference.modules.path.all-parents">
- <indexterm zone="bbv2.reference.modules.path.all-parents">
- <primary>all-parents</primary>
- </indexterm>
- <code language="jam">rule all-parents ( path : upper_limit ? : cwd ? )</code>
- <para>
- Find out the absolute name of path and return the list of all the parents,
- starting with the immediate one. Parents are returned as relative names. If
- <code language="jam">upper_limit</code> is specified, directories above it
- will be pruned.
- </para>
- </listitem>
-
- <listitem id="bbv2.reference.modules.path.glob-in-parents">
- <indexterm zone="bbv2.reference.modules.path.glob-in-parents">
- <primary>glob-in-parents</primary>
- </indexterm>
- <code language="jam">rule glob-in-parents ( dir : patterns + : upper-limit ? )</code>
- <para>
- Search for <code language="jam">patterns</code> in parent directories
- of <code language="jam">dir</code>, up to and including
- <code language="jam">upper_limit</code>, if it is specified, or
- till the filesystem root otherwise.
- </para>
- </listitem>
-
- <listitem id="bbv2.reference.modules.path.relative">
- <indexterm zone="bbv2.reference.modules.path.relative">
- <primary>relative</primary>
- </indexterm>
- <code language="jam">rule relative ( child parent : no-error ? )</code>
- <para>
- Assuming <code language="jam">child</code> is a subdirectory of
- <code language="jam">parent</code>, return the relative path from
- <code language="jam">parent</code> to <code language="jam">child</code>.
- </para>
- </listitem>
-
- <listitem id="bbv2.reference.modules.path.relative-to">
- <indexterm zone="bbv2.reference.modules.path.relative-to">
- <primary>relative-to</primary>
- </indexterm>
- <code language="jam">rule relative-to ( path1 path2 )</code>
- <para>Returns the minimal path to path2 that is relative path1.</para>
- </listitem>
-
- <listitem id="bbv2.reference.modules.path.programs-path">
- <indexterm zone="bbv2.reference.modules.path.programs-path">
- <primary>programs-path</primary>
- </indexterm>
- <code language="jam">rule programs-path ( )</code>
- <para>
- Returns the list of paths which are used by the operating system for
- looking up programs.
- </para>
- </listitem>
-
- <listitem id="bbv2.reference.modules.path.mkdirs">
- <indexterm zone="bbv2.reference.modules.path.mkdirs">
- <primary>mkdirs</primary>
- </indexterm>
- <code language="jam">rule makedirs ( path )</code>
- <para>
- Creates a directory and all parent directories that do not
- already exist.
- </para>
- </listitem>
-
- </orderedlist>
-
-</section>
diff --git a/tools/build/v2/doc/src/recipes.xml b/tools/build/v2/doc/src/recipes.xml
deleted file mode 100644
index 7230aabdf7..0000000000
--- a/tools/build/v2/doc/src/recipes.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE appendix PUBLIC "-//Boost//DTD BoostBook XML V1.0//EN"
- "http://www.boost.org/tools/boostbook/dtd/boostbook.dtd">
-
- <!-- The file is empty. It's not clear if it will be needed in
- future or FAQ completely supercedes it. -->
-
- <appendix id="bbv2.recipies">
- <title>Boost Build System V2 recipes</title>
-
- </appendix>
diff --git a/tools/build/v2/doc/src/reference.xml b/tools/build/v2/doc/src/reference.xml
deleted file mode 100644
index deee17c980..0000000000
--- a/tools/build/v2/doc/src/reference.xml
+++ /dev/null
@@ -1,2545 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE chapter PUBLIC "-//Boost//DTD BoostBook XML V1.0//EN"
- "http://www.boost.org/tools/boostbook/dtd/boostbook.dtd"
-[
-<!ENTITY toolset_ops "<optional><replaceable>version</replaceable></optional> : <optional><replaceable>c++-compile-command</replaceable></optional> : <optional><replaceable>compiler options</replaceable></optional>">
-<!ENTITY option_list_intro "<para>The following options can be provided, using <literal>&lt;<replaceable>option-name</replaceable>&gt;<replaceable>option-value</replaceable></literal> syntax:</para>">
-<!ENTITY using_repeation "<para>This statement may be repeated several times, if you want to configure several versions of the compiler.</para>">
-]>
-
-<chapter id="bbv2.reference"
- xmlns:xi="http://www.w3.org/2001/XInclude">
- <title>Reference</title>
-
- <section id="bbv2.reference.general">
- <title>General information</title>
-
- <section id="bbv2.reference.init">
- <title>Initialization</title>
-
- <para>bjam's first job upon startup is to load the Jam code that
- implements the build system. To do this, it searches for a file
- called <filename>boost-build.jam</filename>, first in the invocation directory, then
- in its parent and so forth up to the filesystem root, and finally
- in the directories specified by the environment variable
- BOOST_BUILD_PATH. When found, the file is interpreted, and should
- specify the build system location by calling the boost-build
- rule:</para>
-
-<programlisting>
-rule boost-build ( location ? )
-</programlisting>
-
- <para>
- If location is a relative path, it is treated as relative to
- the directory of <filename>boost-build.jam</filename>. The directory specified by
- that location and the directories in BOOST_BUILD_PATH are then searched for
- a file called <filename>bootstrap.jam</filename>, which is expected to
- bootstrap the build system. This arrangement allows the build
- system to work without any command-line or environment variable
- settings. For example, if the build system files were located in a
- directory "build-system/" at your project root, you might place a
- <filename>boost-build.jam</filename> at the project root containing:
-
-<programlisting>
-boost-build build-system ;
-</programlisting>
-
- In this case, running <command>b2</command> anywhere in the project tree will
- automatically find the build system.</para>
-
- <para>The default <filename>bootstrap.jam</filename>, after loading some standard
- definitions, loads two <filename>site-config.jam</filename> and <filename>user-config.jam</filename>.</para>
-
- </section>
-
- </section>
-
- <section id="bbv2.reference.rules">
- <title>Builtin rules</title>
-
- <para>This section contains the list of all rules that
- can be used in Jamfile&#x2014;both rules that define new
- targets and auxiliary rules.</para>
-
- <variablelist>
- <varlistentry>
- <term><literal>exe</literal></term>
-
- <listitem><para>Creates an executable file. See
- <xref linkend="bbv2.tasks.programs"/>.</para></listitem>
- </varlistentry>
-
- <varlistentry>
- <term><literal>lib</literal></term>
-
- <listitem><para>Creates an library file. See
- <xref linkend="bbv2.tasks.libraries"/>.</para></listitem>
- </varlistentry>
-
- <varlistentry>
- <term><literal>install</literal></term>
-
- <listitem><para>Installs built targets and other files. See
- <xref linkend="bbv2.tasks.installing"/>.</para></listitem>
- </varlistentry>
-
- <varlistentry>
- <term><literal>alias</literal></term>
-
- <listitem><para>Creates an alias for other targets. See
- <xref linkend="bbv2.tasks.alias"/>.</para></listitem>
- </varlistentry>
-
- <varlistentry>
- <term><literal>unit-test</literal></term>
-
- <listitem><para>Creates an executable that will be automatically run. See
- <xref linkend="bbv2.builtins.testing"/>.</para></listitem>
- </varlistentry>
-
- <varlistentry>
- <term><literal>compile</literal></term>
- <term><literal>compile-fail</literal></term>
- <term><literal>link</literal></term>
- <term><literal>link-fail</literal></term>
- <term><literal>run</literal></term>
- <term><literal>run-fail</literal></term>
-
- <listitem><para>Specialized rules for testing. See
- <xref linkend="bbv2.builtins.testing"/>.</para></listitem>
- </varlistentry>
-
-
- <varlistentry>
- <term><literal>obj</literal></term>
-
- <listitem><para>Creates an object file. Useful when a single source
- file must be compiled with special properties.</para></listitem>
- </varlistentry>
-
- <varlistentry>
- <term><literal>preprocessed</literal></term>
- <indexterm><primary>preprocessed</primary></indexterm>
-
- <listitem><para>Creates an preprocessed source file. The arguments follow the
- <link linkend="bbv2.main-target-rule-syntax">common syntax</link>.</para></listitem>
- </varlistentry>
-
- <varlistentry id="bbv2.reference.rules.glob">
- <term><literal>glob</literal></term>
-
- <listitem><para>The <code>glob</code> rule takes a list shell pattern
- and returns the list of files in the project's source directory that
- match the pattern. For example:
- <programlisting>
-lib tools : [ glob *.cpp ] ;
- </programlisting>
- It is possible to also pass a second argument&#x2014;the list of
- exclude patterns. The result will then include the list of
- files patching any of include patterns, and not matching any
- of the exclude patterns. For example:
- <programlisting>
-lib tools : [ glob *.cpp : file_to_exclude.cpp bad*.cpp ] ;
- </programlisting>
- </para></listitem>
- </varlistentry>
-
- <varlistentry id="bbv2.reference.glob-tree">
- <indexterm><primary>glob-tree</primary></indexterm>
- <term><literal>glob-tree</literal></term>
-
- <listitem><para>The <code>glob-tree</code> is similar to the
- <code>glob</code> except that it operates recursively from
- the directory of the containing Jamfile. For example:
- <programlisting>
-ECHO [ glob-tree *.cpp : .svn ] ;
- </programlisting>
- will print the names of all C++ files in your project. The
- <literal>.svn</literal> exclude pattern prevents the
- <code>glob-tree</code> rule from entering administrative
- directories of the Subversion version control system.
- </para></listitem>
- </varlistentry>
-
- <varlistentry>
- <term><literal>project</literal></term>
-
- <listitem><para>Declares project id and attributes, including
- project requirements. See <xref linkend="bbv2.overview.projects"/>.
- </para></listitem>
- </varlistentry>
-
- <varlistentry>
- <term><literal>use-project</literal></term>
-
- <listitem><para>Assigns a symbolic project ID to a project at
- a given path. This rule must be better documented!
- </para></listitem>
- </varlistentry>
-
- <varlistentry id="bbv2.reference.rules.explicit">
- <term><literal>explicit</literal></term>
-
- <listitem><para>The <literal>explicit</literal> rule takes a single
- parameter&#x2014;a list of target names. The named targets will
- be marked explicit, and will be built only if they are explicitly
- requested on the command line, or if their dependents are built.
- Compare this to ordinary targets, that are built implicitly when
- their containing project is built.</para></listitem>
- </varlistentry>
-
- <varlistentry>
- <term><literal>always</literal></term>
- <indexterm><primary>always building a metatarget</primary></indexterm>
-
- <listitem><para>The <literal>always</literal> funciton takes a single
- parameter&#x2014;a list of metatarget names. The top-level targets produced
- by the named metatargets will be always considered out of date. Consider this example:
- </para>
-<programlisting>
-exe hello : hello.cpp ;
-exe bye : bye.cpp ;
-always hello ;
-</programlisting>
- <para>If a build of <filename>hello</filename> is requested, then the binary will
- always be relinked. The object files will not be recompiled, though. Note that if
- a build of <filename>hello</filename> is not requested, for example you specify just
- <filename>bye</filename> on the command line, <filename>hello</filename> will not
- be relinked.</para></listitem>
- </varlistentry>
-
- <varlistentry>
- <term><literal>constant</literal></term>
-
- <listitem><para>Sets project-wide constant. Takes two
- parameters: variable name and a value and makes the specified
- variable name accessible in this Jamfile and any child Jamfiles.
- For example:
- <programlisting>
-constant VERSION : 1.34.0 ;
- </programlisting>
- </para></listitem>
- </varlistentry>
-
- <varlistentry>
- <term><literal>path-constant</literal></term>
-
- <listitem><para>Same as <literal>constant</literal> except that
- the value is treated as path relative to Jamfile location. For example,
- if <command>b2</command> is invoked in the current directory,
- and Jamfile in <filename>helper</filename> subdirectory has:
- <programlisting>
-path-constant DATA : data/a.txt ;
- </programlisting>
- then the variable <varname>DATA</varname> will be set to
- <literal>helper/data/a.txt</literal>, and if <command>b2</command>
- is invoked from the <filename>helper</filename> directory, then
- the variable <varname>DATA</varname> will be set to
- <literal>data/a.txt</literal>.
- </para></listitem>
- </varlistentry>
-
- <varlistentry>
- <term><literal>build-project</literal></term>
-
- <listitem><para>Cause some other project to be built. This rule
- takes a single parameter&#x2014;a directory name relative to
- the containing Jamfile. When the containing Jamfile is built,
- the project located at that directory will be built as well.
- At the moment, the parameter to this rule should be a directory
- name. Project ID or general target references are not allowed.
- </para></listitem>
- </varlistentry>
-
- <varlistentry>
- <term><literal>test-suite</literal></term>
-
- <listitem><para>This rule is deprecated and equivalent to
- <code>alias</code>.</para></listitem>
- </varlistentry>
-
- </variablelist>
-
- </section>
-
- <section id="bbv2.overview.builtins.features">
- <title>Builtin features</title>
-
- <para>This section documents the features that are built-in into
- Boost.Build. For features with a fixed set of values, that set is
- provided, with the default value listed first.</para>
-
- <indexterm><primary>features</primary><secondary>builtin</secondary></indexterm>
-
- <variablelist>
- <varlistentry><term><literal>variant</literal></term>
- <indexterm><primary>variant</primary></indexterm>
-
- <listitem>
- <para>
- A feature combining several low-level features, making it easy to
- request common build configurations.
- </para>
-
- <para>
- <emphasis role="bold">Allowed values:</emphasis>
- <literal>debug</literal>, <literal>release</literal>,
- <literal>profile</literal>.
- </para>
-
- <para>
- The value <literal>debug</literal> expands to
- </para>
-
-<programlisting>
-&lt;optimization&gt;off &lt;debug-symbols&gt;on &lt;inlining&gt;off &lt;runtime-debugging&gt;on
-</programlisting>
-
- <para>
- The value <literal>release</literal> expands to
- </para>
-
-<programlisting>
-&lt;optimization&gt;speed &lt;debug-symbols&gt;off &lt;inlining&gt;full &lt;runtime-debugging&gt;off
-</programlisting>
-
- <para>
- The value <literal>profile</literal> expands to the same as
- <literal>release</literal>, plus:
- </para>
-
-<programlisting>
-&lt;profiling&gt;on &lt;debug-symbols&gt;on
-</programlisting>
-
- <para>
- Users can define their own build variants using the
- <code>variant</code> rule from the <code>common</code> module.
- </para>
-
- <para>
- <emphasis role="bold">Note:</emphasis> Runtime debugging is on in
- debug builds to suit the expectations of people used to various
- IDEs.
- <!-- Define "runtime debugging". Why will those people expect it to
- be on in debug builds? -->
- </para>
- </listitem>
- </varlistentry>
-
- <varlistentry id="bbv2.overview.builtins.features.link">
- <term><literal>link</literal></term>
- <indexterm><primary>link</primary></indexterm>
-
- <listitem>
-
- <para><emphasis role="bold">Allowed values:</emphasis> <literal>shared</literal>,
- <literal>static</literal></para>
-
- <simpara>
- A feature controling how libraries are built.
- </simpara>
-
- </listitem>
- </varlistentry>
-
- <varlistentry id="bbv2.overview.builtins.features.runtime-link">
- <indexterm><primary>runtime linking</primary></indexterm>
- <term><literal>runtime-link</literal></term>
-
- <listitem>
- <para><emphasis role="bold">Allowed values:</emphasis> <literal>shared</literal>,
- <literal>static</literal></para>
-
- <simpara>
- Controls if a static or shared C/C++ runtime should be used. There
- are some restrictions how this feature can be used, for example
- on some compilers an application using static runtime should
- not use shared libraries at all, and on some compilers,
- mixing static and shared runtime requires extreme care. Check
- your compiler documentation for more details.
- </simpara>
-
- </listitem>
- </varlistentry>
-
- <varlistentry>
- <term><literal>threading</literal></term>
- <indexterm><primary>threading</primary></indexterm>
-
- <listitem>
-
- <para><emphasis role="bold">Allowed values:</emphasis> <literal>single</literal>,
- <literal>multi</literal></para>
-
- <simpara>
- Controls if the project should be built in multi-threaded mode. This feature does not
- necessary change code generation in the compiler, but it causes the compiler to link
- to additional or different runtime libraries, and define additional preprocessor
- symbols (for example, <code>_MT</code> on Windows and <code>_REENTRANT</code> on Linux).
- How those symbols affect the compiled code depends on the code itself.
- </simpara>
-
- </listitem>
- </varlistentry>
-
- <varlistentry>
- <term><literal>source</literal></term>
- <indexterm><primary>source</primary></indexterm>
-
- <listitem>
- <simpara>
- The <code>&lt;source&gt;X</code> feature has the same effect on
- building a target as putting X in the list of sources. It is useful
- when you want to add the same source to all targets in the project
- (you can put &lt;source&gt; in requirements) or to conditionally
- include a source (using conditional requirements, see <xref linkend=
- "bbv2.tutorial.conditions"/>). See also the <code>&lt;library&gt;
- </code> feature.
- </simpara>
- </listitem>
- </varlistentry>
-
- <varlistentry>
- <term><literal>library</literal></term>
- <indexterm><primary>library</primary></indexterm>
-
- <listitem>
- <simpara>
- This feature is almost equivalent to the <code>&lt;source&gt;</code>
- feature, except that it takes effect only for linking. When you want
- to link all targets in a Jamfile to certain library, the
- <code>&lt;library&gt;</code> feature is preferred over
- <code>&lt;source&gt;X</code>&mdash;the latter will add the library to
- all targets, even those that have nothing to do with libraries.
- </simpara>
- </listitem>
- </varlistentry>
-
- <varlistentry><term><anchor id="bbv2.builtin.features.dependency"/>
- <literal>dependency</literal></term>
- <indexterm><primary>dependency</primary></indexterm>
-
- <listitem>
- <simpara>
- Introduces a dependency on the target named by the value of this
- feature (so it will be brought up-to-date whenever the target being
- declared is). The dependency is not used in any other way.
-
- <!--
- ====================================================================
- An example and a motivation is needed here. Below is some commented
- out content that used to be here but did not make any sense and
- seems to have been left unfinished in some previous revision. Should
- be fixed and this whole feature should be retested and fixed as
- needed.
- ====================================================================
- For example, in application with plugins, the plugins are not used
- when linking the application, application might have a dependency on
- its plugins, even though
-
- and
- adds its usage requirements to the build properties
- of the target being declared.
-
- The primary use case is when you want
- the usage requirements (such as <code>#include</code> paths) of some
- library to be applied, but do not want to link to it.
-
- It is hard to picture why anyone would want to do that. Please flesh
- out this motivation.
- ====================================================================
- -->
- </simpara>
- </listitem>
- </varlistentry>
-
- <varlistentry><term><anchor id="bbv2.builtin.features.implicit-dependency"/>
- <literal>implicit-dependency</literal></term>
- <indexterm><primary>implicit-dependency</primary></indexterm>
-
- <listitem>
- <simpara>
- Indicates that the target named by the value of this feature
- may produce files that are included by the sources of the
- target being declared. See <xref linkend="bbv2.reference.generated_headers"/>
- for more information.
- </simpara>
- </listitem>
- </varlistentry>
-
-
- <varlistentry><term><anchor id="bbv2.builtin.features.use"/>
- <literal>use</literal></term>
- <indexterm><primary>use</primary></indexterm>
-
- <listitem>
- <simpara>
- Introduces a dependency on the target named by the value of this
- feature (so it will be brought up-to-date whenever the target being
- declared is), and adds its usage requirements to the build
- properties
- <!-- Do you really mean "to the requirements?" -->
- of the target being declared. The dependency is not used in any
- other way. The primary use case is when you want the usage
- requirements (such as <code>#include</code> paths) of some library
- to be applied, but do not want to link to it.
- <!-- It is hard to picture why anyone would want to do that. Please
- flesh out this motivation. -->
- </simpara>
- </listitem>
- </varlistentry>
-
- <varlistentry>
- <term><anchor id="bbv2.reference.features.dll-path"/>
- <literal>dll-path</literal></term>
- <indexterm><primary>dll-path</primary></indexterm>
-
- <listitem>
- <simpara>
- Specify an additional directory where the system should
- look for shared libraries when the executable or shared
- library is run. This feature only affects Unix
- compilers. Plase see <xref linkend="bbv2.faq.dll-path"/>
- in <xref linkend="bbv2.faq"/> for details.
- </simpara>
- </listitem></varlistentry>
-
- <varlistentry>
- <term><literal>hardcode-dll-paths</literal></term>
- <indexterm><primary>hardcode-dll-paths</primary></indexterm>
-
- <listitem>
- <simpara>
- Controls automatic generation of dll-path properties.
- </simpara>
-
- <para><emphasis role="bold">Allowed values:</emphasis>
- <literal>true</literal>, <literal>false</literal>. This property is
- specific to Unix systems. If an executable is built with
- <code>&lt;hardcode-dll-paths&gt;true</code>, the generated binary
- will contain the list of all the paths to the used shared libraries.
- As the result, the executable can be run without changing system
- paths to shared libraries or installing the libraries to system
- paths. This <!-- you need an antecedent. This _what_? --> is very
- convenient during development. Plase see the <link linkend=
- "bbv2.faq.dll-path">FAQ entry</link> for details. Note that on Mac
- OSX, the paths are unconditionally hardcoded by the linker, and it
- is not possible to disable that behaviour.</para>
- </listitem>
- </varlistentry>
-
- <varlistentry>
- <term><literal>cflags</literal></term>
- <term><literal>cxxflags</literal></term>
- <term><literal>linkflags</literal></term>
-
- <listitem>
- <simpara>
- The value of those features is passed without modification to the
- corresponding tools. For <code>cflags</code> that is both the C and
- C++ compilers, for <code>cxxflags</code> that is the C++ compiler
- and for <code>linkflags</code> that is the linker. The features are
- handy when you are trying to do something special that cannot be
- achieved by a higher-level feature in Boost.Build.
- </simpara>
- </listitem>
- </varlistentry>
-
- <varlistentry>
- <term><literal>include</literal></term>
- <indexterm><primary>include</primary></indexterm>
-
- <listitem>
- <simpara>
- Specifies an additional include path that is to be passed to C and
- C++ compilers.
- </simpara>
- </listitem>
- </varlistentry>
-
- <varlistentry>
- <term><literal>define</literal></term>
- <indexterm><primary>define</primary></indexterm>
-
- <listitem>
- <simpara>
- Specifies an preprocessor symbol that should be defined on the command
- line. You may either specify just the symbol, which will be defined
- without any value, or both the symbol and the value, separated by
- equal sign.
- </simpara>
- </listitem>
- </varlistentry>
-
-
- <varlistentry><term><literal>warnings</literal></term>
- <listitem>
- <simpara>
- The <code>&lt;warnings&gt;</code> feature controls the warning level
- of compilers. It has the following values:
- <itemizedlist>
- <listitem><para><code>off</code> - disables all warnings.</para></listitem>
- <listitem><para><code>on</code> - enables default warning level for the tool.</para></listitem>
- <listitem><para><code>all</code> - enables all warnings.</para></listitem>
- </itemizedlist>
- Default value is <code>all</code>.
- </simpara>
- </listitem>
- </varlistentry>
-
- <varlistentry><term><literal>warnings-as-errors</literal></term>
- <listitem>
- <simpara>
- The <code>&lt;warnings-as-errors&gt;</code> makes it possible to
- treat warnings as errors and abort compilation on a warning. The
- value <code>on</code> enables this behaviour. The default value is
- <code>off</code>.
- </simpara>
- </listitem>
- </varlistentry>
-
- <varlistentry><term><literal>build</literal></term>
-
- <listitem>
- <para><emphasis role="bold">Allowed values:</emphasis> <literal>no</literal></para>
-
- <para>
- The <code>build</code> feature is used to conditionally disable
- build of a target. If <code>&lt;build&gt;no</code> is in properties
- when building a target, build of that target is skipped. Combined
- with conditional requirements this allows you to skip building some
- target in configurations where the build is known to fail.
- </para>
- </listitem>
- </varlistentry>
-
- <varlistentry><term><literal>tag</literal></term>
-
- <listitem><para>The <literal>tag</literal> feature is used to customize
- the name of the generated files. The value should have the form:
-<programlisting>@<replaceable>rulename</replaceable></programlisting> where
- <replaceable>rulename</replaceable> should be a name of a rule with the
- following signature:
-<programlisting>rule tag ( name : type ? : property-set )</programlisting>
- The rule will be called for each target with the default name computed
- by Boost.Build, the type of the target, and property set. The rule can
- either return a string that must be used as the name of the target, or
- an empty string, in which case the default name will be used.
- </para>
-
- <para>Most typical use of the <literal>tag</literal> feature is to
- encode build properties, or library version in library target names. You
- should take care to return non-empty string from the tag rule only for
- types you care about &#x2014; otherwise, you might end up modifying
- names of object files, generated header file and other targets for which
- changing names does not make sense.</para>
- </listitem>
- </varlistentry>
-
- <varlistentry><term><literal>debug-symbols</literal></term>
-
- <listitem>
- <para><emphasis role="bold">Allowed values:</emphasis> <literal>on</literal>, <literal>off</literal>.</para>
-
- <para>The <literal>debug-symbols</literal> feature specifies if
- produced object files, executables and libraries should include
- debug information.
- Typically, the value of this feature is implicitly set by the
- <literal>variant</literal> feature, but it can be explicitly
- specified by the user. The most common usage is to build
- release variant with debugging information.</para>
- </listitem>
- </varlistentry>
-
- <varlistentry><term><literal>target-os</literal></term>
- <listitem>
-
- <anchor id="bbv2.reference.features.target-os"/>
-
- <para>
- The operating system for which the code is to be generated. The
- compiler you used should be the compiler for that operating
- system. This option causes Boost.Build to use naming conventions
- suitable for that operating system, and adjust build process
- accordingly. For example, with gcc, it controls if import
- libraries are produced for shared libraries or not.
- </para>
-
- <para>The complete list of possible values for this feature is:
- aix, bsd, cygwin, darwin, freebsd, hpux, iphone, linux, netbsd,
- openbsd, osf, qnx, qnxnto, sgi, solaris, unix, unixware, windows.
- </para>
-
- <para>See <xref linkend="bbv2.tasks.crosscompile"/> for details of
- crosscompilation</para>
-
- </listitem>
- </varlistentry>
-
-
- <varlistentry><term><literal>architecture</literal></term>
- <listitem>
-
- <para>The <literal>architecture</literal> features specifies
- the general processor familty to generate code for.</para>
-
- </listitem>
- </varlistentry>
-
- <varlistentry><term><literal>instruction-set</literal></term>
- <indexterm><primary>instruction-set</primary></indexterm>
- <listitem>
- <para>
- <emphasis role="bold">Allowed values:</emphasis> depend on the used
- toolset.
- </para>
-
- <para>The <literal>instruction-set</literal> specifies for which
- specific instruction set the code should be generated. The
- code in general might not run on processors with older/different
- instruction sets.</para>
-
- <para>While Boost.Build allows a large set of possible values
- for this features, whether a given value works depends on which
- compiler you use. Please see
- <xref linkend="bbv2.reference.tools.compilers"/> for details.
- </para>
-
- </listitem>
- </varlistentry>
-
- <varlistentry><term><literal>address-model</literal></term>
- <indexterm><primary>64-bit compilation</primary></indexterm>
- <listitem>
- <para><emphasis role="bold">Allowed values:</emphasis> <literal>32</literal>, <literal>64</literal>.</para>
-
- <para>The <literal>address-model</literal> specifies if 32-bit or
- 64-bit code should be generated by the compiler. Whether this feature
- works depends on the used compiler, its version, how the compiler is
- configured, and the values of the <literal>architecture</literal>
- <literal>instruction-set</literal>
- features. Please see <xref linkend="bbv2.reference.tools.compilers"/>
- for details.</para>
- </listitem>
- </varlistentry>
-
- <varlistentry><term><literal>c++-template-depth</literal></term>
- <listitem>
- <para>
- <emphasis role="bold">Allowed values:</emphasis> Any positive
- integer.
- </para>
-
- <para>
- This feature allows configuring a C++ compiler with the maximal
- template instantiation depth parameter. Specific toolsets may or may
- not provide support for this feature depending on whether their
- compilers provide a corresponding command-line option.
- </para>
-
- <para>
- <emphasis role="bold">Note:</emphasis> Due to some internal details
- in the current Boost Build implementation it is not possible to have
- features whose valid values are all positive integer. As a
- workaround a large set of allowed values has been defined for this
- feature and, if a different one is needed, user can easily add it by
- calling the feature.extend rule.
- </para>
- </listitem>
- </varlistentry>
-
- <varlistentry><term><literal>embed-manifest</literal></term>
- <listitem>
-
- <indexterm><primary>manifest file</primary><secondary>embedding</secondary></indexterm>
- <indexterm><primary>embed-manifest</primary></indexterm>
-
- <para>
- <emphasis role="bold">Allowed values:</emphasis> on, off.
- </para>
-
- <para>This feature is specific to the msvc toolset (see
- <xref linkend="bbv2.reference.tools.compiler.msvc"/>),
- and controls whether the manifest files should be embedded inside
- executables and shared libraries, or placed alongside them. This
- feature corresponds to the IDE option found in the project settings dialog,
- under <menuchoice><guimenu>Configuration Properties</guimenu>
- <guisubmenu>Manifest Tool</guisubmenu>
- <guisubmenu>Input and Output</guisubmenu>
- <guimenuitem>Embed manifest</guimenuitem> </menuchoice>.
- </para>
-
- </listitem>
- </varlistentry>
-
-
- </variablelist>
- </section>
-
- <section id="bbv2.reference.tools">
- <title>Builtin tools</title>
-
- <para>Boost.Build comes with support for a large number of C++ compilers,
- and other tools. This section documents how to use those tools.</para>
-
- <para>Before using any tool, you must declare your intention, and possibly
- specify additional information about the tool's configuration. This is
- done by calling the <code>using</code> rule, typically in your
- <filename>user-config.jam</filename>, for example:</para>
-<programlisting>
-using gcc ;
-</programlisting>
- <para>additional parameters can be passed just like for other rules, for example:</para>
-<programlisting>
-using gcc : 4.0 : g++-4.0 ;
-</programlisting>
-
-
-
- <para>The options that can be passed to each tool are documented in the
- subsequent sections.</para>
-
- <section id="bbv2.reference.tools.compilers">
-
- <title>C++ Compilers</title>
-
- <para>This section lists all Boost.Build modules that support C++
- compilers and documents how each one can be initialized. The name
- of support module for compiler is also the value for
- the <code>toolset</code> feature that can be used to explicitly
- request that compiler. </para>
-
- <section id="bbv2.reference.tools.compiler.gcc">
-
- <title>GNU C++</title>
-
- <para>The <code>gcc</code> module supports the
- <ulink url="http://gcc.gnu.org">GNU C++ compiler</ulink>
- on Linux, a number of Unix-like system including SunOS and on Windows
- (either <ulink url="http://www.cygwin.com">Cygwin</ulink> or
- <ulink url="http://www.mingw.org">MinGW</ulink>). On Mac OSX, it is recommended
- to use system gcc, see <xref linkend="bbv2.reference.tools.compiler.darwin"/>.
- </para>
-
- <para>The <code>gcc</code> module is initialized using the following
- syntax:</para>
- <programlisting>
-using gcc : &toolset_ops; ;</programlisting>
-
- &using_repeation;
-
- <!-- FIXME: mention everywhere what is the semantic
- of version is -->
-
- <para>
- If the version is not explicitly specified, it will be
- automatically detected by running the compiler with the <code>-v</code>
- option. If the command is not specified, the <command>g++</command>
- binary will be searched in <envar>PATH</envar>.</para>
-
- &option_list_intro;
- <variablelist>
-
- <xi:include href="fragments.xml" xpointer="xpointer(id('common_options')/*)"
- parse="xml"/>
-
- <xi:include href="fragments.xml" xpointer="xpointer(id('root_option')/*)"
- parse="xml"/>
-
- <varlistentry>
- <term><literal>rc</literal></term>
-
- <listitem>
- <para>Specifies the resource compiler command
- that will be used with the version of gcc that is being
- configured. This setting makes sense only for Windows and only
- if you plan to use resource files. By
- default <command>windres</command> will be used.</para>
- </listitem>
- </varlistentry>
-
- <varlistentry>
- <term><literal>rc-type</literal></term>
-
- <listitem>
- <para>Specifies the type of resource compiler. The value can
- be either <code>windres</code> for msvc resource compiler,
- or <code>rc</code> for borland's resource compiler.</para>
- </listitem>
- </varlistentry>
-
- </variablelist>
-
- <indexterm><primary>64-bit compilation</primary>
- <secondary>gcc</secondary></indexterm>
-
- In order to compile 64-bit applications, you have to specify
- <code>address-model=64</code>, and the <code>instruction-set</code>
- feature should refer to a 64 bit processor. Currently, those
- include <literal>nocona</literal>, <literal>opteron</literal>,
- <literal>athlon64</literal> and <literal>athlon-fx</literal>.
-
- </section>
-
- <section id="bbv2.reference.tools.compiler.darwin">
-
- <title>Apple Darwin gcc</title>
-
- <para>The <code>darwin</code> module supports the version of gcc that is
- modified and provided by Apple. The configuration is essentially identical
- to that of the gcc module.
- </para>
-
- <para>
- <indexterm><primary>fat binaries</primary></indexterm>
- The darwin toolset can generate so called "fat"
- binaries&#x2014;binaries that can run support more than one
- architecture, or address mode. To build a binary that can run both
- on Intel and PowerPC processors, specify
- <code>architecture=combined</code>. To build a binary that can run
- both in 32-bit and 64-bit modes, specify
- <code>address-model=32_64</code>. If you specify both of those
- properties, a "4-way" fat binary will be generated.
- </para>
-
- </section>
-
- <section id="bbv2.reference.tools.compiler.msvc">
-
- <title>Microsoft Visual C++</title>
-
- <para>The <code>msvc</code> module supports the
- <ulink url="http://msdn.microsoft.com/visualc/">Microsoft Visual
- C++</ulink> command-line tools on Microsoft Windows. The supported
- products and versions of command line tools are listed below:</para>
- <itemizedlist>
- <listitem><para>Visual Studio 2010&#x2014;10.0</para></listitem>
- <listitem><para>Visual Studio 2008&#x2014;9.0</para></listitem>
- <listitem><para>Visual Studio 2005&#x2014;8.0</para></listitem>
- <listitem><para>Visual Studio .NET 2003&#x2014;7.1</para></listitem>
- <listitem><para>Visual Studio .NET&#x2014;7.0</para></listitem>
- <listitem><para>Visual Studio 6.0, Service Pack 5&#x2014;6.5</para></listitem>
- </itemizedlist>
-
- <para>The <code>msvc</code> module is initialized using the following
- syntax:</para>
- <programlisting>
-using msvc : &toolset_ops; ;
- </programlisting>
- &using_repeation;
- <para>If the version is not explicitly specified, the most recent
- version found in the registry will be used instead. If the special
- value <code>all</code> is passed as the version, all versions found in
- the registry will be configured. If a version is specified, but the
- command is not, the compiler binary will be searched in standard
- installation paths for that version, followed by <envar>PATH</envar>.
- </para>
-
- <para>The compiler command should be specified using forward slashes,
- and quoted.</para>
-
- &option_list_intro;
- <variablelist>
-
- <xi:include href="fragments.xml" xpointer="xpointer(id('common_options')/*)"
- parse="xml"/>
-
- <varlistentry>
- <term><literal>assembler</literal></term>
-
- <listitem><para>The command that compiles assembler sources. If
- not specified, <command>ml</command> will be used. The command
- will be invoked after the setup script was executed and adjusted
- the <envar>PATH</envar> variable.</para></listitem>
- </varlistentry>
-
- <varlistentry>
- <term><literal>compiler</literal></term>
-
- <listitem><para>The command that compiles C and C++ sources. If
- not specified, <command>cl</command> will be used. The command
- will be invoked after the setup script was executed and adjusted
- the <envar>PATH</envar> variable.</para></listitem>
- </varlistentry>
-
- <varlistentry>
- <term><literal>compiler-filter</literal></term>
-
- <listitem><para>Command through which to pipe the output of
- running the compiler. For example to pass the output to STLfilt.
- </para></listitem>
- </varlistentry>
-
- <varlistentry>
- <term><literal>idl-compiler</literal></term>
-
- <listitem><para>The command that compiles Microsoft COM interface
- definition files. If not specified, <command>midl</command> will
- be used. The command will be invoked after the setup script was
- executed and adjusted the <envar>PATH</envar> variable.</para>
- </listitem>
- </varlistentry>
-
- <varlistentry>
- <term><literal>linker</literal></term>
-
- <listitem><para>The command that links executables and dynamic
- libraries. If not specified, <command>link</command> will be used.
- The command will be invoked after the setup script was executed
- and adjusted the <envar>PATH</envar> variable.</para></listitem>
- </varlistentry>
-
- <varlistentry>
- <term><literal>mc-compiler</literal></term>
-
- <listitem><para>The command that compiles Microsoft message
- catalog files. If not specified, <command>mc</command> will be
- used. The command will be invoked after the setup script was
- executed and adjusted the <envar>PATH</envar> variable.</para>
- </listitem>
- </varlistentry>
-
- <varlistentry>
- <term><literal>resource-compiler</literal></term>
-
- <listitem><para>The command that compiles resource files. If not
- specified, <command>rc</command> will be used. The command will be
- invoked after the setup script was executed and adjusted the
- <envar>PATH</envar> variable.</para></listitem>
- </varlistentry>
-
- <varlistentry>
- <term><literal>setup</literal></term>
-
- <listitem><para>The filename of the global environment setup
- script to run before invoking any of the tools defined in this
- toolset. Will not be used in case a target platform specific
- script has been explicitly specified for the current target
- platform. Used setup script will be passed the target platform
- identifier (x86, x86_amd64, x86_ia64, amd64 or ia64) as a
- arameter. If not specified a default script is chosen based on the
- used compiler binary, e.g. <command>vcvars32.bat</command> or
- <command>vsvars32.bat</command>.</para></listitem>
- </varlistentry>
-
- <varlistentry>
- <term><literal>setup-amd64</literal></term>
- <term><literal>setup-i386</literal></term>
- <term><literal>setup-ia64</literal></term>
-
- <listitem><para>The filename of the target platform specific
- environment setup script to run before invoking any of the tools
- defined in this toolset. If not specified the global environment
- setup script is used.</para></listitem>
- </varlistentry>
- </variablelist>
-
- <section id="v2.reference.tools.compiler.msvc.64">
- <title>64-bit support</title>
-
- <indexterm><primary>64-bit compilation</primary>
- <secondary>Microsoft Visual Studio</secondary></indexterm>
-
- <para>Starting with version 8.0, Microsoft Visual Studio can
- generate binaries for 64-bit processor, both 64-bit flavours of x86
- (codenamed AMD64/EM64T), and Itanium (codenamed IA64). In addition,
- compilers that are itself run in 64-bit mode, for better
- performance, are provided. The complete list of compiler
- configurations are as follows (we abbreviate AMD64/EM64T to just
- AMD64):</para>
-
- <itemizedlist>
- <listitem><para>32-bit x86 host, 32-bit x86 target</para>
- </listitem>
- <listitem><para>32-bit x86 host, 64-bit AMD64 target</para>
- </listitem>
- <listitem><para>32-bit x86 host, 64-bit IA64 target</para>
- </listitem>
- <listitem><para>64-bit AMD64 host, 64-bit AMD64 target</para>
- </listitem>
- <listitem><para>64-bit IA64 host, 64-bit IA64 target</para>
- </listitem>
- </itemizedlist>
- <para>
- The 32-bit host compilers can be always used, even on 64-bit
- Windows. On the contrary, 64-bit host compilers require both 64-bit
- host processor and 64-bit Windows, but can be faster. By default,
- only 32-bit host, 32-bit target compiler is installed, and
- additional compilers need to be installed explicitly.
- </para>
-
- <para>To use 64-bit compilation you should:</para>
- <orderedlist>
- <listitem><para>Configure you compiler as usual. If you provide a
- path to the compiler explicitly, provide the path to the 32-bit
- compiler. If you try to specify the path to any of 64-bit
- compilers, configuration will not work.</para></listitem>
-
- <listitem><para>When compiling, use <code>address-model=64</code>,
- to generate AMD64 code.</para></listitem>
-
- <listitem><para>To generate IA64 code, use
- <code>architecture=ia64</code></para></listitem>
- </orderedlist>
-
- <para>The (AMD64 host, AMD64 target) compiler will be used
- automatically when you are generating AMD64 code and are running
- 64-bit Windows on AMD64. The (IA64 host, IA64 target) compiler will
- never be used, since nobody has an IA64 machine to test.</para>
-
- <para>It is believed that AMD64 and EM64T targets are essentially
- compatible. The compiler options <code>/favor:AMD64</code> and
- <code>/favor:EM64T</code>, which are accepted only by AMD64
- targeting compilers, cause the generated code to be tuned to a
- specific flavor of 64-bit x86. Boost.Build will make use of those
- options depending on the value of the<code>instruction-set</code>
- feature.</para>
- </section>
- </section>
-
- <section id="bbv2.reference.tools.compiler.intel">
-
- <title>Intel C++</title>
-
- <para>The <code>intel-linux</code> and <code>intel-win</code> modules
- support the Intel C++ command-line compiler&#x2014;the <ulink url=
- "http://www.intel.com/software/products/compilers/clin/index.htm">Linux</ulink>
- and <ulink url=
- "http://www.intel.com/cd/software/products/asmo-na/eng/compilers/284527.htm">
- Windows</ulink> versions respectively.</para>
-
- <para>The module is initialized using the following syntax:</para>
- <programlisting>
-using intel-linux : &toolset_ops; ;</programlisting>
- <para>or</para>
- <programlisting>
-using intel-win : &toolset_ops; ;</programlisting>
- <para>respectively.</para>
-
- &using_repeation;
-
- <para>
- If compiler command is not specified, then Boost.Build will
- look in <envar>PATH</envar> for an executable <command>icpc</command>
- (on Linux), or <command>icc.exe</command> (on Windows).
- </para>
-
- &option_list_intro;
- <variablelist>
-
- <xi:include href="fragments.xml" xpointer="xpointer(id('common_options')/*)"
- parse="xml"/>
-
- </variablelist>
-
- <para>The Linux version supports the following additional options:</para>
- <variablelist>
-
- <xi:include href="fragments.xml" xpointer="xpointer(id('root_option')/*)"
- parse="xml"/>
-
- </variablelist>
-
- <!-- the compatibility option appears to be messed up -->
-
- </section>
-
- <section id="bbv2.reference.tools.compiler.acc">
-
- <title>HP aC++ compiler</title>
-
- <para>The <code>acc</code> module supports the
-<ulink url="http://h21007.www2.hp.com/dspp/tech/tech_TechSoftwareDetailPage_IDX/1,1703,1740,00.html">HP aC++ compiler</ulink>
- for the HP-UX operating system.</para>
-
- <para>The module is initialized using the following
- syntax:</para>
- <programlisting>
-using acc : &toolset_ops; ;</programlisting>
-
- &using_repeation;
-
-
- <para>
- If the command is not specified, the <command>aCC</command>
- binary will be searched in <envar>PATH</envar>.</para>
-
- &option_list_intro;
- <variablelist>
- <xi:include href="fragments.xml" xpointer="xpointer(id('common_options')/*)"
- parse="xml"/>
- </variablelist>
-
- </section>
-
- <section id="bbv2.reference.tools.compiler.borland">
-
- <title>Borland C++ Compiler</title>
-
- <para>The <code>borland</code> module supports the command line
- C++ compiler included in
- <ulink url="http://www.borland.com/us/products/cbuilder/index.html">C++ Builder 2006</ulink>
- product and earlier version of it, running on Microsoft Windows.</para>
-
- <para>The supported products are listed below. The version reported
- by the command lines tools is also listed for reference.:</para>
- <itemizedlist>
- <listitem><para>C++ Builder 2006&#x2014;5.8.2</para></listitem>
- <listitem><para>CBuilderX&#x2014;5.6.5, 5.6.4 (depending on release)</para></listitem>
- <listitem><para>CBuilder6&#x2014;5.6.4</para></listitem>
- <listitem><para>Free command line tools&#x2014;5.5.1</para></listitem>
- </itemizedlist>
-
- <para>The module is initialized using the following syntax:</para>
- <programlisting>
-using borland : &toolset_ops; ;</programlisting>
-
- &using_repeation;
-
- <para>If the command is not specified, Boost.Build will search for
- a binary named <command>bcc32</command> in <envar>PATH</envar>.</para>
-
- &option_list_intro;
- <variablelist>
- <xi:include href="fragments.xml" xpointer="xpointer(id('common_options')/*)"
- parse="xml"/>
- </variablelist>
-
- </section>
-
- <section id="bbv2.reference.tools.compiler.como">
-
- <title>Comeau C/C++ Compiler</title>
-
- <para>The <code>como-linux</code> and the <code>como-win</code>
- modules supports the
- <ulink url="http://www.comeaucomputing.com/">Comeau C/C++ Compiler</ulink>
- on Linux and Windows respectively.</para>
-
- <para>The module is initialized using the following syntax:</para>
- <programlisting>
-using como-linux : &toolset_ops; ;</programlisting>
-
- &using_repeation;
-
- <para>If the command is not specified, Boost.Build will search for
- a binary named <command>como</command> in
- <envar>PATH</envar>.</para>
-
- &option_list_intro;
- <variablelist>
- <xi:include href="fragments.xml" xpointer="xpointer(id('common_options')/*)"
- parse="xml"/>
- </variablelist>
-
- <para>Before using the Windows version of the compiler, you need to
- setup necessary environment variables per compiler's documentation. In
- particular, the <envar>COMO_XXX_INCLUDE</envar> variable should be
- set, where <envar>XXX</envar> corresponds to the used backend C
- compiler.</para>
- </section>
-
- <section id="bbv2.reference.tools.compiler.cw">
-
- <title>Code Warrior</title>
-
- <para>The <code>cw</code> module support CodeWarrior compiler,
- originally produced by Metrowerks and presently developed by
- Freescale. Boost.Build supports only the versions of the compiler that
- target x86 processors. All such versions were released by Metrowerks
- before aquisition and are not sold any longer. The last version known
- to work is 9.4.</para>
-
- <para>The module is initialized using the following syntax:</para>
- <programlisting>
-using cw : &toolset_ops; ;</programlisting>
-
- &using_repeation;
-
- <para>If the command is not specified, Boost.Build will search for a
- binary named <command>mwcc</command> in default installation paths and
- in <envar>PATH</envar>.</para>
-
- &option_list_intro;
- <variablelist>
-
- <xi:include href="fragments.xml" xpointer="xpointer(id('common_options')/*)"
- parse="xml"/>
-
- <xi:include href="fragments.xml" xpointer="xpointer(id('root_option')/*)"
- parse="xml"/>
-
- <varlistentry>
- <term><literal>setup</literal></term>
-
- <listitem><para>The command that sets up environment variables
- prior to invoking the compiler. If not specified,
- <command>cwenv.bat</command> alongside the compiler binary
- will be used.</para>
- </listitem>
- </varlistentry>
-
-
- <varlistentry>
- <term><literal>compiler</literal></term>
-
- <listitem><para>The command that compiles C and C++ sources.
- If not specified, <command>mwcc</command> will be used. The
- command will be invoked after the setup script was
- executed and adjusted the <envar>PATH</envar> variable.</para>
- </listitem>
- </varlistentry>
-
- <varlistentry>
- <term><literal>linker</literal></term>
-
- <listitem><para>The command that links executables and dynamic
- libraries.
- If not specified, <command>mwld</command> will be used. The
- command will be invoked after the setup script was
- executed and adjusted the <envar>PATH</envar> variable.</para>
- </listitem>
- </varlistentry>
-
- </variablelist>
-
- </section>
-
- <section id="bbv2.reference.tools.compiler.dmc">
-
- <title>Digital Mars C/C++ Compiler</title>
-
- <para>The <code>dmc</code> module supports the
- <ulink url="http://www.digitalmars.com/">Digital Mars C++ compiler.</ulink>
- </para>
-
- <para>The module is initialized using the following syntax:</para>
- <programlisting>
-using dmc : &toolset_ops; ;</programlisting>
-
- &using_repeation;
-
- <para>If the command is not specified, Boost.Build will search for
- a binary named <command>dmc</command> in
- <envar>PATH</envar>.</para>
-
- &option_list_intro;
- <variablelist>
- <xi:include href="fragments.xml" xpointer="xpointer(id('common_options')/*)"
- parse="xml"/>
- </variablelist>
-
- </section>
-
- <section id="bbv2.reference.tools.compiler.hp_cxx">
-
- <title>HP C++ Compiler for Tru64 Unix</title>
-
- <para>The <code>hp_cxx</code> modules supports the
- <ulink url="http://h30097.www3.hp.com/cplus/?jumpid=reg_R1002_USEN">
- HP C++ Compiler</ulink> for Tru64 Unix.</para>
-
- <para>The module is initialized using the following syntax:</para>
- <programlisting>
-using hp_cxx : &toolset_ops; ;</programlisting>
-
- &using_repeation;
-
- <para>If the command is not specified, Boost.Build will search for
- a binary named <command>hp_cxx</command> in <envar>PATH</envar>.</para>
-
- &option_list_intro;
- <variablelist>
- <xi:include href="fragments.xml" xpointer="xpointer(id('common_options')/*)"
- parse="xml"/>
- </variablelist>
-
- </section>
-
- <section id="bbv2.reference.tools.compiler.sun">
-
- <title>Sun Studio</title>
-
- <para>The <code>sun</code> module supports the
- <ulink url="http://developers.sun.com/sunstudio/index.jsp">
- Sun Studio</ulink> C++ compilers for the Solaris OS.</para>
-
- <para>The module is initialized using the following syntax:</para>
- <programlisting>
-using sun : &toolset_ops; ;</programlisting>
-
- &using_repeation;
-
- <para>If the command is not specified, Boost.Build will search for
- a binary named <command>CC</command>
- in <filename>/opt/SUNWspro/bin</filename> and in
- <envar>PATH</envar>.</para>
-
- <para>When using this compiler on complex C++ code, such as the
- <ulink url="http://boost.org">Boost C++ library</ulink>, it is
- recommended to specify the following options when intializing the
- <code>sun</code> module:
- <screen>
--library=stlport4 -features=tmplife -features=tmplrefstatic
- </screen> See the <ulink url="http://blogs.sun.com/sga/entry/command_line_options">
- Sun C++ Frontend Tales</ulink> for details.</para>
-
- &option_list_intro;
- <variablelist>
- <xi:include href="fragments.xml" xpointer="xpointer(id('common_options')/*)"
- parse="xml"/>
- </variablelist>
-
- <indexterm><primary>64-bit compilation</primary>
- <secondary>Sun Studio</secondary></indexterm>
- Starting with Sun Studio 12, you can create 64-bit applications
- by using the <code>address-model=64</code> property.
-
- </section>
-
- <section id="bbv2.reference.tools.compiler.vacpp">
-
- <title>IBM Visual Age</title>
- <para>The <code>vacpp</code> module supports the
- <ulink url="http://www.ibm.com/software/ad/vacpp">IBM Visual
- Age</ulink> C++ Compiler, for the AIX operating system. Versions
- 7.1 and 8.0 are known to work.</para>
-
- <para>The module is initialized using the following
- syntax:</para>
- <programlisting>
-using vacpp ;</programlisting>
-
- <para>The module does not accept any initialization options. The
- compiler should be installed in the <filename>/usr/vacpp/bin</filename>
- directory.</para>
-
- <para>Later versions of Visual Age are known as XL C/C++. They
- were not tested with the the <code>vacpp</code> module.</para>
-
- </section>
-
- </section>
-
- <section>
- <title>Third-party libraries</title>
-
- <para>Boost.Build provides special support for some
- third-party C++ libraries, documented below.</para>
-
- <section id="bbv2.reference.tools.libraries.stlport">
- <title>STLport library</title>
- <indexterm><primary>STLport</primary></indexterm>
-
- <para>The <ulink url="http://stlport.org">STLport</ulink> library
- is an alternative implementation of C++ runtime library. Boost.Build
- supports using that library on Windows platfrom. Linux is
- hampered by different naming of libraries in each STLport
- version and is not officially supported.</para>
-
- <para>Before using STLport, you need to configure it in
- <filename>user-config.jam</filename> using the following syntax:
- </para>
- <programlisting>
-using stlport : <optional><replaceable>version</replaceable></optional> : <replaceable>header-path</replaceable> : <optional><replaceable>library-path</replaceable></optional> ;
-</programlisting>
- <para>
- Where <replaceable>version</replaceable> is the version of
- STLport, for example <literal>5.1.4</literal>,
- <replaceable>headers</replaceable> is the location where
- STLport headers can be found, and <replaceable>libraries</replaceable>
- is the location where STLport libraries can be found.
- The version should always be provided, and the library path should
- be provided if you're using STLport's implementation of
- iostreams. Note that STLport 5.* always uses its own iostream
- implementation, so the library path is required.
- </para>
-
- <para>When STLport is configured, you can build with STLport by
- requesting <literal>stdlib=stlport</literal> on the command line.
- </para>
-
- </section>
-
- </section>
-
- <section>
- <title>Documentation tools</title>
-
- <para>Boost.Build support for the Boost documentation tools is
- documented below.
- </para>
-
- <section id="bbv2.reference.tools.doc.xsltproc">
- <title>xsltproc</title>
- <indexterm><primary>xsltproc</primary></indexterm>
-
- <para>To use xsltproc, you first need to configure it using the following syntax:</para>
- <programlisting>
-using xsltproc : <optional><replaceable>xsltproc</replaceable></optional> ;
-</programlisting>
- <para>
- Where <replaceable>xsltproc</replaceable> is the xsltproc executable.
- If <replaceable>xsltproc</replaceable> is not specified, and the
- variable XSLTPROC is set, the value of XSLTPROC will be used.
- Otherwise, xsltproc will be searched for in PATH.
- </para>
-
-
- &option_list_intro;
- <variablelist>
-
- <varlistentry>
- <indexterm><primary>xsl:param</primary></indexterm>
- <term><literal>xsl:param</literal></term>
- <listitem>
- <para>Values should have the form
- <replaceable>name</replaceable>=<replaceable>value</replaceable></para>
- </listitem>
- </varlistentry>
-
- <varlistentry>
- <indexterm><primary>xsl:path</primary></indexterm>
- <term><literal>xsl:path</literal></term>
- <listitem>
- <para>Sets an additional search path for xi:include elements.</para>
- </listitem>
- </varlistentry>
-
- <varlistentry>
- <indexterm><primary>catalog</primary></indexterm>
- <term><literal>catalog</literal></term>
- <listitem>
- <para>A catalog file used to rewrite remote URL's to a local copy.</para>
- </listitem>
- </varlistentry>
-
- </variablelist>
-
- <para>The xsltproc module provides the following rules. Note that
- these operate on jam targets and are intended to be used by another
- toolset, such as boostbook, rather than directly by users.
- </para>
- <variablelist>
-
- <varlistentry>
- <indexterm><primary>xslt</primary></indexterm>
- <term><literal>xslt</literal></term>
- <listitem>
- <programlisting>
-rule xslt ( target : source stylesheet : properties * )
-</programlisting>
- <para>Runs xsltproc to create a single output file.</para>
- </listitem>
- </varlistentry>
-
- <varlistentry>
- <indexterm><primary>xslt-dir</primary></indexterm>
- <term><literal>xslt-dir</literal></term>
- <listitem>
- <programlisting>
-rule xslt-dir ( target : source stylesheet : properties * : dirname )
-</programlisting>
- <para>Runs xsltproc to create multiple outputs in a directory.
- <literal>dirname</literal> is unused, but exists for
- historical reasons. The output directory is determined from the
- target.
- </para>
- </listitem>
- </varlistentry>
-
- </variablelist>
-
- </section>
-
- <section id="bbv2.reference.tools.doc.boostbook">
- <title>boostbook</title>
- <indexterm><primary>boostbook</primary><secondary>module</secondary></indexterm>
-
- <para>To use boostbook, you first need to configure it using the following syntax:</para>
- <programlisting>
-using boostbook : <optional><replaceable>docbook-xsl-dir</replaceable></optional> : <optional><replaceable>docbook-dtd-dir</replaceable></optional> : <optional><replaceable>boostbook-dir</replaceable></optional> ;
-</programlisting>
- <para>
- <replaceable>docbook-xsl-dir</replaceable> is the DocBook XSL stylesheet
- directory. If not provided, we use DOCBOOK_XSL_DIR from the environment
- (if available) or look in standard locations. Otherwise, we let the
- XML processor load the stylesheets remotely.
- </para>
-
- <para>
- <replaceable>docbook-dtd-dir</replaceable> is the DocBook DTD directory.
- If not provided, we use DOCBOOK_DTD_DIR From the environment (if
- available) or look in standard locations. Otherwise, we let the XML
- processor load the DTD remotely.
- </para>
-
- <para>
- <replaceable>boostbook-dir</replaceable> is the BoostBook directory
- with the DTD and XSL subdirs.
- </para>
-
- <para>The boostbook module depends on xsltproc. For pdf or ps output,
- it also depends on fop.
- </para>
-
- &option_list_intro;
- <variablelist>
-
- <varlistentry>
- <indexterm><primary>format</primary></indexterm>
- <indexterm><primary>html</primary></indexterm>
- <indexterm><primary>xhtml</primary></indexterm>
- <indexterm><primary>htmlhelp</primary></indexterm>
- <indexterm><primary>onehtml</primary></indexterm>
- <indexterm><primary>man</primary></indexterm>
- <indexterm><primary>pdf</primary></indexterm>
- <indexterm><primary>ps</primary></indexterm>
- <indexterm><primary>docbook</primary></indexterm>
- <indexterm><primary>fo</primary></indexterm>
- <indexterm><primary>tests</primary></indexterm>
- <term><literal>format</literal></term>
- <listitem>
- <para>
- <emphasis role="bold">Allowed values:</emphasis>
- <literal>html</literal>, <literal>xhtml</literal>,
- <literal>htmlhelp</literal>, <literal>onehtml</literal>,
- <literal>man</literal>, <literal>pdf</literal>,
- <literal>ps</literal>, <literal>docbook</literal>,
- <literal>fo</literal>, <literal>tests</literal>.
- </para>
-
-
- <para>The <literal>format</literal> feature determines the type
- of output produced by the boostbook rule.</para>
- </listitem>
- </varlistentry>
-
- </variablelist>
-
- <para>The boostbook module defines a rule for creating a target
- following the common syntax.</para>
-
- <variablelist>
-
- <varlistentry>
- <indexterm><primary>boostbook</primary><secondary>rule</secondary></indexterm>
- <term><literal>boostbook</literal></term>
- <listitem>
- <programlisting>
-rule boostbook ( target-name : sources * : requirements * : default-build * )
-</programlisting>
- <para>Creates a boostbook target.</para>
- </listitem>
- </varlistentry>
-
- </variablelist>
-
- </section>
-
- <section id="bbv2.reference.tools.doc.doxygen">
- <title>doxygen</title>
- <indexterm><primary>doxygen</primary></indexterm>
-
- <para>To use doxygen, you first need to configure it using the following syntax:</para>
- <programlisting>
-using doxygen : <optional><replaceable>name</replaceable></optional> ;
-</programlisting>
- <para>
- <replaceable>name</replaceable> is the doxygen command.
- If it is not specified, it will be found in the PATH.
- </para>
-
- <para>The doxygen module depends on the boostbook module when
- generating BoostBook XML.
- </para>
-
- &option_list_intro;
- <variablelist>
-
- <varlistentry>
- <indexterm><primary>doxygen:param</primary></indexterm>
- <term><literal>doxygen:param</literal></term>
- <listitem>
- <para>All the values of <literal>doxygen:param</literal>
- are added to the doxyfile.</para>
- </listitem>
- </varlistentry>
-
- <varlistentry>
- <indexterm><primary>prefix</primary></indexterm>
- <term><literal>prefix</literal></term>
- <listitem>
- <para>Specifies the common prefix of all headers
- when generating BoostBook XML. Everything before
- this will be stripped off.
- </para>
- </listitem>
- </varlistentry>
-
- <varlistentry>
- <indexterm><primary>reftitle</primary></indexterm>
- <term><literal>reftitle</literal></term>
- <listitem>
- <para>Specifies the title of the library-reference section,
- when generating BoostBook XML.</para>
- </listitem>
- </varlistentry>
-
- <varlistentry>
- <indexterm><primary>doxygen:xml-imagedir</primary></indexterm>
- <term><literal>doxygen:xml-imagedir</literal></term>
- <listitem>
- <para>When generating BoostBook XML, specifies the
- directory in which to place the images generated
- from LaTex formulae.</para>
- <warning><para>The path is interpreted relative to the
- current working directory, not relative to the Jamfile.
- This is necessary to match the behavior of BoostBook.
- </para></warning>
- </listitem>
- </varlistentry>
-
- </variablelist>
-
- <para>The doxygen module defines a rule for creating a target
- following the common syntax.</para>
-
- <variablelist>
-
- <varlistentry>
- <indexterm><primary>doxygen</primary><secondary>rule</secondary></indexterm>
- <term><literal>doxygen</literal></term>
- <listitem>
- <programlisting>
-rule doxygen ( target : sources * : requirements * : default-build * : usage-requirements * )
-</programlisting>
- <para>Creates a doxygen target. If the target name
- ends with .html, then this will generate an html
- directory. Otherwise it will generate BoostBook XML.
- </para>
- </listitem>
- </varlistentry>
-
- </variablelist>
-
- </section>
-
- <section id="bbv2.reference.tools.doc.quickbook">
- <title>quickbook</title>
- <indexterm><primary>quickbook</primary></indexterm>
-
- <para>The quickbook module provides a generator to convert from
- Quickbook to BoostBook XML.</para>
-
- <para>To use quickbook, you first need to configure it using the following syntax:</para>
- <programlisting>
-using quickbook : <optional><replaceable>command</replaceable></optional> ;
-</programlisting>
- <para>
- <replaceable>command</replaceable> is the quickbook executable.
- If it is not specified, Boost.Build will compile it from source.
- If it is unable to find the source it will search for a quickbook
- executable in PATH.
- </para>
-
- </section>
-
- <section id="bbv2.reference.tools.doc.fop">
- <title>fop</title>
- <indexterm><primary>fop</primary></indexterm>
-
- <para>The fop module provides generators to convert from
- XSL formatting objects to Postscript and PDF.</para>
-
- <para>To use fop, you first need to configure it using the following syntax:</para>
- <programlisting>
-using fop : <optional><replaceable>fop-command</replaceable></optional> : <optional><replaceable>java-home</replaceable></optional> : <optional><replaceable>java</replaceable></optional> ;
-</programlisting>
- <para>
- <replaceable>fop-command</replaceable> is the command to run fop.
- If it is not specified, Boost.Build will search for it in PATH and
- FOP_HOME.
- </para>
- <para>
- Either <replaceable>java-home</replaceable> or
- <replaceable>java</replaceable>
- can be used to specify where to find java.
- </para>
-
- </section>
-
- </section>
-
- </section>
-
- <section id="bbv2.reference.modules">
- <title>Builtin modules</title>
-
- <para>
- This section describes the modules that are provided
- by Boost.Build. The import rule allows rules from
- one module to be used in another module or Jamfile.
- </para>
-
- <section id="bbv2.reference.modules.modules">
- <title>modules</title>
- <indexterm><primary>modules</primary></indexterm>
-
- <para>
- The <code>modules</code> module defines basic functionality
- for handling modules.
- </para>
-
- <para>
- A module defines a number of rules that can be used in other
- modules. Modules can contain code at the top level to initialize
- the module. This code is executed the first time the
- module is loaded.
- <note>
- <para>
- A Jamfile is a special kind of module which is managed by
- the build system. Although they cannot be loaded directly
- by users, the other features of modules are still useful
- for Jamfiles.
- </para>
- </note>
- </para>
-
- <para>
- Each module has its own namespaces for variables and rules. If two
- modules A and B both use a variable named X, each one gets its own
- copy of X. They won't interfere with each other in any way.
- Similarly, importing rules into one module has no effect on any other
- module.
- </para>
-
- <para>
- Every module has two special variables.
- <code>$(__file__)</code> contains the name of the file that
- the module was loaded from and <code>$(__name__)</code>
- contains the name of the module.
- <note><para><code>$(__file__)</code> does not contain
- the full path to the file. If you need this, use
- <code>modules.binding</code>.</para></note>
- </para>
-
- <orderedlist>
-
- <listitem id="bbv2.reference.modules.modules.binding">
- <indexterm zone="bbv2.reference.modules.modules.binding"><primary>binding</primary></indexterm>
- <code language="jam">rule binding ( module-name )</code>
- <para>Returns the filesystem binding of the given module.</para>
- <para>For example, a module can get its own location with:
- <programlisting language="jam">me = [ modules.binding $(__name__) ] ;</programlisting>
- </para>
- </listitem>
-
- <listitem id="bbv2.reference.modules.modules.poke">
- <indexterm zone="bbv2.reference.modules.modules.poke"><primary>poke</primary></indexterm>
- <code language="jam">rule poke ( module-name ? : variables + : value * )</code>
- <para>Sets the module-local value of a variable.</para>
- <para>For example, to set a variable in the global module:
- <programlisting language="jam">modules.poke : ZLIB_INCLUDE : /usr/local/include ;</programlisting>
- </para>
- </listitem>
-
- <listitem id="bbv2.reference.modules.modules.peek">
- <indexterm zone="bbv2.reference.modules.modules.peek"><primary>peek</primary></indexterm>
- <code language="jam">rule peek ( module-name ? : variables + )</code>
- <para>Returns the module-local value of a variable.</para>
- <para>
- For example, to read a variable from the global module:
- <programlisting language="jam">local ZLIB_INCLUDE = [ modules.peek : ZLIB_INCLUDE ] ;</programlisting>
- </para>
- </listitem>
-
- <listitem id="bbv2.reference.modules.modules.call-in">
- <indexterm zone="bbv2.reference.modules.modules.call-in"><primary>call-in</primary></indexterm>
- <code language="jam">rule call-in ( module-name ? : rule-name args * : * ) </code>
- <para>Call the given rule locally in the given module. Use
- this for rules accepting rule names as arguments, so that
- the passed rule may be invoked in the context of the rule's
- caller (for example, if the rule accesses module globals or
- is a local rule).
- <note><para>rules called this way may accept at most
- 8 parameters.</para></note></para>
- <para>Example:
-<programlisting language="jam">
-rule filter ( f : values * )
-{
- local m = [ CALLER_MODULE ] ;
- local result ;
- for v in $(values)
- {
- if [ modules.call-in $(m) : $(f) $(v) ]
- {
- result += $(v) ;
- }
- }
- return result ;
-}
-</programlisting>
- </para>
- </listitem>
-
- <listitem id="bbv2.reference.modules.modules.load">
- <indexterm zone="bbv2.reference.modules.modules.load"><primary>load</primary></indexterm>
- <code language="jam">rule load ( module-name : filename ? : search * )</code>
- <para>Load the indicated module if it is not already loaded.</para>
- <variablelist>
- <varlistentry>
- <term><literal>module-name</literal></term>
- <listitem><para>Name of module to load.</para></listitem>
- </varlistentry>
- </variablelist>
- <variablelist>
- <varlistentry>
- <term><literal>filename</literal></term>
- <listitem><para>(partial) path to file; Defaults to <code>$(module-name).jam</code></para></listitem>
- </varlistentry>
- </variablelist>
- <variablelist>
- <varlistentry>
- <term><literal>search</literal></term>
- <listitem><para>Directories in which to search for filename.
- Defaults to <code>$(BOOST_BUILD_PATH)</code>.</para></listitem>
- </varlistentry>
- </variablelist>
- </listitem>
-
- <listitem id="bbv2.reference.modules.modules.import">
- <indexterm zone="bbv2.reference.modules.modules.import"><primary>import</primary></indexterm>
- <code language="jam">rule import ( module-names + : rules-opt * : rename-opt * )</code>
- <para>Load the indicated module and import rule names into the
- current module. Any members of <code>rules-opt</code> will be
- available without qualification in the caller's module. Any
- members of <code>rename-opt</code> will be taken as the names
- of the rules in the caller's module, in place of the names they
- have in the imported module. If <code>rules-opt = '*'</code>,
- all rules from the indicated module are imported into the
- caller's module. If <code>rename-opt</code> is supplied, it must have the
- same number of elements as <code>rules-opt</code>.</para>
- <note><para>The <literal>import</literal> rule is available
- without qualification in all modules.</para></note>
- <para>Examples:
-<programlisting language="jam">
-import path ;
-import path : * ;
-import path : join ;
-import path : native make : native-path make-path ;
-</programlisting>
- </para>
- </listitem>
-
- <listitem id="bbv2.reference.modules.modules.clone-rules">
- <indexterm zone="bbv2.reference.modules.modules.clone-rules"><primary>clone-rules</primary></indexterm>
- <code language="jam">rule clone-rules ( source-module target-module )</code>
- <para>Define exported copies in <code>$(target-module)</code>
- of all rules exported from <code>$(source-module)</code>. Also
- make them available in the global module with qualification,
- so that it is just as though the rules were defined originally
- in <code>$(target-module)</code>.</para>
- </listitem>
-
- </orderedlist>
-
- </section>
-
- <xi:include href="path.xml"/>
- <xi:include href="type.xml"/>
-
- </section>
-
- <section id="bbv2.reference.class">
- <title>Builtin classes</title>
- <xi:include href="abstract-target.xml"/>
- <xi:include href="project-target.xml"/>
- <xi:include href="main-target.xml"/>
- <xi:include href="basic-target.xml"/>
- <xi:include href="typed-target.xml"/>
- <xi:include href="property-set.xml"/>
- </section>
-
- <section id="bbv2.reference.buildprocess">
- <title>Build process</title>
-
- <para>The general overview of the build process was given in the
- <link linkend="bbv2.overview.build_process">user documentation</link>.
- This section provides additional details, and some specific rules.
- </para>
-
- <para>To recap, building a target with specific properties includes the
- following steps:
- <orderedlist>
-
- <listitem><para>applying default build,</para></listitem>
-
- <listitem><para>selecting the main target alternative to use,
- </para></listitem>
-
- <listitem><para>determining "common" properties,</para></listitem>
-
- <listitem><para>building targets referred by the sources list and
- dependency properties,</para></listitem>
-
- <listitem><para>adding the usage requirements produces when building
- dependencies to the "common" properties,</para></listitem>
-
- <listitem><para>building the target using generators,</para></listitem>
-
- <listitem><para>computing the usage requirements to be returned.</para></listitem>
-
- </orderedlist>
- </para>
-
- <section id="bbv2.reference.buildprocess.alternatives">
- <title>Alternative selection</title>
-
- <para>When there are several alternatives, one of them must be
- selected. The process is as follows:</para>
-
- <orderedlist>
- <listitem>
- <simpara>
- For each alternative <emphasis>condition</emphasis> is defined as
- the set of base properties in requirements. [Note: it might be
- better to specify the condition explicitly, as in conditional
- requirements].
- </simpara>
- </listitem>
-
- <listitem>
- <simpara>
- An alternative is viable only if all properties in condition
- are present in build request.
- </simpara>
- </listitem>
-
- <listitem>
- <simpara>
- If there's one viable alternative, it's choosen. Otherwise,
- an attempt is made to find one best alternative. An alternative
- a is better than another alternative b, iff the set of properties
- in b's condition is a strict subset of the set of properities of
- 'a's condition. If there's one viable alternative, which is
- better than all others, it's selected. Otherwise, an error is
- reported.
- </simpara>
- </listitem>
- </orderedlist>
-
- </section>
-
- <section id="bbv2.reference.buildprocess.common">
- <title>Determining common properties</title>
-
- <para>The "common" properties is a somewhat artificial term. Those are
- the intermediate property set from which both the build request for
- dependencies and properties for building the target are derived.
- </para>
-
- <para>Since default build and alternatives are already handled, we have
- only two inputs: build requests and requirements. Here are the rules
- about common properties.
- </para>
-
- <orderedlist>
- <listitem><para>Non-free feature can have only one
- value</para></listitem>
-
- <listitem><para>A non-conditional property in requirement in always
- present in common properties.</para></listitem>
-
- <listitem><para>A property in build request is present in
- common properties, unless (2) tells otherwise.</para></listitem>
-
- <listitem><para>If either build request, or requirements (non-conditional
- or conditional) include an expandable property (either composite,
- or property with specified subfeature value), the behaviour is
- equivalent to explicitly adding all expanded properties to build
- request or requirements.</para></listitem>
-
- <listitem><para>If requirements include a conditional property, and
- condiiton of this property is true in context of common
- properties, then the conditional property should be in common
- properties as well.</para></listitem>
-
- <listitem><para>If no value for a feature is given by other rules
- here, it has default value in common properties.</para></listitem>
- </orderedlist>
-
- <para>Those rules are declarative, they don't specify how to compute the
- common properties. However, they provide enough information for the
- user. The important point is the handling of conditional
- requirements. The condition can be satisfied either by property in
- build request, by non-conditional requirements, or even by another
- conditional property. For example, the following example works as
- expected:
-<programlisting>
-exe a : a.cpp
- : &lt;toolset&gt;gcc:&lt;variant&gt;release
- &lt;variant&gt;release:&lt;define&gt;FOO ;
-</programlisting>
- </para>
-
- </section>
-
- <section id="bbv2.reference.buildprocess.targetpath">
- <title>Target Paths</title>
-
- <para>Several factors determine the location of a concrete
- file target. All files in a project are built under
- the directory bin unless this is overriden by the build-dir project
- attribute. Under bin is a path that depends on the properties
- used to build each target. This path is uniquely determined by
- all non-free, non-incidental properties. For example,
- given a property set containing:
- <code>&lt;toolset&gt;gcc &lt;toolset-gcc:version&gt;4.6.1 &lt;variant&gt;debug
- &lt;warnings&gt;all &lt;define&gt;_DEBUG &lt;include&gt;/usr/local/include
- &lt;link&gt;static</code>,
- the path will be gcc-4.6.1/debug/link-static. &lt;warnings&gt; is an
- incidental feature and &lt;define&gt; and &lt;include&gt; are
- free features, so they do not affect the path.</para>
-
- <para>Sometimes the paths produced by Boost.Build can become excessively
- long. There are a couple of command line options that can help with this.
- --abbreviate-paths reduces each element to no more than five characters.
- For example, link-static becomes lnk-sttc. The --hash option reduces the
- path to a single directory using an MD5 hash.</para>
-
- <para>There are two features that affect the build
- directory. The &lt;location&gt; feature completely
- overrides the default build directory. For example,
- <programlisting>exe a : a.cpp : &lt;location&gt;. ;</programlisting>
- builds all the files produced by <code>a</code>
- in the directory of the Jamfile. This is generally
- discouraged, as it precludes variant builds.</para>
-
- <para>The &lt;location-prefix&gt; feature adds a
- prefix to the path, under the project's build
- directory. For example,
- <programlisting>exe a : a.cpp : &lt;location-prefix&gt;subdir ;</programlisting>
- will create the files for <code>a</code> in bin/subdir/gcc-4.6.1/debug</para>
-
- </section>
-
- </section>
-
-
-
- <section id="bbv2.reference.definitions">
-
- <title>Definitions</title>
-
- <section id="bbv2.reference.features">
- <title>Features and properties</title>
-
- <para>A <emphasis>feature</emphasis> is a normalized (toolset-independent)
- aspect of a build configuration, such as whether inlining is
- enabled. Feature names may not contain the '<literal>&gt;</literal>'
- character.</para>
-
- <!--
- And what about dash?
- -->
-
- <para>Each feature in a build configuration has one or more
- associated <emphasis>value</emphasis>s. Feature values for non-free features
- may not contain the '<literal>&lt;</literal>', '<literal>:</literal>', or
- '<literal>=</literal>' characters. Feature values for free features may not
- contain the '<literal>&lt;</literal>' character.</para>
-
- <para>A <emphasis>property</emphasis> is a (feature,value) pair, expressed as
- &lt;feature&gt;value.</para>
-
- <para>A <emphasis>subfeature</emphasis> is a feature that only exists in the
- presence of its parent feature, and whose identity can be derived
- (in the context of its parent) from its value. A subfeature's
- parent can never be another subfeature. Thus, features and their
- subfeatures form a two-level hierarchy.</para>
-
- <para>A <emphasis>value-string</emphasis> for a feature <emphasis role="bold">F</emphasis> is a string of
- the form
- <literal>value-subvalue1-subvalue2</literal>...<literal>-subvalueN</literal>, where
- <literal>value</literal> is a legal value for <emphasis role="bold">F</emphasis> and
- <literal>subvalue1</literal>...<literal>subvalueN</literal> are legal values of some
- of <emphasis role="bold">F</emphasis>'s subfeatures. For example, the properties
- <literal>&lt;toolset&gt;gcc &lt;toolset-version&gt;3.0.1</literal> can be
- expressed more conscisely using a value-string, as
- <literal>&lt;toolset&gt;gcc-3.0.1</literal>.</para>
-
- <para>A <emphasis>property set</emphasis> is a set of properties (i.e. a
- collection without duplicates), for instance:
- <literal>&lt;toolset&gt;gcc &lt;runtime-link&gt;static</literal>.</para>
-
- <para>A <emphasis>property path</emphasis> is a property set whose elements have
- been joined into a single string separated by slashes. A property
- path representation of the previous example would be
- <literal>&lt;toolset&gt;gcc/&lt;runtime-link&gt;static</literal>.</para>
-
- <para>A <emphasis>build specification</emphasis> is a property set that fully
- describes the set of features used to build a target.</para>
-
- <section id="bbv2.reference.features.validity">
- <title>Property Validity</title>
-
- <para>
- For <link linkend=
- "bbv2.reference.features.attributes.free">free</link>
- features, all values are valid. For all other features,
- the valid values are explicitly specified, and the build
- system will report an error for the use of an invalid
- feature-value. Subproperty validity may be restricted so
- that certain values are valid only in the presence of
- certain other subproperties. For example, it is possible
- to specify that the <code>&lt;gcc-target&gt;mingw</code>
- property is only valid in the presence of
- <code>&lt;gcc-version&gt;2.95.2</code>.
- </para>
-
- </section>
- <section id="bbv2.reference.features.attributes">
- <title>Feature Attributes</title>
-
- <para>Each feature has a collection of zero or more of the following
- attributes. Feature attributes are low-level descriptions of how the
- build system should interpret a feature's values when they appear in
- a build request. We also refer to the attributes of properties, so
- that an <emphasis>incidental</emphasis> property, for example, is
- one whose feature has the <emphasis>incidental</emphasis>
- attribute.</para>
-
- <itemizedlist>
- <listitem>
- <para><emphasis>incidental</emphasis></para>
-
- <para>Incidental features are assumed not to affect build
- products at all. As a consequence, the build system may use
- the same file for targets whose build specification differs
- only in incidental features. A feature that controls a
- compiler's warning level is one example of a likely
- incidental feature.</para>
-
- <para>Non-incidental features are assumed to affect build
- products, so the files for targets whose build specification
- differs in non-incidental features are placed in different
- directories as described in <xref linkend="bbv2.reference.buildprocess.targetpath"/>.
- </para>
- </listitem>
-
- <listitem>
- <para>
- <anchor id="bbv2.reference.features.attributes.propagated"/>
- <emphasis>propagated</emphasis>
- </para>
-
- <para>Features of this kind are
- propagated to dependencies. That is, if a <link linkend=
- "bbv2.overview.targets.main">main target</link> is built using a
- propagated
- property, the build systems attempts to use the same property
- when building any of its dependencies as part of that main
- target. For instance, when an optimized exectuable is
- requested, one usually wants it to be linked with optimized
- libraries. Thus, the <literal>&lt;optimization&gt;</literal> feature is
- propagated.</para>
- </listitem>
-
- <listitem>
- <para>
- <anchor id="bbv2.reference.features.attributes.free"/>
- <emphasis>free</emphasis>
- </para>
-
- <para>Most features have a finite set of allowed values, and can
- only take on a single value from that set in a given build
- specification. Free features, on the other hand, can have
- several values at a time and each value can be an arbitrary
- string. For example, it is possible to have several
- preprocessor symbols defined simultaneously:</para>
-
-<programlisting>
-&lt;define&gt;NDEBUG=1 &lt;define&gt;HAS_CONFIG_H=1
-</programlisting>
-
- </listitem>
-
- <listitem>
- <para><emphasis>optional</emphasis></para>
-
- <para>An optional feature is a feature that is not required to
- appear in a build specification. Every non-optional non-free
- feature has a default value that is used when a value for
- the feature is not otherwise specified, either in a target's
- requirements or in the user's build request. [A feature's
- default value is given by the first value listed in the
- feature's declaration. -- move this elsewhere - dwa]</para>
- </listitem>
-
- <listitem>
- <para><emphasis>symmetric</emphasis></para>
-
- <para>Normally a feature only generates a subvariant directory
- when its value differs from its default value,
- leading to an assymmetric subvariant directory structure for
- certain values of the feature. A symmetric feature
- always generates a corresponding
- subvariant directory.</para>
- </listitem>
-
- <listitem>
- <para><emphasis>path</emphasis></para>
-
- <para>The value of a path feature specifies a path. The path is
- treated as relative to the directory of Jamfile where path
- feature is used and is translated appropriately by the build
- system when the build is invoked from a different
- directory</para>
- </listitem>
-
- <listitem>
- <para><emphasis>implicit</emphasis></para>
-
- <para>Values of implicit features alone identify the feature.
- For example, a user is not required to write
- "&lt;toolset&gt;gcc", but can simply write "gcc". Implicit
- feature names also don't appear in variant paths, although
- the values do. Thus: bin/gcc/... as opposed to
- bin/toolset-gcc/.... There should typically be only a few
- such features, to avoid possible name clashes.</para>
- </listitem>
-
- <listitem>
- <para><emphasis>composite</emphasis></para>
-
- <para>Composite features actually correspond to groups of
- properties. For example, a build variant is a composite
- feature. When generating targets from a set of build
- properties, composite features are recursively expanded and
- <emphasis>added</emphasis> to the build property set, so rules can find
- them if necessary. Non-composite non-free features override
- components of composite features in a build property set.</para>
- </listitem>
-
- <listitem>
- <para><emphasis>dependency</emphasis></para>
-
- <para>The value of a dependency feature is a target reference.
- When used for building of a main target, the value of
- dependency feature is treated as additional dependency.</para>
-
- <para>For example, dependency features allow to state that
- library A depends on library B. As the result, whenever an
- application will link to A, it will also link to B.
- Specifying B as dependency of A is different from adding B to
- the sources of A. <!-- Need to clarify this. --></para>
- </listitem>
- </itemizedlist>
-
- <para>Features that are neither free nor incidental are called
- <emphasis>base</emphasis> features.</para>
-
-
- </section>
- <section id="bbv2.reference.features.declaration">
- <title>Feature Declaration</title>
-
- <para>The low-level feature declaration interface is the
- <literal>feature</literal> rule from the
- <literal>feature</literal> module:
-
-<programlisting>
-rule feature ( name : allowed-values * : attributes * )
-</programlisting>
-
- A feature's allowed-values may be extended with the
- <code>feature.extend</code> rule.
- </para>
-
- </section>
- </section>
-
- <section id="bbv2.reference.variants.proprefine">
- <title>Property refinement</title>
-
- <para>When a target with certain properties is requested, and that
- target requires some set of properties, it is needed to find the
- set of properties to use for building. This process is called
- <emphasis>property refinement</emphasis> and is performed by these rules</para>
-
- <orderedlist>
-
- <listitem>
- <simpara>
- Each property in the required set is added to the original
- property set
- </simpara>
- </listitem>
-
- <listitem>
- <simpara>
- If the original property set includes property with a different
- value of non free feature, that property is removed.
- </simpara>
- </listitem>
- </orderedlist>
- </section>
-
- <section id="bbv2.reference.variants.propcond">
- <title>Conditional properties</title>
-
- <para>Sometime it's desirable to apply certain requirements only for
- a specific combination of other properties. For example, one of
- compilers that you use issues a pointless warning that you want to
- suppress by passing a command line option to it. You would not
- want to pass that option to other compilers. Conditional
- properties allow you to do just that. Their syntax is:</para>
-
- <programlisting>
- property ( "," property ) * ":" property
- </programlisting>
-
- <para>
- For example, the problem above would be solved by:
-
-<programlisting>
-exe hello : hello.cpp : &lt;toolset&gt;yfc:&lt;cxxflags&gt;-disable-pointless-warning ;
-</programlisting>
- </para>
-
- <para>The syntax also allows several properties in the condition, for
- example:
-<programlisting>
-exe hello : hello.cpp : &lt;os&gt;NT,&lt;toolset&gt;gcc:&lt;link&gt;static ;
-</programlisting>
- </para>
-
- </section>
-
- <section id="bbv2.reference.ids">
- <title>Target identifiers and references</title>
-
- <para><emphasis>Target identifier</emphasis> is used to denote a
- target. The syntax is:</para>
-
-<programlisting>
-target-id -&gt; (project-id | target-name | file-name )
- | (project-id | directory-name) "//" target-name
-project-id -&gt; path
-target-name -&gt; path
-file-name -&gt; path
-directory-name -&gt; path
-</programlisting>
-
- <para>
- This grammar allows some elements to be recognized as either
-
- <itemizedlist>
- <listitem>
- <simpara>
- project id (at this point, all project ids start with slash).
- </simpara>
- </listitem>
-
- <listitem>
- <simpara>
- name of target declared in current Jamfile (note that target
- names may include slash).
- </simpara>
- </listitem>
-
- <listitem>
- <simpara>
- a regular file, denoted by absolute name or name relative to
- project's sources location.
- </simpara>
- </listitem>
- </itemizedlist>
-
- To determine the real meaning a check is made if project-id
- by the specified name exists, and then if main target of that
- name exists. For example, valid target ids might be:
-
-<screen>
-a -- target in current project
-lib/b.cpp -- regular file
-/boost/thread -- project "/boost/thread"
-/home/ghost/build/lr_library//parser -- target in specific project
-</screen>
-
- </para>
-
- <para><emphasis role="bold">Rationale:</emphasis>Target is separated from project by special
- separator (not just slash), because:</para>
-
- <itemizedlist>
- <listitem>
- <simpara>
- It emphasises that projects and targets are different things.
- </simpara>
- </listitem>
-
- <listitem>
- <simpara>
- It allows to have main target names with slashes.
-
- <!-- The motivation for which is:
-
- So, to summarize:
-
- 1. The project that extract tarfile may extract all possible kinds
- of targets, and it's reasonable to use them directly from other
- project.
-
- 2. The rule for unpacking tar is inplemented in terms of
- "patch-file", for maintainability, and therefore, must use main
- target name that contains slashes?
-
- 3. Using sub-Jamfile in "foo" to declare extracted file "foo/b" is
- not an option, because you should not change existing tree
-
- That makes good rationale for why main target must contain names.
- -->
- </simpara>
- </listitem>
- </itemizedlist>
-
- <para id="bbv2.reference.targets.references">
- <emphasis>Target reference</emphasis> is used to
- specify a source target, and may additionally specify desired
- properties for that target. It has this syntax:</para>
-
-<programlisting>
-target-reference -&gt; target-id [ "/" requested-properties ]
-requested-properties -&gt; property-path
-</programlisting>
-
- <para>
- For example,
-
- <programlisting>
- exe compiler : compiler.cpp libs/cmdline/&lt;optimization&gt;space ;
- </programlisting>
-
- would cause the version of <literal>cmdline</literal> library,
- optimized for space, to be linked in even if the
- <literal>compiler</literal> executable is build with optimization for
- speed.
- </para>
- </section>
-
- </section>
-
-</chapter>
-
-<!--
- Local Variables:
- mode: nxml
- sgml-indent-data: t
- sgml-parent-document: ("userman.xml" "chapter")
- sgml-set-face: t
- End:
--->
diff --git a/tools/build/v2/doc/src/standalone.xml b/tools/build/v2/doc/src/standalone.xml
deleted file mode 100644
index 22ffff1ee1..0000000000
--- a/tools/build/v2/doc/src/standalone.xml
+++ /dev/null
@@ -1,47 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE book PUBLIC "-//Boost//DTD BoostBook XML V1.0//EN"
- "http://www.boost.org/tools/boostbook/dtd/boostbook.dtd">
-
-<book xmlns:xi="http://www.w3.org/2001/XInclude"
- id="bbv2" last-revision="$Date: 2011-01-25 10:06:12 -0800 (Tue, 25 Jan 2011) $">
- <bookinfo>
- <copyright>
- <year>2006</year>
- <year>2007</year>
- <year>2008</year>
- <year>2009</year>
- <holder>Vladimir Prus</holder>
- </copyright>
-
- <legalnotice>
- <para>Distributed under the Boost Software License, Version 1.0.
- (See accompanying file <filename>LICENSE_1_0.txt</filename> or copy at
- <ulink
- url="http://www.boost.org/LICENSE_1_0.txt">http://www.boost.org/LICENSE_1_0.txt</ulink>)
- </para>
- </legalnotice>
- </bookinfo>
-
- <title>Boost.Build V2 User Manual</title>
-
- <!-- Chapters -->
- <xi:include href="howto.xml"/>
- <xi:include href="install.xml"/>
- <xi:include href="tutorial.xml"/>
- <xi:include href="overview.xml"/>
- <xi:include href="tasks.xml"/>
- <xi:include href="reference.xml"/>
- <xi:include href="extending.xml"/>
- <xi:include href="faq.xml"/>
-
- <!-- Appendicies -->
-<!-- <xi:include href="architecture.xml"/> -->
- <appendix id="bbv2.jam">
- <title>Boost.Jam Documentation</title>
- <xi:include href="jam_docs.xml" parse="xml"
- xpointer="xpointer(id('jam.building')|id('jam.building')/following-sibling::*)"/>
- </appendix>
-
- <index/>
-
-</book>
diff --git a/tools/build/v2/doc/src/tasks.xml b/tools/build/v2/doc/src/tasks.xml
deleted file mode 100644
index d6419d4d8f..0000000000
--- a/tools/build/v2/doc/src/tasks.xml
+++ /dev/null
@@ -1,782 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE chapter PUBLIC "-//Boost//DTD BoostBook XML V1.0//EN"
- "http://www.boost.org/tools/boostbook/dtd/boostbook.dtd">
-
-<!-- Copyright 2006 Vladimir Prus -->
-<!-- Distributed under the Boost Software License, Version 1.0. -->
-<!-- (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) -->
-
-<chapter id="bbv2.tasks">
- <title>Common tasks</title>
-
- <para>
- This section describes main targets types that Boost.Build supports
- out-of-the-box. Unless otherwise noted, all mentioned main target rules have
- the common signature, described in <xref linkend="bbv2.overview.targets"/>.
- </para>
-
- <section id="bbv2.tasks.programs">
- <title>Programs</title>
-
- <indexterm><primary>exe</primary></indexterm>
- <para>
- Programs are created using the <code>exe</code> rule, which follows the
- <link linkend="bbv2.main-target-rule-syntax">common syntax</link>. For
- example:
-<programlisting>
-exe hello : hello.cpp some_library.lib /some_project//library
- : &lt;threading&gt;multi
- ;
-</programlisting>
- This will create an executable file from the sources&mdash;in this case, one
- C++ file, one library file present in the same directory, and another
- library that is created by Boost.Build. Generally, sources can include C
- and C++ files, object files and libraries. Boost.Build will automatically
- try to convert targets of other types.
- </para>
-
- <tip>
- <para>
- On Windows, if an application uses shared libraries, and both the
- application and the libraries are built using Boost.Build, it is not
- possible to immediately run the application, because the <literal>PATH
- </literal> environment variable should include the path to the
- libraries. It means you have to either add the paths manually, or have
- the build place the application and the libraries into the same
- directory. See <xref linkend="bbv2.tasks.installing"/>.
- </para>
- <!-- We should be emphasizing the use of the built-in testing rules
- rather than continually discussing these quirks of running programs
- with shared libraries. -->
- </tip>
- </section>
-
- <section id="bbv2.tasks.libraries">
- <title>Libraries</title>
-
- <indexterm>
- <primary>library</primary>
- <secondary>target</secondary>
- </indexterm>
-
- <para>
- Library targets are created using the <code>lib</code> rule, which
- follows the <link linkend="bbv2.main-target-rule-syntax">common syntax
- </link>. For example:
-<programlisting>
-lib helpers : helpers.cpp ;
-</programlisting>
- This will define a library target named <code>helpers</code> built from
- the <code>helpers.cpp</code> source file.
- It can be either a static library or a shared library,
- depending on the value of the <link linkend="bbv2.overview.builtins.features.link">&lt;link&gt;</link> feature.
- </para>
- <para>
- Library targets can represent:
- <itemizedlist>
- <listitem>
- <para>
- Libraries that should be built from source,
- as in the example above.
- </para>
- </listitem>
- <listitem>
- <para>
- Prebuilt libraries which already exist on the system.
- Such libraries can be searched for by the tools using them (typically
- with the linker's <option>-l</option> option) or their paths can be
- known in advance by the build system.
- </para>
- </listitem>
- </itemizedlist>
- </para>
-
- <para>
- The syntax for prebuilt libraries is given below:
-<programlisting>
-lib z : : &lt;name&gt;z &lt;search&gt;/home/ghost ;
-lib compress : : &lt;file&gt;/opt/libs/compress.a ;
-</programlisting>
- The <code>name</code> property specifies the name of the library
- without the standard prefixes and suffixes. For example, depending
- on the system, <code>z</code> could refer to a file called
- z.so, libz.a, or z.lib, etc. The <code>search</code> feature
- specifies paths in which to search for the library in addition
- to the default compiler paths. <code>search</code> can be specified
- several times or it can be omitted, in which case only the default
- compiler paths will be searched. The <code>file</code> property
- specifies the file location.
- </para>
-
- <para>
- The difference between using the <code>file</code> feature and
- using a combination of the <code>name</code> and <code>search</code>
- features is that <code>file</code> is more precise.
-
- <warning>
- <para>
- The value of the <code>search</code> feature is just added to the
- linker search path. When linking to multiple libraries,
- the paths specified by <code>search</code> are combined without
- regard to which <code>lib</code> target each path came from.
- Thus, given
-<programlisting>
-lib a : : &lt;name&gt;a &lt;search&gt;/pool/release ;
-lib b : : &lt;name&gt;b &lt;search&gt;/pool/debug ;
-</programlisting>
- If /pool/release/a.so, /pool/release/b.so, /pool/debug/a.so,
- and /pool/release/b.so all exist, the linker will probably
- take both <code>a</code> and <code>b</code> from the same
- directory, instead of finding <code>a</code> in /pool/release
- and <code>b</code> in /pool/debug. If you need to distinguish
- between multiple libraries with the same name, it's safer
- to use <code>file</code>.
- </para>
- </warning>
- </para>
-
- <para>
- For convenience, the following syntax is allowed:
-<programlisting>
-lib z ;
-lib gui db aux ;
-</programlisting>
- which has exactly the same effect as:
-<programlisting>
-lib z : : &lt;name&gt;z ;
-lib gui : : &lt;name&gt;gui ;
-lib db : : &lt;name&gt;db ;
-lib aux : : &lt;name&gt;aux ;
-</programlisting>
- </para>
-
- <para>
- When a library references another library you should put that other
- library in its list of sources. This will do the right thing in all cases.
- <!--Add a link to the notes below. --> For portability, you should specify
- library dependencies even for searched and prebuilt libraries, othewise,
- static linking on Unix will not work. For example:
-<programlisting>
-lib z ;
-lib png : z : &lt;name&gt;png ;
-</programlisting>
- </para>
-
- <note>
- <para>
- When a library has a shared library as a source, or a static
- library has another static library as a source then any target
- linking to the first library with automatically link to its source
- library as well.
- </para>
- <para>
- On the other hand, when a shared library has a static library as
- a source then the first library will be built so that it completely
- includes the second one.
- </para>
- <para>
- If you do not want a shared library to include all the libraries specified
- in its sources (especially statically linked ones), you would need to
- use the following:
-<programlisting>
-lib b : a.cpp ;
-lib a : a.cpp : &lt;use&gt;b : : &lt;library&gt;b ;
-</programlisting>
- This specifies that library <code>a</code> uses library <code>b</code>,
- and causes all executables that link to <code>a</code> to link to
- <code>b</code> also. In this case, even for shared linking, the
- <code>a</code> library will not refer to <code>b</code>.
- </para>
- </note>
-
- <para>
- <!-- FIXME: After adding a full subsection on usage requirements, link to it -->
- <link linkend="bbv2.overview.targets">Usage requirements</link> are often
- very useful for defining library targets. For example, imagine that
- you want you build a <code>helpers</code> library and its interface is
- described in its <code>helpers.hpp</code> header file located in the same
- directory as the <code>helpers.cpp</code> source file. Then you could add
- the following to the Jamfile located in that same directory:
-<programlisting>
-lib helpers : helpers.cpp : : : &lt;include&gt;. ;
-</programlisting>
- which would automatically add the directory where the target has been
- defined (and where the library's header file is located) to the compiler's
- include path for all targets using the <code>helpers</code> library. This
- feature greatly simplifies Jamfiles.
- </para>
- </section>
-
- <section id="bbv2.tasks.alias">
- <title>Alias</title>
-
- <para>
- The <code language="jam">alias</code> rule gives an alternative name to a
- group of targets. For example, to give the name <filename>core</filename>
- to a group of three other targets with the following code:
-<programlisting>
-alias core : im reader writer ;
-</programlisting>
- Using <filename>core</filename> on the command line, or in the source list
- of any other target is the same as explicitly using <filename>im
- </filename>, <filename>reader</filename>, and <filename>writer</filename>.
- </para>
-
- <para>
- Another use of the <code>alias</code> rule is to change build properties.
- For example, if you want to use link statically to the Boost Threads
- library, you can write the following:
-<programlisting>
-alias threads : /boost/thread//boost_thread : &lt;link&gt;static ;
-</programlisting>
- and use only the <code>threads</code> alias in your Jamfiles.
- </para>
-
- <para>
- You can also specify usage requirements for the <code>alias</code> target.
- If you write the following:
-<programlisting>
-alias header_only_library : : : : &lt;include&gt;/usr/include/header_only_library ;
-</programlisting>
- then using <code>header_only_library</code> in sources will only add an
- include path. Also note that when an alias has sources, their usage
- requirements are propagated as well. For example:
-<programlisting>
-lib library1 : library1.cpp : : : &lt;include&gt;/library/include1 ;
-lib library2 : library2.cpp : : : &lt;include&gt;/library/include2 ;
-alias static_libraries : library1 library2 : &lt;link&gt;static ;
-exe main : main.cpp static_libraries ;
-</programlisting>
- will compile <filename>main.cpp</filename> with additional includes
- required for using the specified static libraries.
- </para>
- </section>
-
- <section id="bbv2.tasks.installing">
- <title>Installing</title>
-
- <para>
- This section describes various ways to install built target and arbitrary
- files.
- </para>
-
- <bridgehead>Basic install</bridgehead>
-
- <para>
- For installing a built target you should use the <code>install</code>
- rule, which follows the <link linkend="bbv2.main-target-rule-syntax">
- common syntax</link>. For example:
-<programlisting>
-install dist : hello helpers ;
-</programlisting>
- will cause the targets <code>hello</code> and <code>helpers</code> to be
- moved to the <filename>dist</filename> directory, relative to the
- Jamfile's directory. The directory can be changed using the
- <code>location</code> property:
-<programlisting>
-install dist : hello helpers : &lt;location&gt;/usr/bin ;
-</programlisting>
- While you can achieve the same effect by changing the target name to
- <filename>/usr/bin</filename>, using the <code>location</code> property is
- better as it allows you to use a mnemonic target name.
- </para>
-
- <para>
- The <code>location</code> property is especially handy when the location
- is not fixed, but depends on the build variant or environment variables:
-<programlisting>
-install dist : hello helpers :
- &lt;variant&gt;release:&lt;location&gt;dist/release
- &lt;variant&gt;debug:&lt;location&gt;dist/debug ;
-install dist2 : hello helpers : &lt;location&gt;$(DIST) ;
-</programlisting>
- See also <link linkend="bbv2.reference.variants.propcond">conditional
- properties</link> and <link linkend="bbv2.faq.envar">environment
- variables</link>
- </para>
-
- <bridgehead>Installing with all dependencies</bridgehead>
-
- <para>
- Specifying the names of all libraries to install can be boring. The
- <code>install</code> allows you to specify only the top-level executable
- targets to install, and automatically install all dependencies:
-<programlisting>
-install dist : hello
- : &lt;install-dependencies&gt;on &lt;install-type&gt;EXE
- &lt;install-type&gt;LIB
- ;
-</programlisting>
- will find all targets that <code>hello</code> depends on, and install all
- of those which are either executables or libraries. More specifically, for
- each target, other targets that were specified as sources or as dependency
- properties, will be recursively found. One exception is that targets
- referred with the <link linkend="bbv2.builtin.features.use">
- <code>use</code></link> feature are not considered, as that feature is
- typically used to refer to header-only libraries. If the set of target
- types is specified, only targets of that type will be installed,
- otherwise, all found target will be installed.
- </para>
-
- <bridgehead>Preserving Directory Hierarchy</bridgehead>
-
- <indexterm><primary>install-source-root</primary></indexterm>
-
- <para>
- By default, the <code>install</code> rule will strip paths from its
- sources. So, if sources include <filename>a/b/c.hpp</filename>, the
- <filename>a/b</filename> part will be ignored. To make the
- <code>install</code> rule preserve the directory hierarchy you need to
- use the <literal>&lt;install-source-root&gt;</literal> feature to specify
- the root of the hierarchy you are installing. Relative paths from that
- root will be preserved. For example, if you write:
-<programlisting>
-install headers
- : a/b/c.h
- : &lt;location&gt;/tmp &lt;install-source-root&gt;a
- ;
-</programlisting>
- the a file named <filename>/tmp/b/c.h</filename> will be created.
- </para>
-
- <para>
- The <link linkend="bbv2.reference.glob-tree">glob-tree</link> rule can be
- used to find all files below a given directory, making it easy to install
- an entire directory tree.
- </para>
-
- <bridgehead>Installing into Several Directories</bridgehead>
-
- <para>
- The <link linkend="bbv2.tasks.alias"><code>alias</code></link> rule can be
- used when targets need to be installed into several directories:
-<programlisting>
-alias install : install-bin install-lib ;
-install install-bin : applications : /usr/bin ;
-install install-lib : helper : /usr/lib ;
-</programlisting>
- </para>
-
- <para>
- Because the <code>install</code> rule just copies targets, most free
- features <footnote><para>see the definition of "free" in <xref
- linkend="bbv2.reference.features.attributes"/>.</para></footnote> have no
- effect when used in requirements of the <code>install</code> rule. The
- only two that matter are <link linkend="bbv2.builtin.features.dependency">
- <varname>dependency</varname></link> and, on Unix, <link
- linkend="bbv2.reference.features.dll-path"><varname>dll-path</varname>
- </link>.
- </para>
-
- <note>
- <para>
- (Unix specific) On Unix, executables built using Boost.Build typically
- contain the list of paths to all used shared libraries. For installing,
- this is not desired, so Boost.Build relinks the executable with an empty
- list of paths. You can also specify additional paths for installed
- executables using the <varname>dll-path</varname> feature.
- </para>
- </note>
- </section>
-
- <section id="bbv2.builtins.testing">
- <title>Testing</title>
-
- <para>
- Boost.Build has convenient support for running unit tests. The simplest
- way is the <code>unit-test</code> rule, which follows the <link
- linkend="bbv2.main-target-rule-syntax">common syntax</link>. For example:
-<programlisting>
-unit-test helpers_test : helpers_test.cpp helpers ;
-</programlisting>
- </para>
-
- <para>
- The <code language="jam">unit-test</code> rule behaves like the
- <link linkend="bbv2.tasks.programs">exe</link> rule, but after the executable is created
- it is also run. If the executable returns an error code, the build system
- will also return an error and will try running the executable on the next
- invocation until it runs successfully. This behaviour ensures that you can
- not miss a unit test failure.
- </para>
-
- <para>
- By default, the executable is run directly. Sometimes, it is desirable to
- run the executable using some helper command. You should use the <literal>
- testing.launcher</literal> property to specify the name of the helper
- command. For example, if you write:
-<programlisting>
-unit-test helpers_test
- : helpers_test.cpp helpers
- : <emphasis role="bold">&lt;testing.launcher&gt;valgrind</emphasis>
- ;
-</programlisting>
- The command used to run the executable will be:
-<screen>
-<emphasis role="bold">valgrind</emphasis> bin/$toolset/debug/helpers_test
-</screen>
- </para>
-
- <para>
- There are few specialized testing rules, listed below:
-<programlisting>
-rule compile ( sources : requirements * : target-name ? )
-rule compile-fail ( sources : requirements * : target-name ? )
-rule link ( sources + : requirements * : target-name ? )
-rule link-fail ( sources + : requirements * : target-name ? )
-</programlisting>
- They are given a list of sources and requirements. If the target name is
- not provided, the name of the first source file is used instead. The
- <literal>compile*</literal> tests try to compile the passed source. The
- <literal>link*</literal> rules try to compile and link an application from
- all the passed sources. The <literal>compile</literal> and <literal>link
- </literal> rules expect that compilation/linking succeeds. The <literal>
- compile-fail</literal> and <literal>link-fail</literal> rules expect that
- the compilation/linking fails.
- </para>
-
- <para>
- There are two specialized rules for running applications, which are more
- powerful than the <code>unit-test</code> rule. The <code>run</code> rule
- has the following signature:
-<programlisting>
-rule run ( sources + : args * : input-files * : requirements * : target-name ?
- : default-build * )
-</programlisting>
- The rule builds application from the provided sources and runs it, passing
- <varname>args</varname> and <varname>input-files</varname> as command-line
- arguments. The <varname>args</varname> parameter is passed verbatim and
- the values of the <varname>input-files</varname> parameter are treated as
- paths relative to containing Jamfile, and are adjusted if <command>bjam
- </command> is invoked from a different directory. The
- <code>run-fail</code> rule is identical to the <code>run</code> rule,
- except that it expects that the run fails.
- </para>
-
- <para>
- All rules described in this section, if executed successfully, create a
- special manifest file to indicate that the test passed. For the
- <code>unit-test</code> rule the files is named <filename><replaceable>
- target-name</replaceable>.passed</filename> and for the other rules it is
- called <filename><replaceable>target-name</replaceable>.test</filename>.
- The <code>run*</code> rules also capture all output from the program, and
- store it in a file named <filename><replaceable>
- target-name</replaceable>.output</filename>.
- </para>
-
- <para>
- <indexterm><primary>preserve-test-targets</primary></indexterm>
- If the <literal>preserve-test-targets</literal> feature has the value
- <literal>off</literal>, then <code>run</code> and the <code>run-fail</code>
- rules will remove the executable after running it. This somewhat decreases
- disk space requirements for continuous testing environments. The default
- value of <literal>preserve-test-targets</literal> feature is <literal>on</literal>.
- </para>
-
- <para>
- It is possible to print the list of all test targets (except for
- <code>unit-test</code>) declared in your project, by passing the <literal>
- --dump-tests</literal> command-line option. The output will consist of
- lines of the form:
-<screen>
-boost-test(<replaceable>test-type</replaceable>) <replaceable>path</replaceable> : <replaceable>sources</replaceable>
-</screen>
- </para>
-
- <para>
- It is possible to process the list of tests, Boost.Build output
- and the presense/absense of the <filename>*.test</filename>
- files created when test passes into human-readable status table of tests.
- Such processing utilities are not included in Boost.Build.
- </para>
- </section>
-
- <section id="bbv2.builtins.raw">
- <title>Custom commands</title>
-
- <para>
- For most main target rules, Boost.Build automatically figures out
- the commands to run. When you want to use new
- file types or support new tools, one approach is to extend Boost.Build to
- support them smoothly, as documented in <xref linkend="bbv2.extender"/>.
- However, if the new tool is only used in a single place, it
- might be easier just to specify the commands to run explicitly.
- </para>
-
- <para>
- <!-- This paragraph requires links to where the terms 'virtual target' &
- 'target' are defined. -->
- Three main target rules can be used for that. The <code language="jam">make
- </code> rule allows you to construct a single file from any number
- of source file, by running a command you specify. The <code language="jam">
- notfile</code> rule allows you to run an arbitrary command,
- without creating any files. And finaly, the <code language="jam">generate
- </code> rule allows you to describe a transformation using
- Boost.Build's virtual targets. This is higher-level than the file names that
- the <code language="jam">make</code> rule operates with and allows you to
- create more than one target, create differently named targets depending on
- properties or use more than one tool.
- </para>
-
- <para>
- The <code language="jam">make</code> rule is used when you want to create
- one file from a number of sources using some specific command. The
- <code language="jam">notfile</code> is used to unconditionally run a
- command.
- </para>
-
- <!-- We need to specify somewhere that the user can get rules like make,
- notfile & generate defined in his Jamfiles by importing an appropriate
- Boost.Build module. Also, each of those rules should get a separate
- documentation page explicitly listing which module needs to be imported for
- them to become accessible. -->
-
- <para>
- Suppose you want to create the file <filename>file.out</filename> from
- the file <filename>file.in</filename> by running the command <command>
- in2out</command>. Here is how you would do this in Boost.Build:
-<programlisting>
-make file.out : file.in : @in2out ;
-actions in2out
-{
- in2out $(&lt;) $(&gt;)
-}
-</programlisting>
- If you run <command>b2</command> and <filename>file.out</filename> does
- not exist, Boost.Build will run the <command>in2out</command> command to
- create that file. For more details on specifying actions, see <xref
- linkend="bbv2.overview.jam_language.actions"/>.
- </para>
-
- <para>
- It could be that you just want to run some command unconditionally, and
- that command does not create any specific files. For that you can use the
- <code language="jam">notfile</code> rule. For example:
-<programlisting>
-notfile echo_something : @echo ;
-actions echo
-{
- echo "something"
-}
-</programlisting>
- The only difference from the <code language="jam">make</code> rule is
- that the name of the target is not considered a name of a file, so
- Boost.Build will unconditionally run the action.
- </para>
-
- <para>
- <!-- This paragraph requires links to where terms like 'virtual target',
- 'target', 'project-target' & 'property-set' are defined. -->
- The <code language="jam">generate</code> rule is used when you want to
- express transformations using Boost.Build's virtual targets, as opposed to
- just filenames. The <code language="jam">generate</code> rule has the
- standard main target rule signature, but you are required to specify the
- <literal>generating-rule</literal> property. The value of the property
- should be in the form <literal>
- @<replaceable>rule-name</replaceable></literal>, the named rule should
- have the following signature:
-<programlisting>
-rule generating-rule ( project name : property-set : sources * )
-</programlisting>
- and will be called with an instance of the <code>project-target</code>
- class, the name of the main target, an instance of the
- <code>property-set</code> class containing build properties, and the list
- of instances of the <code>virtual-target</code> class corresponding to
- sources. The rule must return a list of <code>virtual-target</code>
- instances. The interface of the <code>virtual-target</code> class can be
- learned by looking at the <filename>build/virtual-target.jam</filename>
- file. The <filename>generate</filename> example contained in the
- Boost.Build distribution illustrates how the <literal>generate</literal>
- rule can be used.
- </para>
- </section>
-
- <section id="bbv2.reference.precompiled_headers">
- <title>Precompiled Headers</title>
-
- <para>
- Precompiled headers is a mechanism to speed up compilation by creating a
- partially processed version of some header files, and then using that
- version during compilations rather then repeatedly parsing the original
- headers. Boost.Build supports precompiled headers with gcc and msvc
- toolsets.
- </para>
-
- <para>
- To use precompiled headers, follow the following steps:
- </para>
-
- <orderedlist>
- <listitem>
- <para>
- Create a header that includes headers used by your project that you
- want precompiled. It is better to include only headers that are
- sufficiently stable &#x2014; like headers from the compiler and
- external libraries. Please wrap the header in <code>#ifdef
- BOOST_BUILD_PCH_ENABLED</code>, so that the potentially expensive
- inclusion of headers is not done when PCH is not enabled. Include the
- new header at the top of your source files.
- </para>
- </listitem>
-
- <listitem>
- <para>
- Declare a new Boost.Build target for the precompiled header and add
- that precompiled header to the sources of the target whose compilation
- you want to speed up:
-<programlisting>
-cpp-pch pch : pch.hpp ;
-exe main : main.cpp pch ;
-</programlisting>
- You can use the <code language="jam">c-pch</code> rule if you want to
- use the precompiled header in C programs.
- </para></listitem>
- </orderedlist>
-
- <para>
- The <filename>pch</filename> example in Boost.Build distribution can be
- used as reference.
- </para>
-
- <para>
- Please note the following:
- </para>
-
- <itemizedlist>
- <listitem>
- <para>
- The inclusion of the precompiled header must be the first thing in a
- source file, before any code or preprocessor directives.
- </para>
- </listitem>
-
- <listitem>
- <para>
- The build properties used to compile the source files and the
- precompiled header must be the same. Consider using project
- requirements to assure this.
- </para>
- </listitem>
-
- <listitem>
- <para>
- Precompiled headers must be used purely as a way to improve
- compilation time, not to save the number of <code>#include</code>
- statements. If a source file needs to include some header, explicitly
- include it in the source file, even if the same header is included
- from the precompiled header. This makes sure that your project will
- build even if precompiled headers are not supported.
- </para>
- </listitem>
-
- <listitem>
- <para>
- On the gcc compiler, the name of the header being precompiled must be
- equal to the name of the <code>cpp-pch</code> target. This is a gcc
- requirement.
- </para>
- </listitem>
-
- <listitem>
- <para>
- Prior to version 4.2, the gcc compiler did not allow anonymous
- namespaces in precompiled headers, which limits their utility. See the
- <ulink url="http://gcc.gnu.org/bugzilla/show_bug.cgi?id=29085"> bug
- report</ulink> for details.
- </para>
- </listitem>
- </itemizedlist>
- </section>
-
- <section id="bbv2.reference.generated_headers">
- <title>Generated headers</title>
-
- <para>
- Usually, Boost.Build handles implicit dependendies completely
- automatically. For example, for C++ files, all <literal>#include</literal>
- statements are found and handled. The only aspect where user help might be
- needed is implicit dependency on generated files.
- </para>
-
- <para>
- By default, Boost.Build handles such dependencies within one main target.
- For example, assume that main target "app" has two sources, "app.cpp" and
- "parser.y". The latter source is converted into "parser.c" and "parser.h".
- Then, if "app.cpp" includes "parser.h", Boost.Build will detect this
- dependency. Moreover, since "parser.h" will be generated into a build
- directory, the path to that directory will automatically added to include
- path.
- </para>
-
- <para>
- Making this mechanism work across main target boundaries is possible, but
- imposes certain overhead. For that reason, if there is implicit dependency
- on files from other main targets, the <literal>&lt;implicit-dependency&gt;
- </literal> feature must be used, for example:
-<programlisting>
-lib parser : parser.y ;
-exe app : app.cpp : &lt;implicit-dependency&gt;parser ;
-</programlisting>
- The above example tells the build system that when scanning all sources of
- "app" for implicit-dependencies, it should consider targets from "parser"
- as potential dependencies.
- </para>
- </section>
-
- <section id="bbv2.tasks.crosscompile">
- <title>Cross-compilation</title>
-
- <indexterm><primary>cross compilation</primary></indexterm>
-
- <para>Boost.Build supports cross compilation with the gcc and msvc
- toolsets.</para>
-
- <para>
- When using gcc, you first need to specify your cross compiler
- in <filename>user-config.jam</filename> (see <xref linkend="bbv2.overview.configuration"/>),
- for example:</para>
-<programlisting>
-using gcc : arm : arm-none-linux-gnueabi-g++ ;
-</programlisting>
- <para>
- After that, if the host and target os are the same, for example Linux, you can
- just request that this compiler version to be used:
- </para>
-<screen>
-b2 toolset=gcc-arm
-</screen>
-
- <para>
- If you want to target different operating system from the host, you need
- to additionally specify the value for the <code>target-os</code> feature, for
- example:
- </para>
-<screen>
-# On windows box
-b2 toolset=gcc-arm <emphasis role="bold">target-os=linux</emphasis>
-# On Linux box
-b2 toolset=gcc-mingw <emphasis role="bold">target-os=windows</emphasis>
-</screen>
- <para>
- For the complete list of allowed opeating system names, please see the documentation for
- <link linkend="bbv2.reference.features.target-os">target-os feature</link>.
- </para>
-
- <para>
- When using the msvc compiler, it's only possible to cross-compiler to a 64-bit system
- on a 32-bit host. Please see <xref linkend="v2.reference.tools.compiler.msvc.64"/> for
- details.
- </para>
-
- </section>
-
-</chapter>
-
-<!--
- Local Variables:
- mode: nxml
- sgml-indent-data: t
- sgml-parent-document: ("userman.xml" "chapter")
- sgml-set-face: t
- End:
--->
diff --git a/tools/build/v2/doc/src/tutorial.xml b/tools/build/v2/doc/src/tutorial.xml
deleted file mode 100644
index 141ed9196f..0000000000
--- a/tools/build/v2/doc/src/tutorial.xml
+++ /dev/null
@@ -1,682 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE chapter PUBLIC "-//Boost//DTD BoostBook XML V1.0//EN"
- "http://www.boost.org/tools/boostbook/dtd/boostbook.dtd">
-
-<?psgml nofill screen programlisting literallayout?>
-
-<chapter id="bbv2.tutorial">
- <title>Tutorial</title>
-
-<!-- You can't launch into this stuff without describing how to configure -->
-<!-- Boost.Build... unless of course you think it's likely to work with -->
-<!-- no configuration. But even if you do you have to tell people how to -->
-<!-- configure their installation in case it doesn't work. -->
-<!--
- VP: need also mention the examples which correspond to specific
- sections.
--->
-
- <para>
- This section will guide you though the most basic features of Boost.Build
- V2. We will start with the &#x201C;Hello, world&#x201D; example, learn how
- to use libraries, and finish with testing and installing features.
- </para>
-
- <section id="bbv2.tutorial.hello">
- <title>Hello, world</title>
-
- <para>
- The simplest project that Boost.Build can construct is stored in
- <filename>example/hello/</filename> directory. The project is described by
- a file called <filename>Jamroot</filename> that contains:
-
-<programlisting language="jam">
-exe hello : hello.cpp ;
-</programlisting>
-
- Even with this simple setup, you can do some interesting things. First of
- all, just invoking <command>b2</command> will build the <filename>hello
- </filename> executable by compiling and linking <filename>hello.cpp
- </filename>. By default, the debug variant is built. Now, to build the release
- variant of <filename>hello</filename>, invoke
-
-<screen>
-b2 release
-</screen>
-
- Note that the debug and release variants are created in different directories,
- so you can switch between variants or even build multiple variants at
- once, without any unnecessary recompilation. Let us extend the example by
- adding another line to our project's <filename>Jamroot</filename>:
-
-<programlisting language="jam">
-exe hello2 : hello.cpp ;
-</programlisting>
-
- Now let us build both the debug and release variants of our project again:
-
-<screen>
-b2 debug release
-</screen>
-
- Note that two variants of <filename>hello2</filename> are linked. Since we
- have already built both variants of <filename>hello</filename>, hello.cpp
- will not be recompiled; instead the existing object files will just be
- linked into the corresponding variants of <filename>hello2</filename>. Now
- let us remove all the built products:
-
-<screen>
-b2 --clean debug release
-</screen>
-
- It is also possible to build or clean specific targets. The following two
- commands, respectively, build or clean only the debug version of
- <filename>hello2</filename>.
-
-<screen>
-b2 hello2
-b2 --clean hello2
-</screen>
- </para>
- </section>
-
- <section id="bbv2.tutorial.properties">
- <title>Properties</title>
-
- <para>
- To represent aspects of target configuration such as
- debug and release variants, or single- and multi-threaded
- builds portably, Boost.Build uses <firstterm>features</firstterm> with
- associated <firstterm>values</firstterm>. For
- example, the <code>debug-symbols</code> feature can have a value of <code>on</code> or
- <code>off</code>. A <firstterm>property</firstterm> is just a (feature,
- value) pair. When a user initiates a build, Boost.Build
- automatically translates the requested properties into appropriate
- command-line flags for invoking toolset components like compilers
- and linkers.
- </para>
-
- <para>
- There are many built-in features that can be combined to
- produce arbitrary build configurations. The following command
- builds the project's <code>release</code> variant with inlining
- disabled and debug symbols enabled:
-<screen>
-b2 release inlining=off debug-symbols=on
-</screen>
- </para>
-
- <para>
- Properties on the command-line are specified with the syntax:
-
-<screen>
-<replaceable>feature-name</replaceable>=<replaceable>feature-value</replaceable>
-</screen>
- </para>
-
- <para>
- The <option>release</option> and <option>debug</option> that we have seen
- in <command>b2</command> invocations are just a shorthand way to specify
- values of the <varname>variant</varname> feature. For example, the
- command above could also have been written this way:
-
- <screen>
-b2 variant=release inlining=off debug-symbols=on
- </screen>
- </para>
-
- <para>
- <varname>variant</varname> is so commonly-used that it has been given
- special status as an <firstterm>implicit</firstterm> feature&#x2014;
- Boost.Build will deduce its identity just from the name of one of its
- values.
- </para>
-
- <para>
- A complete description of features can be found in <xref linkend="bbv2.reference.features"/>.
- </para>
-
- <section id="bbv2.tutorial.properties.requirements">
- <title>Build Requests and Target Requirements</title>
-
- <para>
- The set of properties specified on the command line constitutes
- a <firstterm>build request</firstterm>&#x2014;a description of
- the desired properties for building the requested targets (or,
- if no targets were explicitly requested, the project in the
- current directory). The <emphasis>actual</emphasis>
- properties used for building targets are typically a
- combination of the build request and properties derived from
- the project's <filename>Jamroot</filename> (and its other
- Jamfiles, as described in <xref
- linkend="bbv2.tutorial.hierarchy"/>). For example, the
- locations of <code>#include</code>d header files are normally
- not specified on the command-line, but described in
- Jamfiles as <firstterm>target
- requirements</firstterm> and automatically combined with the
- build request for those targets. Multithread-enabled
- compilation is another example of a typical target
- requirement. The Jamfile fragment below
- illustrates how these requirements might be specified.
- </para>
-
-<programlisting language="jam">
-exe hello
- : hello.cpp
- : &lt;include&gt;boost &lt;threading&gt;multi
- ;
-</programlisting>
-
- <para>
- When <filename>hello</filename> is built, the two requirements specified
- above will always be present. If the build request given on the
- <command>b2</command> command-line explictly contradicts a target's
- requirements, the target requirements usually override (or, in the case
- of &#x201C;free&rdquo;&#x201D; features like
- <varname>&lt;include&gt;</varname>,
- <footnote>
- <para>
- See <xref linkend="bbv2.reference.features.attributes"/>
- </para>
- </footnote>
- augments) the build request.
- </para>
-
- <tip>
- <para>
- The value of the <varname>&lt;include&gt;</varname> feature is
- relative to the location of <filename>Jamroot</filename> where it is
- used.
- </para>
- </tip>
- </section>
-
- <section id="bbv2.tutorial.properties.project_attributes">
- <title>Project Attributes</title>
-
- <para>
- If we want the same requirements for our other target,
- <filename>hello2</filename>, we could simply duplicate them. However,
- as projects grow, that approach leads to a great deal of repeated
- boilerplate in Jamfiles.
-
- Fortunately, there's a better way. Each project can specify a set of
- <firstterm>attributes</firstterm>, including requirements:
-
-<programlisting language="jam">
-project
- : requirements &lt;include&gt;/home/ghost/Work/boost &lt;threading&gt;multi
- ;
-
-exe hello : hello.cpp ;
-exe hello2 : hello.cpp ;</programlisting>
-
- The effect would be as if we specified the same requirement for both
- <filename>hello</filename> and <filename>hello2</filename>.
- </para>
- </section>
- </section>
-
- <section id="bbv2.tutorial.hierarchy">
- <title>Project Hierarchies</title>
-
- <para>
- So far we have only considered examples with one project, with
- one user-written Boost.Jam file, <filename>Jamroot</filename>. A typical
- large codebase would be composed of many projects organized into a tree.
- The top of the tree is called the <firstterm>project root</firstterm>.
- Every subproject is defined by a file called <filename>Jamfile</filename>
- in a descendant directory of the project root. The parent project of a
- subproject is defined by the nearest <filename>Jamfile</filename> or
- <filename>Jamroot</filename> file in an ancestor directory. For example,
- in the following directory layout:
-
-<screen>
-top/
- |
- +-- Jamroot
- |
- +-- app/
- | |
- | +-- Jamfile
- | `-- app.cpp
- |
- `-- util/
- |
- +-- foo/
- . |
- . +-- Jamfile
- . `-- bar.cpp
-</screen>
-
- the project root is <filename>top/</filename>. The projects in
- <filename>top/app/</filename> and <filename>top/util/foo/</filename> are
- immediate children of the root project.
-
- <note>
- <para>
- When we refer to a &#x201C;Jamfile,&#x201D; set in normal
- type, we mean a file called either
- <filename>Jamfile</filename> or
- <filename>Jamroot</filename>. When we need to be more
- specific, the filename will be set as
- &#x201C;<filename>Jamfile</filename>&#x201D; or
- &#x201C;<filename>Jamroot</filename>.&#x201D;
- </para>
- </note>
- </para>
-
- <para>
- Projects inherit all attributes (such as requirements)
- from their parents. Inherited requirements are combined with
- any requirements specified by the subproject.
- For example, if <filename>top/Jamroot</filename> has
-
-<programlisting language="jam">
-&lt;include&gt;/home/ghost/local
-</programlisting>
-
- in its requirements, then all of its subprojects will have it
- in their requirements, too. Of course, any project can add
- include paths to those specified by its parents. <footnote>
- <para>Many
- features will be overridden,
- rather than added-to, in subprojects. See <xref
- linkend="bbv2.reference.features.attributes"/> for more
- information</para>
- </footnote>
- More details can be found in
- <xref linkend= "bbv2.overview.projects"/>.
- </para>
-
- <para>
- Invoking <command>b2</command> without explicitly specifying
- any targets on the command line builds the project rooted in the
- current directory. Building a project does not automatically
- cause its subprojects to be built unless the parent project's
- Jamfile explicitly requests it. In our example,
- <filename>top/Jamroot</filename> might contain:
-
-<programlisting language="jam">
-build-project app ;
-</programlisting>
-
- which would cause the project in <filename>top/app/</filename>
- to be built whenever the project in <filename>top/</filename> is
- built. However, targets in <filename>top/util/foo/</filename>
- will be built only if they are needed by targets in
- <filename>top/</filename> or <filename>top/app/</filename>.
- </para>
- </section>
-
- <section id="bbv2.tutorial.libs">
- <title>Dependent Targets</title>
-
- <para>
- When building a target <filename>X</filename> that depends on first
- building another target <filename>Y</filename> (such as a
- library that must be linked with <firstterm>X</firstterm>),
- <filename>Y</filename> is called a
- <firstterm>dependency</firstterm> of <filename>X</filename> and
- <filename>X</filename> is termed a
- <firstterm>dependent</firstterm> of <filename>Y</filename>.
- </para>
-
- <para>To get a feeling of target dependencies, let's continue the
- above example and see how <filename>top/app/Jamfile</filename> can
- use libraries from <filename>top/util/foo</filename>. If
- <filename>top/util/foo/Jamfile</filename> contains
-
-<programlisting language="jam">
-lib bar : bar.cpp ;
-</programlisting>
-
- then to use this library in <filename>top/app/Jamfile</filename>, we can
- write:
-
-<programlisting language="jam">
-exe app : app.cpp ../util/foo//bar ;
-</programlisting>
-
- While <code>app.cpp</code> refers to a regular source file,
- <code>../util/foo//bar</code> is a reference to another target:
- a library <filename>bar</filename> declared in the Jamfile at
- <filename>../util/foo</filename>.
- </para>
-
- <tip>
- <para>Some other build system have special syntax for listing dependent
- libraries, for example <varname>LIBS</varname> variable. In Boost.Build,
- you just add the library to the list of sources.
- </para>
- </tip>
-
- <para>Suppose we build <filename>app</filename> with:
- <screen>
-b2 app optimization=full define=USE_ASM
- </screen>
- Which properties will be used to build <code>foo</code>? The answer is
- that some features are
- <firstterm>propagated</firstterm>&#x2014;Boost.Build attempts to use
- dependencies with the same value of propagated features. The
- <varname>&lt;optimization&gt;</varname> feature is propagated, so both
- <filename>app</filename> and <filename>foo</filename> will be compiled
- with full optimization. But <varname>&lt;define&gt;</varname> is not
- propagated: its value will be added as-is to the compiler flags for
- <filename>a.cpp</filename>, but won't affect <filename>foo</filename>.
- </para>
-
-
- <para>
- Let's improve this project further. The library probably has some headers
- that must be used when compiling <filename>app.cpp</filename>. We could
- manually add the necessary <code>#include</code> paths to
- <filename>app</filename>'s requirements as values of the
- <varname>&lt;include&gt; </varname> feature, but then this work will be
- repeated for all programs that use <filename>foo</filename>. A better
- solution is to modify <filename>util/foo/Jamfile</filename> in this way:
-
- <programlisting language="jam">
-project
- : usage-requirements &lt;include&gt;.
- ;
-
-lib foo : foo.cpp ;</programlisting>
-
- Usage requirements are applied not to the target being declared but to its
- dependants. In this case, <literal>&lt;include&gt;.</literal> will be
- applied to all targets that directly depend on <filename>foo</filename>.
- </para>
-
- <para>
- Another improvement is using symbolic identifiers to refer to the library,
- as opposed to <filename>Jamfile</filename> location. In a large project, a
- library can be used by many targets, and if they all use <filename>Jamfile
- </filename> location, a change in directory organization entails much
- work. The solution is to use project ids&#x2014;symbolic names not tied to
- directory layout. First, we need to assign a project id by adding this
- code to <filename>Jamroot</filename>:
- </para>
-
- <programlisting language="jam">
-use-project /library-example/foo : util/foo ;</programlisting>
-
- <para>
- Second, we modify <filename>app/Jamfile</filename> to use the project id:
- <programlisting>
-exe app : app.cpp /library-example/foo//bar ;</programlisting>
-
- The <filename>/library-example/foo//bar</filename> syntax is used to refer
- to the target <filename>bar</filename> in the project with id <filename>
- /library-example/foo</filename>. We've achieved our goal&#x2014;if the
- library is moved to a different directory, only <filename>Jamroot
- </filename> must be modified. Note that project ids are global&#x2014;two
- Jamfiles are not allowed to assign the same project id to different
- directories.
- </para>
-
- <tip>
- <para>If you want all applications in some project to link to a certain
- library, you can avoid having to specify it directly the sources of
- every target by using the <varname>&lt;library&gt;</varname> property.
- For example, if <filename>/boost/filesystem//fs</filename> should be
- linked to all applications in your project, you can add
- <code>&lt;library&gt;/boost/filesystem//fs</code> to the project's
- requirements, like this:
- </para>
-
- <programlisting language="jam">
-project
- : requirements &lt;library&gt;/boost/filesystem//fs
- ;</programlisting>
- </tip>
- </section>
-
- <section id="bbv2.tutorial.linkage">
- <title>Static and shared libaries</title>
-
- <para>
- Libraries can be either <emphasis>static</emphasis>, which means they are
- included in executable files that use them, or <emphasis>shared</emphasis>
- (a.k.a. <emphasis>dynamic</emphasis>), which are only referred to from
- executables, and must be available at run time. Boost.Build can create and
- use both kinds.
- </para>
-
- <para>
- The kind of library produced from a <code>lib</code> target is determined
- by the value of the <varname>link</varname> feature. Default value is
- <literal>shared</literal>, and to build a static library, the value should
- be <literal>static</literal>. You can request a static build either on the
- command line:
- <programlisting>b2 link=static</programlisting>
- or in the library's requirements:
- <programlisting language="jam">lib l : l.cpp : &lt;link&gt;static ;</programlisting>
- </para>
-
- <para>
- We can also use the <varname>&lt;link&gt;</varname> property to express
- linking requirements on a per-target basis. For example, if a particular
- executable can be correctly built only with the static version of a
- library, we can qualify the executable's <link
- linkend="bbv2.reference.targets.references">target reference</link> to the
- library as follows:
-
-<!-- There has been no earlier indication that target references can contain
- properties. You can't assume that the reader will recognize that strange
- incantation as a target reference, or that she'll know what it means. You
- also can't assume that hyperlinks will help the reader, because she may be
- working from a printout, as I was.
- VP: to be addressed when this section is moved. See comment below.
--->
-
- <programlisting language="jam">
-exe important : main.cpp helpers/&lt;link&gt;static ;</programlisting>
-
- No matter what arguments are specified on the <command>b2</command>
- command line, <filename>important</filename> will only be linked with the
- static version of <filename>helpers</filename>.
- </para>
-
- <para>
- Specifying properties in target references is especially useful if you use
- a library defined in some other project (one you can't change) but you
- still want static (or dynamic) linking to that library in all cases. If
- that library is used by many targets, you <emphasis>could</emphasis> use
- target references everywhere:
-
- <programlisting language="jam">
-exe e1 : e1.cpp /other_project//bar/&lt;link&gt;static ;
-exe e10 : e10.cpp /other_project//bar/&lt;link&gt;static ;</programlisting>
-
- but that's far from being convenient. A better approach is to introduce a
- level of indirection. Create a local <type>alias</type> target that refers
- to the static (or dynamic) version of <filename>foo</filename>:
-
- <programlisting>
-alias foo : /other_project//bar/&lt;link&gt;static ;
-exe e1 : e1.cpp foo ;
-exe e10 : e10.cpp foo ;</programlisting>
-
- The <link linkend="bbv2.tasks.alias">alias</link> rule is specifically
- used to rename a reference to a target and possibly change the
- properties.
-
- <!-- You should introduce the alias rule in an earlier section, before
- describing how it applies to this specific use-case, and the
- foregoing sentence should go there.
- VP: we've agreed that this section should be moved further in the
- docs, since it's more like advanced reading. When I move it, I'll
- make sure 'alias' is already mentioned.
- -->
- </para>
-
- <tip>
- <para>
- When one library uses another, you put the second library in the source
- list of the first. For example:
- <programlisting language="jam">
-lib utils : utils.cpp /boost/filesystem//fs ;
-lib core : core.cpp utils ;
-exe app : app.cpp core ;</programlisting>
- This works no matter what kind of linking is used. When <filename>core
- </filename> is built as a shared library, it is linked directly into
- <filename>utils</filename>. Static libraries can't link to other
- libraries, so when <filename>core</filename> is built as a static
- library, its dependency on <filename>utils</filename> is passed along to
- <filename>core</filename>'s dependents, causing <filename>app</filename>
- to be linked with both <filename>core</filename> and <filename>utils
- </filename>.
- </para>
- </tip>
-
- <note>
- <para>
- (Note for non-UNIX system). Typically, shared libraries must be
- installed to a directory in the dynamic linker's search path. Otherwise,
- applications that use shared libraries can't be started. On Windows, the
- dynamic linker's search path is given by the <envar>PATH</envar>
- environment variable. This restriction is lifted when you use
- Boost.Build testing facilities&#x2014;the <envar>PATH</envar> variable
- will be automatically adjusted before running the executable.
- <!-- Need ref here to 'testing facilities' -->
- </para>
- </note>
- </section>
-
- <section id="bbv2.tutorial.conditions">
- <title>Conditions and alternatives</title>
-
- <para>
- Sometimes, particular relationships need to be maintained among a target's
- build properties. For example, you might want to set specific <code>
- #define</code> when a library is built as shared, or when a target's
- <code>release</code> variant is built. This can be achieved using
- <firstterm>conditional requirements</firstterm>.
-
- <programlisting language="jam">
-lib network : network.cpp
- : <emphasis role="bold">&lt;link&gt;shared:&lt;define&gt;NEWORK_LIB_SHARED</emphasis>
- &lt;variant&gt;release:&lt;define&gt;EXTRA_FAST
- ;</programlisting>
-
- In the example above, whenever <filename>network</filename> is built with
- <code language="jam">&lt;link&gt;shared</code>, <code language="jam">&lt;define&gt;NEWORK_LIB_SHARED
- </code> will be in its properties, too. Also, whenever its release variant
- is built, <code>&lt;define&gt;EXTRA_FAST</code> will appear in its
- properties.
- </para>
-
- <para>
- Sometimes the ways a target is built are so different that describing them
- using conditional requirements would be hard. For example, imagine that a
- library actually uses different source files depending on the toolset used
- to build it. We can express this situation using <firstterm>target
- alternatives</firstterm>:
- <programlisting language="jam">
-lib demangler : dummy_demangler.cpp ; # alternative 1
-lib demangler : demangler_gcc.cpp : &lt;toolset&gt;gcc ; # alternative 2
-lib demangler : demangler_msvc.cpp : &lt;toolset&gt;msvc ; # alternative 3</programlisting>
- When building <filename>demangler</filename>, Boost.Build will compare
- requirements for each alternative with build properties to find the best
- match. For example, when building with <code language="jam">&lt;toolset&gt;gcc</code>
- alternative 2, will be selected, and when building with
- <code language="jam">&lt;toolset&gt;msvc</code> alternative 3 will be selected. In all
- other cases, the most generic alternative 1 will be built.
- </para>
- </section>
-
- <section id="bbv2.tutorial.prebuilt">
- <title>Prebuilt targets</title>
-
- <para>
- To link to libraries whose build instructions aren't given in a Jamfile,
- you need to create <code>lib</code> targets with an appropriate
- <varname>file</varname> property. Target alternatives can be used to
- associate multiple library files with a single conceptual target. For
- example:
- <programlisting language="jam">
-# util/lib2/Jamfile
-lib lib2
- :
- : &lt;file&gt;lib2_release.a &lt;variant&gt;release
- ;
-
-lib lib2
- :
- : &lt;file&gt;lib2_debug.a &lt;variant&gt;debug
- ;</programlisting>
-
- This example defines two alternatives for <filename>lib2</filename>, and
- for each one names a prebuilt file. Naturally, there are no sources.
- Instead, the <varname>&lt;file&gt;</varname> feature is used to specify
- the file name.
- </para>
-
- <para>
- Once a prebuilt target has been declared, it can be used just like any
- other target:
-
- <programlisting language="jam">
-exe app : app.cpp ../util/lib2//lib2 ;</programlisting>
-
- As with any target, the alternative selected depends on the properties
- propagated from <filename>lib2</filename>'s dependants. If we build the
- release and debug versions of <filename>app</filename> will be linked
- with <filename>lib2_release.a</filename> and <filename>lib2_debug.a
- </filename>, respectively.
- </para>
-
- <para>
- System libraries&#x2014;those that are automatically found by the toolset
- by searching through some set of predetermined paths&#x2014;should be
- declared almost like regular ones:
-
- <programlisting language="jam">
-lib pythonlib : : &lt;name&gt;python22 ;</programlisting>
-
- We again don't specify any sources, but give a <varname>name</varname>
- that should be passed to the compiler. If the gcc toolset were used to
- link an executable target to <filename>pythonlib</filename>,
- <option>-lpython22</option> would appear in the command line (other
- compilers may use different options).
- </para>
-
- <para>
- We can also specify where the toolset should look for the library:
-
- <programlisting language="jam">
-lib pythonlib : : &lt;name&gt;python22 &lt;search&gt;/opt/lib ;</programlisting>
-
- And, of course, target alternatives can be used in the usual way:
-
- <programlisting language="jam">
-lib pythonlib : : &lt;name&gt;python22 &lt;variant&gt;release ;
-lib pythonlib : : &lt;name&gt;python22_d &lt;variant&gt;debug ;</programlisting>
- </para>
-
- <para>
- A more advanced use of prebuilt targets is described in <xref linkend=
- "bbv2.recipies.site-config"/>.
- </para>
- </section>
-</chapter>
-
-<!--
- Local Variables:
- mode: nxml
- sgml-indent-data:t
- sgml-parent-document:("userman.xml" "chapter")
- sgml-set-face: t
- sgml-omittag:nil
- sgml-shorttag:nil
- sgml-namecase-general:t
- sgml-general-insert-case:lower
- sgml-minimize-attributes:nil
- sgml-always-quote-attributes:t
- sgml-indent-step:2
- sgml-exposed-tags:nil
- sgml-local-catalogs:nil
- sgml-local-ecat-files:nil
- End:
--->
diff --git a/tools/build/v2/doc/src/userman.xml b/tools/build/v2/doc/src/userman.xml
deleted file mode 100644
index c3f070f39a..0000000000
--- a/tools/build/v2/doc/src/userman.xml
+++ /dev/null
@@ -1,40 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE part PUBLIC "-//Boost//DTD BoostBook XML V1.0//EN"
- "http://www.boost.org/tools/boostbook/dtd/boostbook.dtd">
-
-<part xmlns:xi="http://www.w3.org/2001/XInclude"
- id="bbv2" last-revision="$Date: 2011-01-25 10:06:12 -0800 (Tue, 25 Jan 2011) $">
- <partinfo>
- <copyright>
- <year>2006</year>
- <year>2007</year>
- <year>2008</year>
- <year>2009</year>
- <holder>Vladimir Prus</holder>
- </copyright>
-
- <legalnotice>
- <para>Distributed under the Boost Software License, Version 1.0.
- (See accompanying file <filename>LICENSE_1_0.txt</filename> or copy at
- <ulink
- url="http://www.boost.org/LICENSE_1_0.txt">http://www.boost.org/LICENSE_1_0.txt</ulink>)
- </para>
- </legalnotice>
- </partinfo>
-
- <title>Boost.Build V2 User Manual</title>
-
- <!-- Chapters -->
- <xi:include href="howto.xml"/>
- <xi:include href="install.xml"/>
- <xi:include href="tutorial.xml"/>
- <xi:include href="overview.xml"/>
- <xi:include href="tasks.xml"/>
- <xi:include href="reference.xml"/>
- <xi:include href="extending.xml"/>
- <xi:include href="faq.xml"/>
-
- <!-- Appendicies -->
-<!-- <xi:include href="architecture.xml"/> -->
-
-</part>
diff --git a/tools/build/v2/engine/build.bat b/tools/build/v2/engine/build.bat
deleted file mode 100644
index 2982fb9c93..0000000000
--- a/tools/build/v2/engine/build.bat
+++ /dev/null
@@ -1,560 +0,0 @@
-@ECHO OFF
-
-REM ~ Copyright 2002-2007 Rene Rivera.
-REM ~ Distributed under the Boost Software License, Version 1.0.
-REM ~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-setlocal
-goto Start
-
-
-:Set_Error
-color 00
-goto :eof
-
-
-:Clear_Error
-ver >nul
-goto :eof
-
-
-:Error_Print
-REM Output an error message and set the errorlevel to indicate failure.
-setlocal
-ECHO ###
-ECHO ### %1
-ECHO ###
-ECHO ### You can specify the toolset as the argument, i.e.:
-ECHO ### .\build.bat msvc
-ECHO ###
-ECHO ### Toolsets supported by this script are: borland, como, gcc, gcc-nocygwin,
-ECHO ### intel-win32, metrowerks, mingw, msvc, vc7, vc8, vc9, vc10, vc11
-ECHO ###
-call :Set_Error
-endlocal
-goto :eof
-
-
-:Test_Path
-REM Tests for the given file(executable) presence in the directories in the PATH
-REM environment variable. Additionaly sets FOUND_PATH to the path of the
-REM found file.
-call :Clear_Error
-setlocal
-set test=%~$PATH:1
-endlocal
-if not errorlevel 1 set FOUND_PATH=%~dp$PATH:1
-goto :eof
-
-
-:Test_Option
-REM Tests whether the given string is in the form of an option: "--*"
-call :Clear_Error
-setlocal
-set test=%1
-if not defined test (
- call :Set_Error
- goto Test_Option_End
-)
-set test=###%test%###
-set test=%test:"###=%
-set test=%test:###"=%
-set test=%test:###=%
-if not "-" == "%test:~1,1%" call :Set_Error
-:Test_Option_End
-endlocal
-goto :eof
-
-
-:Test_Empty
-REM Tests whether the given string is not empty
-call :Clear_Error
-setlocal
-set test=%1
-if not defined test (
- call :Clear_Error
- goto Test_Empty_End
-)
-set test=###%test%###
-set test=%test:"###=%
-set test=%test:###"=%
-set test=%test:###=%
-if not "" == "%test%" call :Set_Error
-:Test_Empty_End
-endlocal
-goto :eof
-
-
-:Call_If_Exists
-if EXIST %1 call %*
-goto :eof
-
-
-:Guess_Toolset
-REM Try and guess the toolset to bootstrap the build with...
-REM Sets BOOST_JAM_TOOLSET to the first found toolset.
-REM May also set BOOST_JAM_TOOLSET_ROOT to the
-REM location of the found toolset.
-
-call :Clear_Error
-call :Test_Empty %ProgramFiles%
-if not errorlevel 1 set ProgramFiles=C:\Program Files
-
-call :Clear_Error
-if NOT "_%VS110COMNTOOLS%_" == "__" (
- set "BOOST_JAM_TOOLSET=vc11"
- set "BOOST_JAM_TOOLSET_ROOT=%VS110COMNTOOLS%..\..\VC\"
- goto :eof)
-call :Clear_Error
-if EXIST "%ProgramFiles%\Microsoft Visual Studio 11.0\VC\VCVARSALL.BAT" (
- set "BOOST_JAM_TOOLSET=vc11"
- set "BOOST_JAM_TOOLSET_ROOT=%ProgramFiles%\Microsoft Visual Studio 11.0\VC\"
- goto :eof)
-call :Clear_Error
-if NOT "_%VS100COMNTOOLS%_" == "__" (
- set "BOOST_JAM_TOOLSET=vc10"
- set "BOOST_JAM_TOOLSET_ROOT=%VS100COMNTOOLS%..\..\VC\"
- goto :eof)
-call :Clear_Error
-if EXIST "%ProgramFiles%\Microsoft Visual Studio 10.0\VC\VCVARSALL.BAT" (
- set "BOOST_JAM_TOOLSET=vc10"
- set "BOOST_JAM_TOOLSET_ROOT=%ProgramFiles%\Microsoft Visual Studio 10.0\VC\"
- goto :eof)
-call :Clear_Error
-if NOT "_%VS90COMNTOOLS%_" == "__" (
- set "BOOST_JAM_TOOLSET=vc9"
- set "BOOST_JAM_TOOLSET_ROOT=%VS90COMNTOOLS%..\..\VC\"
- goto :eof)
-call :Clear_Error
-if EXIST "%ProgramFiles%\Microsoft Visual Studio 9.0\VC\VCVARSALL.BAT" (
- set "BOOST_JAM_TOOLSET=vc9"
- set "BOOST_JAM_TOOLSET_ROOT=%ProgramFiles%\Microsoft Visual Studio 9.0\VC\"
- goto :eof)
-call :Clear_Error
-if NOT "_%VS80COMNTOOLS%_" == "__" (
- set "BOOST_JAM_TOOLSET=vc8"
- set "BOOST_JAM_TOOLSET_ROOT=%VS80COMNTOOLS%..\..\VC\"
- goto :eof)
-call :Clear_Error
-if EXIST "%ProgramFiles%\Microsoft Visual Studio 8\VC\VCVARSALL.BAT" (
- set "BOOST_JAM_TOOLSET=vc8"
- set "BOOST_JAM_TOOLSET_ROOT=%ProgramFiles%\Microsoft Visual Studio 8\VC\"
- goto :eof)
-call :Clear_Error
-if NOT "_%VS71COMNTOOLS%_" == "__" (
- set "BOOST_JAM_TOOLSET=vc7"
- set "BOOST_JAM_TOOLSET_ROOT=%VS71COMNTOOLS%\..\..\VC7\"
- goto :eof)
-call :Clear_Error
-if NOT "_%VCINSTALLDIR%_" == "__" (
- REM %VCINSTALLDIR% is also set for VC9 (and probably VC8)
- set "BOOST_JAM_TOOLSET=vc7"
- set "BOOST_JAM_TOOLSET_ROOT=%VCINSTALLDIR%\VC7\"
- goto :eof)
-call :Clear_Error
-if EXIST "%ProgramFiles%\Microsoft Visual Studio .NET 2003\VC7\bin\VCVARS32.BAT" (
- set "BOOST_JAM_TOOLSET=vc7"
- set "BOOST_JAM_TOOLSET_ROOT=%ProgramFiles%\Microsoft Visual Studio .NET 2003\VC7\"
- goto :eof)
-call :Clear_Error
-if EXIST "%ProgramFiles%\Microsoft Visual Studio .NET\VC7\bin\VCVARS32.BAT" (
- set "BOOST_JAM_TOOLSET=vc7"
- set "BOOST_JAM_TOOLSET_ROOT=%ProgramFiles%\Microsoft Visual Studio .NET\VC7\"
- goto :eof)
-call :Clear_Error
-if NOT "_%MSVCDir%_" == "__" (
- set "BOOST_JAM_TOOLSET=msvc"
- set "BOOST_JAM_TOOLSET_ROOT=%MSVCDir%\"
- goto :eof)
-call :Clear_Error
-if EXIST "%ProgramFiles%\Microsoft Visual Studio\VC98\bin\VCVARS32.BAT" (
- set "BOOST_JAM_TOOLSET=msvc"
- set "BOOST_JAM_TOOLSET_ROOT=%ProgramFiles%\Microsoft Visual Studio\VC98\"
- goto :eof)
-call :Clear_Error
-if EXIST "%ProgramFiles%\Microsoft Visual C++\VC98\bin\VCVARS32.BAT" (
- set "BOOST_JAM_TOOLSET=msvc"
- set "BOOST_JAM_TOOLSET_ROOT=%ProgramFiles%\Microsoft Visual C++\VC98\"
- goto :eof)
-call :Clear_Error
-call :Test_Path cl.exe
-if not errorlevel 1 (
- set "BOOST_JAM_TOOLSET=msvc"
- set "BOOST_JAM_TOOLSET_ROOT=%FOUND_PATH%..\"
- goto :eof)
-call :Clear_Error
-call :Test_Path vcvars32.bat
-if not errorlevel 1 (
- set "BOOST_JAM_TOOLSET=msvc"
- call "%FOUND_PATH%VCVARS32.BAT"
- set "BOOST_JAM_TOOLSET_ROOT=%MSVCDir%\"
- goto :eof)
-call :Clear_Error
-if EXIST "C:\Borland\BCC55\Bin\bcc32.exe" (
- set "BOOST_JAM_TOOLSET=borland"
- set "BOOST_JAM_TOOLSET_ROOT=C:\Borland\BCC55\"
- goto :eof)
-call :Clear_Error
-call :Test_Path bcc32.exe
-if not errorlevel 1 (
- set "BOOST_JAM_TOOLSET=borland"
- set "BOOST_JAM_TOOLSET_ROOT=%FOUND_PATH%..\"
- goto :eof)
-call :Clear_Error
-call :Test_Path icl.exe
-if not errorlevel 1 (
- set "BOOST_JAM_TOOLSET=intel-win32"
- set "BOOST_JAM_TOOLSET_ROOT=%FOUND_PATH%..\"
- goto :eof)
-call :Clear_Error
-if EXIST "C:\MinGW\bin\gcc.exe" (
- set "BOOST_JAM_TOOLSET=mingw"
- set "BOOST_JAM_TOOLSET_ROOT=C:\MinGW\"
- goto :eof)
-call :Clear_Error
-if NOT "_%CWFolder%_" == "__" (
- set "BOOST_JAM_TOOLSET=metrowerks"
- set "BOOST_JAM_TOOLSET_ROOT=%CWFolder%\"
- goto :eof )
-call :Clear_Error
-call :Test_Path mwcc.exe
-if not errorlevel 1 (
- set "BOOST_JAM_TOOLSET=metrowerks"
- set "BOOST_JAM_TOOLSET_ROOT=%FOUND_PATH%..\..\"
- goto :eof)
-call :Clear_Error
-call :Error_Print "Could not find a suitable toolset."
-goto :eof
-
-
-:Guess_Yacc
-REM Tries to find bison or yacc in common places so we can build the grammar.
-call :Clear_Error
-call :Test_Path yacc.exe
-if not errorlevel 1 (
- set "YACC=yacc -d"
- goto :eof)
-call :Clear_Error
-call :Test_Path bison.exe
-if not errorlevel 1 (
- set "YACC=bison -d --yacc"
- goto :eof)
-call :Clear_Error
-if EXIST "C:\Program Files\GnuWin32\bin\bison.exe" (
- set "YACC=C:\Program Files\GnuWin32\bin\bison.exe" -d --yacc
- goto :eof)
-call :Clear_Error
-call :Error_Print "Could not find Yacc to build the Jam grammar."
-goto :eof
-
-
-:Start
-set BOOST_JAM_TOOLSET=
-set BOOST_JAM_ARGS=
-
-REM If no arguments guess the toolset;
-REM or if first argument is an option guess the toolset;
-REM otherwise the argument is the toolset to use.
-call :Clear_Error
-call :Test_Empty %1
-if not errorlevel 1 (
- call :Guess_Toolset
- if not errorlevel 1 ( goto Setup_Toolset ) else ( goto Finish )
-)
-
-call :Clear_Error
-call :Test_Option %1
-if not errorlevel 1 (
- call :Guess_Toolset
- if not errorlevel 1 ( goto Setup_Toolset ) else ( goto Finish )
-)
-
-call :Clear_Error
-set BOOST_JAM_TOOLSET=%1
-shift
-goto Setup_Toolset
-
-
-:Setup_Toolset
-REM Setup the toolset command and options. This bit of code
-REM needs to be flexible enough to handle both when
-REM the toolset was guessed at and found, or when the toolset
-REM was indicated in the command arguments.
-REM NOTE: The strange multiple "if ?? == _toolset_" tests are that way
-REM because in BAT variables are subsituted only once during a single
-REM command. A complete "if ... ( commands ) else ( commands )"
-REM is a single command, even though it's in multiple lines here.
-:Setup_Args
-call :Clear_Error
-call :Test_Empty %1
-if not errorlevel 1 goto Config_Toolset
-call :Clear_Error
-call :Test_Option %1
-if errorlevel 1 (
- set BOOST_JAM_ARGS=%BOOST_JAM_ARGS% %1
- shift
- goto Setup_Args
-)
-:Config_Toolset
-if NOT "_%BOOST_JAM_TOOLSET%_" == "_metrowerks_" goto Skip_METROWERKS
-if NOT "_%CWFolder%_" == "__" (
- set "BOOST_JAM_TOOLSET_ROOT=%CWFolder%\"
- )
-set "PATH=%BOOST_JAM_TOOLSET_ROOT%Other Metrowerks Tools\Command Line Tools;%PATH%"
-set "BOOST_JAM_CC=mwcc -runtime ss -cwd include -DNT -lkernel32.lib -ladvapi32.lib -luser32.lib"
-set "BOOST_JAM_OPT_JAM=-o bootstrap\jam0.exe"
-set "BOOST_JAM_OPT_MKJAMBASE=-o bootstrap\mkjambase0.exe"
-set "BOOST_JAM_OPT_YYACC=-o bootstrap\yyacc0.exe"
-set "_known_=1"
-:Skip_METROWERKS
-if NOT "_%BOOST_JAM_TOOLSET%_" == "_msvc_" goto Skip_MSVC
-if NOT "_%MSVCDir%_" == "__" (
- set "BOOST_JAM_TOOLSET_ROOT=%MSVCDir%\"
- )
-call :Call_If_Exists "%BOOST_JAM_TOOLSET_ROOT%bin\VCVARS32.BAT"
-if not "_%BOOST_JAM_TOOLSET_ROOT%_" == "__" (
- set "PATH=%BOOST_JAM_TOOLSET_ROOT%bin;%PATH%"
- )
-set "BOOST_JAM_CC=cl /nologo /GZ /Zi /MLd /Fobootstrap/ /Fdbootstrap/ -DNT -DYYDEBUG kernel32.lib advapi32.lib user32.lib"
-set "BOOST_JAM_OPT_JAM=/Febootstrap\jam0"
-set "BOOST_JAM_OPT_MKJAMBASE=/Febootstrap\mkjambase0"
-set "BOOST_JAM_OPT_YYACC=/Febootstrap\yyacc0"
-set "_known_=1"
-:Skip_MSVC
-if NOT "_%BOOST_JAM_TOOLSET%_" == "_vc7_" goto Skip_VC7
-if NOT "_%VS71COMNTOOLS%_" == "__" (
- set "BOOST_JAM_TOOLSET_ROOT=%VS71COMNTOOLS%..\..\VC7\"
- )
-if "_%VCINSTALLDIR%_" == "__" call :Call_If_Exists "%BOOST_JAM_TOOLSET_ROOT%bin\VCVARS32.BAT"
-if NOT "_%BOOST_JAM_TOOLSET_ROOT%_" == "__" (
- if "_%VCINSTALLDIR%_" == "__" (
- set "PATH=%BOOST_JAM_TOOLSET_ROOT%bin;%PATH%"
- ) )
-set "BOOST_JAM_CC=cl /nologo /GZ /Zi /MLd /Fobootstrap/ /Fdbootstrap/ -DNT -DYYDEBUG kernel32.lib advapi32.lib user32.lib"
-set "BOOST_JAM_OPT_JAM=/Febootstrap\jam0"
-set "BOOST_JAM_OPT_MKJAMBASE=/Febootstrap\mkjambase0"
-set "BOOST_JAM_OPT_YYACC=/Febootstrap\yyacc0"
-set "_known_=1"
-:Skip_VC7
-if NOT "_%BOOST_JAM_TOOLSET%_" == "_vc8_" goto Skip_VC8
-if NOT "_%VS80COMNTOOLS%_" == "__" (
- set "BOOST_JAM_TOOLSET_ROOT=%VS80COMNTOOLS%..\..\VC\"
- )
-if "_%VCINSTALLDIR%_" == "__" call :Call_If_Exists "%BOOST_JAM_TOOLSET_ROOT%VCVARSALL.BAT" %BOOST_JAM_ARGS%
-if NOT "_%BOOST_JAM_TOOLSET_ROOT%_" == "__" (
- if "_%VCINSTALLDIR%_" == "__" (
- set "PATH=%BOOST_JAM_TOOLSET_ROOT%bin;%PATH%"
- ) )
-set "BOOST_JAM_CC=cl /nologo /RTC1 /Zi /MTd /Fobootstrap/ /Fdbootstrap/ -DNT -DYYDEBUG -wd4996 kernel32.lib advapi32.lib user32.lib"
-set "BOOST_JAM_OPT_JAM=/Febootstrap\jam0"
-set "BOOST_JAM_OPT_MKJAMBASE=/Febootstrap\mkjambase0"
-set "BOOST_JAM_OPT_YYACC=/Febootstrap\yyacc0"
-set "_known_=1"
-:Skip_VC8
-if NOT "_%BOOST_JAM_TOOLSET%_" == "_vc9_" goto Skip_VC9
-if NOT "_%VS90COMNTOOLS%_" == "__" (
- set "BOOST_JAM_TOOLSET_ROOT=%VS90COMNTOOLS%..\..\VC\"
- )
-if "_%VCINSTALLDIR%_" == "__" call :Call_If_Exists "%BOOST_JAM_TOOLSET_ROOT%VCVARSALL.BAT" %BOOST_JAM_ARGS%
-if NOT "_%BOOST_JAM_TOOLSET_ROOT%_" == "__" (
- if "_%VCINSTALLDIR%_" == "__" (
- set "PATH=%BOOST_JAM_TOOLSET_ROOT%bin;%PATH%"
- ) )
-set "BOOST_JAM_CC=cl /nologo /RTC1 /Zi /MTd /Fobootstrap/ /Fdbootstrap/ -DNT -DYYDEBUG -wd4996 kernel32.lib advapi32.lib user32.lib"
-set "BOOST_JAM_OPT_JAM=/Febootstrap\jam0"
-set "BOOST_JAM_OPT_MKJAMBASE=/Febootstrap\mkjambase0"
-set "BOOST_JAM_OPT_YYACC=/Febootstrap\yyacc0"
-set "_known_=1"
-:Skip_VC9
-if NOT "_%BOOST_JAM_TOOLSET%_" == "_vc10_" goto Skip_VC10
-if NOT "_%VS100COMNTOOLS%_" == "__" (
- set "BOOST_JAM_TOOLSET_ROOT=%VS100COMNTOOLS%..\..\VC\"
- )
-if "_%VCINSTALLDIR%_" == "__" call :Call_If_Exists "%BOOST_JAM_TOOLSET_ROOT%VCVARSALL.BAT" %BOOST_JAM_ARGS%
-if NOT "_%BOOST_JAM_TOOLSET_ROOT%_" == "__" (
- if "_%VCINSTALLDIR%_" == "__" (
- set "PATH=%BOOST_JAM_TOOLSET_ROOT%bin;%PATH%"
- ) )
-set "BOOST_JAM_CC=cl /nologo /RTC1 /Zi /MTd /Fobootstrap/ /Fdbootstrap/ -DNT -DYYDEBUG -wd4996 kernel32.lib advapi32.lib user32.lib"
-set "BOOST_JAM_OPT_JAM=/Febootstrap\jam0"
-set "BOOST_JAM_OPT_MKJAMBASE=/Febootstrap\mkjambase0"
-set "BOOST_JAM_OPT_YYACC=/Febootstrap\yyacc0"
-set "_known_=1"
-:Skip_VC10
-if NOT "_%BOOST_JAM_TOOLSET%_" == "_vc11_" goto Skip_VC11
-if NOT "_%VS110COMNTOOLS%_" == "__" (
- set "BOOST_JAM_TOOLSET_ROOT=%VS110COMNTOOLS%..\..\VC\"
- )
-if "_%VCINSTALLDIR%_" == "__" call :Call_If_Exists "%BOOST_JAM_TOOLSET_ROOT%VCVARSALL.BAT" %BOOST_JAM_ARGS%
-if NOT "_%BOOST_JAM_TOOLSET_ROOT%_" == "__" (
- if "_%VCINSTALLDIR%_" == "__" (
- set "PATH=%BOOST_JAM_TOOLSET_ROOT%bin;%PATH%"
- ) )
-set "BOOST_JAM_CC=cl /nologo /RTC1 /Zi /MTd /Fobootstrap/ /Fdbootstrap/ -DNT -DYYDEBUG -wd4996 kernel32.lib advapi32.lib user32.lib"
-set "BOOST_JAM_OPT_JAM=/Febootstrap\jam0"
-set "BOOST_JAM_OPT_MKJAMBASE=/Febootstrap\mkjambase0"
-set "BOOST_JAM_OPT_YYACC=/Febootstrap\yyacc0"
-set "_known_=1"
-:Skip_VC11
-if NOT "_%BOOST_JAM_TOOLSET%_" == "_borland_" goto Skip_BORLAND
-if "_%BOOST_JAM_TOOLSET_ROOT%_" == "__" (
- call :Test_Path bcc32.exe )
-if "_%BOOST_JAM_TOOLSET_ROOT%_" == "__" (
- if not errorlevel 1 (
- set "BOOST_JAM_TOOLSET_ROOT=%FOUND_PATH%..\"
- ) )
-if not "_%BOOST_JAM_TOOLSET_ROOT%_" == "__" (
- set "PATH=%BOOST_JAM_TOOLSET_ROOT%Bin;%PATH%"
- )
-set "BOOST_JAM_CC=bcc32 -WC -w- -q -I%BOOST_JAM_TOOLSET_ROOT%Include -L%BOOST_JAM_TOOLSET_ROOT%Lib /DNT -nbootstrap"
-set "BOOST_JAM_OPT_JAM=-ejam0"
-set "BOOST_JAM_OPT_MKJAMBASE=-emkjambasejam0"
-set "BOOST_JAM_OPT_YYACC=-eyyacc0"
-set "_known_=1"
-:Skip_BORLAND
-if NOT "_%BOOST_JAM_TOOLSET%_" == "_como_" goto Skip_COMO
-set "BOOST_JAM_CC=como -DNT"
-set "BOOST_JAM_OPT_JAM=-o bootstrap\jam0.exe"
-set "BOOST_JAM_OPT_MKJAMBASE=-o bootstrap\mkjambase0.exe"
-set "BOOST_JAM_OPT_YYACC=-o bootstrap\yyacc0.exe"
-set "_known_=1"
-:Skip_COMO
-if NOT "_%BOOST_JAM_TOOLSET%_" == "_gcc_" goto Skip_GCC
-set "BOOST_JAM_CC=gcc -DNT"
-set "BOOST_JAM_OPT_JAM=-o bootstrap\jam0.exe"
-set "BOOST_JAM_OPT_MKJAMBASE=-o bootstrap\mkjambase0.exe"
-set "BOOST_JAM_OPT_YYACC=-o bootstrap\yyacc0.exe"
-set "_known_=1"
-:Skip_GCC
-if NOT "_%BOOST_JAM_TOOLSET%_" == "_gcc-nocygwin_" goto Skip_GCC_NOCYGWIN
-set "BOOST_JAM_CC=gcc -DNT -mno-cygwin"
-set "BOOST_JAM_OPT_JAM=-o bootstrap\jam0.exe"
-set "BOOST_JAM_OPT_MKJAMBASE=-o bootstrap\mkjambase0.exe"
-set "BOOST_JAM_OPT_YYACC=-o bootstrap\yyacc0.exe"
-set "_known_=1"
-:Skip_GCC_NOCYGWIN
-if NOT "_%BOOST_JAM_TOOLSET%_" == "_intel-win32_" goto Skip_INTEL_WIN32
-set "BOOST_JAM_CC=icl -DNT /nologo kernel32.lib advapi32.lib user32.lib"
-set "BOOST_JAM_OPT_JAM=/Febootstrap\jam0"
-set "BOOST_JAM_OPT_MKJAMBASE=/Febootstrap\mkjambase0"
-set "BOOST_JAM_OPT_YYACC=/Febootstrap\yyacc0"
-set "_known_=1"
-:Skip_INTEL_WIN32
-if NOT "_%BOOST_JAM_TOOLSET%_" == "_mingw_" goto Skip_MINGW
-if not "_%BOOST_JAM_TOOLSET_ROOT%_" == "__" (
- set "PATH=%BOOST_JAM_TOOLSET_ROOT%bin;%PATH%"
- )
-set "BOOST_JAM_CC=gcc -DNT"
-set "BOOST_JAM_OPT_JAM=-o bootstrap\jam0.exe"
-set "BOOST_JAM_OPT_MKJAMBASE=-o bootstrap\mkjambase0.exe"
-set "BOOST_JAM_OPT_YYACC=-o bootstrap\yyacc0.exe"
-set "_known_=1"
-:Skip_MINGW
-call :Clear_Error
-if "_%_known_%_" == "__" (
- call :Error_Print "Unknown toolset: %BOOST_JAM_TOOLSET%"
-)
-if errorlevel 1 goto Finish
-
-echo ###
-echo ### Using '%BOOST_JAM_TOOLSET%' toolset.
-echo ###
-
-set YYACC_SOURCES=yyacc.c
-set MKJAMBASE_SOURCES=mkjambase.c
-set BJAM_SOURCES=
-set BJAM_SOURCES=%BJAM_SOURCES% command.c compile.c constants.c debug.c execnt.c filent.c function.c glob.c hash.c
-set BJAM_SOURCES=%BJAM_SOURCES% hdrmacro.c headers.c jam.c jambase.c jamgram.c lists.c make.c make1.c
-set BJAM_SOURCES=%BJAM_SOURCES% object.c option.c output.c parse.c pathunix.c regexp.c
-set BJAM_SOURCES=%BJAM_SOURCES% rules.c scan.c search.c subst.c timestamp.c variable.c modules.c
-set BJAM_SOURCES=%BJAM_SOURCES% strings.c filesys.c builtins.c md5.c pwd.c class.c w32_getreg.c native.c
-set BJAM_SOURCES=%BJAM_SOURCES% modules/set.c modules/path.c modules/regex.c
-set BJAM_SOURCES=%BJAM_SOURCES% modules/property-set.c modules/sequence.c modules/order.c
-
-set BJAM_UPDATE=
-:Check_Update
-call :Test_Empty %1
-if not errorlevel 1 goto Check_Update_End
-call :Clear_Error
-setlocal
-set test=%1
-set test=###%test%###
-set test=%test:"###=%
-set test=%test:###"=%
-set test=%test:###=%
-if "%test%" == "--update" goto Found_Update
-endlocal
-shift
-if not "_%BJAM_UPDATE%_" == "_update_" goto Check_Update
-:Found_Update
-endlocal
-set BJAM_UPDATE=update
-:Check_Update_End
-if "_%BJAM_UPDATE%_" == "_update_" (
- if not exist ".\bootstrap\jam0.exe" (
- set BJAM_UPDATE=
- )
-)
-
-@echo ON
-@if "_%BJAM_UPDATE%_" == "_update_" goto Skip_Bootstrap
-if exist bootstrap rd /S /Q bootstrap
-md bootstrap
-@if not exist jamgram.y goto Bootstrap_GrammarPrep
-@if not exist jamgramtab.h goto Bootstrap_GrammarPrep
-@goto Skip_GrammarPrep
-:Bootstrap_GrammarPrep
-%BOOST_JAM_CC% %BOOST_JAM_OPT_YYACC% %YYACC_SOURCES%
-@if not exist ".\bootstrap\yyacc0.exe" goto Skip_GrammarPrep
-.\bootstrap\yyacc0 jamgram.y jamgramtab.h jamgram.yy
-:Skip_GrammarPrep
-@if not exist jamgram.c goto Bootstrap_GrammarBuild
-@if not exist jamgram.h goto Bootstrap_GrammarBuild
-@goto Skip_GrammarBuild
-:Bootstrap_GrammarBuild
-@echo OFF
-if "_%YACC%_" == "__" (
- call :Guess_Yacc
-)
-if errorlevel 1 goto Finish
-@echo ON
-%YACC% jamgram.y
-@if errorlevel 1 goto Finish
-del /f jamgram.c
-rename y.tab.c jamgram.c
-del /f jamgram.h
-rename y.tab.h jamgram.h
-:Skip_GrammarBuild
-@echo ON
-@if exist jambase.c goto Skip_Jambase
-%BOOST_JAM_CC% %BOOST_JAM_OPT_MKJAMBASE% %MKJAMBASE_SOURCES%
-@if not exist ".\bootstrap\mkjambase0.exe" goto Skip_Jambase
-.\bootstrap\mkjambase0 jambase.c Jambase
-:Skip_Jambase
-%BOOST_JAM_CC% %BOOST_JAM_OPT_JAM% %BJAM_SOURCES%
-:Skip_Bootstrap
-@if not exist ".\bootstrap\jam0.exe" goto Skip_Jam
-@set args=%*
-@echo OFF
-:Set_Args
-setlocal
-call :Test_Empty %args%
-if not errorlevel 1 goto Set_Args_End
-set test=###%args:~0,2%###
-set test=%test:"###=%
-set test=%test:###"=%
-set test=%test:###=%
-set test=%test:~0,1%
-if "-" == "%test%" goto Set_Args_End
-endlocal
-set args=%args:~1%
-goto Set_Args
-:Set_Args_End
-@echo ON
-@if "_%BJAM_UPDATE%_" == "_update_" goto Skip_Clean
-.\bootstrap\jam0 -f build.jam --toolset=%BOOST_JAM_TOOLSET% "--toolset-root=%BOOST_JAM_TOOLSET_ROOT% " %args% clean
-:Skip_Clean
-.\bootstrap\jam0 -f build.jam --toolset=%BOOST_JAM_TOOLSET% "--toolset-root=%BOOST_JAM_TOOLSET_ROOT% " %args%
-:Skip_Jam
-
-:Finish
diff --git a/tools/build/v2/engine/build.jam b/tools/build/v2/engine/build.jam
deleted file mode 100644
index 1efc3f1f0b..0000000000
--- a/tools/build/v2/engine/build.jam
+++ /dev/null
@@ -1,1013 +0,0 @@
-#~ Copyright 2002-2007 Rene Rivera.
-#~ Distributed under the Boost Software License, Version 1.0.
-#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Clean env vars of any "extra" empty values.
-for local v in ARGV CC CFLAGS LIBS
-{
- local values ;
- for local x in $($(v))
- {
- if $(x) != ""
- {
- values += $(x) ;
- }
- }
- $(v) = $(values) ;
-}
-
-# Platform related specifics.
-if $(OS) = NT { rule .path { return "$(<:J=\\)" ; } ./ = "/" ; }
-else { rule .path { return "$(<:J=/)" ; } }
-
-. = "." ;
-./ ?= "" ;
-
-# Info about what we are building.
-_VERSION_ = 3 1 19 ;
-NAME = boost-jam ;
-VERSION = $(_VERSION_:J=$(.)) ;
-RELEASE = 1 ;
-LICENSE = LICENSE_1_0 ;
-
-# Generate development debug binaries?
-if --debug in $(ARGV)
-{
- debug = true ;
-}
-
-if --profile in $(ARGV)
-{
- profile = true ;
-}
-
-# Attempt to generate and/or build the grammar?
-if --grammar in $(ARGV)
-{
- grammar = true ;
-}
-
-# Do we need to add a default build type argument?
-if ! ( --release in $(ARGV) ) &&
- ! ( --debug in $(ARGV) ) &&
- ! ( --profile in $(ARGV) )
-{
- ARGV += --release ;
-}
-
-# Enable, and configure, Python hooks.
-with-python = ;
-python-location = [ MATCH --with-python=(.*) : $(ARGV) ] ;
-if $(python-location)
-{
- with-python = true ;
-}
-if $(with-python)
-{
- if $(OS) = NT
- {
- --python-include = [ .path $(python-location) include ] ;
- --python-lib = ;
- for local v in 27 26 25 24 23 22
- {
- --python-lib ?=
- [ GLOB [ .path $(python-location) libs ] : "python$(v).lib" ]
- [ GLOB $(python-location) [ .path $(python-location) libs ]
- $(Path) $(PATH) $(path) : "python$(v).dll" ]
- ;
- if ! $(--python-lib[2])
- {
- --python-lib = ;
- }
- }
- --python-lib = $(--python-lib[1]) ;
- }
- else if $(OS) = MACOSX
- {
- --python-include = [ .path $(python-location) Headers ] ;
- --python-lib = $(python-location) Python ;
- }
- else
- {
- --python-include = ;
- --python-lib = ;
- for local v in 2.7 2.6 2.5 2.4 2.3 2.2
- {
- local inc = [ GLOB [ .path $(python-location) include ] : python$(v) ] ;
- local lib = [ GLOB [ .path $(python-location) lib ] : libpython$(v)* ] ;
- if $(inc) && $(lib)
- {
- --python-include ?= $(inc) ;
- --python-lib ?= $(lib[1]:D) python$(v) ;
- }
- }
- }
-}
-
-# Boehm GC?
-if --gc in $(ARGV)
-{
- --boehm-gc = true ;
-}
-if $(--boehm-gc)
-{
- --extra-include += [ .path [ PWD ] "boehm_gc" "include" ] ;
-}
-
-# Duma?
-if --duma in $(ARGV)
-{
- --duma = true ;
-}
-if $(--duma)
-{
- --extra-include += [ .path [ PWD ] "duma" ] ;
-}
-
-# An explicit root for the toolset? (trim spaces)
-toolset-root = [ MATCH --toolset-root=(.*) : $(ARGV) ] ;
-{
- local t = [ MATCH "[ ]*(.*)" : $(toolset-root:J=" ") ] ;
- toolset-root = ;
- while $(t)
- {
- t = [ MATCH "([^ ]+)([ ]*)(.*)" : $(t) ] ;
- toolset-root += $(t[1]) ;
- if $(t[3]) { toolset-root += $(t[2]) ; }
- t = $(t[3]) ;
- }
- toolset-root = $(toolset-root:J="") ;
-}
-
-# Configure the implemented toolsets. These are minimal
-# commands and options to compile the full Jam. When
-# adding new toolsets make sure to add them to the
-# "known" list also.
-
-rule toolset ( name command .type ? : opt.out + : opt.define * : flags * : linklibs * )
-{
- .type ?= "" ;
- tool.$(name)$(.type).cc ?= $(command) ;
- tool.$(name)$(.type).opt.out ?= $(opt.out) ;
- tool.$(name)$(.type).opt.define ?= $(opt.define) ;
- tool.$(name)$(.type).flags ?= $(flags) ;
- tool.$(name)$(.type).linklibs ?= $(linklibs) ;
- if ! $(name) in $(toolsets) { toolsets += $(name) ; }
-}
-
-rule if-os ( os + : yes-opt * : no-opt * )
- { if $(os) in $(OS) { return $(yes-opt) ; } else { return $(no-opt) ; } }
-
-rule opt ( type : yes-opt * : no-opt * )
- { if $(type) in $(ARGV) { return $(yes-opt) ; } else { return $(no-opt) ; } }
-
-## HP-UX aCC compiler
-toolset acc cc : "-o " : -D
- : -Ae
- [ opt --release : -s -O3 ]
- [ opt --debug : -g -pg ]
- -I$(--python-include) -I$(--extra-include)
- : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
-## Borland C++ 5.5.x
-toolset borland bcc32 : -e -n : /D
- : -WC -w- -q "-I$(toolset-root)Include" "-L$(toolset-root)Lib"
- [ opt --release : -O2 -vi -w-inl ]
- [ opt --debug : -v -Od -vi- ]
- -I$(--python-include) -I$(--extra-include)
- : $(--python-lib[1]) ;
-## Generic Unix cc
-if ! $(CC) { CC = cc ; }
-toolset cc $(CC) : "-o " : -D
- : $(CFLAGS)
- [ opt --release : -s -O ]
- [ opt --debug : -g ]
- -I$(--python-include) -I$(--extra-include)
- : $(LIBS) -L$(--python-lib[1]) -l$(--python-lib[2]) ;
-## Comeau C/C++ 4.x
-toolset como como : "-o " : -D
- : --c
- [ opt --release : --inlining ]
- [ opt --debug : --no_inlining ]
- -I$(--python-include) -I$(--extra-include)
- : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
-## Clang Linux 2.8+
-toolset clang clang : "-o " : -D
- : -Wno-unused -Wno-format
- [ opt --release : -Os ]
- [ opt --debug : -g -O0 -fno-inline ]
- [ opt --profile : -finline-functions -g ]
- -I$(--python-include) -I$(--extra-include)
- : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
-## MacOSX Darwin, using GCC 2.9.x, 3.x
-toolset darwin cc : "-o " : -D
- :
- [ opt --release : -Wl,-x -O3 -finline-functions ]
- [ opt --debug : -g -O0 -fno-inline -pg ]
- [ opt --profile : -Wl,-x -O3 -finline-functions -g -pg ]
- -I$(--python-include) -I$(--extra-include)
- : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
-## GCC 2.x, 3.x, 4.x
-toolset gcc gcc : "-o " : -D
- : -pedantic -fno-strict-aliasing
- [ opt --release : [ opt --symbols : -g : -s ] -O3 ]
- [ opt --debug : -g -O0 -fno-inline ]
- [ opt --profile : -O3 -g -pg ]
- -I$(--python-include) -I$(--extra-include) -Wno-long-long
- : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
-## GCC 2.x, 3.x on CYGWIN but without cygwin1.dll
-toolset gcc-nocygwin gcc : "-o " : -D
- : -s -O3 -mno-cygwin
- [ opt --release : -finline-functions ]
- [ opt --debug : -s -O3 -fno-inline -pg ]
- -I$(--python-include) -I$(--extra-include)
- : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
-## Intel C/C++ for Darwin
-toolset intel-darwin icc : "-o " : -D
- :
- [ opt --release : -O3 ]
- [ opt --debug : -g -O0 -p ]
- -I$(--python-include) -I$(--extra-include)
- : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
-## Intel C/C++ for Linux
-toolset intel-linux icc : "-o " : -D
- :
- [ opt --release : -Xlinker -s -O3 ]
- [ opt --debug : -g -O0 -p ]
- -I$(--python-include) -I$(--extra-include)
- : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
-## Intel C/C++ for Win32
-toolset intel-win32 icl : /Fe : -D
- : /nologo
- [ opt --release : /MT /O2 /Ob2 /Gy /GF /GA /GB ]
- [ opt --debug : /MTd /DEBUG /Z7 /Od /Ob0 ]
- -I$(--python-include) -I$(--extra-include)
- : kernel32.lib advapi32.lib user32.lib $(--python-lib[1]) ;
-## KCC ?
-toolset kcc KCC : "-o " : -D
- :
- [ opt --release : -s +K2 ]
- [ opt --debug : -g +K0 ]
- -I$(--python-include) -I$(--extra-include)
- : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
-## Borland Kylix
-toolset kylix bc++ : -o : -D
- : -tC -q
- [ opt --release : -O2 -vi -w-inl ]
- [ opt --debug : -v -Od -vi- ]
- -I$(--python-include) -I$(--extra-include)
- : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
-## Metrowerks CodeWarrior 8.x
-{
- # Even though CW can compile all files at once, it crashes if it tries in the bjam case.
- local mwcc = ; if $(OS) = NT { mwcc = mwcc ; } else { mwcc = mwc$(OSPLAT:L) ; }
- mwcc ?= mwcc ;
- toolset metrowerks $(mwcc) : "-o " : -D
- : -c -lang c -subsystem console -cwd include
- [ opt --release : -runtime ss -opt full -inline all ]
- [ opt --debug : -runtime ssd -opt none -inline off ]
- -I$(--python-include) -I$(--extra-include) ;
- toolset metrowerks $(mwcc) .link : "-o " :
- : -subsystem console -lkernel32.lib -ladvapi32.lib -luser32.lib
- [ opt --release : -runtime ss ]
- [ opt --debug : -runtime ssd ]
- : $(--python-lib[1]) ;
-}
-## MINGW GCC
-toolset mingw gcc : "-o " : -D
- :
- [ opt --release : -s -O3 -finline-functions ]
- [ opt --debug : -g -O0 -fno-inline -pg ]
- -I$(--python-include) -I$(--extra-include)
- : $(--python-lib[2]) ;
-## MIPS Pro
-toolset mipspro cc : "-o " : -D
- :
- [ opt --release : -s -O3 -g0 -INLINE:none ]
- [ opt --debug : -g -O0 -INLINE ]
- -I$(--python-include) -I$(--extra-include)
- : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
-## Microsoft Visual Studio C++ 6.x
-toolset msvc cl : /Fe /Fe /Fd /Fo : -D
- : /nologo
- [ opt --release : /ML /O2 /Ob2 /Gy /GF /GA /GB ]
- [ opt --debug : /MLd /DEBUG /Z7 /Od /Ob0 ]
- -I$(--python-include) -I$(--extra-include)
- : kernel32.lib advapi32.lib user32.lib $(--python-lib[1]) ;
-## QNX 6.x GCC 3.x/2.95.3
-toolset qcc qcc : "-o " : -D
- : -Wc,-pedantic -Wc,-fno-strict-aliasing
- [ opt --release : [ opt --symbols : -g ] -O3 -Wc,-finline-functions ]
- [ opt --debug : -g -O0 -Wc,-fno-inline ]
- -I$(--python-include) -I$(--extra-include)
- : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
-## Qlogic Pathscale 2.4
-toolset pathscale pathcc : "-o " : -D
- :
- [ opt --release : -s -Ofast -O3 ]
- [ opt --debug : -g ]
- -I$(--python-include) -I$(--extra-include)
- : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
-## Portland Group Pgi 6.2
-toolset pgi pgcc : "-o " : -D
- :
- [ opt --release : -s -O3 ]
- [ opt --debug : -g ]
- -I$(--python-include) -I$(--extra-include)
- : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
-## Sun Workshop 6 C++
-toolset sun cc : "-o " : -D
- :
- [ opt --release : -s -xO3 ]
- [ opt --debug : -g ]
- -I$(--python-include) -I$(--extra-include)
- : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
-## Sun Workshop 6 C++ (old alias)
-toolset sunpro cc : "-o " : -D
- :
- [ opt --release : -s -xO3 ]
- [ opt --debug : -g ]
- -I$(--python-include) -I$(--extra-include)
- : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
-## Compaq Alpha CXX
-toolset tru64cxx cc : "-o " : -D
- :
- [ opt --release : -s -O5 -inline speed ]
- [ opt --debug : -g -O0 -pg ]
- -I$(--python-include) -I$(--extra-include)
- : -L$(--python-lib[1]) -l$(--python-lib[2]) ;
-## IBM VisualAge C++
-toolset vacpp xlc : "-o " : -D
- :
- [ opt --release : -s -O3 -qstrict -qinline ]
- [ opt --debug : -g -qNOOPTimize -qnoinline -pg ]
- -I$(--python-include) -I$(--extra-include)
- : -L$(--python-lib[1]) -l$(--python-lib[2]) [ if-os AIX : -bmaxdata:0x40000000 ] ;
-## Microsoft Visual C++ .NET 7.x
-toolset vc7 cl : /Fe /Fe /Fd /Fo : -D
- : /nologo
- [ opt --release : /ML /O2 /Ob2 /Gy /GF /GA /GB ]
- [ opt --debug : /MLd /DEBUG /Z7 /Od /Ob0 ]
- -I$(--python-include) -I$(--extra-include)
- : kernel32.lib advapi32.lib user32.lib $(--python-lib[1]) ;
-## Microsoft Visual C++ 2005
-toolset vc8 cl : /Fe /Fe /Fd /Fo : -D
- : /nologo
- [ opt --release : /MT /O2 /Ob2 /Gy /GF /GA /wd4996 ]
- [ opt --debug : /MTd /DEBUG /Z7 /Od /Ob0 /wd4996 ]
- -I$(--python-include) -I$(--extra-include)
- : kernel32.lib advapi32.lib user32.lib $(--python-lib[1]) ;
-## Microsoft Visual C++ 2008
-toolset vc9 cl : /Fe /Fe /Fd /Fo : -D
- : /nologo
- [ opt --release : /MT /O2 /Ob2 /Gy /GF /GA /wd4996 ]
- [ opt --debug : /MTd /DEBUG /Z7 /Od /Ob0 /wd4996 ]
- -I$(--python-include) -I$(--extra-include)
- : kernel32.lib advapi32.lib user32.lib $(--python-lib[1]) ;
-## Microsoft Visual C++ 2010
-toolset vc10 cl : /Fe /Fe /Fd /Fo : -D
- : /nologo
- [ opt --release : /MT /O2 /Ob2 /Gy /GF /GA /wd4996 ]
- [ opt --debug : /MTd /DEBUG /Z7 /Od /Ob0 /wd4996 ]
- -I$(--python-include) -I$(--extra-include)
- : kernel32.lib advapi32.lib user32.lib $(--python-lib[1]) ;
-toolset vc11 cl : /Fe /Fe /Fd /Fo : -D
- : /nologo
- [ opt --release : /MT /O2 /Ob2 /Gy /GF /GA /wd4996 ]
- [ opt --debug : /MTd /DEBUG /Z7 /Od /Ob0 /wd4996 ]
- -I$(--python-include) -I$(--extra-include)
- : kernel32.lib advapi32.lib user32.lib $(--python-lib[1]) ;
-
-# First set the build commands and options according to the
-# preset toolset.
-toolset = [ MATCH --toolset=(.*) : $(ARGV) ] ;
-if ! $(toolset)
-{
- # For some reason, the following test does not catch empty toolset.
- ECHO "###" ;
- ECHO "###" No toolset specified. Please use --toolset option. ;
- ECHO "###" ;
- ECHO "###" Known toolsets are: $(toolsets:J=", ") ;
- EXIT "###" ;
-}
-if ! $(toolset) in $(toolsets)
-{
- ECHO "###" ;
- ECHO "###" Unknown toolset: $(toolset) ;
- ECHO "###" ;
- ECHO "###" Known toolsets are: $(toolsets:J=", ") ;
- EXIT "###" ;
-}
---cc = $(tool.$(toolset).cc) ;
-if $(tool.$(toolset).opt.out[2])
-{
- if $(tool.$(toolset).opt.out[1]) = $(tool.$(toolset).opt.out[2])
- {
- --out = $(tool.$(toolset).opt.out[1]) ;
- --dir = $(tool.$(toolset).opt.out[3-]) ;
- }
- else
- {
- --bin = $(tool.$(toolset).opt.out[1]) ;
- --dir = $(tool.$(toolset).opt.out[2-]) ;
- }
-}
-else
-{
- --out = $(tool.$(toolset).opt.out) ;
-}
---def = $(tool.$(toolset).opt.define) ;
---flags = $(tool.$(toolset).flags) ;
---defs = $(tool.$(toolset).defines) ;
---libs = $(tool.$(toolset).linklibs) ;
-if $(tool.$(toolset).link.cc)
-{
- --link = $(tool.$(toolset).link.cc) ;
- if $(tool.$(toolset).link.opt.out[2])
- {
- if $(tool.$(toolset).link.opt.out[1]) = $(tool.$(toolset).link.opt.out[2])
- {
- --link-out = $(tool.$(toolset).link.opt.out[1]) ;
- --link-dir = $(tool.$(toolset).link.opt.out[3-]) ;
- }
- else
- {
- --link-bin = $(tool.$(toolset).link.opt.out[1]) ;
- --link-dir = $(tool.$(toolset).link.opt.out[2-]) ;
- }
- }
- else
- {
- --link-out = $(tool.$(toolset).link.opt.out) ;
- }
- --link-def = $(tool.$(toolset).link.opt.define) ;
- --link-flags = $(tool.$(toolset).link.flags) ;
- --link-defs = $(tool.$(toolset).link.defines) ;
- --link-libs = $(tool.$(toolset).link.linklibs) ;
-}
-
-# Put executables in platform-specific subdirectory.
-locate-target = $(LOCATE_TARGET) ;
-if $(OSPLAT)
-{
- locate-target ?= bin$(.)$(OS:L)$(OSPLAT:L) ;
- platform = $(OS:L)$(OSPLAT:L) ;
-}
-else
-{
- locate-target ?= bin$(.)$(OS:L) ;
- platform = $(OS:L) ;
-}
-if $(debug)
-{
- locate-target = [ .path $(locate-target)$(.)debug ] ;
-}
-if $(profile)
-{
- locate-target = [ .path $(locate-target)$(.)profile ] ;
-}
-else
-{
- locate-target = [ .path $(locate-target) ] ;
-}
-
-if --show-locate-target in $(ARGV)
-{
- ECHO $(locate-target) ;
-}
-
-# We have some different files for UNIX, and NT.
-jam.source =
- command.c compile.c constants.c debug.c function.c glob.c
- hash.c hcache.c headers.c hdrmacro.c
- jam.c jambase.c jamgram.c
- lists.c make.c make1.c mem.c object.c
- option.c output.c parse.c regexp.c rules.c
- scan.c search.c subst.c w32_getreg.c
- timestamp.c variable.c modules.c strings.c filesys.c
- builtins.c pwd.c class.c native.c md5.c modules/set.c
- modules/path.c modules/regex.c modules/property-set.c
- modules/sequence.c modules/order.c
- ;
-if $(OS) = NT
-{
- jam.source += execnt.c filent.c pathunix.c ;
-}
-else
-{
- jam.source += execunix.c fileunix.c pathunix.c ;
-}
-
-# Debug assertions, or not.
-if ! $(debug) || --noassert in $(ARGV)
-{
- --defs += NDEBUG ;
-}
-
-# Enable some optional features.
---defs += OPT_HEADER_CACHE_EXT ;
---defs += OPT_GRAPH_DEBUG_EXT ;
---defs += OPT_SEMAPHORE ;
---defs += OPT_AT_FILES ;
---defs += OPT_DEBUG_PROFILE ;
-
-# Bug fixes
---defs += OPT_FIX_TARGET_VARIABLES_EXT ;
-#~ --defs += OPT_NO_EXTERNAL_VARIABLE_SPLIT ;
-
-# Improvements
---defs += OPT_IMPROVED_PATIENCE_EXT ;
-
-# Use Boehm GC memory allocator?
-if $(--boehm-gc)
-{
- --defs += OPT_BOEHM_GC ;
- if $(debug)
- {
- --defs += GC_DEBUG ;
- }
-}
-
-if $(--duma)
-{
- --defs += OPT_DUMA ;
-}
-
-if ( $(OS) = NT ) && ! NT in $(--defs)
-{
- --defs += NT ;
-}
---defs += YYSTACKSIZE=5000 ;
-
-if $(with-python)
-{
- --defs += HAVE_PYTHON ;
-}
-
-if $(debug)
-{
- --defs += BJAM_NEWSTR_NO_ALLOCATE ;
-}
-
-
-# The basic symbolic targets...
-NOTFILE all clean dist ;
-ALWAYS clean ;
-
-# Utility rules and actions...
-rule .clean
-{
- [DELETE] clean : $(<) ;
-}
-if $(OS) = NT { actions piecemeal together existing [DELETE] {
- del /F /Q "$(>)"
-} }
-if $(UNIX) = true { actions piecemeal together existing [DELETE] {
- rm -f "$(>)"
-} }
-if $(OS) = NT {
- --chmod+w = "attrib -r " ;
-}
-if $(UNIX) = true {
- --chmod+w = "chmod +w " ;
-}
-
-rule .mkdir
-{
- NOUPDATE $(<) ;
- if $(<:P) { DEPENDS $(<) : $(<:P) ; .mkdir $(<:P) ; }
- if ! $(md<$(<)>) { [MKDIR] $(<) ; md<$(<)> = - ; }
-}
-if $(OS) = NT { actions [MKDIR] {
- md "$(<)"
-} }
-if $(UNIX) = true { actions [MKDIR] {
- mkdir "$(<)"
-} }
-
-rule .exe
-{
- local exe = $(<) ;
- if $(OS) = NT || ( $(UNIX) = true && $(OS) = CYGWIN ) { exe = $(exe:S=.exe) ; }
- LOCATE on $(exe) = $(locate-target) ;
- DEPENDS all : $(exe) ;
- .mkdir $(locate-target) ;
- if $(--link)
- {
- local objs = ;
- for local s in $(>)
- {
- # Translate any subdir elements into a simple file name.
- local o = [ MATCH "([^/]+)[/]?(.+)" : $(s) ] ;
- o = $(o:J=_) ;
- o = $(o:S=.o) ;
- objs += $(o) ;
- LOCATE on $(o) = $(locate-target) ;
- DEPENDS $(exe) : $(o) ;
- DEPENDS $(o) : $(s) ;
- DEPENDS $(o) : $(locate-target) ;
- [COMPILE] $(o) : $(s) ;
- .clean $(o) ;
- }
- DEPENDS $(exe) : $(objs) ;
- DEPENDS $(exe) : $(locate-target) ;
- [COMPILE.LINK] $(exe) : $(objs) ;
- .clean $(exe) ;
- }
- else
- {
- DEPENDS $(exe) : $(>) ;
- DEPENDS $(exe) : $(locate-target) ;
- [COMPILE] $(exe) : $(>) ;
- .clean $(exe) ;
- }
- return $(exe) ;
-}
-if ! $(--def[2]) { actions [COMPILE] {
- "$(--cc)" "$(--bin)$(<:D=)" "$(--dir)$(<:D)$(./)" $(--out)$(<) "$(--def)$(--defs)" "$(--flags)" "$(>)" "$(--libs)"
-} }
-else { actions [COMPILE] {
- "$(--cc)" "$(--bin)$(<:D=)" "$(--dir)$(<:D)$(./)" $(--out)$(<) "$(--def[1])$(--defs:J=$(--def[2]))$(--def[3])" "$(--flags)" "$(>)" "$(--libs)"
-} }
-
-actions [COMPILE.LINK] {
- "$(--link)" "$(--link-bin)$(<:D=)" "$(--link-dir)$(<:D)$(./)" "$(--link-out)$(<)" "$(--link-def)$(--link-defs)" "$(--link-flags)" "$(>)" "$(--link-libs)"
-}
-
-rule .link
-{
- DEPENDS all : $(<) ;
- DEPENDS $(<) : $(>) ;
- [LINK] $(<) : $(>) ;
- .clean $(<) ;
-}
-if $(OS) = NT { actions [LINK] {
- copy "$(>)" "$(<)"
-} }
-if $(UNIX) = true { actions [LINK] {
- ln -fs "$(>)" "$(<)"
-} }
-
-rule .copy
-{
- DEPENDS all : $(<) ;
- DEPENDS $(<) : $(>) ;
- [COPY] $(<) : $(>) ;
- .clean $(<) ;
-}
-
-# Will be redefined later.
-actions [COPY]
-{
-}
-
-
-rule .move
-{
- DEPENDS $(<) : $(>) ;
- [MOVE] $(<) : $(>) ;
-}
-if $(OS) = NT { actions [MOVE] {
- del /f "$(<)"
- rename "$(>)" "$(<)"
-} }
-if $(UNIX) = true { actions [MOVE] {
- mv -f "$(>)" "$(<)"
-} }
-
-# Generate the grammar tokens table, and the real yacc grammar.
-rule .yyacc
-{
- local exe = [ .exe yyacc : yyacc.c ] ;
- NOUPDATE $(exe) ;
- DEPENDS $(<) : $(exe) $(>) ;
- LEAVES $(<) ;
- yyacc.exe on $(<) = $(exe:R=$(locate-target)) ;
- [YYACC] $(<) : $(>) ;
-}
-actions [YYACC] {
- $(--chmod+w)$(<[1])
- $(--chmod+w)$(<[2])
- "$(yyacc.exe)" "$(<)" "$(>)"
-}
-if $(grammar)
-{
- .yyacc jamgram.y jamgramtab.h : jamgram.yy ;
-}
-else if $(debug)
-{
- .exe yyacc : yyacc.c ;
-}
-
-# How to build the grammar.
-if $(OS) = NT
-{
- SUFEXE = .exe ;
- # try some other likely spellings...
- PATH ?= $(Path) ;
- PATH ?= $(path) ;
-}
-SUFEXE ?= "" ;
-
-yacc ?= [ GLOB $(PATH) : yacc$(SUFEXE) ] ;
-yacc ?= [ GLOB $(PATH) : bison$(SUFEXE) ] ;
-yacc ?= [ GLOB "$(ProgramFiles:J= )\\GnuWin32\\bin" "C:\\Program Files\\GnuWin32\\bin" : bison$(SUFEXE) ] ;
-yacc = $(yacc[1]) ;
-switch $(yacc:D=:S=)
-{
- case bison : yacc += -d --yacc ;
- case yacc : yacc += -d ;
-}
-if $(debug) && $(yacc)
-{
- yacc += -t -v ;
-}
-yacc += $(YACCFLAGS) ;
-
-rule .yacc
-{
- DEPENDS $(<) : $(>) ;
- LEAVES $(<) ;
- [YACC] $(<) : $(>) ;
-}
-if $(OS) = NT { actions [YACC] {
- "$(yacc)" "$(>)"
- if not errorlevel 1 (
- del /f "$(<[1])"
- rename y.tab$(<[1]:S) "$(<[1])"
- del /f $(<[2])
- rename y.tab$(<[2]:S) "$(<[2])"
- ) else set _error_ =
-} }
-if $(UNIX) = true { actions [YACC] {
- if ` "$(yacc)" "$(>)" ` ; then
- mv -f y.tab$(<[1]:S) "$(<[1])"
- mv -f y.tab$(<[2]:S) "$(<[2])"
- else
- exit 1
- fi
-} }
-if $(grammar) && ! $(yacc)
-{
- EXIT "Could not find the 'yacc' tool, and therefore can not build the grammar." ;
-}
-if $(grammar) && $(yacc)
-{
- .yacc jamgram.c jamgram.h : jamgram.y ;
-}
-
-# How to build the compiled in jambase.
-rule .mkjambase
-{
- local exe = [ .exe mkjambase : mkjambase.c ] ;
- DEPENDS $(<) : $(exe) $(>) ;
- LEAVES $(<) ;
- mkjambase.exe on $(<) = $(exe:R=$(locate-target)) ;
- [MKJAMBASE] $(<) : $(>) ;
-}
-actions [MKJAMBASE] {
- $(--chmod+w)$(<)
- $(mkjambase.exe) "$(<)" "$(>)"
-}
-if $(debug)
-{
- .mkjambase jambase.c : Jambase ;
-}
-
-# How to build Jam.
-rule .jam
-{
- $(>).exe = [ .exe $(>) : $(jam.source) ] ;
- DEPENDS all : $($(>).exe) ;
-
- # Make a copy under the old name.
- $(<).exe = $(<:S=$($(>).exe:S)) ;
- LOCATE on $($(<).exe) = $(locate-target) ;
- .copy $($(<).exe) : $($(>).exe) ;
- DEPENDS all : $($(<).exe) ;
-}
-.jam bjam : b2 ;
-
-
-# Scan sources for header dependencies.
-# WARNING: Yes those are *REAL TABS* below. DO NOT CHANGE,
-# under any circumstances, to spaces!! And the tabs
-# indenting this are so that if someone is in the mood to
-# replace tabs they hit this comment, and hopefully notice
-# their error.
-rule .scan
-{
- HDRRULE on $(<:D=) = .hdr.scan ;
- HDRSCAN on $(<:D=) = "^[ ]*#[ ]*include[ ]*([<\"][^\">]*[\">]).*$" ;
-}
-rule .hdr.scan
-{
- local hdrs = [ GLOB . : $(>:D=) ] ;
- INCLUDES $(<:D=) : $(hdrs:D=) ;
- HDRRULE on $(>:D=) = .hdr.scan ;
- HDRSCAN on $(>:D=) = "^[ ]*#[ ]*include[ ]*([<\"][^\">]*[\">]).*$" ;
-}
-.scan [ GLOB . : *.c ] ;
-
-# Distribution making from here on out. Assumes that
-# the docs are already built as html at ../doc/html. If
-# they aren't, then the docs are not included in the dist
-# archive.
-dist.license =
- [ GLOB . : $(LICENSE).txt ]
- ;
-dist.license = $(dist.license:D=)
- [ GLOB [ .path .. .. .. ] : $(LICENSE).txt ]
- [ GLOB [ .path .. boost ] : $(LICENSE).txt ] ;
-dist.docs =
- [ GLOB . : *.png *.css *.html ]
- ;
-dist.docs = $(dist.docs:D=)
- [ GLOB [ .path images ] : *.png ]
- [ GLOB [ .path jam ] : *.html ]
- ;
-dist.source =
- [ GLOB . : *.c *.h ]
- ;
-dist.source = $(dist.source:D=)
- $(dist.license[1])
- $(dist.docs)
- build.jam build.bat build.sh
- Jambase
- jamgram.y jamgram.yy
- [ .path modules set.c ]
- [ .path modules path.c ]
- [ .path modules regex.c ]
- [ .path modules property-set.c ]
- [ .path modules sequence.c ]
- [ .path modules order.c ]
- [ GLOB [ .path boehm_gc ] : * ]
- [ GLOB [ .path boehm_gc include ] : * ]
- [ GLOB [ .path boehm_gc include private ] : * ]
- [ GLOB [ .path boehm_gc cord ] : * ]
- [ GLOB [ .path boehm_gc Mac_files ] : * ]
- [ GLOB [ .path boehm_gc tests ] : * ]
- [ GLOB [ .path boehm_gc doc ] : * ]
- ;
-dist.bin =
- bjam
- ;
-dist.bin =
- $(dist.license[1])
- $(dist.bin:S=$(bjam.exe:S))
- ;
-
-if $(OS) = NT
-{
- zip ?= [ GLOB "$(ProgramFiles:J= )\\7-ZIP" "C:\\Program Files\\7-ZIP" : "7z.exe" ] ;
- zip ?= [ GLOB "$(ProgramFiles:J= )\\7-ZIP" "C:\\Program Files\\7-ZIP" : "7zn.exe" ] ;
- zip ?= [ GLOB $(PATH) : zip.exe ] ;
- zip ?= zip ;
- zip = $(zip[1]) ;
- switch $(zip:D=:S=)
- {
- case 7z* : zip += a -r -tzip -mx=9 ;
- case zip : zip += -9r ;
- }
- actions piecemeal [PACK] {
- "$(zip)" "$(<)" "$(>)"
- }
- actions piecemeal [ZIP] {
- "$(zip)" "$(<)" "$(>)"
- }
- actions piecemeal [COPY] {
- copy /Y "$(>)" "$(<)" >NUL:
- }
-}
-if $(UNIX) = true
-{
- tar ?= [ GLOB $(PATH) : star bsdtar tar ] ;
- tar = $(tar[1]) ;
- switch $(tar:D=:S=)
- {
- case star : tar += -c artype=pax -D -d -to-stdout ;
- case * : tar += -c -f - ;
- }
- actions [PACK] {
- "$(tar)" "$(>)" | gzip -c9 > "$(<)"
- }
- #~ actions [PACK] {
- #~ tar cf "$(<:S=.tar)" "$(>)"
- #~ }
- actions [ZIP] {
- gzip -c9 "$(>)" > "$(<)"
- }
- actions [COPY] {
- cp -Rpf "$(>)" "$(<)"
- }
-}
-
-# The single binary, compressed.
-rule .binary
-{
- local zip = ;
- if $(OS) = NT { zip = $($(<).exe:S=.zip) ; }
- if $(UNIX) = true { zip = $($(<).exe:S=.tgz) ; }
- zip = $(zip:S=)-$(VERSION)-$(RELEASE)-$(platform)$(zip:S) ;
- DEPENDS $(zip) : $($(<).exe) ;
- DEPENDS dist : $(zip) ;
- #~ LOCATE on $(zip) = $(locate-target) ;
- if $(OS) = NT { [ZIP] $(zip) : $($(<).exe) ; }
- if $(UNIX) = true { [PACK] $(zip) : $($(<).exe) ; }
- .clean $(zip) ;
-}
-
-# Package some file.
-rule .package ( dst-dir : src-files + )
-{
- local dst-files ;
- local src-files-actual ;
- for local src-path in $(src-files)
- {
- if ! [ GLOB $(src-path:P) : $(src-path:B) ] || [ CHECK_IF_FILE $(src-path) ]
- {
- local src-subdir = $(src-path:D) ;
- local src-file = $(src-path) ;
- while $(src-subdir:D) { src-subdir = $(src-subdir:D) ; }
- if $(src-subdir) = ".."
- {
- src-file = $(src-file:D=) ;
- }
- dst-files += $(src-file:R=$(dst-dir)) ;
- src-files-actual += $(src-path) ;
- }
- }
-
- local pack = ;
- if $(OS) = NT { pack = $(dst-dir).zip ; }
- if $(UNIX) = true { pack = $(dst-dir).tgz ; }
-
- DEPENDS dist : $(pack) ;
- DEPENDS $(pack) : $(dst-files) ;
-
- local dst-files-queue = $(dst-files) ;
- for local src-path in $(src-files-actual)
- {
- local dst-file = $(dst-files-queue[1]) ;
- dst-files-queue = $(dst-files-queue[2-]) ;
- DEPENDS $(dst-file) : $(src-path) $(dst-file:D) ;
- .mkdir $(dst-file:D) ;
-
- [COPY] $(dst-file) : $(src-path) ;
- .clean $(dst-file) ;
- }
-
- [PACK] $(pack) : $(dst-files) ;
- .clean $(pack) ;
-}
-
-# RPM distro file.
-rpm-tool = [ GLOB $(PATH) : "rpmbuild" ] ;
-rpm-tool ?= [ GLOB $(PATH) : "rpm" ] ;
-rpm-tool = $(rpm-tool[1]) ;
-rule .rpm ( name : source )
-{
- local rpm-arch = ;
- switch $(OSPLAT)
- {
- case X86 : rpm-arch ?= i386 ;
- case PPC : rpm-arch ?= ppc ;
- case AXP : rpm-arch ?= alpha ;
- # no guaranty for these:
- case IA64 : rpm-arch ?= ia64 ;
- case ARM : rpm-arch ?= arm ;
- case SPARC : rpm-arch ?= sparc ;
- case * : rpm-arch ?= other ;
- }
- local target = $(name)-rpm ;
- NOTFILE $(target) ;
- DEPENDS dist : $(target) ;
- DEPENDS $(target) : $(name).$(rpm-arch).rpm $(name).src.rpm ;
- DEPENDS $(name).$(rpm-arch).rpm : $(source) ;
- DEPENDS $(name).src.rpm : $(name).$(rpm-arch).rpm ;
- docs on $(target) = $(dist.docs:J=" ") ;
- arch on $(target) = $(rpm-arch) ;
- if $(rpm-arch) = ppc { target-opt on $(target) = --target= ; }
- else { target-opt on $(target) = "--target " ; }
- [RPM] $(target) : $(source) ;
- .clean $(name).$(rpm-arch).rpm $(name).src.rpm ;
-}
-actions [RPM] {
- set -e
- export BOOST_JAM_TOOLSET="$(toolset)"
- $(rpm-tool) -ta $(target-opt)$(arch) $(>) | tee rpm.out
- cp `grep -e '^Wrote:' rpm.out | sed 's/^Wrote: //'` .
- rm -f rpm.out
-}
-
-# The distribution targets. Don't bother with the targets if
-# distribution build not requested.
-if dist in $(ARGV)
-{
- #~ .binary bjam ;
- .package $(NAME)-$(VERSION) : $(dist.source) ;
- .package $(NAME)-$(VERSION)-$(RELEASE)-$(platform) : $(dist.bin) ;
- if $(rpm-tool)
- {
- #~ .rpm $(NAME)-$(VERSION)-$(RELEASE) : $(NAME)-$(VERSION).tgz ;
- }
-}
diff --git a/tools/build/v2/engine/build.sh b/tools/build/v2/engine/build.sh
deleted file mode 100755
index e3a4498b7a..0000000000
--- a/tools/build/v2/engine/build.sh
+++ /dev/null
@@ -1,303 +0,0 @@
-#!/bin/sh
-
-#~ Copyright 2002-2005 Rene Rivera.
-#~ Distributed under the Boost Software License, Version 1.0.
-#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Reset the toolset.
-BOOST_JAM_TOOLSET=
-
-# Run a command, and echo before doing so. Also checks the exit
-# status and quits if there was an error.
-echo_run ()
-{
- echo "$@"
- $@
- r=$?
- if test $r -ne 0 ; then
- exit $r
- fi
-}
-
-# Print an error message, and exit with a status of 1.
-error_exit ()
-{
- echo "###"
- echo "###" "$@"
- echo "###"
- echo "### You can specify the toolset as the argument, i.e.:"
- echo "### ./build.sh gcc"
- echo "###"
- echo "### Toolsets supported by this script are:"
- echo "### acc, como, darwin, gcc, intel-darwin, intel-linux, kcc, kylix,"
- echo "### mipspro, mingw(msys), pathscale, pgi, qcc, sun, sunpro, tru64cxx, vacpp"
- echo "###"
- echo "### A special toolset; cc, is available which is used as a fallback"
- echo "### when a more specific toolset is not found and the cc command is"
- echo "### detected. The 'cc' toolset will use the CC, CFLAGS, and LIBS"
- echo "### envrironment variables, if present."
- echo "###"
- exit 1
-}
-
-# Check that a command is in the PATH.
-test_path ()
-{
- if `command -v command 1>/dev/null 2>/dev/null`; then
- command -v $1 1>/dev/null 2>/dev/null
- else
- hash $1 1>/dev/null 2>/dev/null
- fi
-}
-
-# Check that the OS name, as returned by "uname", is as given.
-test_uname ()
-{
- if test_path uname; then
- test `uname` = $*
- fi
-}
-
-# Try and guess the toolset to bootstrap the build with...
-Guess_Toolset ()
-{
- if test -r /mingw/bin/gcc ; then
- BOOST_JAM_TOOLSET=mingw
- BOOST_JAM_TOOLSET_ROOT=/mingw/
- elif test_uname Darwin ; then BOOST_JAM_TOOLSET=darwin
- elif test_uname IRIX ; then BOOST_JAM_TOOLSET=mipspro
- elif test_uname IRIX64 ; then BOOST_JAM_TOOLSET=mipspro
- elif test_uname OSF1 ; then BOOST_JAM_TOOLSET=tru64cxx
- elif test_uname QNX && test_path qcc ; then BOOST_JAM_TOOLSET=qcc
- elif test_path gcc ; then BOOST_JAM_TOOLSET=gcc
- elif test_path icc ; then BOOST_JAM_TOOLSET=intel-linux
- elif test -r /opt/intel/cc/9.0/bin/iccvars.sh ; then
- BOOST_JAM_TOOLSET=intel-linux
- BOOST_JAM_TOOLSET_ROOT=/opt/intel/cc/9.0
- elif test -r /opt/intel_cc_80/bin/iccvars.sh ; then
- BOOST_JAM_TOOLSET=intel-linux
- BOOST_JAM_TOOLSET_ROOT=/opt/intel_cc_80
- elif test -r /opt/intel/compiler70/ia32/bin/iccvars.sh ; then
- BOOST_JAM_TOOLSET=intel-linux
- BOOST_JAM_TOOLSET_ROOT=/opt/intel/compiler70/ia32/
- elif test -r /opt/intel/compiler60/ia32/bin/iccvars.sh ; then
- BOOST_JAM_TOOLSET=intel-linux
- BOOST_JAM_TOOLSET_ROOT=/opt/intel/compiler60/ia32/
- elif test -r /opt/intel/compiler50/ia32/bin/iccvars.sh ; then
- BOOST_JAM_TOOLSET=intel-linux
- BOOST_JAM_TOOLSET_ROOT=/opt/intel/compiler50/ia32/
- elif test_path pgcc ; then BOOST_JAM_TOOLSET=pgi
- elif test_path pathcc ; then BOOST_JAM_TOOLSET=pathscale
- elif test_path xlc ; then BOOST_JAM_TOOLSET=vacpp
- elif test_path como ; then BOOST_JAM_TOOLSET=como
- elif test_path KCC ; then BOOST_JAM_TOOLSET=kcc
- elif test_path bc++ ; then BOOST_JAM_TOOLSET=kylix
- elif test_path aCC ; then BOOST_JAM_TOOLSET=acc
- elif test_uname HP-UX ; then BOOST_JAM_TOOLSET=acc
- elif test -r /opt/SUNWspro/bin/cc ; then
- BOOST_JAM_TOOLSET=sunpro
- BOOST_JAM_TOOLSET_ROOT=/opt/SUNWspro/
- # Test for "cc" as the default fallback.
- elif test_path $CC ; then BOOST_JAM_TOOLSET=cc
- elif test_path cc ; then
- BOOST_JAM_TOOLSET=cc
- CC=cc
- fi
- if test "$BOOST_JAM_TOOLSET" = "" ; then
- error_exit "Could not find a suitable toolset."
- fi
-}
-
-# The one option we support in the invocation
-# is the name of the toolset to force building
-# with.
-case "$1" in
- --guess-toolset) Guess_Toolset ; echo "$BOOST_JAM_TOOLSET" ; exit 1 ;;
- -*) Guess_Toolset ;;
- ?*) BOOST_JAM_TOOLSET=$1 ; shift ;;
- *) Guess_Toolset ;;
-esac
-BOOST_JAM_OPT_JAM="-o bootstrap/jam0"
-BOOST_JAM_OPT_MKJAMBASE="-o bootstrap/mkjambase0"
-BOOST_JAM_OPT_YYACC="-o bootstrap/yyacc0"
-case $BOOST_JAM_TOOLSET in
- mingw)
- if test -r ${BOOST_JAM_TOOLSET_ROOT}bin/gcc ; then
- export PATH=${BOOST_JAM_TOOLSET_ROOT}bin:$PATH
- fi
- BOOST_JAM_CC="gcc -DNT"
- ;;
-
- gcc)
- BOOST_JAM_CC=gcc
- ;;
-
- darwin)
- BOOST_JAM_CC=cc
- ;;
-
- intel-darwin)
- BOOST_JAM_CC=icc
- ;;
-
- intel-linux)
- if test -r /opt/intel/cc/9.0/bin/iccvars.sh ; then
- BOOST_JAM_TOOLSET_ROOT=/opt/intel/cc/9.0/
- elif test -r /opt/intel_cc_80/bin/iccvars.sh ; then
- BOOST_JAM_TOOLSET_ROOT=/opt/intel_cc_80/
- elif test -r /opt/intel/compiler70/ia32/bin/iccvars.sh ; then
- BOOST_JAM_TOOLSET_ROOT=/opt/intel/compiler70/ia32/
- elif test -r /opt/intel/compiler60/ia32/bin/iccvars.sh ; then
- BOOST_JAM_TOOLSET_ROOT=/opt/intel/compiler60/ia32/
- elif test -r /opt/intel/compiler50/ia32/bin/iccvars.sh ; then
- BOOST_JAM_TOOLSET_ROOT=/opt/intel/compiler50/ia32/
- fi
- if test -r ${BOOST_JAM_TOOLSET_ROOT}bin/iccvars.sh ; then
- # iccvars doesn't change LD_RUN_PATH. We adjust LD_RUN_PATH
- # here in order not to have to rely on ld.so.conf knowing the
- # icc library directory. We do this before running iccvars.sh
- # in order to allow a user to add modifications to LD_RUN_PATH
- # in iccvars.sh.
- if test -z "${LD_RUN_PATH}"; then
- LD_RUN_PATH="${BOOST_JAM_TOOLSET_ROOT}lib"
- else
- LD_RUN_PATH="${BOOST_JAM_TOOLSET_ROOT}lib:${LD_RUN_PATH}"
- fi
- export LD_RUN_PATH
- . ${BOOST_JAM_TOOLSET_ROOT}bin/iccvars.sh
- fi
- BOOST_JAM_CC=icc
- ;;
-
- vacpp)
- BOOST_JAM_CC=xlc
- ;;
-
- como)
- BOOST_JAM_CC="como --c"
- ;;
-
- kcc)
- BOOST_JAM_CC=KCC
- ;;
-
- kylix)
- BOOST_JAM_CC=bc++
- ;;
-
- mipspro)
- BOOST_JAM_CC=cc
- ;;
-
- pathscale)
- BOOST_JAM_CC=pathcc
- ;;
-
- pgi)
- BOOST_JAM_CC=pgcc
- ;;
-
- sun*)
- if test -z "${BOOST_JAM_TOOLSET_ROOT}" -a -r /opt/SUNWspro/bin/cc ; then
- BOOST_JAM_TOOLSET_ROOT=/opt/SUNWspro/
- fi
- if test -r "${BOOST_JAM_TOOLSET_ROOT}bin/cc" ; then
- PATH=${BOOST_JAM_TOOLSET_ROOT}bin:${PATH}
- export PATH
- fi
- BOOST_JAM_CC=cc
- ;;
-
- clang*)
- BOOST_JAM_CC="clang -Wno-unused -Wno-format"
- BOOST_JAM_TOOLSET=clang
- ;;
-
- tru64cxx)
- BOOST_JAM_CC=cc
- ;;
-
- acc)
- BOOST_JAM_CC="cc -Ae"
- ;;
-
- cc)
- if test -z "$CC" ; then CC=cc ; fi
- BOOST_JAM_CC=$CC
- BOOST_JAM_OPT_JAM="$BOOST_JAM_OPT_JAM $CFLAGS $LIBS"
- BOOST_JAM_OPT_MKJAMBASE="$BOOST_JAM_OPT_MKJAMBASE $CFLAGS $LIBS"
- BOOST_JAM_OPT_YYACC="$BOOST_JAM_OPT_YYACC $CFLAGS $LIBS"
- ;;
-
- qcc)
- BOOST_JAM_CC=qcc
- ;;
-
- *)
- error_exit "Unknown toolset: $BOOST_JAM_TOOLSET"
- ;;
-esac
-
-echo "###"
-echo "### Using '$BOOST_JAM_TOOLSET' toolset."
-echo "###"
-
-YYACC_SOURCES="yyacc.c"
-MKJAMBASE_SOURCES="mkjambase.c"
-BJAM_SOURCES="\
- command.c compile.c constants.c debug.c function.c glob.c hash.c\
- hdrmacro.c headers.c jam.c jambase.c jamgram.c lists.c make.c make1.c\
- object.c option.c output.c parse.c pathunix.c regexp.c\
- rules.c scan.c search.c subst.c timestamp.c variable.c modules.c\
- strings.c filesys.c builtins.c pwd.c class.c native.c md5.c w32_getreg.c\
- modules/set.c modules/path.c modules/regex.c modules/property-set.c\
- modules/sequence.c modules/order.c"
-case $BOOST_JAM_TOOLSET in
- mingw)
- BJAM_SOURCES="${BJAM_SOURCES} execnt.c filent.c"
- ;;
-
- *)
- BJAM_SOURCES="${BJAM_SOURCES} execunix.c fileunix.c"
- ;;
-esac
-
-BJAM_UPDATE=
-if test "$1" = "--update" -o "$2" = "--update" -o "$3" = "--update" -o "$4" = "--update" ; then
- BJAM_UPDATE="update"
-fi
-if test "${BJAM_UPDATE}" = "update" -a ! -x "./bootstrap/jam0" ; then
- BJAM_UPDATE=
-fi
-
-if test "${BJAM_UPDATE}" != "update" ; then
- echo_run rm -rf bootstrap
- echo_run mkdir bootstrap
- if test ! -r jamgram.y -o ! -r jamgramtab.h ; then
- echo_run ${BOOST_JAM_CC} ${BOOST_JAM_OPT_YYACC} ${YYACC_SOURCES}
- if test -x "./bootstrap/yyacc0" ; then
- echo_run ./bootstrap/yyacc0 jamgram.y jamgramtab.h jamgram.yy
- fi
- fi
- if test ! -r jamgram.c -o ! -r jamgram.h ; then
- if test_path yacc ; then YACC="yacc -d"
- elif test_path bison ; then YACC="bison -y -d --yacc"
- fi
- echo_run $YACC jamgram.y
- mv -f y.tab.c jamgram.c
- mv -f y.tab.h jamgram.h
- fi
- if test ! -r jambase.c ; then
- echo_run ${BOOST_JAM_CC} ${BOOST_JAM_OPT_MKJAMBASE} ${MKJAMBASE_SOURCES}
- if test -x "./bootstrap/mkjambase0" ; then
- echo_run ./bootstrap/mkjambase0 jambase.c Jambase
- fi
- fi
- echo_run ${BOOST_JAM_CC} ${BOOST_JAM_OPT_JAM} ${BJAM_SOURCES}
-fi
-if test -x "./bootstrap/jam0" ; then
- if test "${BJAM_UPDATE}" != "update" ; then
- echo_run ./bootstrap/jam0 -f build.jam --toolset=$BOOST_JAM_TOOLSET "--toolset-root=$BOOST_JAM_TOOLSET_ROOT" "$@" clean
- fi
- echo_run ./bootstrap/jam0 -f build.jam --toolset=$BOOST_JAM_TOOLSET "--toolset-root=$BOOST_JAM_TOOLSET_ROOT" "$@"
-fi
diff --git a/tools/build/v2/engine/builtins.c b/tools/build/v2/engine/builtins.c
deleted file mode 100644
index 07eaa1534a..0000000000
--- a/tools/build/v2/engine/builtins.c
+++ /dev/null
@@ -1,2312 +0,0 @@
-/*
- * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-#include "jam.h"
-
-#include "lists.h"
-#include "parse.h"
-#include "builtins.h"
-#include "rules.h"
-#include "filesys.h"
-#include "object.h"
-#include "regexp.h"
-#include "frames.h"
-#include "hash.h"
-#include "strings.h"
-#include "pwd.h"
-#include "pathsys.h"
-#include "make.h"
-#include "hdrmacro.h"
-#include "compile.h"
-#include "native.h"
-#include "variable.h"
-#include "timestamp.h"
-#include "md5.h"
-#include "constants.h"
-#include <ctype.h>
-
-#if defined(USE_EXECUNIX)
-# include <sys/types.h>
-# include <sys/wait.h>
-#else
-/*
- NT does not have wait() and associated macros, it uses the return value
- of system() instead. Status code group are documented at
- http://msdn.microsoft.com/en-gb/library/ff565436.aspx
-*/
-# define WIFEXITED(w) (((w) & 0XFFFFFF00) == 0)
-# define WEXITSTATUS(w)(w)
-#endif
-
-/*
- * builtins.c - builtin jam rules
- *
- * External routines:
- *
- * load_builtin() - define builtin rules
- *
- * Internal routines:
- *
- * builtin_depends() - DEPENDS/INCLUDES rule.
- * builtin_echo() - ECHO rule.
- * builtin_exit() - EXIT rule.
- * builtin_flags() - NOCARE, NOTFILE, TEMPORARY rule.
- * builtin_glob() - GLOB rule.
- * builtin_match() - MATCH rule.
- *
- * 01/10/01 (seiwald) - split from compile.c
- */
-
-
-/*
- * compile_builtin() - define builtin rules
- */
-
-#define P0 (PARSE *)0
-#define C0 (OBJECT *)0
-
-#if defined( OS_NT ) || defined( OS_CYGWIN )
- LIST * builtin_system_registry ( FRAME *, int );
- LIST * builtin_system_registry_names( FRAME *, int );
-#endif
-
-int glob( const char * s, const char * c );
-
-void backtrace ( FRAME * );
-void backtrace_line ( FRAME * );
-void print_source_line( FRAME * );
-
-
-RULE * bind_builtin( const char * name_, LIST * (* f)( FRAME *, int flags ), int flags, const char * * args )
-{
- FUNCTION * func;
- RULE * result;
- OBJECT * name = object_new( name_ );
-
- func = function_builtin( f, flags, args );
-
- result = new_rule_body( root_module(), name, func, 1 );
-
- function_free( func );
-
- object_free( name );
-
- return result;
-}
-
-
-RULE * duplicate_rule( const char * name_, RULE * other )
-{
- OBJECT * name = object_new( name_ );
- RULE * result = import_rule( other, root_module(), name );
- object_free( name );
- return result;
-}
-
-
-void load_builtins()
-{
- duplicate_rule( "Always",
- bind_builtin( "ALWAYS",
- builtin_flags, T_FLAG_TOUCHED, 0 ) );
-
- duplicate_rule( "Depends",
- bind_builtin( "DEPENDS",
- builtin_depends, 0, 0 ) );
-
- duplicate_rule( "echo",
- duplicate_rule( "Echo",
- bind_builtin( "ECHO",
- builtin_echo, 0, 0 ) ) );
-
- {
- const char * args[] = { "message", "*", ":", "result-value", "?", 0 };
- duplicate_rule( "exit",
- duplicate_rule( "Exit",
- bind_builtin( "EXIT",
- builtin_exit, 0, args ) ) );
- }
-
- {
- const char * args[] = { "directories", "*", ":", "patterns", "*", ":", "case-insensitive", "?", 0 };
- duplicate_rule( "Glob",
- bind_builtin( "GLOB", builtin_glob, 0, args ) );
- }
-
- {
- const char * args[] = { "patterns", "*", 0 };
- bind_builtin( "GLOB-RECURSIVELY",
- builtin_glob_recursive, 0, args );
- }
-
- duplicate_rule( "Includes",
- bind_builtin( "INCLUDES",
- builtin_depends, 1, 0 ) );
-
- {
- const char * args[] = { "targets", "*", ":", "targets-to-rebuild", "*", 0 };
- bind_builtin( "REBUILDS",
- builtin_rebuilds, 0, args );
- }
-
- duplicate_rule( "Leaves",
- bind_builtin( "LEAVES",
- builtin_flags, T_FLAG_LEAVES, 0 ) );
-
- duplicate_rule( "Match",
- bind_builtin( "MATCH",
- builtin_match, 0, 0 ) );
-
- {
- const char * args[] = { "string", ":", "delimiters" };
- bind_builtin( "SPLIT_BY_CHARACTERS",
- builtin_split_by_characters, 0, 0 );
- }
-
- duplicate_rule( "NoCare",
- bind_builtin( "NOCARE",
- builtin_flags, T_FLAG_NOCARE, 0 ) );
-
- duplicate_rule( "NOTIME",
- duplicate_rule( "NotFile",
- bind_builtin( "NOTFILE",
- builtin_flags, T_FLAG_NOTFILE, 0 ) ) );
-
- duplicate_rule( "NoUpdate",
- bind_builtin( "NOUPDATE",
- builtin_flags, T_FLAG_NOUPDATE, 0 ) );
-
- duplicate_rule( "Temporary",
- bind_builtin( "TEMPORARY",
- builtin_flags, T_FLAG_TEMP, 0 ) );
-
- bind_builtin( "ISFILE",
- builtin_flags, T_FLAG_ISFILE, 0 );
-
- duplicate_rule( "HdrMacro",
- bind_builtin( "HDRMACRO",
- builtin_hdrmacro, 0, 0 ) );
-
- /* FAIL_EXPECTED is used to indicate that the result of a target build
- * action should be inverted (ok <=> fail) this can be useful when
- * performing test runs from Jamfiles.
- */
- bind_builtin( "FAIL_EXPECTED",
- builtin_flags, T_FLAG_FAIL_EXPECTED, 0 );
-
- bind_builtin( "RMOLD",
- builtin_flags, T_FLAG_RMOLD, 0 );
-
- {
- const char * args[] = { "targets", "*", 0 };
- bind_builtin( "UPDATE",
- builtin_update, 0, args );
- }
-
- {
- const char * args[] = { "targets", "*",
- ":", "log", "?",
- ":", "ignore-minus-n", "?",
- ":", "ignore-minus-q", "?", 0 };
- bind_builtin( "UPDATE_NOW",
- builtin_update_now, 0, args );
- }
-
- {
- const char * args[] = { "string", "pattern", "replacements", "+", 0 };
- duplicate_rule( "subst",
- bind_builtin( "SUBST",
- builtin_subst, 0, args ) );
- }
-
- {
- const char * args[] = { "module", "?", 0 };
- bind_builtin( "RULENAMES",
- builtin_rulenames, 0, args );
- }
-
-
- {
- const char * args[] = { "module", "?", 0 };
- bind_builtin( "VARNAMES",
- builtin_varnames, 0, args );
- }
-
- {
- const char * args[] = { "module", "?", 0 };
- bind_builtin( "DELETE_MODULE",
- builtin_delete_module, 0, args );
- }
-
- {
- const char * args[] = { "source_module", "?",
- ":", "source_rules", "*",
- ":", "target_module", "?",
- ":", "target_rules", "*",
- ":", "localize", "?", 0 };
- bind_builtin( "IMPORT",
- builtin_import, 0, args );
- }
-
- {
- const char * args[] = { "module", "?", ":", "rules", "*", 0 };
- bind_builtin( "EXPORT",
- builtin_export, 0, args );
- }
-
- {
- const char * args[] = { "levels", "?", 0 };
- bind_builtin( "CALLER_MODULE",
- builtin_caller_module, 0, args );
- }
-
- {
- const char * args[] = { "levels", "?", 0 };
- bind_builtin( "BACKTRACE",
- builtin_backtrace, 0, args );
- }
-
- {
- const char * args[] = { 0 };
- bind_builtin( "PWD",
- builtin_pwd, 0, args );
- }
-
- {
- const char * args[] = { "modules_to_import", "+", ":", "target_module", "?", 0 };
- bind_builtin( "IMPORT_MODULE",
- builtin_import_module, 0, args );
- }
-
- {
- const char * args[] = { "module", "?", 0 };
- bind_builtin( "IMPORTED_MODULES",
- builtin_imported_modules, 0, args );
- }
-
- {
- const char * args[] = { "instance_module", ":", "class_module", 0 };
- bind_builtin( "INSTANCE",
- builtin_instance, 0, args );
- }
-
- {
- const char * args[] = { "sequence", "*", 0 };
- bind_builtin( "SORT",
- builtin_sort, 0, args );
- }
-
- {
- const char * args[] = { "path_parts", "*", 0 };
- bind_builtin( "NORMALIZE_PATH",
- builtin_normalize_path, 0, args );
- }
-
- {
- const char * args[] = { "args", "*", 0 };
- bind_builtin( "CALC",
- builtin_calc, 0, args );
- }
-
- {
- const char * args[] = { "module", ":", "rule", 0 };
- bind_builtin( "NATIVE_RULE",
- builtin_native_rule, 0, args );
- }
-
- {
- const char * args[] = { "module", ":", "rule", ":", "version", 0 };
- bind_builtin( "HAS_NATIVE_RULE",
- builtin_has_native_rule, 0, args );
- }
-
- {
- const char * args[] = { "module", "*", 0 };
- bind_builtin( "USER_MODULE",
- builtin_user_module, 0, args );
- }
-
- {
- const char * args[] = { 0 };
- bind_builtin( "NEAREST_USER_LOCATION",
- builtin_nearest_user_location, 0, args );
- }
-
- {
- const char * args[] = { "file", 0 };
- bind_builtin( "CHECK_IF_FILE",
- builtin_check_if_file, 0, args );
- }
-
-#ifdef HAVE_PYTHON
- {
- const char * args[] = { "python-module", ":", "function", ":",
- "jam-module", ":", "rule-name", 0 };
- bind_builtin( "PYTHON_IMPORT_RULE",
- builtin_python_import_rule, 0, args );
- }
-#endif
-
-# if defined( OS_NT ) || defined( OS_CYGWIN )
- {
- const char * args[] = { "key_path", ":", "data", "?", 0 };
- bind_builtin( "W32_GETREG",
- builtin_system_registry, 0, args );
- }
-
- {
- const char * args[] = { "key_path", ":", "result-type", 0 };
- bind_builtin( "W32_GETREGNAMES",
- builtin_system_registry_names, 0, args );
- }
-# endif
-
- {
- const char * args[] = { "command", ":", "*", 0 };
- duplicate_rule( "SHELL",
- bind_builtin( "COMMAND",
- builtin_shell, 0, args ) );
- }
-
- {
- const char * args[] = { "string", 0 };
- bind_builtin( "MD5",
- builtin_md5, 0, args ) ;
- }
-
- {
- const char * args[] = { "name", ":", "mode", 0 };
- bind_builtin( "FILE_OPEN",
- builtin_file_open, 0, args );
- }
-
- {
- const char * args[] = { "string", ":", "width", 0 };
- bind_builtin( "PAD",
- builtin_pad, 0, args );
- }
-
- {
- const char * args[] = { "targets", "*", 0 };
- bind_builtin( "PRECIOUS",
- builtin_precious, 0, args );
- }
-
- {
- const char * args [] = { 0 };
- bind_builtin( "SELF_PATH", builtin_self_path, 0, args );
- }
-
- {
- const char * args [] = { "path", 0 };
- bind_builtin( "MAKEDIR", builtin_makedir, 0, args );
- }
-
- /* Initialize builtin modules. */
- init_set();
- init_path();
- init_regex();
- init_property_set();
- init_sequence();
- init_order();
-}
-
-
-/*
- * builtin_calc() - CALC rule.
- *
- * The CALC rule performs simple mathematical operations on two arguments.
- */
-
-LIST * builtin_calc( FRAME * frame, int flags )
-{
- LIST * arg = lol_get( frame->args, 0 );
-
- LIST * result = L0;
- long lhs_value;
- long rhs_value;
- long result_value;
- char buffer [ 16 ];
- char const * lhs;
- char const * op;
- char const * rhs;
- LISTITER iter = list_begin( arg ), end = list_end( arg );
-
- if ( iter == end ) return L0;
- lhs = object_str( list_item( iter ) );
-
- iter = list_next( iter );
- if ( iter == end ) return L0;
- op = object_str( list_item( iter ) );
-
- iter = list_next( iter );
- if ( iter == end ) return L0;
- rhs = object_str( list_item( iter ) );
-
- lhs_value = atoi( lhs );
- rhs_value = atoi( rhs );
-
- if ( strcmp( "+", op ) == 0 )
- {
- result_value = lhs_value + rhs_value;
- }
- else if ( strcmp( "-", op ) == 0 )
- {
- result_value = lhs_value - rhs_value;
- }
- else
- {
- return L0;
- }
-
- sprintf( buffer, "%ld", result_value );
- result = list_push_back( result, object_new( buffer ) );
- return result;
-}
-
-
-/*
- * builtin_depends() - DEPENDS/INCLUDES rule.
- *
- * The DEPENDS/INCLUDES builtin rule appends each of the listed sources on the
- * dependency/includes list of each of the listed targets. It binds both the
- * targets and sources as TARGETs.
- */
-
-LIST * builtin_depends( FRAME * frame, int flags )
-{
- LIST * targets = lol_get( frame->args, 0 );
- LIST * sources = lol_get( frame->args, 1 );
- LISTITER iter, end;
-
- iter = list_begin( targets ), end = list_end( targets );
- for ( ; iter != end; iter = list_next( iter ) )
- {
- TARGET * t = bindtarget( list_item( iter ) );
-
- /* If doing INCLUDES, switch to the TARGET's include */
- /* TARGET, creating it if needed. The internal include */
- /* TARGET shares the name of its parent. */
-
- if ( flags )
- {
- if ( !t->includes )
- {
- t->includes = copytarget( t );
- t->includes->original_target = t;
- }
- t = t->includes;
- }
-
- t->depends = targetlist( t->depends, sources );
- }
-
- /* Enter reverse links */
- iter = list_begin( sources ), end = list_end( sources );
- for ( ; iter != end; iter = list_next( iter ) )
- {
- TARGET * s = bindtarget( list_item( iter ) );
- s->dependants = targetlist( s->dependants, targets );
- }
-
- return L0;
-}
-
-
-/*
- * builtin_rebuilds() - REBUILDS rule.
- *
- * The REBUILDS builtin rule appends each of the listed rebuild-targets in its
- * 2nd argument on the rebuilds list of each of the listed targets in its first
- * argument.
- */
-
-LIST * builtin_rebuilds( FRAME * frame, int flags )
-{
- LIST * targets = lol_get( frame->args, 0 );
- LIST * rebuilds = lol_get( frame->args, 1 );
- LISTITER iter = list_begin( targets ), end = list_end( targets );
-
- for ( ; iter != end; iter = list_next( iter ) )
- {
- TARGET * t = bindtarget( list_item( iter ) );
- t->rebuilds = targetlist( t->rebuilds, rebuilds );
- }
-
- return L0;
-}
-
-
-/*
- * builtin_echo() - ECHO rule.
- *
- * The ECHO builtin rule echoes the targets to the user. No other actions are
- * taken.
- */
-
-LIST * builtin_echo( FRAME * frame, int flags )
-{
- list_print( lol_get( frame->args, 0 ) );
- printf( "\n" );
- fflush( stdout );
- return L0;
-}
-
-
-/*
- * builtin_exit() - EXIT rule.
- *
- * The EXIT builtin rule echoes the targets to the user and exits the program
- * with a failure status.
- */
-
-LIST * builtin_exit( FRAME * frame, int flags )
-{
- LIST * code = lol_get( frame->args, 1 );
- list_print( lol_get( frame->args, 0 ) );
- printf( "\n" );
- if ( !list_empty( code ) )
- {
- exit( atoi( object_str( list_front( code ) ) ) );
- }
- else
- {
- exit( EXITBAD ); /* yeech */
- }
- return L0;
-}
-
-
-/*
- * builtin_flags() - NOCARE, NOTFILE, TEMPORARY rule.
- *
- * Builtin_flags() marks the target with the appropriate flag, for use by make0().
- * It binds each target as a TARGET.
- */
-
-LIST * builtin_flags( FRAME * frame, int flags )
-{
- LIST * l = lol_get( frame->args, 0 );
- LISTITER iter = list_begin( l ), end = list_end( l );
- for ( ; iter != end; iter = list_next( iter ) )
- bindtarget( list_item( iter ) )->flags |= flags;
- return L0;
-}
-
-
-/*
- * builtin_globbing() - GLOB rule.
- */
-
-struct globbing
-{
- LIST * patterns;
- LIST * results;
- LIST * case_insensitive;
-};
-
-
-static void downcase_inplace( char * p )
-{
- for ( ; *p; ++p )
- *p = tolower( *p );
-}
-
-
-static void builtin_glob_back
-(
- void * closure,
- OBJECT * file,
- int status,
- time_t time
-)
-{
- PROFILE_ENTER( BUILTIN_GLOB_BACK );
-
- struct globbing * globbing = (struct globbing *)closure;
- LIST * l;
- PATHNAME f;
- string buf[ 1 ];
- LISTITER iter, end;
-
- /* Null out directory for matching. We wish we had file_dirscan() pass up a
- * PATHNAME.
- */
- path_parse( object_str( file ), &f );
- f.f_dir.len = 0;
-
- /* For globbing, we unconditionally ignore current and parent directory
- * items. Since these items always exist, there is no reason why caller of
- * GLOB would want to see them. We could also change file_dirscan(), but
- * then paths with embedded "." and ".." would not work anywhere.
- */
- if ( !strcmp( f.f_base.ptr, "." ) || !strcmp( f.f_base.ptr, ".." ) )
- {
- PROFILE_EXIT( BUILTIN_GLOB_BACK );
- return;
- }
-
- string_new( buf );
- path_build( &f, buf, 0 );
-
- if ( globbing->case_insensitive )
- downcase_inplace( buf->value );
-
- iter = list_begin( globbing->patterns ), end = list_end( globbing->patterns );
- for ( ; iter != end; iter = list_next( iter ) )
- {
- if ( !glob( object_str( list_item( iter ) ), buf->value ) )
- {
- globbing->results = list_push_back( globbing->results, object_copy( file ) );
- break;
- }
- }
-
- string_free( buf );
-
- PROFILE_EXIT( BUILTIN_GLOB_BACK );
-}
-
-
-static LIST * downcase_list( LIST * in )
-{
- LIST * result = L0;
- LISTITER iter = list_begin( in ), end = list_end( in );
-
- string s[ 1 ];
- string_new( s );
-
- for ( ; iter != end; iter = list_next( iter ) )
- {
- string_append( s, object_str( list_item( iter ) ) );
- downcase_inplace( s->value );
- result = list_push_back( result, object_new( s->value ) );
- string_truncate( s, 0 );
- }
-
- string_free( s );
- return result;
-}
-
-
-LIST * builtin_glob( FRAME * frame, int flags )
-{
- LIST * l = lol_get( frame->args, 0 );
- LIST * r = lol_get( frame->args, 1 );
-
- LISTITER iter, end;
- struct globbing globbing;
-
- globbing.results = L0;
- globbing.patterns = r;
-
- globbing.case_insensitive
-# if defined( OS_NT ) || defined( OS_CYGWIN )
- = l; /* Always case-insensitive if any files can be found. */
-# else
- = lol_get( frame->args, 2 );
-# endif
-
- if ( globbing.case_insensitive )
- globbing.patterns = downcase_list( r );
-
- iter = list_begin( l ), end = list_end( l );
- for ( ; iter != end; iter = list_next( iter ) )
- file_dirscan( list_item( iter ), builtin_glob_back, &globbing );
-
- if ( globbing.case_insensitive )
- list_free( globbing.patterns );
-
- return globbing.results;
-}
-
-
-static int has_wildcards( char const * str )
-{
- size_t const index = strcspn( str, "[]*?" );
- return str[ index ] == '\0' ? 0 : 1;
-}
-
-
-/*
- * If 'file' exists, append 'file' to 'list'. Returns 'list'.
- */
-
-static LIST * append_if_exists( LIST * list, OBJECT * file )
-{
- time_t time;
- timestamp( file, &time );
- return time > 0
- ? list_push_back( list, object_copy( file ) )
- : list;
-}
-
-
-LIST * glob1( OBJECT * dirname, OBJECT * pattern )
-{
- LIST * plist = list_new( object_copy(pattern) );
- struct globbing globbing;
-
- globbing.results = L0;
- globbing.patterns = plist;
-
- globbing.case_insensitive
-# if defined( OS_NT ) || defined( OS_CYGWIN )
- = plist; /* always case-insensitive if any files can be found */
-# else
- = L0;
-# endif
-
- if ( globbing.case_insensitive )
- globbing.patterns = downcase_list( plist );
-
- file_dirscan( dirname, builtin_glob_back, &globbing );
-
- if ( globbing.case_insensitive )
- list_free( globbing.patterns );
-
- list_free( plist );
-
- return globbing.results;
-}
-
-
-LIST * glob_recursive( const char * pattern )
-{
- LIST * result = L0;
-
- /* Check if there's metacharacters in pattern */
- if ( !has_wildcards( pattern ) )
- {
- /* No metacharacters. Check if the path exists. */
- OBJECT * p = object_new( pattern );
- result = append_if_exists( result, p );
- object_free( p );
- }
- else
- {
- /* Have metacharacters in the pattern. Split into dir/name. */
- PATHNAME path[ 1 ];
- path_parse( pattern, path );
-
- if ( path->f_dir.ptr )
- {
- LIST * dirs = L0;
- string dirname[ 1 ];
- string basename[ 1 ];
- string_new( dirname );
- string_new( basename );
-
- string_append_range( dirname, path->f_dir.ptr,
- path->f_dir.ptr + path->f_dir.len );
-
- path->f_grist.ptr = 0;
- path->f_grist.len = 0;
- path->f_dir.ptr = 0;
- path->f_dir.len = 0;
- path_build( path, basename, 0 );
-
- dirs = has_wildcards( dirname->value )
- ? glob_recursive( dirname->value )
- : list_push_back( dirs, object_new( dirname->value ) );
-
- if ( has_wildcards( basename->value ) )
- {
- OBJECT * b = object_new( basename->value );
- LISTITER iter = list_begin( dirs ), end = list_end( dirs );
- for ( ; iter != end; iter = list_next( iter ) )
- result = list_append( result, glob1( list_item( iter ), b ) );
- object_free( b );
- }
- else
- {
- LISTITER iter = list_begin( dirs ), end = list_end( dirs );
- string file_string[ 1 ];
- string_new( file_string );
-
- /* No wildcard in basename. */
- for ( ; iter != end; iter = list_next( iter ) )
- {
- OBJECT * p;
- path->f_dir.ptr = object_str( list_item( iter ) );
- path->f_dir.len = strlen( object_str( list_item( iter ) ) );
- path_build( path, file_string, 0 );
-
- p = object_new( file_string->value );
-
- result = append_if_exists( result, p );
-
- object_free( p );
-
- string_truncate( file_string, 0 );
- }
-
- string_free( file_string );
- }
-
- string_free( dirname );
- string_free( basename );
-
- list_free( dirs );
- }
- else
- {
- /** No directory, just a pattern. */
- OBJECT * p = object_new( pattern );
- result = list_append( result, glob1( constant_dot, p ) );
- object_free( p );
- }
- }
-
- return result;
-}
-
-
-LIST * builtin_glob_recursive( FRAME * frame, int flags )
-{
- LIST * result = L0;
- LIST * l = lol_get( frame->args, 0 );
- LISTITER iter = list_begin( l ), end = list_end( l );
- for ( ; iter != end; iter = list_next( iter ) )
- result = list_append( result, glob_recursive( object_str( list_item( iter ) ) ) );
- return result;
-}
-
-
-/*
- * builtin_match() - MATCH rule, regexp matching.
- */
-
-LIST * builtin_match( FRAME * frame, int flags )
-{
- LIST * l;
- LIST * r;
- LIST * result = L0;
- LISTITER l_iter, l_end, r_iter, r_end;
-
- string buf[ 1 ];
- string_new( buf );
-
- /* For each pattern */
-
- l = lol_get( frame->args, 0 );
- l_iter = list_begin( l ), l_end = list_end( l );
- for (; l_iter != l_end; l_iter = list_next( l_iter ) )
- {
- /* Result is cached and intentionally never freed. */
- regexp * re = regex_compile( list_item( l_iter ) );
-
- /* For each string to match against. */
- r = lol_get( frame->args, 1 );
- r_iter = list_begin( r ), r_end = list_end( r );
- for ( ; r_iter != r_end; r_iter = list_next( r_iter ) )
- {
- if ( regexec( re, object_str( list_item( r_iter ) ) ) )
- {
- int i;
- int top;
-
- /* Find highest parameter */
-
- for ( top = NSUBEXP; top-- > 1; )
- if ( re->startp[ top ] )
- break;
-
- /* And add all parameters up to highest onto list. */
- /* Must have parameters to have results! */
- for ( i = 1; i <= top; ++i )
- {
- string_append_range( buf, re->startp[ i ], re->endp[ i ] );
- result = list_push_back( result, object_new( buf->value ) );
- string_truncate( buf, 0 );
- }
- }
- }
- }
-
- string_free( buf );
- return result;
-}
-
-LIST * builtin_split_by_characters( FRAME * frame, int flags )
-{
- LIST * l1 = lol_get( frame->args, 0 );
- LIST * l2 = lol_get( frame->args, 1 );
-
- LIST * result = L0;
-
- string buf[ 1 ];
-
- const char * delimiters = object_str( list_front( l2 ) );
- char * t;
-
- string_copy( buf, object_str( list_front( l1 ) ) );
-
- t = strtok( buf->value, delimiters) ;
- while ( t )
- {
- result = list_push_back( result, object_new( t ) );
- t = strtok( NULL, delimiters );
- }
-
- string_free( buf );
-
- return result;
-}
-
-LIST * builtin_hdrmacro( FRAME * frame, int flags )
-{
- LIST * l = lol_get( frame->args, 0 );
- LISTITER iter = list_begin( l ), end = list_end( l );
-
- for ( ; iter != end; iter = list_next( iter ) )
- {
- TARGET * t = bindtarget( list_item( iter ) );
-
- /* Scan file for header filename macro definitions. */
- if ( DEBUG_HEADER )
- printf( "scanning '%s' for header file macro definitions\n",
- object_str( list_item( iter ) ) );
-
- macro_headers( t );
- }
-
- return L0;
-}
-
-
-/*
- * builtin_rulenames() - RULENAMES ( MODULE ? ).
- *
- * Returns a list of the non-local rule names in the given MODULE. If MODULE is
- * not supplied, returns the list of rule names in the global module.
- */
-
-static void add_rule_name( void * r_, void * result_ )
-{
- RULE * r = (RULE *)r_;
- LIST * * result = (LIST * *)result_;
- if ( r->exported )
- *result = list_push_back( *result, object_copy( r->name ) );
-}
-
-
-LIST * builtin_rulenames( FRAME * frame, int flags )
-{
- LIST * arg0 = lol_get( frame->args, 0 );
- LIST * result = L0;
- module_t * source_module = bindmodule( !list_empty( arg0 ) ? list_front( arg0 ) : 0 );
-
- if ( source_module->rules )
- hashenumerate( source_module->rules, add_rule_name, &result );
- return result;
-}
-
-
-/*
- * builtin_varnames() - VARNAMES ( MODULE ? ).
- *
- * Returns a list of the variable names in the given MODULE. If MODULE is not
- * supplied, returns the list of variable names in the global module.
- */
-
-/* helper function for builtin_varnames(), below. Used with hashenumerate, will
- * prepend the key of each element to the list
- */
-static void add_hash_key( void * np, void * result_ )
-{
- LIST * * result = (LIST * *)result_;
- *result = list_push_back( *result, object_copy( *(OBJECT * *)np ) );
-}
-
-
-LIST * builtin_varnames( FRAME * frame, int flags )
-{
- LIST * arg0 = lol_get( frame->args, 0 );
- LIST * result = L0;
- module_t * source_module = bindmodule( !list_empty(arg0) ? list_front(arg0) : 0 );
-
- struct hash * vars = source_module->variables;
-
- if ( vars )
- hashenumerate( vars, add_hash_key, &result );
- return result;
-}
-
-
-/*
- * builtin_delete_module() - MODULE ?.
- *
- * Clears all rules and variables from the given module.
- */
-
-LIST * builtin_delete_module( FRAME * frame, int flags )
-{
- LIST * arg0 = lol_get( frame->args, 0 );
- LIST * result = L0;
- module_t * source_module = bindmodule( !list_empty(arg0) ? list_front(arg0) : 0 );
- delete_module( source_module );
- return result;
-}
-
-
-static void unknown_rule( FRAME * frame, const char * key, module_t * module, OBJECT * rule_name )
-{
- const char * module_name = module->name ? object_str( module->name ) : "";
- backtrace_line( frame->prev );
- if ( module->name )
- {
- printf( "%s error: rule \"%s\" unknown in module \"%s.\"\n", key, object_str( rule_name ), object_str( module->name ) );
- }
- else
- {
- printf( "%s error: rule \"%s\" unknown in module \"\"\n", key, object_str( rule_name ) );
- }
- backtrace( frame->prev );
- exit( 1 );
-}
-
-
-/*
- * builtin_import() - IMPORT
- * (
- * SOURCE_MODULE ? :
- * SOURCE_RULES * :
- * TARGET_MODULE ? :
- * TARGET_RULES * :
- * LOCALIZE ?
- * )
- *
- * The IMPORT rule imports rules from the SOURCE_MODULE into the TARGET_MODULE
- * as local rules. If either SOURCE_MODULE or TARGET_MODULE is not supplied, it
- * refers to the global module. SOURCE_RULES specifies which rules from the
- * SOURCE_MODULE to import; TARGET_RULES specifies the names to give those rules
- * in TARGET_MODULE. If SOURCE_RULES contains a name which doesn't correspond to
- * a rule in SOURCE_MODULE, or if it contains a different number of items than
- * TARGET_RULES, an error is issued. If LOCALIZE is specified, the rules will be
- * executed in TARGET_MODULE, with corresponding access to its module local
- * variables.
- */
-
-LIST * builtin_import( FRAME * frame, int flags )
-{
- LIST * source_module_list = lol_get( frame->args, 0 );
- LIST * source_rules = lol_get( frame->args, 1 );
- LIST * target_module_list = lol_get( frame->args, 2 );
- LIST * target_rules = lol_get( frame->args, 3 );
- LIST * localize = lol_get( frame->args, 4 );
-
- module_t * target_module =
- bindmodule( !list_empty( target_module_list ) ? list_front( target_module_list ) : 0 );
- module_t * source_module =
- bindmodule( !list_empty( source_module_list ) ? list_front( source_module_list ) : 0 );
-
- LISTITER source_iter = list_begin( source_rules ), source_end = list_end( source_rules );
- LISTITER target_iter = list_begin( target_rules ), target_end = list_end( target_rules );
-
- for ( ;
- source_iter != source_end && target_iter != target_end;
- source_iter = list_next( source_iter ),
- target_iter = list_next( target_iter ) )
- {
- RULE * r;
- RULE * imported;
-
- if ( !source_module->rules ||
- !(r = (RULE *)hash_find( source_module->rules, list_item( source_iter ) ) ) )
- unknown_rule( frame, "IMPORT", source_module, list_item( source_iter ) );
-
- imported = import_rule( r, target_module, list_item( target_iter ) );
- if ( !list_empty( localize ) )
- rule_localize( imported, target_module );
- /* This rule is really part of some other module. Just refer to it here,
- * but do not let it out.
- */
- imported->exported = 0;
- }
-
- if ( source_iter != source_end || target_iter != target_end )
- {
- backtrace_line( frame->prev );
- printf( "import error: length of source and target rule name lists don't match!\n" );
- printf( " source: " );
- list_print( source_rules );
- printf( "\n target: " );
- list_print( target_rules );
- printf( "\n" );
- backtrace( frame->prev );
- exit( 1 );
- }
-
- return L0;
-}
-
-
-/*
- * builtin_export() - EXPORT ( MODULE ? : RULES * ).
- *
- * The EXPORT rule marks RULES from the SOURCE_MODULE as non-local (and thus
- * exportable). If an element of RULES does not name a rule in MODULE, an error
- * is issued.
- */
-
-LIST * builtin_export( FRAME * frame, int flags )
-{
- LIST * module_list = lol_get( frame->args, 0 );
- LIST * rules = lol_get( frame->args, 1 );
- module_t * m = bindmodule( !list_empty( module_list ) ? list_front( module_list ) : 0 );
-
- LISTITER iter = list_begin( rules ), end = list_end( rules );
- for ( ; iter != end; iter = list_next( iter ) )
- {
- RULE * r;
-
- if ( !m->rules || !(r = (RULE *)hash_find( m->rules, list_item( iter ) ) ) )
- unknown_rule( frame, "EXPORT", m, list_item( iter ) );
-
- r->exported = 1;
- }
- return L0;
-}
-
-
-/*
- * get_source_line() - Retrieve the file and line number that should be
- * indicated for a given procedure in debug output or an error backtrace.
- */
-
-static void get_source_line( FRAME * frame, const char * * file, int * line )
-{
- if ( frame->file )
- {
- const char * f = object_str( frame->file );
- int l = frame->line;
- if ( !strcmp( f, "+" ) )
- {
- f = "jambase.c";
- l += 3;
- }
- *file = f;
- *line = l;
- }
- else
- {
- *file = "(builtin)";
- *line = -1;
- }
-}
-
-
-void print_source_line( FRAME * frame )
-{
- const char * file;
- int line;
-
- get_source_line( frame, &file, &line );
- if ( line < 0 )
- printf( "(builtin):" );
- else
- printf( "%s:%d:", file, line );
-}
-
-
-/*
- * backtrace_line() - print a single line of error backtrace for the given
- * frame.
- */
-
-void backtrace_line( FRAME * frame )
-{
- if ( frame == 0 )
- {
- printf( "(no frame):" );
- }
- else
- {
- print_source_line( frame );
- printf( " in %s\n", frame->rulename );
- }
-}
-
-
-/*
- * backtrace() - Print the entire backtrace from the given frame to the Jambase
- * which invoked it.
- */
-
-void backtrace( FRAME * frame )
-{
- if ( !frame ) return;
- while ( ( frame = frame->prev ) )
- backtrace_line( frame );
-}
-
-
-/*
- * builtin_backtrace() - A Jam version of the backtrace function, taking no
- * arguments and returning a list of quadruples: FILENAME LINE MODULE. RULENAME
- * describing each frame. Note that the module-name is always followed by a
- * period.
- */
-
-LIST * builtin_backtrace( FRAME * frame, int flags )
-{
- LIST * levels_arg = lol_get( frame->args, 0 );
- int levels = !list_empty( levels_arg ) ? atoi( object_str( list_front( levels_arg ) ) ) : (int)( (unsigned int)(-1) >> 1 ) ;
-
- LIST * result = L0;
- for ( ; ( frame = frame->prev ) && levels ; --levels )
- {
- const char * file;
- int line;
- char buf[32];
- string module_name[1];
- get_source_line( frame, &file, &line );
- sprintf( buf, "%d", line );
- string_new( module_name );
- if ( frame->module->name )
- {
- string_append( module_name, object_str( frame->module->name ) );
- string_append( module_name, "." );
- }
- result = list_push_back( result, object_new( file ) );
- result = list_push_back( result, object_new( buf ) );
- result = list_push_back( result, object_new( module_name->value ) );
- result = list_push_back( result, object_new( frame->rulename ) );
- string_free( module_name );
- }
- return result;
-}
-
-
-/*
- * builtin_caller_module() - CALLER_MODULE ( levels ? )
- *
- * If levels is not supplied, returns the name of the module of the rule which
- * called the one calling this one. If levels is supplied, it is interpreted as
- * an integer specifying a number of additional levels of call stack to traverse
- * in order to locate the module in question. If no such module exists, returns
- * the empty list. Also returns the empty list when the module in question is
- * the global module. This rule is needed for implementing module import
- * behavior.
- */
-
-LIST * builtin_caller_module( FRAME * frame, int flags )
-{
- LIST * levels_arg = lol_get( frame->args, 0 );
- int levels = !list_empty( levels_arg ) ? atoi( object_str( list_front( levels_arg ) ) ) : 0 ;
-
- int i;
- for ( i = 0; ( i < levels + 2 ) && frame->prev; ++i )
- frame = frame->prev;
-
- if ( frame->module == root_module() )
- return L0;
- else
- return list_new( object_copy( frame->module->name ) );
-}
-
-
-/*
- * Return the current working directory.
- *
- * Usage: pwd = [ PWD ] ;
- */
-
-LIST * builtin_pwd( FRAME * frame, int flags )
-{
- return pwd();
-}
-
-
-/*
- * Adds targets to the list of target that jam will attempt to update.
- */
-
-LIST * builtin_update( FRAME * frame, int flags )
-{
- LIST * result = list_copy( targets_to_update() );
- LIST * arg1 = lol_get( frame->args, 0 );
- LISTITER iter = list_begin( arg1 ), end = list_end( arg1 );
- clear_targets_to_update();
- for ( ; iter != end; iter = list_next( iter ) )
- mark_target_for_updating( object_copy( list_item( iter ) ) );
- return result;
-}
-
-extern int anyhow;
-int last_update_now_status;
-
-/* Takes a list of target names as first argument, and immediately
- updates them.
- Second parameter, if specified, if the descriptor (converted to a string)
- of a log file where all build output is redirected.
- Third parameter, if non-empty, specifies that the -n option should have
- no effect -- that is, all out-of-date targets should be rebuild.
-*/
-LIST * builtin_update_now( FRAME * frame, int flags )
-{
- LIST * targets = lol_get( frame->args, 0 );
- LIST * log = lol_get( frame->args, 1 );
- LIST * force = lol_get( frame->args, 2 );
- LIST * continue_ = lol_get( frame->args, 3 );
- int status;
- int original_stdout = 0;
- int original_stderr = 0;
- int original_noexec = 0;
- int original_quitquick = 0;
-
-
- if ( !list_empty( log ) )
- {
- int fd = atoi( object_str( list_front( log ) ) );
- /* Redirect stdout and stderr, temporary, to the log file. */
- original_stdout = dup( 0 );
- original_stderr = dup( 1 );
- dup2 ( fd, 0 );
- dup2 ( fd, 1 );
- }
-
- if ( !list_empty( force ) )
- {
- original_noexec = globs.noexec;
- globs.noexec = 0;
- original_quitquick = globs.quitquick;
- globs.quitquick = 0;
- }
-
- if ( !list_empty( continue_ ) )
- {
- original_quitquick = globs.quitquick;
- globs.quitquick = 0;
- }
-
- status = make( targets, anyhow );
-
- if ( !list_empty( force ) )
- {
- globs.noexec = original_noexec;
- globs.quitquick = original_quitquick;
- }
-
- if ( !list_empty( continue_ ) )
- {
- globs.quitquick = original_quitquick;
- }
-
- if ( !list_empty( log ) )
- {
- /* Flush whatever stdio might have buffered, while descriptions
- 0 and 1 still refer to the log file. */
- fflush( stdout );
- fflush( stderr );
- dup2( original_stdout, 0 );
- dup2( original_stderr, 1 );
- close( original_stdout );
- close( original_stderr );
- }
-
- last_update_now_status = status;
-
- if ( status == 0 )
- return list_new( object_copy( constant_ok ) );
- else
- return L0;
-}
-
-
-LIST * builtin_import_module( FRAME * frame, int flags )
-{
- LIST * arg1 = lol_get( frame->args, 0 );
- LIST * arg2 = lol_get( frame->args, 1 );
- module_t * m = !list_empty( arg2 ) ? bindmodule( list_front( arg2 ) ) : root_module();
- import_module( arg1, m );
- return L0;
-}
-
-
-LIST * builtin_imported_modules( FRAME * frame, int flags )
-{
- LIST * arg0 = lol_get( frame->args, 0 );
- return imported_modules( bindmodule( !list_empty( arg0 ) ? list_front( arg0 ) : 0 ) );
-}
-
-
-LIST * builtin_instance( FRAME * frame, int flags )
-{
- LIST * arg1 = lol_get( frame->args, 0 );
- LIST * arg2 = lol_get( frame->args, 1 );
- module_t * const instance = bindmodule( list_front( arg1 ) );
- module_t * const class_module = bindmodule( list_front( arg2 ) );
- instance->class_module = class_module;
- module_set_fixed_variables( instance, class_module->num_fixed_variables );
- return L0;
-}
-
-
-LIST * builtin_sort( FRAME * frame, int flags )
-{
- LIST * arg1 = lol_get( frame->args, 0 );
- return list_sort( arg1 );
-}
-
-
-LIST * builtin_normalize_path( FRAME * frame, int flags )
-{
- LIST * arg = lol_get( frame->args, 0 );
-
- /* First, we iterate over all '/'-separated elements, starting from the end
- * of string. If we see a '..', we remove a previous path elements. If we
- * see '.', we remove it. The removal is done by overwriting data using '\1'
- * in the string. After the whole string has been processed, we do a second
- * pass, removing all the entered '\1' characters.
- */
-
- string in[ 1 ];
- string out[ 1 ];
- /* Last character of the part of string still to be processed. */
- char * end;
- /* Working pointer. */
- char * current;
- /* Number of '..' elements seen and not processed yet. */
- int dotdots = 0;
- int rooted = 0;
- OBJECT * result = 0;
- LISTITER arg_iter = list_begin( arg ), arg_end = list_end( arg );
-
- /* Make a copy of input: we should not change it. Prepend a '/' before it as
- * a guard for the algorithm later on and remember whether it was originally
- * rooted or not.
- */
- string_new( in );
- string_push_back( in, '/' );
- for ( ; arg_iter != arg_end; arg_iter = list_next( arg_iter ) )
- {
- if ( object_str( list_item( arg_iter ) )[ 0 ] != '\0' )
- {
- if ( in->size == 1 )
- rooted = ( ( object_str( list_item( arg_iter ) )[ 0 ] == '/' ) ||
- ( object_str( list_item( arg_iter ) )[ 0 ] == '\\' ) );
- else
- string_append( in, "/" );
- string_append( in, object_str( list_item( arg_iter ) ) );
- }
- }
-
- /* Convert \ into /. On Windows, paths using / and \ are equivalent, and we
- * want this function to obtain a canonic representation.
- */
- for ( current = in->value, end = in->value + in->size;
- current < end; ++current )
- if ( *current == '\\' )
- *current = '/';
-
- /* Now we remove any extra path elements by overwriting them with '\1'
- * characters and cound how many more unused '..' path elements there are
- * remaining. Note that each remaining path element with always starts with
- * a '/' character.
- */
- for ( end = in->value + in->size - 1; end >= in->value; )
- {
- /* Set 'current' to the next occurence of '/', which always exists. */
- for ( current = end; *current != '/'; --current );
-
- if ( current == end )
- {
- /* Found a trailing or duplicate '/'. Remove it. */
- *current = '\1';
- }
- else if ( ( end - current == 1 ) && ( *(current + 1) == '.' ) )
- {
- /* Found '/.'. Remove them all. */
- *current = '\1';
- *(current + 1) = '\1';
- }
- else if ( ( end - current == 2 ) && ( *(current + 1) == '.' ) && ( *(current + 2) == '.' ) )
- {
- /* Found '/..'. Remove them all. */
- *current = '\1';
- *(current + 1) = '\1';
- *(current + 2) = '\1';
- ++dotdots;
- }
- else if ( dotdots )
- {
- memset( current, '\1', end - current + 1 );
- --dotdots;
- }
- end = current - 1;
- }
-
- string_new( out );
-
- /* Now we know that we need to add exactly dotdots '..' path elements to the
- * front and that our string is either empty or has a '/' as its first
- * significant character. If we have any dotdots remaining then the passed
- * path must not have been rooted or else it is invalid we return an empty
- * list.
- */
- if ( dotdots )
- {
- if ( rooted )
- {
- string_free( out );
- string_free( in );
- return L0;
- }
- do
- string_append( out, "/.." );
- while ( --dotdots );
- }
-
- /* Now we actually remove all the path characters marked for removal. */
- for ( current = in->value; *current; ++current )
- if ( *current != '\1' )
- string_push_back( out, *current );
-
- /* Here we know that our string contains no '\1' characters and is either
- * empty or has a '/' as its initial character. If the original path was not
- * rooted and we have a non-empty path we need to drop the initial '/'. If
- * the original path was rooted and we have an empty path we need to add
- * back the '/'.
- */
- result = object_new( out->size ? out->value + !rooted : ( rooted ? "/" : "." ) );
-
- string_free( out );
- string_free( in );
-
- return list_new( result );
-}
-
-
-LIST * builtin_native_rule( FRAME * frame, int flags )
-{
- LIST * module_name = lol_get( frame->args, 0 );
- LIST * rule_name = lol_get( frame->args, 1 );
-
- module_t * module = bindmodule( list_front( module_name ) );
-
- native_rule_t * np;
- if ( module->native_rules && (np = (native_rule_t *)hash_find( module->native_rules, list_front( rule_name ) ) ) )
- {
- new_rule_body( module, np->name, np->procedure, 1 );
- }
- else
- {
- backtrace_line( frame->prev );
- printf( "error: no native rule \"%s\" defined in module \"%s.\"\n",
- object_str( list_front( rule_name ) ), object_str( module->name ) );
- backtrace( frame->prev );
- exit( 1 );
- }
- return L0;
-}
-
-
-LIST * builtin_has_native_rule( FRAME * frame, int flags )
-{
- LIST * module_name = lol_get( frame->args, 0 );
- LIST * rule_name = lol_get( frame->args, 1 );
- LIST * version = lol_get( frame->args, 2 );
-
- module_t * module = bindmodule( list_front( module_name ) );
-
- native_rule_t * np;
- if ( module->native_rules && (np = (native_rule_t *)hash_find( module->native_rules, list_front( rule_name ) ) ) )
- {
- int expected_version = atoi( object_str( list_front( version ) ) );
- if ( np->version == expected_version )
- return list_new( object_copy( constant_true ) );
- }
- return L0;
-}
-
-
-LIST * builtin_user_module( FRAME * frame, int flags )
-{
- LIST * module_name = lol_get( frame->args, 0 );
- LISTITER iter = list_begin( module_name ), end = list_end( module_name );
- for ( ; iter != end; iter = list_next( iter ) )
- {
- module_t * m = bindmodule( list_item( iter ) );
- m->user_module = 1;
- }
- return L0;
-}
-
-
-LIST * builtin_nearest_user_location( FRAME * frame, int flags )
-{
- FRAME * nearest_user_frame =
- frame->module->user_module ? frame : frame->prev_user;
- if ( !nearest_user_frame )
- return L0;
-
- {
- LIST * result = L0;
- const char * file;
- int line;
- char buf[32];
-
- get_source_line( nearest_user_frame, &file, &line );
- sprintf( buf, "%d", line );
- result = list_push_back( result, object_new( file ) );
- result = list_push_back( result, object_new( buf ) );
- return result;
- }
-}
-
-
-LIST * builtin_check_if_file( FRAME * frame, int flags )
-{
- LIST * name = lol_get( frame->args, 0 );
- return file_is_file( list_front( name ) ) == 1
- ? list_new( object_copy( constant_true ) )
- : L0 ;
-}
-
-
-LIST * builtin_md5( FRAME * frame, int flags )
-{
- LIST * l = lol_get( frame->args, 0 );
- const char* s = object_str( list_front( l ) );
-
- md5_state_t state;
- md5_byte_t digest[16];
- char hex_output[16*2 + 1];
-
- int di;
-
- md5_init( &state );
- md5_append( &state, (const md5_byte_t *)s, strlen(s) );
- md5_finish( &state, digest );
-
- for (di = 0; di < 16; ++di)
- sprintf( hex_output + di * 2, "%02x", digest[di] );
-
- return list_new( object_new( hex_output ) );
-}
-
-LIST *builtin_file_open( FRAME * frame, int flags )
-{
- const char * name = object_str( list_front( lol_get( frame->args, 0 ) ) );
- const char * mode = object_str( list_front( lol_get( frame->args, 1 ) ) );
- int fd;
- char buffer[sizeof("4294967295")];
-
- if ( strcmp(mode, "w") == 0 )
- {
- fd = open( name, O_WRONLY|O_CREAT|O_TRUNC, 0666 );
- }
- else
- {
- fd = open( name, O_RDONLY );
- }
-
- if (fd != -1)
- {
- sprintf( buffer, "%d", fd );
- return list_new( object_new( buffer ) );
- }
- else
- {
- return L0;
- }
-}
-
-LIST *builtin_pad( FRAME * frame, int flags )
-{
- OBJECT * string = list_front( lol_get( frame->args, 0 ) );
- const char * width_s = object_str( list_front( lol_get( frame->args, 1 ) ) );
-
- int current = strlen( object_str( string ) );
- int desired = atoi( width_s );
- if (current >= desired)
- return list_new( object_copy( string ) );
- else
- {
- char * buffer = BJAM_MALLOC( desired + 1 );
- int i;
- LIST * result;
-
- strcpy( buffer, object_str( string ) );
- for ( i = current; i < desired; ++i )
- buffer[i] = ' ';
- buffer[desired] = '\0';
- result = list_new( object_new( buffer ) );
- BJAM_FREE( buffer );
- return result;
- }
-}
-
-LIST *builtin_precious( FRAME * frame, int flags )
-{
- LIST * targets = lol_get(frame->args, 0);
-
- LISTITER iter = list_begin( targets ), end = list_end( targets );
- for ( ; iter != end; iter = list_next( iter ) )
- {
- TARGET* t = bindtarget( list_item( iter ) );
- t->flags |= T_FLAG_PRECIOUS;
- }
-
- return L0;
-}
-
-LIST *builtin_self_path( FRAME * frame, int flags )
-{
- extern const char * saved_argv0;
- char * p = executable_path( saved_argv0 );
- if ( p )
- {
- LIST* result = list_new( object_new( p ) );
- free( p );
- return result;
- }
- else
- {
- return L0;
- }
-}
-
-LIST *builtin_makedir( FRAME * frame, int flags )
-{
- LIST * path = lol_get( frame->args, 0 );
-
- if ( file_mkdir( object_str( list_front( path ) ) ) == 0 )
- {
- LIST * result = list_new( object_copy( list_front( path ) ) );
- return result;
- }
- else
- {
- return L0;
- }
-}
-
-#ifdef HAVE_PYTHON
-
-LIST * builtin_python_import_rule( FRAME * frame, int flags )
-{
- static int first_time = 1;
- const char * python_module = object_str( list_front( lol_get( frame->args, 0 ) ) );
- const char * python_function = object_str( list_front( lol_get( frame->args, 1 ) ) );
- OBJECT * jam_module = list_front( lol_get( frame->args, 2 ) );
- OBJECT * jam_rule = list_front( lol_get( frame->args, 3 ) );
-
- PyObject * pName;
- PyObject * pModule;
- PyObject * pDict;
- PyObject * pFunc;
-
- if ( first_time )
- {
- /* At the first invocation, we add the value of the global
- * EXTRA_PYTHONPATH to the sys.path Python variable.
- */
- LIST * extra = 0;
- module_t * outer_module = frame->module;
- LISTITER iter, end;
-
- first_time = 0;
-
- extra = var_get( root_module(), constant_extra_pythonpath );
-
- iter = list_begin( extra ), end = list_end( extra );
- for ( ; iter != end; iter = list_next( iter ) )
- {
- string buf[ 1 ];
- string_new( buf );
- string_append( buf, "import sys\nsys.path.append(\"" );
- string_append( buf, object_str( list_item( iter ) ) );
- string_append( buf, "\")\n" );
- PyRun_SimpleString( buf->value );
- string_free( buf );
- }
- }
-
- pName = PyString_FromString( python_module );
- pModule = PyImport_Import( pName );
- Py_DECREF( pName );
-
- if ( pModule != NULL )
- {
- pDict = PyModule_GetDict( pModule );
- pFunc = PyDict_GetItemString( pDict, python_function );
-
- if ( pFunc && PyCallable_Check( pFunc ) )
- {
- module_t * m = bindmodule( jam_module );
- new_rule_body( m, jam_rule, function_python( pFunc, 0 ), 0 );
- }
- else
- {
- if ( PyErr_Occurred() )
- PyErr_Print();
- fprintf( stderr, "Cannot find function \"%s\"\n", python_function );
- }
- Py_DECREF( pModule );
- }
- else
- {
- PyErr_Print();
- fprintf( stderr, "Failed to load \"%s\"\n", python_module );
- }
- return L0;
-
-}
-
-#endif
-
-void lol_build( LOL * lol, const char * * elements )
-{
- LIST * l = L0;
- lol_init( lol );
-
- while ( elements && *elements )
- {
- if ( !strcmp( *elements, ":" ) )
- {
- lol_add( lol, l );
- l = L0 ;
- }
- else
- {
- l = list_push_back( l, object_new( *elements ) );
- }
- ++elements;
- }
-
- if ( l != L0 )
- lol_add( lol, l );
-}
-
-
-#ifdef HAVE_PYTHON
-
-/*
- * Calls the bjam rule specified by name passed in 'args'. The name is looked up
- * in the context of bjam's 'python_interface' module. Returns the list of
- * string retured by the rule.
- */
-
-PyObject* bjam_call( PyObject * self, PyObject * args )
-{
- FRAME inner[ 1 ];
- LIST * result;
- PARSE * p;
- OBJECT * rulename;
-
- /* Build up the list of arg lists. */
- frame_init( inner );
- inner->prev = 0;
- inner->prev_user = 0;
- inner->module = bindmodule( constant_python_interface );
-
- /* Extract the rule name and arguments from 'args'. */
-
- /* PyTuple_GetItem returns borrowed reference. */
- rulename = object_new( PyString_AsString( PyTuple_GetItem( args, 0 ) ) );
- {
- int i = 1;
- int size = PyTuple_Size( args );
- for ( ; i < size; ++i )
- {
- PyObject * a = PyTuple_GetItem( args, i );
- if ( PyString_Check( a ) )
- {
- lol_add( inner->args, list_new( object_new(
- PyString_AsString( a ) ) ) );
- }
- else if ( PySequence_Check( a ) )
- {
- LIST * l = 0;
- int s = PySequence_Size( a );
- int i = 0;
- for ( ; i < s; ++i )
- {
- /* PySequence_GetItem returns new reference. */
- PyObject * e = PySequence_GetItem( a, i );
- char * s = PyString_AsString( e );
- if ( !s )
- {
- printf( "Invalid parameter type passed from Python\n" );
- exit( 1 );
- }
- l = list_push_back( l, object_new( s ) );
- Py_DECREF( e );
- }
- lol_add( inner->args, l );
- }
- }
- }
-
- result = evaluate_rule( rulename, inner );
- object_free( rulename );
-
- frame_free( inner );
-
- /* Convert the bjam list into a Python list result. */
- {
- PyObject * pyResult = PyList_New( list_length( result ) );
- int i = 0;
- LISTITER iter = list_begin( result ), end = list_end( result );
- for ( ; iter != end; iter = list_next( iter ) )
- {
- PyList_SetItem( pyResult, i, PyString_FromString( object_str( list_item( iter ) ) ) );
- i += 1;
- }
- list_free( result );
- return pyResult;
- }
-}
-
-
-/*
- * Accepts four arguments:
- * - module name
- * - rule name,
- * - Python callable.
- * - (optional) bjam language function signature.
- * Creates a bjam rule with the specified name in the specified module, which will
- * invoke the Python callable.
- */
-
-PyObject * bjam_import_rule( PyObject * self, PyObject * args )
-{
- char * module;
- char * rule;
- PyObject * func;
- PyObject * bjam_signature = NULL;
- module_t * m;
- RULE * r;
- OBJECT * module_name;
- OBJECT * rule_name;
-
- if ( !PyArg_ParseTuple( args, "ssO|O:import_rule",
- &module, &rule, &func, &bjam_signature ) )
- return NULL;
-
- if ( !PyCallable_Check( func ) )
- {
- PyErr_SetString( PyExc_RuntimeError,
- "Non-callable object passed to bjam.import_rule" );
- return NULL;
- }
-
- module_name = *module ? object_new( module ) : 0;
- m = bindmodule( module_name );
- if( module_name )
- {
- object_free( module_name );
- }
- rule_name = object_new( rule );
- new_rule_body( m, rule_name, function_python( func, bjam_signature ), 0 );
- object_free( rule_name );
-
- Py_INCREF( Py_None );
- return Py_None;
-}
-
-
-/*
- * Accepts four arguments:
- * - an action name
- * - an action body
- * - a list of variable that will be bound inside the action
- * - integer flags.
- * Defines an action on bjam side.
- */
-
-PyObject * bjam_define_action( PyObject * self, PyObject * args )
-{
- char * name;
- char * body;
- module_t * m;
- PyObject * bindlist_python;
- int flags;
- LIST * bindlist = L0;
- int n;
- int i;
- OBJECT * name_str;
- FUNCTION * body_func;
-
- if ( !PyArg_ParseTuple( args, "ssO!i:define_action", &name, &body,
- &PyList_Type, &bindlist_python, &flags ) )
- return NULL;
-
- n = PyList_Size( bindlist_python );
- for ( i = 0; i < n; ++i )
- {
- PyObject * next = PyList_GetItem( bindlist_python, i );
- if ( !PyString_Check( next ) )
- {
- PyErr_SetString( PyExc_RuntimeError,
- "bind list has non-string type" );
- return NULL;
- }
- bindlist = list_push_back( bindlist, object_new( PyString_AsString( next ) ) );
- }
-
- name_str = object_new( name );
- body_func = function_compile_actions( body, constant_builtin, -1 );
- new_rule_actions( root_module(), name_str, body_func, bindlist, flags );
- function_free( body_func );
- object_free( name_str );
-
- Py_INCREF( Py_None );
- return Py_None;
-}
-
-
-/*
- * Returns the value of a variable in root Jam module.
- */
-
-PyObject * bjam_variable( PyObject * self, PyObject * args )
-{
- char * name;
- LIST * value;
- PyObject * result;
- int i;
- OBJECT * varname;
- LISTITER iter, end;
-
- if ( !PyArg_ParseTuple( args, "s", &name ) )
- return NULL;
-
- varname = object_new( name );
- value = var_get( root_module(), varname );
- object_free( varname );
- iter = list_begin( value ), end = list_end( value );
-
- result = PyList_New( list_length( value ) );
- for ( i = 0; iter != end; iter = list_next( iter ), ++i )
- PyList_SetItem( result, i, PyString_FromString( object_str( list_item( iter ) ) ) );
-
- return result;
-}
-
-
-PyObject * bjam_backtrace( PyObject * self, PyObject * args )
-{
- PyObject * result = PyList_New( 0 );
- struct frame * f = frame_before_python_call;
-
- for ( ; f = f->prev; )
- {
- PyObject * tuple = PyTuple_New( 4 );
- const char * file;
- int line;
- char buf[ 32 ];
- string module_name[1];
-
- get_source_line( f, &file, &line );
- sprintf( buf, "%d", line );
- string_new( module_name );
- if ( f->module->name )
- {
- string_append( module_name, object_str( f->module->name ) );
- string_append( module_name, "." );
- }
-
- /* PyTuple_SetItem steals reference. */
- PyTuple_SetItem( tuple, 0, PyString_FromString( file ) );
- PyTuple_SetItem( tuple, 1, PyString_FromString( buf ) );
- PyTuple_SetItem( tuple, 2, PyString_FromString( module_name->value ) );
- PyTuple_SetItem( tuple, 3, PyString_FromString( f->rulename ) );
-
- string_free( module_name );
-
- PyList_Append( result, tuple );
- Py_DECREF( tuple );
- }
- return result;
-}
-
-PyObject * bjam_caller( PyObject * self, PyObject * args )
-{
- const char * s = frame_before_python_call->prev->module->name ?
- object_str( frame_before_python_call->prev->module->name ) :
- "";
- return PyString_FromString( s );
-}
-
-#endif /* #ifdef HAVE_PYTHON */
-
-
-#ifdef HAVE_POPEN
-
-#if defined(_MSC_VER) || defined(__BORLANDC__)
- #define popen windows_popen_wrapper
- #define pclose _pclose
-
- /*
- * This wrapper is a workaround for a funny _popen() feature on Windows
- * where it eats external quotes in some cases. The bug seems to be related
- * to the quote stripping functionality used by the Windows cmd.exe
- * interpreter when its /S is not specified.
- *
- * Cleaned up quote from the cmd.exe help screen as displayed on Windows XP
- * SP3:
- *
- * 1. If all of the following conditions are met, then quote characters on
- * the command line are preserved:
- *
- * - no /S switch
- * - exactly two quote characters
- * - no special characters between the two quote characters, where
- * special is one of: &<>()@^|
- * - there are one or more whitespace characters between the two quote
- * characters
- * - the string between the two quote characters is the name of an
- * executable file.
- *
- * 2. Otherwise, old behavior is to see if the first character is a quote
- * character and if so, strip the leading character and remove the last
- * quote character on the command line, preserving any text after the
- * last quote character.
- *
- * This causes some commands containing quotes not to be executed correctly.
- * For example:
- *
- * "\Long folder name\aaa.exe" --name="Jurko" --no-surname
- *
- * would get its outermost quotes stripped and would be executed as:
- *
- * \Long folder name\aaa.exe" --name="Jurko --no-surname
- *
- * which would report an error about '\Long' not being a valid command.
- *
- * cmd.exe help seems to indicate it would be enough to add an extra space
- * character in front of the command to avoid this but this does not work,
- * most likely due to the shell first stripping all leading whitespace
- * characters from the command.
- *
- * Solution implemented here is to quote the whole command in case it
- * contains any quote characters. Note thought this will not work correctly
- * should Windows ever 'fix' this feature.
- * (03.06.2008.) (Jurko)
- */
- static FILE * windows_popen_wrapper( const char * command, const char * mode )
- {
- int extra_command_quotes_needed = ( strchr( command, '"' ) != 0 );
- string quoted_command;
- FILE * result;
-
- if ( extra_command_quotes_needed )
- {
- string_new( &quoted_command );
- string_append( &quoted_command, "\"" );
- string_append( &quoted_command, command );
- string_append( &quoted_command, "\"" );
- command = quoted_command.value;
- }
-
- result = _popen( command, "r" );
-
- if ( extra_command_quotes_needed )
- string_free( &quoted_command );
-
- return result;
- }
-#endif
-
-
-static char * rtrim( char * s )
-{
- char * p = s;
- while ( *p ) ++p;
- for ( --p; p >= s && isspace( *p ); *p-- = 0 );
- return s;
-}
-
-LIST * builtin_shell( FRAME * frame, int flags )
-{
- LIST * command = lol_get( frame->args, 0 );
- LIST * result = L0;
- string s;
- int ret;
- char buffer[ 1024 ];
- FILE * p = NULL;
- int exit_status = -1;
- int exit_status_opt = 0;
- int no_output_opt = 0;
- int strip_eol_opt = 0;
-
- /* Process the variable args options. */
- {
- int a = 1;
- LIST * arg = lol_get( frame->args, a );
- while ( !list_empty( arg ) )
- {
- if ( strcmp( "exit-status", object_str( list_front( arg ) ) ) == 0 )
- {
- exit_status_opt = 1;
- }
- else if ( strcmp( "no-output", object_str( list_front( arg ) ) ) == 0 )
- {
- no_output_opt = 1;
- }
- else if ( strcmp("strip-eol", object_str( list_front( arg ) ) ) == 0 )
- {
- strip_eol_opt = 1;
- }
- arg = lol_get( frame->args, ++a );
- }
- }
-
- /* The following fflush() call seems to be indicated as a workaround for a
- * popen() bug on POSIX implementations related to synhronizing input
- * stream positions for the called and the calling process.
- */
- fflush( NULL );
-
- p = popen( object_str( list_front( command ) ), "r" );
- if ( p == NULL )
- return L0;
-
- string_new( &s );
-
- while ( ( ret = fread( buffer, sizeof( char ), sizeof( buffer ) - 1, p ) ) > 0 )
- {
- buffer[ret] = 0;
- if ( !no_output_opt )
- {
- if ( strip_eol_opt )
- rtrim(buffer);
- string_append( &s, buffer );
- }
- }
-
- exit_status = pclose( p );
-
- /* The command output is returned first. */
- result = list_new( object_new( s.value ) );
- string_free( &s );
-
- /* The command exit result next. */
- if ( exit_status_opt )
- {
- if ( WIFEXITED(exit_status) )
- exit_status = WEXITSTATUS(exit_status);
- else
- exit_status = -1;
- sprintf( buffer, "%d", exit_status );
- result = list_push_back( result, object_new( buffer ) );
- }
-
- return result;
-}
-
-#else /* #ifdef HAVE_POPEN */
-
-LIST * builtin_shell( FRAME * frame, int flags )
-{
- return L0;
-}
-
-#endif /* #ifdef HAVE_POPEN */
diff --git a/tools/build/v2/engine/builtins.h b/tools/build/v2/engine/builtins.h
deleted file mode 100644
index b8c086fb4f..0000000000
--- a/tools/build/v2/engine/builtins.h
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-#ifndef JAM_BUILTINS_H
-# define JAM_BUILTINS_H
-
-# include "frames.h"
-
-/*
- * builtins.h - compile parsed jam statements
- */
-
-void load_builtins();
-void init_set();
-void init_path();
-void init_regex();
-void init_property_set();
-void init_sequence();
-void init_order();
-
-LIST *builtin_calc( FRAME * frame, int flags );
-LIST *builtin_depends( FRAME * frame, int flags );
-LIST *builtin_rebuilds( FRAME * frame, int flags );
-LIST *builtin_echo( FRAME * frame, int flags );
-LIST *builtin_exit( FRAME * frame, int flags );
-LIST *builtin_flags( FRAME * frame, int flags );
-LIST *builtin_glob( FRAME * frame, int flags );
-LIST *builtin_glob_recursive( FRAME * frame, int flags );
-LIST *builtin_subst( FRAME * frame, int flags );
-LIST *builtin_match( FRAME * frame, int flags );
-LIST *builtin_split_by_characters( FRAME * frame, int flags );
-LIST *builtin_hdrmacro( FRAME * frame, int flags );
-LIST *builtin_rulenames( FRAME * frame, int flags );
-LIST *builtin_varnames( FRAME * frame, int flags );
-LIST *builtin_delete_module( FRAME * frame, int flags );
-LIST *builtin_import( FRAME * frame, int flags );
-LIST *builtin_export( FRAME * frame, int flags );
-LIST *builtin_caller_module( FRAME * frame, int flags );
-LIST *builtin_backtrace( FRAME * frame, int flags );
-LIST *builtin_pwd( FRAME * frame, int flags );
-LIST *builtin_update( FRAME * frame, int flags );
-LIST *builtin_update_now( FRAME * frame, int flags );
-LIST *builtin_import_module( FRAME * frame, int flags );
-LIST *builtin_imported_modules( FRAME * frame, int flags );
-LIST *builtin_instance( FRAME * frame, int flags );
-LIST *builtin_sort( FRAME * frame, int flags );
-LIST *builtin_normalize_path( FRAME * frame, int flags );
-LIST *builtin_native_rule( FRAME * frame, int flags );
-LIST *builtin_has_native_rule( FRAME * frame, int flags );
-LIST *builtin_user_module( FRAME * frame, int flags );
-LIST *builtin_nearest_user_location( FRAME * frame, int flags );
-LIST *builtin_check_if_file( FRAME * frame, int flags );
-LIST *builtin_python_import_rule( FRAME * frame, int flags );
-LIST *builtin_shell( FRAME * frame, int flags );
-LIST *builtin_md5( FRAME * frame, int flags );
-LIST *builtin_file_open( FRAME * frame, int flags );
-LIST *builtin_pad( FRAME * frame, int flags );
-LIST *builtin_precious( FRAME * frame, int flags );
-LIST *builtin_self_path( FRAME * frame, int flags );
-LIST *builtin_makedir( FRAME * frame, int flags );
-
-void backtrace( FRAME *frame );
-extern int last_update_now_status;
-
-#endif
diff --git a/tools/build/v2/engine/bump_version.py b/tools/build/v2/engine/bump_version.py
deleted file mode 100644
index 9423c4c774..0000000000
--- a/tools/build/v2/engine/bump_version.py
+++ /dev/null
@@ -1,80 +0,0 @@
-#!/usr/bin/python
-
-# This script is used to bump version of bjam. It takes a single argument, e.g
-#
-# ./bump_version.py 3.1.9
-#
-# and updates all necessary files. For the time being, it's assumes presense
-# of 'perl' executable and Debian-specific 'dch' executable.
-#
-
-
-import os
-import os.path
-import re
-import string
-import sys
-
-srcdir = os.path.abspath(os.path.dirname(__file__ ))
-docdir = os.path.abspath(os.path.join(srcdir,"..","doc"))
-
-def edit(file,replacements):
- print " '%s'..." %(file)
- text = open(file,'r').read()
- while len(replacements) > 0:
- #~ print " '%s' ==> '%s'" % (replacements[0],replacements[1])
- text = re.compile(replacements[0],re.M).subn(replacements[1],text)[0]
- replacements = replacements[2:]
- #~ print text
- open(file,'w').write(text)
-
-def make_edits(version):
- edit(os.path.join(srcdir,"boost-jam.spec"), [
- '^Version:.*$','Version: %s' % string.join(version, "."),
- ])
-
- edit(os.path.join(srcdir,"build.jam"), [
- '^_VERSION_ = .* ;$','_VERSION_ = %s %s %s ;' % (version[0], version[1], version[2]),
- ])
-
- edit(os.path.join(docdir,"bjam.qbk"), [
- '\[version.*\]','[version: %s]' % string.join(version, '.'),
- '\[def :version:.*\]','[def :version: %s]' % string.join(version, '.'),
- ])
-
- edit(os.path.join(srcdir,"patchlevel.h"), [
- '^#define VERSION_MAJOR .*$',
- '#define VERSION_MAJOR %s' % (version[0]),
- '^#define VERSION_MINOR .*$',
- '#define VERSION_MINOR %s' % (version[1]),
- '^#define VERSION_PATCH .*$',
- '#define VERSION_PATCH %s' % (version[2]),
- '^#define VERSION_MAJOR_SYM .*$',
- '#define VERSION_MAJOR_SYM "0%s"' % (version[0]),
- '^#define VERSION_MINOR_SYM .*$',
- '#define VERSION_MINOR_SYM "%s"' % (version[1]),
- '^#define VERSION_PATCH_SYM .*$',
- '#define VERSION_PATCH_SYM "%s"' % (version[2]),
- '^#define VERSION .*$',
- '#define VERSION "%s"' % string.join(version, '.'),
- '^#define JAMVERSYM .*$',
- '#define JAMVERSYM "JAMVERSION=%s.%s"' % (version[0],version[1]),
- ])
-
-def main():
-
- if len(sys.argv) < 2:
- print "Expect new version as argument"
- sys.exit(1)
-
- version = string.split(sys.argv[1], ".")
- print "Setting version to", version
- make_edits(version)
-
-if __name__ == '__main__':
- main()
-
-#~ Copyright 2006 Rene Rivera.
-#~ Copyright 2005-2006 Vladimir Prus.
-#~ Distributed under the Boost Software License, Version 1.0.
-#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
diff --git a/tools/build/v2/engine/class.c b/tools/build/v2/engine/class.c
deleted file mode 100644
index 8871d1113b..0000000000
--- a/tools/build/v2/engine/class.c
+++ /dev/null
@@ -1,167 +0,0 @@
-/* Copyright Vladimir Prus 2003. Distributed under the Boost */
-/* Software License, Version 1.0. (See accompanying */
-/* file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) */
-
-#include "class.h"
-#include "strings.h"
-#include "variable.h"
-#include "frames.h"
-#include "rules.h"
-#include "object.h"
-
-#include "hash.h"
-
-
-static struct hash * classes = 0;
-
-
-static void check_defined( LIST * class_names )
-{
- LISTITER iter = list_begin( class_names ), end = list_end( class_names );
- for ( ; iter != end; iter = list_next( iter ) )
- {
- if ( !hash_find( classes, list_item( iter ) ) )
- {
- printf( "Class %s is not defined\n", object_str( list_item( iter ) ) );
- abort();
- }
- }
-}
-
-
-static OBJECT * class_module_name( OBJECT * declared_name )
-{
- string name[ 1 ];
- OBJECT * result;
-
- string_new( name );
- string_append( name, "class@" );
- string_append( name, object_str( declared_name ) );
-
- result = object_new( name->value );
- string_free( name );
-
- return result;
-}
-
-
-struct import_base_data
-{
- OBJECT * base_name;
- module_t * base_module;
- module_t * class_module;
-};
-
-
-static void import_base_rule( void * r_, void * d_ )
-{
- RULE * r = (RULE *)r_;
- RULE * ir1;
- RULE * ir2;
- struct import_base_data * d = (struct import_base_data *)d_;
- string qualified_name[ 1 ];
- OBJECT * qname;
-
- string_new ( qualified_name );
- string_append ( qualified_name, object_str( d->base_name ) );
- string_push_back( qualified_name, '.' );
- string_append ( qualified_name, object_str( r->name ) );
-
- qname = object_new( qualified_name->value );
-
- ir1 = import_rule( r, d->class_module, r->name );
- ir2 = import_rule( r, d->class_module, qname );
-
- object_free( qname );
-
- /* Copy 'exported' flag. */
- ir1->exported = ir2->exported = r->exported;
-
- /* If we are importing a class method, localize it. */
- if ( ( r->module == d->base_module ) || ( r->module->class_module &&
- ( r->module->class_module == d->base_module ) ) )
- {
- rule_localize( ir1, d->class_module );
- rule_localize( ir2, d->class_module );
- }
-
- string_free( qualified_name );
-}
-
-
-/*
- * For each exported rule 'n', declared in class module for base, imports that
- * rule in 'class' as 'n' and as 'base.n'. Imported rules are localized and
- * marked as exported.
- */
-
-static void import_base_rules( module_t * class_, OBJECT * base )
-{
- OBJECT * module_name = class_module_name( base );
- module_t * base_module = bindmodule( module_name );
- LIST * imported;
- struct import_base_data d;
- d.base_name = base;
- d.base_module = base_module;
- d.class_module = class_;
- object_free( module_name );
-
- if ( base_module->rules )
- hashenumerate( base_module->rules, import_base_rule, &d );
-
- imported = imported_modules( base_module );
- import_module( imported, class_ );
- list_free( imported );
-}
-
-
-OBJECT * make_class_module( LIST * xname, LIST * bases, FRAME * frame )
-{
- OBJECT * name = class_module_name( list_front( xname ) );
- OBJECT * * pp;
- module_t * class_module = 0;
- module_t * outer_module = frame->module;
- int found;
- LISTITER iter, end;
-
- if ( !classes )
- classes = hashinit( sizeof( OBJECT * ), "classes" );
-
- pp = (OBJECT * *)hash_insert( classes, list_front( xname ), &found );
- if ( !found )
- {
- *pp = object_copy( list_front( xname ) );
- }
- else
- {
- printf( "Class %s already defined\n", object_str( list_front( xname ) ) );
- abort();
- }
- check_defined( bases );
-
- class_module = bindmodule( name );
-
- var_set( class_module, constant_name, xname, VAR_SET );
- var_set( class_module, constant_bases, bases, VAR_SET );
-
- iter = list_begin( bases ), end = list_end( bases );
- for ( ; iter != end; iter = list_next( iter ) )
- import_base_rules( class_module, list_item( iter ) );
-
- return name;
-}
-
-static void free_class( void * xclass, void * data )
-{
- object_free( *(OBJECT * *)xclass );
-}
-
-void class_done( void )
-{
- if( classes )
- {
- hashenumerate( classes, free_class, (void *)0 );
- hashdone( classes );
- classes = 0;
- }
-}
diff --git a/tools/build/v2/engine/command.c b/tools/build/v2/engine/command.c
deleted file mode 100644
index 8161014c05..0000000000
--- a/tools/build/v2/engine/command.c
+++ /dev/null
@@ -1,94 +0,0 @@
-/*
- * Copyright 1993, 1995 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/* This file is ALSO:
- * Copyright 2001-2004 David Abrahams.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-/*
- * command.c - maintain lists of commands
- */
-
-#include "jam.h"
-
-#include "lists.h"
-#include "parse.h"
-#include "variable.h"
-#include "rules.h"
-
-#include "command.h"
-#include <limits.h>
-#include <string.h>
-
-
-/*
- * cmd_new() - return a new CMD or 0 if too many args
- */
-
-CMD * cmd_new( RULE * rule, LIST * targets, LIST * sources, LIST * shell )
-{
- CMD * cmd = (CMD *)BJAM_MALLOC( sizeof( CMD ) );
- LISTITER iter = list_begin( shell ), end = list_end( shell );
- /* Lift line-length limitation entirely when JAMSHELL is just "%". */
- int no_limit = ( iter != end && !strcmp( object_str( list_item( iter ) ), "%") && list_next( iter ) == end );
- int max_line = MAXLINE;
- FRAME frame[1];
-
- cmd->rule = rule;
- cmd->shell = shell;
- cmd->next = 0;
-
- lol_init( &cmd->args );
- lol_add( &cmd->args, targets );
- lol_add( &cmd->args, sources );
- string_new( cmd->buf );
-
- frame_init( frame );
- frame->module = rule->module;
- lol_init( frame->args );
- lol_add( frame->args, list_copy( targets ) );
- lol_add( frame->args, list_copy( sources ) );
- function_run_actions( rule->actions->command, frame, stack_global(), cmd->buf );
- frame_free( frame );
-
- if ( !no_limit )
- {
- /* Bail if the result will not fit in MAXLINE. */
- char * s = cmd->buf->value;
- while ( *s )
- {
- size_t l = strcspn( s, "\n" );
-
- if ( l > MAXLINE )
- {
- /* We do not free targets/sources/shell if bailing. */
- cmd_free( cmd );
- return 0;
- }
-
- s += l;
- if ( *s )
- ++s;
- }
- }
-
- return cmd;
-}
-
-
-/*
- * cmd_free() - free a CMD
- */
-
-void cmd_free( CMD * cmd )
-{
- lol_free( &cmd->args );
- list_free( cmd->shell );
- string_free( cmd->buf );
- BJAM_FREE( (void *)cmd );
-}
diff --git a/tools/build/v2/engine/command.h b/tools/build/v2/engine/command.h
deleted file mode 100644
index 4dad9e254c..0000000000
--- a/tools/build/v2/engine/command.h
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Copyright 1994 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/*
- * command.h - the CMD structure and routines to manipulate them
- *
- * Both ACTION and CMD contain a rule, targets, and sources. An
- * ACTION describes a rule to be applied to the given targets and
- * sources; a CMD is what actually gets executed by the shell. The
- * differences are due to:
- *
- * ACTIONS must be combined if 'actions together' is given.
- * ACTIONS must be split if 'actions piecemeal' is given.
- * ACTIONS must have current sources omitted for 'actions updated'.
- *
- * The CMD datatype holds a single command that is to be executed
- * against a target, and they can chain together to represent the
- * full collection of commands used to update a target.
- *
- * Structures:
- *
- * CMD - an action, ready to be formatted into a buffer and executed.
- *
- * External routines:
- *
- * cmd_new() - return a new CMD or 0 if too many args.
- * cmd_free() - delete CMD and its parts.
- * cmd_next() - walk the CMD chain.
- */
-
-
-/*
- * CMD - an action, ready to be formatted into a buffer and executed.
- */
-
-#ifndef COMMAND_SW20111118_H
-#define COMMAND_SW20111118_H
-
-#include "lists.h"
-#include "rules.h"
-#include "strings.h"
-
-typedef struct _cmd CMD;
-
-struct _cmd
-{
- CMD * next;
- CMD * tail; /* valid on in head */
- RULE * rule; /* rule->actions contains shell script */
- LIST * shell; /* $(SHELL) value */
- LOL args; /* LISTs for $(<), $(>) */
- string buf[1]; /* actual commands */
-};
-
-CMD * cmd_new
-(
- RULE * rule, /* rule (referenced) */
- LIST * targets, /* $(<) (freed) */
- LIST * sources, /* $(>) (freed) */
- LIST * shell /* $(SHELL) (freed) */
-);
-
-void cmd_free( CMD * );
-
-#define cmd_next( c ) ( ( c )->next )
-
-#endif
diff --git a/tools/build/v2/engine/compile.c b/tools/build/v2/engine/compile.c
deleted file mode 100644
index cb08e24c95..0000000000
--- a/tools/build/v2/engine/compile.c
+++ /dev/null
@@ -1,347 +0,0 @@
-/*
- * Copyright 1993, 2000 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/* This file is ALSO:
- * Copyright 2001-2004 David Abrahams.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-# include "jam.h"
-
-# include "lists.h"
-# include "parse.h"
-# include "compile.h"
-# include "variable.h"
-# include "rules.h"
-# include "object.h"
-# include "make.h"
-# include "search.h"
-# include "hdrmacro.h"
-# include "hash.h"
-# include "modules.h"
-# include "strings.h"
-# include "builtins.h"
-# include "class.h"
-# include "constants.h"
-
-# include <assert.h>
-# include <string.h>
-# include <stdarg.h>
-
-/*
- * compile.c - compile parsed jam statements
- *
- * External routines:
- *
- * compile_append() - append list results of two statements
- * compile_eval() - evaluate if to determine which leg to compile
- * compile_foreach() - compile the "for x in y" statement
- * compile_if() - compile 'if' rule
- * compile_while() - compile 'while' rule
- * compile_include() - support for 'include' - call include() on file
- * compile_list() - expand and return a list
- * compile_local() - declare (and set) local variables
- * compile_null() - do nothing -- a stub for parsing
- * compile_on() - run rule under influence of on-target variables
- * compile_rule() - compile a single user defined rule
- * compile_rules() - compile a chain of rules
- * compile_set() - compile the "set variable" statement
- * compile_setcomp() - support for `rule` - save parse tree
- * compile_setexec() - support for `actions` - save execution string
- * compile_settings() - compile the "on =" (set variable on exec) statement
- * compile_switch() - compile 'switch' rule
- *
- * Internal routines:
- *
- * debug_compile() - printf with indent to show rule expansion.
- * evaluate_rule() - execute a rule invocation
- *
- * builtin_depends() - DEPENDS/INCLUDES rule
- * builtin_echo() - ECHO rule
- * builtin_exit() - EXIT rule
- * builtin_flags() - NOCARE, NOTFILE, TEMPORARY rule
- *
- * 02/03/94 (seiwald) - Changed trace output to read "setting" instead of
- * the awkward sounding "settings".
- * 04/12/94 (seiwald) - Combined build_depends() with build_includes().
- * 04/12/94 (seiwald) - actionlist() now just appends a single action.
- * 04/13/94 (seiwald) - added shorthand L0 for null list pointer
- * 05/13/94 (seiwald) - include files are now bound as targets, and thus
- * can make use of $(SEARCH)
- * 06/01/94 (seiwald) - new 'actions existing' does existing sources
- * 08/23/94 (seiwald) - Support for '+=' (append to variable)
- * 12/20/94 (seiwald) - NOTIME renamed NOTFILE.
- * 01/22/95 (seiwald) - Exit rule.
- * 02/02/95 (seiwald) - Always rule; LEAVES rule.
- * 02/14/95 (seiwald) - NoUpdate rule.
- * 09/11/00 (seiwald) - new evaluate_rule() for headers().
- * 09/11/00 (seiwald) - compile_xxx() now return LIST *.
- * New compile_append() and compile_list() in
- * support of building lists here, rather than
- * in jamgram.yy.
- * 01/10/00 (seiwald) - built-ins split out to builtin.c.
- */
-
-static void debug_compile( int which, const char * s, FRAME * frame );
-int glob( const char * s, const char * c );
-/* Internal functions from builtins.c */
-void backtrace( FRAME * frame );
-void backtrace_line( FRAME * frame );
-void print_source_line( FRAME * frame );
-
-struct frame * frame_before_python_call;
-
-static OBJECT * module_scope;
-
-void frame_init( FRAME* frame )
-{
- frame->prev = 0;
- frame->prev_user = 0;
- lol_init(frame->args);
- frame->module = root_module();
- frame->rulename = "module scope";
- frame->file = 0;
- frame->line = -1;
-}
-
-
-void frame_free( FRAME* frame )
-{
- lol_free( frame->args );
-}
-
-
-/*
- * evaluate_rule() - execute a rule invocation.
- */
-
-LIST *
-evaluate_rule(
- OBJECT * rulename,
- FRAME * frame )
-{
- LIST * result = L0;
- RULE * rule;
- profile_frame prof[1];
- module_t * prev_module = frame->module;
-
- rule = bindrule( rulename, frame->module );
-
- if ( DEBUG_COMPILE )
- {
- /* Try hard to indicate in which module the rule is going to execute. */
- if ( rule->module != frame->module
- && rule->procedure != 0 && !object_equal( rulename, function_rulename( rule->procedure ) ) )
- {
- char buf[256] = "";
- if ( rule->module->name )
- {
- strncat( buf, object_str( rule->module->name ), sizeof( buf ) - 1 );
- strncat( buf, ".", sizeof( buf ) - 1 );
- }
- strncat( buf, object_str( rule->name ), sizeof( buf ) - 1 );
- debug_compile( 1, buf, frame );
- }
- else
- {
- debug_compile( 1, object_str( rulename ), frame );
- }
-
- lol_print( frame->args );
- printf( "\n" );
- }
-
- if ( rule->procedure && rule->module != prev_module )
- {
- /* Propagate current module to nested rule invocations. */
- frame->module = rule->module;
- }
-
- /* Record current rule name in frame. */
- if ( rule->procedure )
- {
- frame->rulename = object_str( rulename );
- /* And enter record profile info. */
- if ( DEBUG_PROFILE )
- profile_enter( function_rulename( rule->procedure ), prof );
- }
-
- /* Check traditional targets $(<) and sources $(>). */
- if ( !rule->actions && !rule->procedure )
- {
- backtrace_line( frame->prev );
- if ( frame->module->name )
- {
- printf( "rule %s unknown in module %s\n", object_str( rule->name ), object_str( frame->module->name ) );
- }
- else
- {
- printf( "rule %s unknown in module \n", object_str( rule->name ) );
- }
- backtrace( frame->prev );
- exit( 1 );
- }
-
- /* If this rule will be executed for updating the targets then construct the
- * action for make().
- */
- if ( rule->actions )
- {
- TARGETS * t;
- ACTION * action;
-
- /* The action is associated with this instance of this rule. */
- action = (ACTION *)BJAM_MALLOC( sizeof( ACTION ) );
- memset( (char *)action, '\0', sizeof( *action ) );
-
- action->rule = rule;
- action->targets = targetlist( (TARGETS *)0, lol_get( frame->args, 0 ) );
- action->sources = targetlist( (TARGETS *)0, lol_get( frame->args, 1 ) );
- action->refs = 1;
-
- /* If we have a group of targets all being built using the same action
- * then we must not allow any of them to be used as sources unless they
- * had all already been built in the first place or their joined action
- * has had a chance to finish its work and build all of them anew.
- *
- * Without this it might be possible, in case of a multi-process build,
- * for their action, triggered by buiding one of the targets, to still
- * be running when another target in the group reports as done in order
- * to avoid triggering the same action again and gets used prematurely.
- *
- * As a quick-fix to achieve this effect we make all the targets list
- * each other as 'included targets'. More precisely, we mark the first
- * listed target as including all the other targets in the list and vice
- * versa. This makes anyone depending on any of those targets implicitly
- * depend on all of them, thus making sure none of those targets can be
- * used as sources until all of them have been built. Note that direct
- * dependencies could not have been used due to the 'circular
- * dependency' issue.
- *
- * TODO: Although the current implementation solves the problem of one
- * of the targets getting used before its action completes its work it
- * also forces the action to run whenever any of the targets in the
- * group is not up to date even though some of them might not actually
- * be used by the targets being built. We should see how we can
- * correctly recognize such cases and use that to avoid running the
- * action if possible and not rebuild targets not actually depending on
- * targets that are not up to date.
- *
- * TODO: Using the 'include' feature might have side-effects due to
- * interaction with the actual 'inclusion scanning' system. This should
- * be checked.
- */
- if ( action->targets )
- {
- TARGET * t0 = action->targets->target;
- for ( t = action->targets->next; t; t = t->next )
- {
- target_include( t->target, t0 );
- target_include( t0, t->target );
- }
- }
-
- /* Append this action to the actions of each target. */
- for ( t = action->targets; t; t = t->next )
- t->target->actions = actionlist( t->target->actions, action );
-
- action_free( action );
- }
-
- /* Now recursively compile any parse tree associated with this rule.
- * function_refer()/function_free() call pair added to ensure rule not freed
- * during use.
- */
- if ( rule->procedure )
- {
- FUNCTION * function = rule->procedure;
-
- function_refer( function );
- result = function_run( function, frame, stack_global() );
- function_free( function );
- }
-
- if ( DEBUG_PROFILE && rule->procedure )
- profile_exit( prof );
-
- if ( DEBUG_COMPILE )
- debug_compile( -1, 0, frame);
-
- return result;
-}
-
-
-/*
- * Call the given rule with the specified parameters. The parameters should be
- * of type LIST* and end with a NULL pointer. This differs from 'evaluate_rule'
- * in that frame for the called rule is prepared inside 'call_rule'.
- *
- * This function is useful when a builtin rule (in C) wants to call another rule
- * which might be implemented in Jam.
- */
-
-LIST * call_rule( OBJECT * rulename, FRAME * caller_frame, ... )
-{
- va_list va;
- LIST * result;
-
- FRAME inner[1];
- frame_init( inner );
- inner->prev = caller_frame;
- inner->prev_user = caller_frame->module->user_module ?
- caller_frame : caller_frame->prev_user;
- inner->module = caller_frame->module;
-
- va_start( va, caller_frame );
- for ( ; ; )
- {
- LIST * l = va_arg( va, LIST* );
- if ( !l )
- break;
- lol_add( inner->args, l );
- }
- va_end( va );
-
- result = evaluate_rule( rulename, inner );
-
- frame_free( inner );
-
- return result;
-}
-
-
-
-/*
- * debug_compile() - printf with indent to show rule expansion.
- */
-
-static void debug_compile( int which, const char * s, FRAME * frame )
-{
- static int level = 0;
- static char indent[36] = ">>>>|>>>>|>>>>|>>>>|>>>>|>>>>|>>>>|";
-
- if ( which >= 0 )
- {
- int i;
-
- print_source_line( frame );
-
- i = ( level + 1 ) * 2;
- while ( i > 35 )
- {
- fputs( indent, stdout );
- i -= 35;
- }
-
- printf( "%*.*s ", i, i, indent );
- }
-
- if ( s )
- printf( "%s ", s );
-
- level += which;
-}
diff --git a/tools/build/v2/engine/compile.h b/tools/build/v2/engine/compile.h
deleted file mode 100644
index 1c002d90fa..0000000000
--- a/tools/build/v2/engine/compile.h
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Copyright 1993, 2000 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/* This file is ALSO:
- * Copyright 2001-2004 David Abrahams.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-#ifndef COMPILE_DWA20011022_H
-# define COMPILE_DWA20011022_H
-
-# include "frames.h"
-# include "parse.h"
-# include "regexp.h"
-# include "object.h"
-
-/*
- * compile.h - compile parsed jam statements
- */
-
-void compile_builtins();
-
-LIST *evaluate_rule( OBJECT * rulename, FRAME * frame );
-LIST *call_rule( OBJECT * rulename, FRAME * caller_frame, ...);
-
-regexp* regex_compile( OBJECT * pattern );
-
-/* Flags for compile_set(), etc */
-
-# define ASSIGN_SET 0x00 /* = assign variable */
-# define ASSIGN_APPEND 0x01 /* += append variable */
-# define ASSIGN_DEFAULT 0x02 /* set only if unset */
-
-/* Flags for compile_setexec() */
-
-# define EXEC_UPDATED 0x01 /* executes updated */
-# define EXEC_TOGETHER 0x02 /* executes together */
-# define EXEC_IGNORE 0x04 /* executes ignore */
-# define EXEC_QUIETLY 0x08 /* executes quietly */
-# define EXEC_PIECEMEAL 0x10 /* executes piecemeal */
-# define EXEC_EXISTING 0x20 /* executes existing */
-
-/* Conditions for compile_if() */
-
-# define EXPR_NOT 0 /* ! cond */
-# define EXPR_AND 1 /* cond && cond */
-# define EXPR_OR 2 /* cond || cond */
-
-# define EXPR_EXISTS 3 /* arg */
-# define EXPR_EQUALS 4 /* arg = arg */
-# define EXPR_NOTEQ 5 /* arg != arg */
-# define EXPR_LESS 6 /* arg < arg */
-# define EXPR_LESSEQ 7 /* arg <= arg */
-# define EXPR_MORE 8 /* arg > arg */
-# define EXPR_MOREEQ 9 /* arg >= arg */
-# define EXPR_IN 10 /* arg in arg */
-
-#endif
-
diff --git a/tools/build/v2/engine/constants.c b/tools/build/v2/engine/constants.c
deleted file mode 100644
index 3ad534ed75..0000000000
--- a/tools/build/v2/engine/constants.c
+++ /dev/null
@@ -1,183 +0,0 @@
-/*
- * Copyright 2011 Steven Watanabe
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-# include "constants.h"
-# include "object.h"
-
-/*
- * constants.c - constant objects
- *
- * External functions:
- *
- * constants_init() - initialize constants
- * constants_done() - free constants
- *
- */
-
-void constants_init( void )
-{
- constant_empty = object_new( "" );
- constant_dot = object_new( "." );
- constant_percent = object_new( "%" );
- constant_plus = object_new( "+" );
- constant_star = object_new( "*" );
- constant_question_mark = object_new( "?" );
- constant_ok = object_new( "ok" );
- constant_true = object_new( "true" );
- constant_name = object_new( "__name__" );
- constant_bases = object_new( "__bases__" );
- constant_typecheck = object_new( ".typecheck" );
- constant_builtin = object_new( "(builtin)" );
- constant_HCACHEFILE = object_new( "HCACHEFILE" );
- constant_HCACHEMAXAGE = object_new( "HCACHEMAXAGE" );
- constant_HDRSCAN = object_new( "HDRSCAN" );
- constant_HDRRULE = object_new( "HDRRULE" );
- constant_BINDRULE = object_new( "BINDRULE" );
- constant_LOCATE = object_new( "LOCATE" );
- constant_SEARCH = object_new( "SEARCH" );
- constant_JAM_SEMAPHORE = object_new( "JAM_SEMAPHORE" );
- constant_TIMING_RULE = object_new( "__TIMING_RULE__" );
- constant_ACTION_RULE = object_new( "__ACTION_RULE__" );
- constant_JAMSHELL = object_new( "JAMSHELL" );
- constant_TMPDIR = object_new( "TMPDIR" );
- constant_TMPNAME = object_new( "TMPNAME" );
- constant_TMPFILE = object_new( "TMPFILE" );
- constant_STDOUT = object_new( "STDOUT" );
- constant_STDERR = object_new( "STDERR" );
- constant_JAMDATE = object_new( "JAMDATE" );
- constant_JAM_VERSION = object_new( "JAM_VERSION" );
- constant_JAMUNAME = object_new( "JAMUNAME" );
- constant_ENVIRON = object_new( ".ENVIRON" );
- constant_ARGV = object_new( "ARGV" );
- constant_all = object_new( "all" );
- constant_PARALLELISM = object_new( "PARALLELISM" );
- constant_KEEP_GOING = object_new( "KEEP_GOING" );
- constant_other = object_new( "[OTHER]" );
- constant_total = object_new( "[TOTAL]" );
- constant_FILE_DIRSCAN = object_new( "FILE_DIRSCAN" );
- constant_MAIN = object_new( "MAIN" );
- constant_MAIN_MAKE = object_new( "MAIN_MAKE" );
- constant_MAKE_MAKE0 = object_new( "MAKE_MAKE0" );
- constant_MAKE_MAKE1 = object_new( "MAKE_MAKE1" );
- constant_MAKE_MAKE0SORT = object_new( "MAKE_MAKE0SORT" );
- constant_BINDMODULE = object_new( "BINDMODULE" );
- constant_IMPORT_MODULE = object_new( "IMPORT_MODULE" );
- constant_BUILTIN_GLOB_BACK = object_new( "BUILTIN_GLOB_BACK" );
- constant_timestamp = object_new( "timestamp" );
- constant_python = object_new("__python__");
- constant_python_interface = object_new( "python_interface" );
- constant_extra_pythonpath = object_new( "EXTRA_PYTHONPATH" );
- constant_MAIN_PYTHON = object_new( "MAIN_PYTHON" );
-}
-
-void constants_done( void )
-{
- object_free( constant_empty );
- object_free( constant_dot );
- object_free( constant_percent );
- object_free( constant_plus );
- object_free( constant_star );
- object_free( constant_question_mark );
- object_free( constant_ok );
- object_free( constant_true );
- object_free( constant_name );
- object_free( constant_bases );
- object_free( constant_typecheck );
- object_free( constant_builtin );
- object_free( constant_HCACHEFILE );
- object_free( constant_HCACHEMAXAGE );
- object_free( constant_HDRSCAN );
- object_free( constant_HDRRULE );
- object_free( constant_BINDRULE );
- object_free( constant_LOCATE );
- object_free( constant_SEARCH );
- object_free( constant_JAM_SEMAPHORE );
- object_free( constant_TIMING_RULE );
- object_free( constant_ACTION_RULE );
- object_free( constant_JAMSHELL );
- object_free( constant_TMPDIR );
- object_free( constant_TMPNAME );
- object_free( constant_TMPFILE );
- object_free( constant_STDOUT );
- object_free( constant_STDERR );
- object_free( constant_JAMDATE );
- object_free( constant_JAM_VERSION );
- object_free( constant_JAMUNAME );
- object_free( constant_ENVIRON );
- object_free( constant_ARGV );
- object_free( constant_all );
- object_free( constant_PARALLELISM );
- object_free( constant_KEEP_GOING );
- object_free( constant_other );
- object_free( constant_total );
- object_free( constant_FILE_DIRSCAN );
- object_free( constant_MAIN );
- object_free( constant_MAIN_MAKE );
- object_free( constant_MAKE_MAKE0 );
- object_free( constant_MAKE_MAKE1 );
- object_free( constant_MAKE_MAKE0SORT );
- object_free( constant_BINDMODULE );
- object_free( constant_IMPORT_MODULE );
- object_free( constant_BUILTIN_GLOB_BACK );
- object_free( constant_timestamp );
- object_free( constant_python );
- object_free( constant_python_interface );
- object_free( constant_extra_pythonpath );
- object_free( constant_MAIN_PYTHON );
-}
-
-OBJECT * constant_empty;
-OBJECT * constant_dot;
-OBJECT * constant_percent;
-OBJECT * constant_plus;
-OBJECT * constant_star;
-OBJECT * constant_question_mark;
-OBJECT * constant_ok;
-OBJECT * constant_true;
-OBJECT * constant_name;
-OBJECT * constant_bases;
-OBJECT * constant_typecheck;
-OBJECT * constant_builtin;
-OBJECT * constant_HCACHEFILE;
-OBJECT * constant_HCACHEMAXAGE;
-OBJECT * constant_HDRSCAN;
-OBJECT * constant_HDRRULE;
-OBJECT * constant_BINDRULE;
-OBJECT * constant_LOCATE;
-OBJECT * constant_SEARCH;
-OBJECT * constant_JAM_SEMAPHORE;
-OBJECT * constant_TIMING_RULE;
-OBJECT * constant_ACTION_RULE;
-OBJECT * constant_JAMSHELL;
-OBJECT * constant_TMPDIR;
-OBJECT * constant_TMPNAME;
-OBJECT * constant_TMPFILE;
-OBJECT * constant_STDOUT;
-OBJECT * constant_STDERR;
-OBJECT * constant_JAMDATE;
-OBJECT * constant_JAM_VERSION;
-OBJECT * constant_JAMUNAME;
-OBJECT * constant_ENVIRON;
-OBJECT * constant_ARGV;
-OBJECT * constant_all;
-OBJECT * constant_PARALLELISM;
-OBJECT * constant_KEEP_GOING;
-OBJECT * constant_other;
-OBJECT * constant_total;
-OBJECT * constant_FILE_DIRSCAN;
-OBJECT * constant_MAIN;
-OBJECT * constant_MAIN_MAKE;
-OBJECT * constant_MAKE_MAKE0;
-OBJECT * constant_MAKE_MAKE1;
-OBJECT * constant_MAKE_MAKE0SORT;
-OBJECT * constant_BINDMODULE;
-OBJECT * constant_IMPORT_MODULE;
-OBJECT * constant_BUILTIN_GLOB_BACK;
-OBJECT * constant_timestamp;
-OBJECT * constant_python;
-OBJECT * constant_python_interface;
-OBJECT * constant_extra_pythonpath;
-OBJECT * constant_MAIN_PYTHON;
diff --git a/tools/build/v2/engine/constants.h b/tools/build/v2/engine/constants.h
deleted file mode 100644
index 2ec27e800f..0000000000
--- a/tools/build/v2/engine/constants.h
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * Copyright 2011 Steven Watanabe
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/*
- * constants.h - constant objects
- */
-
-#ifndef BOOST_JAM_CONSTANTS_H
-#define BOOST_JAM_CONSTANTS_H
-
-#include "object.h"
-
-void constants_init( void );
-void constants_done( void );
-
-extern OBJECT * constant_empty; /* "" */
-extern OBJECT * constant_dot; /* "." */
-extern OBJECT * constant_percent; /* "%" */
-extern OBJECT * constant_plus; /* "+" */
-extern OBJECT * constant_star; /* "*" */
-extern OBJECT * constant_question_mark; /* "?" */
-extern OBJECT * constant_ok; /* "ok" */
-extern OBJECT * constant_true; /* "true" */
-extern OBJECT * constant_name; /* "__name__" */
-extern OBJECT * constant_bases; /* "__bases__" */
-extern OBJECT * constant_typecheck; /* ".typecheck" */
-extern OBJECT * constant_builtin; /* "(builtin)" */
-extern OBJECT * constant_HCACHEFILE; /* "HCACHEFILE" */
-extern OBJECT * constant_HCACHEMAXAGE; /* "HCACHEMAXAGE" */
-extern OBJECT * constant_HDRSCAN; /* "HDRSCAN" */
-extern OBJECT * constant_HDRRULE; /* "HDRRULE" */
-extern OBJECT * constant_BINDRULE; /* "BINDRULE" */
-extern OBJECT * constant_LOCATE; /* "LOCATE" */
-extern OBJECT * constant_SEARCH; /* "SEARCH" */
-extern OBJECT * constant_JAM_SEMAPHORE; /* "JAM_SEMAPHORE" */
-extern OBJECT * constant_TIMING_RULE; /* "__TIMING_RULE__" */
-extern OBJECT * constant_ACTION_RULE; /* "__ACTION_RULE__" */
-extern OBJECT * constant_JAMSHELL; /* "JAMSHELL" */
-extern OBJECT * constant_TMPDIR; /* "TMPDIR" */
-extern OBJECT * constant_TMPNAME; /* "TMPNAME" */
-extern OBJECT * constant_TMPFILE; /* "TMPFILE" */
-extern OBJECT * constant_STDOUT; /* "STDOUT" */
-extern OBJECT * constant_STDERR; /* "STDERR" */
-extern OBJECT * constant_JAMDATE; /* "JAMDATE" */
-extern OBJECT * constant_JAM_VERSION; /* "JAM_VERSION" */
-extern OBJECT * constant_JAMUNAME; /* "JAMUNAME" */
-extern OBJECT * constant_ENVIRON; /* ".ENVIRON" */
-extern OBJECT * constant_ARGV; /* "ARGV" */
-extern OBJECT * constant_all; /* "all" */
-extern OBJECT * constant_PARALLELISM; /* "PARALLELISM" */
-extern OBJECT * constant_KEEP_GOING; /* "KEEP_GOING" */
-extern OBJECT * constant_other; /* "[OTHER]" */
-extern OBJECT * constant_total; /* "[TOTAL]" */
-extern OBJECT * constant_FILE_DIRSCAN; /* "FILE_DIRSCAN" */
-extern OBJECT * constant_MAIN; /* "MAIN" */
-extern OBJECT * constant_MAIN_MAKE; /* "MAIN_MAKE" */
-extern OBJECT * constant_MAKE_MAKE0; /* "MAKE_MAKE0" */
-extern OBJECT * constant_MAKE_MAKE1; /* "MAKE_MAKE1" */
-extern OBJECT * constant_MAKE_MAKE0SORT; /* "MAKE_MAKE0SORT" */
-extern OBJECT * constant_BINDMODULE; /* "BINDMODULE" */
-extern OBJECT * constant_IMPORT_MODULE; /* "IMPORT_MODULE" */
-extern OBJECT * constant_BUILTIN_GLOB_BACK; /* "BUILTIN_GLOB_BACK" */
-extern OBJECT * constant_timestamp; /* "timestamp" */
-extern OBJECT * constant_python; /* "__python__" */
-extern OBJECT * constant_python_interface; /* "python_interface" */
-extern OBJECT * constant_extra_pythonpath; /* "EXTRA_PYTHONPATH" */
-extern OBJECT * constant_MAIN_PYTHON; /* "MAIN_PYTHON" */
-
-#endif
diff --git a/tools/build/v2/engine/debug.c b/tools/build/v2/engine/debug.c
deleted file mode 100644
index 827356bb83..0000000000
--- a/tools/build/v2/engine/debug.c
+++ /dev/null
@@ -1,141 +0,0 @@
-/*
- Copyright Rene Rivera 2005.
- Distributed under the Boost Software License, Version 1.0.
- (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-*/
-
-#include "jam.h"
-
-#include "hash.h"
-
-#include <time.h>
-#include <assert.h>
-
-
-static profile_frame * profile_stack = 0;
-static struct hash * profile_hash = 0;
-static profile_info profile_other = { 0, 0, 0, 0, 0, 0 };
-static profile_info profile_total = { 0, 0, 0, 0, 0, 0 };
-
-
-profile_frame * profile_init( OBJECT * rulename, profile_frame * frame )
-{
- if ( DEBUG_PROFILE ) profile_enter( rulename, frame );
- return frame;
-}
-
-
-void profile_enter( OBJECT * rulename, profile_frame * frame )
-{
- if ( DEBUG_PROFILE )
- {
- clock_t start = clock();
- profile_info * p;
-
- if ( !profile_hash && rulename )
- profile_hash = hashinit( sizeof( profile_info ), "profile" );
-
- if ( rulename )
- {
- int found;
- p = (profile_info *)hash_insert( profile_hash, rulename, &found );
- if ( !found )
- {
- p->name = rulename;
- p->cumulative = p->net = p->num_entries = p->stack_count = p->memory = 0;
- }
- }
- else
- {
- p = &profile_other;
- }
-
- ++p->num_entries;
- ++p->stack_count;
-
- frame->info = p;
-
- frame->caller = profile_stack;
- profile_stack = frame;
-
- frame->entry_time = clock();
- frame->overhead = 0;
- frame->subrules = 0;
-
- /* caller pays for the time it takes to play with the hash table */
- if ( frame->caller )
- frame->caller->overhead += frame->entry_time - start;
- }
-}
-
-
-void profile_memory( long mem )
-{
- if ( DEBUG_PROFILE )
- if ( profile_stack && profile_stack->info )
- profile_stack->info->memory += mem;
-}
-
-
-void profile_exit( profile_frame * frame )
-{
- if ( DEBUG_PROFILE )
- {
- /* Cumulative time for this call. */
- clock_t t = clock() - frame->entry_time - frame->overhead;
- /* If this rule is already present on the stack, don't add the time for
- * this instance.
- */
- if ( frame->info->stack_count == 1 )
- frame->info->cumulative += t;
- /* Net time does not depend on presense of the same rule in call stack.
- */
- frame->info->net += t - frame->subrules;
-
- if ( frame->caller )
- {
- /* Caller's cumulative time must account for this overhead. */
- frame->caller->overhead += frame->overhead;
- frame->caller->subrules += t;
- }
- /* Pop this stack frame. */
- --frame->info->stack_count;
- profile_stack = frame->caller;
- }
-}
-
-
-static void dump_profile_entry( void * p_, void * ignored )
-{
- profile_info * p = (profile_info *)p_;
- unsigned long mem_each = ( p->memory / ( p->num_entries ? p->num_entries : 1 ) );
- double cumulative = p->cumulative;
- double net = p->net;
- double q = p->net;
- q /= ( p->num_entries ? p->num_entries : 1 );
- cumulative /= CLOCKS_PER_SEC;
- net /= CLOCKS_PER_SEC;
- q /= CLOCKS_PER_SEC;
- if ( !ignored )
- {
- profile_total.cumulative += p->net;
- profile_total.memory += p->memory;
- }
- printf( "%10ld %12.6f %12.6f %12.8f %10ld %10ld %s\n", p->num_entries,
- cumulative, net, q, p->memory, mem_each, object_str( p->name ) );
-}
-
-
-void profile_dump()
-{
- if ( profile_hash )
- {
- printf( "%10s %12s %12s %12s %10s %10s %s\n", "--count--", "--gross--",
- "--net--", "--each--", "--mem--", "--each--", "--name--" );
- hashenumerate( profile_hash, dump_profile_entry, 0 );
- profile_other.name = constant_other;
- dump_profile_entry( &profile_other, 0 );
- profile_total.name = constant_total;
- dump_profile_entry( &profile_total, (void *)1 );
- }
-}
diff --git a/tools/build/v2/engine/debug.h b/tools/build/v2/engine/debug.h
deleted file mode 100644
index baad262a00..0000000000
--- a/tools/build/v2/engine/debug.h
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- Copyright Rene Rivera 2005.
- Distributed under the Boost Software License, Version 1.0.
- (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-*/
-#ifndef BJAM_DEBUG_H
-#define BJAM_DEBUG_H
-
-#include "constants.h"
-#include "jam.h"
-#include <time.h>
-
-
-struct profile_info
-{
- /* name of rule being called */
- OBJECT * name;
- /* cumulative time spent in rule */
- clock_t cumulative;
- /* time spent in rule proper */
- clock_t net;
- /* number of time rule was entered */
- unsigned long num_entries;
- /* number of the times this function is present in stack */
- unsigned long stack_count;
- /* bytes of memory allocated by the call */
- unsigned long memory;
-};
-typedef struct profile_info profile_info;
-
-struct profile_frame
-{
- /* permanent storage where data accumulates */
- profile_info* info;
- /* overhead for profiling in this call */
- clock_t overhead;
- /* time of last entry to rule */
- clock_t entry_time;
- /* stack frame of caller */
- struct profile_frame* caller;
- /* time spent in subrules */
- clock_t subrules;
-};
-typedef struct profile_frame profile_frame;
-
-profile_frame * profile_init( OBJECT * rulename, profile_frame * frame );
-void profile_enter( OBJECT * rulename, profile_frame * frame );
-void profile_memory( long mem );
-void profile_exit( profile_frame * frame );
-void profile_dump();
-
-#define PROFILE_ENTER( scope ) profile_frame PROF_ ## scope, *PROF_ ## scope ## _p = profile_init( constant_ ## scope, &PROF_ ## scope )
-#define PROFILE_EXIT( scope ) profile_exit( PROF_ ## scope ## _p )
-
-#endif
diff --git a/tools/build/v2/engine/execcmd.h b/tools/build/v2/engine/execcmd.h
deleted file mode 100644
index 9d3cff35b2..0000000000
--- a/tools/build/v2/engine/execcmd.h
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Copyright 1993, 1995 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/*
- * execcmd.h - execute a shell script.
- *
- * Defines the interface to be implemented in platform specific implementation
- * modules.
- *
- * 05/04/94 (seiwald) - async multiprocess interface
- */
-
-#ifndef EXECCMD_H
-#define EXECCMD_H
-
-#include <time.h>
-
-typedef struct timing_info
-{
- double system;
- double user;
- time_t start;
- time_t end;
-} timing_info;
-
-void exec_cmd
-(
- const char * string,
- void (* func)( void * closure, int status, timing_info *, const char *, const char * ),
- void * closure,
- LIST * shell,
- const char * action,
- const char * target
-);
-
-int exec_wait();
-
-void exec_done( void );
-
-#define EXEC_CMD_OK 0
-#define EXEC_CMD_FAIL 1
-#define EXEC_CMD_INTR 2
-
-#endif
diff --git a/tools/build/v2/engine/execnt.c b/tools/build/v2/engine/execnt.c
deleted file mode 100644
index f34b378549..0000000000
--- a/tools/build/v2/engine/execnt.c
+++ /dev/null
@@ -1,1303 +0,0 @@
-/*
- * Copyright 1993, 1995 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/* This file is ALSO:
- * Copyright 2001-2004 David Abrahams.
- * Copyright 2007 Rene Rivera.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-#include "jam.h"
-#include "lists.h"
-#include "execcmd.h"
-#include "pathsys.h"
-#include "string.h"
-#include "output.h"
-#include <errno.h>
-#include <assert.h>
-#include <ctype.h>
-#include <time.h>
-#include <math.h>
-
-#ifdef USE_EXECNT
-
-#define WIN32_LEAN_AND_MEAN
-#include <windows.h>
-#include <process.h>
-#include <tlhelp32.h>
-
-/*
- * execnt.c - execute a shell command on Windows NT
- *
- * If $(JAMSHELL) is defined, uses that to formulate execvp()/spawnvp().
- * The default is:
- *
- * /bin/sh -c % [ on UNIX/AmigaOS ]
- * cmd.exe /c % [ on Windows NT ]
- *
- * Each word must be an individual element in a jam variable value.
- *
- * In $(JAMSHELL), % expands to the command string and ! expands to
- * the slot number (starting at 1) for multiprocess (-j) invocations.
- * If $(JAMSHELL) doesn't include a %, it is tacked on as the last
- * argument.
- *
- * Don't just set JAMSHELL to /bin/sh or cmd.exe - it won't work!
- *
- * External routines:
- * exec_cmd() - launch an async command execution.
- * exec_wait() - wait and drive at most one execution completion.
- *
- * Internal routines:
- * onintr() - bump intr to note command interruption.
- *
- * 04/08/94 (seiwald) - Coherent/386 support added.
- * 05/04/94 (seiwald) - async multiprocess interface
- * 01/22/95 (seiwald) - $(JAMSHELL) support
- * 06/02/97 (gsar) - full async multiprocess support for Win32
- */
-
-/* get the maximum command line length according to the OS */
-int maxline();
-
-/* delete and argv list */
-static void free_argv(const char * *);
-/* Convert a command string into arguments for spawnvp. */
-static const char** string_to_args(const char*);
-/* bump intr to note command interruption */
-static void onintr(int);
-/* If the command is suitable for execution via spawnvp */
-long can_spawn(const char*);
-/* Add two 64-bit unsigned numbers, h1l1 and h2l2 */
-static FILETIME add_64(
- unsigned long h1, unsigned long l1,
- unsigned long h2, unsigned long l2);
-static FILETIME add_FILETIME(FILETIME t1, FILETIME t2);
-static FILETIME negate_FILETIME(FILETIME t);
-/* Convert a FILETIME to a number of seconds */
-static double filetime_seconds(FILETIME t);
-/* record the timing info for the process */
-static void record_times(HANDLE, timing_info*);
-/* calc the current running time of an *active* process */
-static double running_time(HANDLE);
-/* */
-DWORD get_process_id(HANDLE);
-/* terminate the given process, after terminating all its children */
-static void kill_process_tree(DWORD, HANDLE);
-/* waits for a command to complete or for the given timeout, whichever is first */
-static int try_wait(int timeoutMillis);
-/* reads any pending output for running commands */
-static void read_output();
-/* checks if a command ran out of time, and kills it */
-static int try_kill_one();
-/* */
-static double creation_time(HANDLE);
-/* Recursive check if first process is parent (directly or indirectly) of
-the second one. */
-static int is_parent_child(DWORD, DWORD);
-/* */
-static void close_alert(HANDLE);
-/* close any alerts hanging around */
-static void close_alerts();
-
-/* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */
-
-static int intr = 0;
-static int cmdsrunning = 0;
-static void (* istat)( int );
-
-
-/* The list of commands we run. */
-static struct
-{
- string action; /* buffer to hold action */
- string target; /* buffer to hold target */
- string command; /* buffer to hold command being invoked */
-
- /* Temporary batch file used to execute the action when needed. */
- char * tempfile_bat;
-
- /* Pipes for communicating with the child process. Parent reads from (0),
- * child writes to (1).
- */
- HANDLE pipe_out[ 2 ];
- HANDLE pipe_err[ 2 ];
-
- string buffer_out; /* buffer to hold stdout, if any */
- string buffer_err; /* buffer to hold stderr, if any */
-
- PROCESS_INFORMATION pi; /* running process information */
- DWORD exit_code; /* executed command's exit code */
- int exit_reason; /* reason why a command completed */
-
- /* Function called when the command completes. */
- void (* func)( void * closure, int status, timing_info *, const char *, const char * );
-
- /* Opaque data passed back to the 'func' callback called when the command
- * completes.
- */
- void * closure;
-}
-cmdtab[ MAXJOBS ] = { { 0 } };
-
-
-/*
- * Execution unit tests.
- */
-
-void execnt_unit_test()
-{
-#if !defined( NDEBUG )
- /* vc6 preprocessor is broken, so assert with these strings gets confused.
- * Use a table instead.
- */
- typedef struct test { char * command; int result; } test;
- test tests[] = {
- { "x", 0 },
- { "x\n ", 0 },
- { "x\ny", 1 },
- { "x\n\n y", 1 },
- { "echo x > foo.bar", 1 },
- { "echo x < foo.bar", 1 },
- { "echo x \">\" foo.bar", 0 },
- { "echo x \"<\" foo.bar", 0 },
- { "echo x \\\">\\\" foo.bar", 1 },
- { "echo x \\\"<\\\" foo.bar", 1 } };
- int i;
- for ( i = 0; i < sizeof( tests ) / sizeof( *tests ); ++i )
- assert( !can_spawn( tests[ i ].command ) == tests[ i ].result );
-
- {
- char * long_command = BJAM_MALLOC_ATOMIC( MAXLINE + 10 );
- assert( long_command != 0 );
- memset( long_command, 'x', MAXLINE + 9 );
- long_command[ MAXLINE + 9 ] = 0;
- assert( can_spawn( long_command ) == MAXLINE + 9 );
- BJAM_FREE( long_command );
- }
-
- {
- /* Work around vc6 bug; it doesn't like escaped string
- * literals inside assert
- */
- const char * * argv = string_to_args(" \"g++\" -c -I\"Foobar\"" );
- char const expected[] = "-c -I\"Foobar\"";
-
- assert( !strcmp( argv[ 0 ], "g++" ) );
- assert( !strcmp( argv[ 1 ], expected ) );
- free_argv( argv );
- }
-#endif
-}
-
-
-/*
- * exec_cmd() - launch an async command execution.
- */
-
-void exec_cmd
-(
- const char * command,
- void (* func)( void * closure, int status, timing_info *, const char * invoked_command, const char * command_output ),
- void * closure,
- LIST * shell,
- const char * action,
- const char * target
-)
-{
- int slot;
- int raw_cmd = 0 ;
- const char * argv_static[ MAXARGC + 1 ]; /* +1 for NULL */
- const char * * argv = argv_static;
- char * p;
- const char * command_orig = command;
-
- /* Check to see if we need to hack around the line-length limitation. Look
- * for a JAMSHELL setting of "%", indicating that the command should be
- * invoked directly.
- */
- if ( !list_empty( shell ) && !strcmp( object_str( list_front( shell ) ), "%" ) && list_next( list_begin( shell ) ) == list_end( shell ) )
- {
- raw_cmd = 1;
- shell = 0;
- }
-
- /* Find a slot in the running commands table for this one. */
- for ( slot = 0; slot < MAXJOBS; ++slot )
- if ( !cmdtab[ slot ].pi.hProcess )
- break;
- if ( slot == MAXJOBS )
- {
- printf( "no slots for child!\n" );
- exit( EXITBAD );
- }
-
- /* Compute the name of a temp batch file, for possible use. */
- if ( !cmdtab[ slot ].tempfile_bat )
- {
- char const * tempdir = path_tmpdir();
- DWORD procID = GetCurrentProcessId();
-
- /* SVA - allocate 64 bytes extra just to be safe. */
- cmdtab[ slot ].tempfile_bat = BJAM_MALLOC_ATOMIC( strlen( tempdir ) + 64 );
-
- sprintf( cmdtab[ slot ].tempfile_bat, "%s\\jam%d-%02d.bat",
- tempdir, procID, slot );
- }
-
- /* Trim leading, -ending- white space */
- while ( *( command + 1 ) && isspace( *command ) )
- ++command;
-
- /* Write to .BAT file unless the line would be too long and it meets the
- * other spawnability criteria.
- */
- if ( raw_cmd && ( can_spawn( command ) >= MAXLINE ) )
- {
- if ( DEBUG_EXECCMD )
- printf("Executing raw command directly\n");
- }
- else
- {
- FILE * f = 0;
- int tries = 0;
- raw_cmd = 0;
-
- /* Write command to bat file. For some reason this open can fail
- * intermitently. But doing some retries works. Most likely this is due
- * to a previously existing file of the same name that happens to be
- * opened by an active virus scanner. Pointed out and fixed by Bronek
- * Kozicki.
- */
- for ( ; !f && ( tries < 4 ); ++tries )
- {
- f = fopen( cmdtab[ slot ].tempfile_bat, "w" );
- if ( !f && ( tries < 4 ) ) Sleep( 250 );
- }
- if ( !f )
- {
- printf( "failed to write command file!\n" );
- exit( EXITBAD );
- }
- fputs( command, f );
- fclose( f );
-
- command = cmdtab[ slot ].tempfile_bat;
-
- if ( DEBUG_EXECCMD )
- {
- if ( !list_empty( shell ) )
- printf( "using user-specified shell: %s", object_str( list_front( shell ) ) );
- else
- printf( "Executing through .bat file\n" );
- }
- }
-
- /* Formulate argv; If shell was defined, be prepared for % and ! subs.
- * Otherwise, use stock cmd.exe.
- */
- if ( shell )
- {
- int i;
- char jobno[ 4 ];
- int gotpercent = 0;
- LISTITER shell_iter = list_begin( shell ), shell_end = list_end( shell );
-
- sprintf( jobno, "%d", slot + 1 );
-
- for ( i = 0; shell_iter != shell_end && ( i < MAXARGC ); ++i, shell_iter = list_next( shell_iter ) )
- {
- switch ( object_str( list_item( shell_iter ) )[ 0 ] )
- {
- case '%': argv[ i ] = command; ++gotpercent; break;
- case '!': argv[ i ] = jobno; break;
- default : argv[ i ] = object_str( list_item( shell_iter ) );
- }
- if ( DEBUG_EXECCMD )
- printf( "argv[%d] = '%s'\n", i, argv[ i ] );
- }
-
- if ( !gotpercent )
- argv[ i++ ] = command;
-
- argv[ i ] = 0;
- }
- else if ( raw_cmd )
- {
- argv = string_to_args( command );
- }
- else
- {
- argv[ 0 ] = "cmd.exe";
- argv[ 1 ] = "/Q/C"; /* anything more is non-portable */
- argv[ 2 ] = command;
- argv[ 3 ] = 0;
- }
-
- /* Catch interrupts whenever commands are running. */
- if ( !cmdsrunning++ )
- istat = signal( SIGINT, onintr );
-
- /* Start the command. */
- {
- SECURITY_ATTRIBUTES sa
- = { sizeof( SECURITY_ATTRIBUTES ), 0, 0 };
- SECURITY_DESCRIPTOR sd;
- STARTUPINFO si
- = { sizeof( STARTUPINFO ), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 };
- string cmd;
-
- /* Init the security data. */
- InitializeSecurityDescriptor( &sd, SECURITY_DESCRIPTOR_REVISION );
- SetSecurityDescriptorDacl( &sd, TRUE, NULL, FALSE );
- sa.lpSecurityDescriptor = &sd;
- sa.bInheritHandle = TRUE;
-
- /* Create the stdout, which is also the merged out + err, pipe. */
- if ( !CreatePipe( &cmdtab[ slot ].pipe_out[ 0 ],
- &cmdtab[ slot ].pipe_out[ 1 ], &sa, 0 ) )
- {
- perror( "CreatePipe" );
- exit( EXITBAD );
- }
-
- /* Create the stdout, which is also the merged out+err, pipe. */
- if ( globs.pipe_action == 2 )
- {
- if ( !CreatePipe( &cmdtab[ slot ].pipe_err[ 0 ],
- &cmdtab[ slot ].pipe_err[ 1 ], &sa, 0 ) )
- {
- perror( "CreatePipe" );
- exit( EXITBAD );
- }
- }
-
- /* Set handle inheritance off for the pipe ends the parent reads from. */
- SetHandleInformation( cmdtab[ slot ].pipe_out[ 0 ], HANDLE_FLAG_INHERIT, 0 );
- if ( globs.pipe_action == 2 )
- SetHandleInformation( cmdtab[ slot ].pipe_err[ 0 ], HANDLE_FLAG_INHERIT, 0 );
-
- /* Hide the child window, if any. */
- si.dwFlags |= STARTF_USESHOWWINDOW;
- si.wShowWindow = SW_HIDE;
-
- /* Set the child outputs to the pipes. */
- si.dwFlags |= STARTF_USESTDHANDLES;
- si.hStdOutput = cmdtab[ slot ].pipe_out[ 1 ];
- if ( globs.pipe_action == 2 )
- {
- /* Pipe stderr to the action error output. */
- si.hStdError = cmdtab[ slot ].pipe_err[ 1 ];
- }
- else if ( globs.pipe_action == 1 )
- {
- /* Pipe stderr to the console error output. */
- si.hStdError = GetStdHandle( STD_ERROR_HANDLE );
- }
- else
- {
- /* Pipe stderr to the action merged output. */
- si.hStdError = cmdtab[ slot ].pipe_out[ 1 ];
- }
-
- /* Let the child inherit stdin, as some commands assume it's available. */
- si.hStdInput = GetStdHandle(STD_INPUT_HANDLE);
-
- /* Save the operation for exec_wait() to find. */
- cmdtab[ slot ].func = func;
- cmdtab[ slot ].closure = closure;
- if ( action && target )
- {
- string_copy( &cmdtab[ slot ].action, action );
- string_copy( &cmdtab[ slot ].target, target );
- }
- else
- {
- string_free( &cmdtab[ slot ].action );
- string_new ( &cmdtab[ slot ].action );
- string_free( &cmdtab[ slot ].target );
- string_new ( &cmdtab[ slot ].target );
- }
- string_copy( &cmdtab[ slot ].command, command_orig );
-
- /* Put together the command we run. */
- {
- const char * * argp = argv;
- string_new( &cmd );
- string_copy( &cmd, *(argp++) );
- while ( *argp )
- {
- string_push_back( &cmd, ' ' );
- string_append( &cmd, *(argp++) );
- }
- }
-
- /* Create output buffers. */
- string_new( &cmdtab[ slot ].buffer_out );
- string_new( &cmdtab[ slot ].buffer_err );
-
- /* Run the command by creating a sub-process for it. */
- if (
- ! CreateProcess(
- NULL , /* application name */
- cmd.value , /* command line */
- NULL , /* process attributes */
- NULL , /* thread attributes */
- TRUE , /* inherit handles */
- CREATE_NEW_PROCESS_GROUP, /* create flags */
- NULL , /* env vars, null inherits env */
- NULL , /* current dir, null is our */
- /* current dir */
- &si , /* startup info */
- &cmdtab[ slot ].pi /* child process info, if created */
- )
- )
- {
- perror( "CreateProcess" );
- exit( EXITBAD );
- }
-
- /* Clean up temporary stuff. */
- string_free( &cmd );
- }
-
- /* Wait until we are under the limit of concurrent commands. Do not trust
- * globs.jobs alone.
- */
- while ( ( cmdsrunning >= MAXJOBS ) || ( cmdsrunning >= globs.jobs ) )
- if ( !exec_wait() )
- break;
-
- if ( argv != argv_static )
- free_argv( argv );
-}
-
-
-/*
- * exec_wait()
- * * wait and drive at most one execution completion.
- * * waits for one command to complete, while processing the i/o for all
- * ongoing commands.
- *
- * Returns 0 if called when there were no more commands being executed or 1
- * otherwise.
- */
-
-int exec_wait()
-{
- int i = -1;
-
- /* Handle naive make1() which does not know if cmds are running. */
- if ( !cmdsrunning )
- return 0;
-
- /* Wait for a command to complete, while snarfing up any output. */
- do
- {
- /* Check for a complete command, briefly. */
- i = try_wait(500);
- /* Read in the output of all running commands. */
- read_output();
- /* Close out pending debug style dialogs. */
- close_alerts();
- /* Check if a command ran out of time. */
- if ( i < 0 ) i = try_kill_one();
- }
- while ( i < 0 );
-
- /* We have a command... process it. */
- --cmdsrunning;
- {
- timing_info time;
- int rstat;
-
- /* The time data for the command. */
- record_times( cmdtab[ i ].pi.hProcess, &time );
-
- /* Clear the temp file. */
- if ( cmdtab[ i ].tempfile_bat )
- {
- unlink( cmdtab[ i ].tempfile_bat );
- BJAM_FREE( cmdtab[ i ].tempfile_bat );
- cmdtab[ i ].tempfile_bat = NULL;
- }
-
- /* Find out the process exit code. */
- GetExitCodeProcess( cmdtab[ i ].pi.hProcess, &cmdtab[ i ].exit_code );
-
- /* The dispossition of the command. */
- if ( intr )
- rstat = EXEC_CMD_INTR;
- else if ( cmdtab[ i ].exit_code != 0 )
- rstat = EXEC_CMD_FAIL;
- else
- rstat = EXEC_CMD_OK;
-
- /* Output the action block. */
- out_action(
- cmdtab[ i ].action.size > 0 ? cmdtab[ i ].action.value : 0,
- cmdtab[ i ].target.size > 0 ? cmdtab[ i ].target.value : 0,
- cmdtab[ i ].command.size > 0 ? cmdtab[ i ].command.value : 0,
- cmdtab[ i ].buffer_out.size > 0 ? cmdtab[ i ].buffer_out.value : 0,
- cmdtab[ i ].buffer_err.size > 0 ? cmdtab[ i ].buffer_err.value : 0,
- cmdtab[ i ].exit_reason );
-
- /* Call the callback, may call back to jam rule land. Assume -p0 in
- * effect so only pass buffer containing merged output.
- */
- (*cmdtab[ i ].func)(
- cmdtab[ i ].closure,
- rstat,
- &time,
- cmdtab[ i ].command.value,
- cmdtab[ i ].buffer_out.value );
-
- /* Clean up the command data, process, etc. */
- string_free( &cmdtab[ i ].action ); string_new( &cmdtab[ i ].action );
- string_free( &cmdtab[ i ].target ); string_new( &cmdtab[ i ].target );
- string_free( &cmdtab[ i ].command ); string_new( &cmdtab[ i ].command );
- if ( cmdtab[ i ].pi.hProcess ) { CloseHandle( cmdtab[ i ].pi.hProcess ); cmdtab[ i ].pi.hProcess = 0; }
- if ( cmdtab[ i ].pi.hThread ) { CloseHandle( cmdtab[ i ].pi.hThread ); cmdtab[ i ].pi.hThread = 0; }
- if ( cmdtab[ i ].pipe_out[ 0 ] ) { CloseHandle( cmdtab[ i ].pipe_out[ 0 ] ); cmdtab[ i ].pipe_out[ 0 ] = 0; }
- if ( cmdtab[ i ].pipe_out[ 1 ] ) { CloseHandle( cmdtab[ i ].pipe_out[ 1 ] ); cmdtab[ i ].pipe_out[ 1 ] = 0; }
- if ( cmdtab[ i ].pipe_err[ 0 ] ) { CloseHandle( cmdtab[ i ].pipe_err[ 0 ] ); cmdtab[ i ].pipe_err[ 0 ] = 0; }
- if ( cmdtab[ i ].pipe_err[ 1 ] ) { CloseHandle( cmdtab[ i ].pipe_err[ 1 ] ); cmdtab[ i ].pipe_err[ 1 ] = 0; }
- string_free( &cmdtab[ i ].buffer_out ); string_new( &cmdtab[ i ].buffer_out );
- string_free( &cmdtab[ i ].buffer_err ); string_new( &cmdtab[ i ].buffer_err );
- cmdtab[ i ].exit_code = 0;
- cmdtab[ i ].exit_reason = EXIT_OK;
- }
-
- return 1;
-}
-
-
-/* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */
-
-static void free_argv( const char * * args )
-{
- BJAM_FREE( (void *)args[ 0 ] );
- BJAM_FREE( (void *)args );
-}
-
-
-/*
- * For more details on Windows cmd.exe shell command-line length limitations see
- * the following MSDN article:
- * http://support.microsoft.com/default.aspx?scid=kb;en-us;830473
- */
-
-int maxline()
-{
- OSVERSIONINFO os_info;
- os_info.dwOSVersionInfoSize = sizeof( os_info );
- GetVersionEx( &os_info );
-
- if ( os_info.dwMajorVersion >= 5 ) return 8191; /* XP > */
- if ( os_info.dwMajorVersion == 4 ) return 2047; /* NT 4.x */
- return 996; /* NT 3.5.1 */
-}
-
-
-/*
- * Convert a command string into arguments for spawnvp(). The original code,
- * inherited from ftjam, tried to break up every argument on the command-line,
- * dealing with quotes, but that is really a waste of time on Win32, at least.
- * It turns out that all you need to do is get the raw path to the executable in
- * the first argument to spawnvp(), and you can pass all the rest of the
- * command-line arguments to spawnvp() in one, un-processed string.
- *
- * New strategy: break the string in at most one place.
- */
-
-static const char * * string_to_args( char const * string )
-{
- int src_len;
- int in_quote;
- char * line;
- char const * src;
- char * dst;
- const char * * argv;
-
- /* Drop leading and trailing whitespace if any. */
- while ( isspace( *string ) )
- ++string;
-
- src_len = strlen( string );
- while ( ( src_len > 0 ) && isspace( string[ src_len - 1 ] ) )
- --src_len;
-
- /* Copy the input string into a buffer we can modify. */
- line = (char *)BJAM_MALLOC_ATOMIC( src_len + 1 );
- if ( !line )
- return 0;
-
- /* Allocate the argv array.
- * element 0: stores the path to the executable
- * element 1: stores the command-line arguments to the executable
- * element 2: NULL terminator
- */
- argv = (const char * *)BJAM_MALLOC( 3 * sizeof( const char * ) );
- if ( !argv )
- {
- BJAM_FREE( line );
- return 0;
- }
-
- /* Strip quotes from the first command-line argument and find where it ends.
- * Quotes are illegal in Win32 pathnames, so we do not need to worry about
- * preserving escaped quotes here. Spaces can not be escaped in Win32, only
- * enclosed in quotes, so removing backslash escapes is also a non-issue.
- */
- in_quote = 0;
- for ( src = string, dst = line ; *src; ++src )
- {
- if ( *src == '"' )
- in_quote = !in_quote;
- else if ( !in_quote && isspace( *src ) )
- break;
- else
- *dst++ = *src;
- }
- *dst++ = 0;
- argv[ 0 ] = line;
-
- /* Skip whitespace in src. */
- while ( isspace( *src ) )
- ++src;
-
- argv[ 1 ] = dst;
-
- /* Copy the rest of the arguments verbatim. */
- src_len -= src - string;
-
- /* Use strncat() because it appends a trailing nul. */
- *dst = 0;
- strncat( dst, src, src_len );
-
- argv[ 2 ] = 0;
-
- return argv;
-}
-
-
-static void onintr( int disp )
-{
- ++intr;
- printf( "...interrupted\n" );
-}
-
-
-/*
- * can_spawn() - If the command is suitable for execution via spawnvp(), return
- * a number >= the number of characters it would occupy on the command-line.
- * Otherwise, return zero.
- */
-
-long can_spawn( const char * command )
-{
- const char * p;
- char inquote = 0;
-
- /* Move to the first non-whitespace. */
- command += strspn( command, " \t" );
-
- p = command;
-
- /* Look for newlines and unquoted i/o redirection. */
- do
- {
- p += strcspn( p, "'\n\"<>|" );
-
- switch ( *p )
- {
- case '\n':
- /* Skip over any following spaces. */
- while ( isspace( *p ) )
- ++p;
- /* Must use a .bat file if there is anything significant following
- * the newline.
- */
- if ( *p )
- return 0;
- break;
-
- case '"':
- case '\'':
- if ( ( p > command ) && ( p[ -1 ] != '\\' ) )
- {
- if ( inquote == *p )
- inquote = 0;
- else if ( inquote == 0 )
- inquote = *p;
- }
- ++p;
- break;
-
- case '<':
- case '>':
- case '|':
- if ( !inquote )
- return 0;
- ++p;
- break;
- }
- }
- while ( *p );
-
- /* Return the number of characters the command will occupy. */
- return p - command;
-}
-
-
-/* 64-bit arithmetic helpers. */
-
-/* Compute the carry bit from the addition of two 32-bit unsigned numbers. */
-#define add_carry_bit( a, b ) ( (((a) | (b)) >> 31) & (~((a) + (b)) >> 31) & 0x1 )
-
-/* Compute the high 32 bits of the addition of two 64-bit unsigned numbers, h1l1 and h2l2. */
-#define add_64_hi( h1, l1, h2, l2 ) ((h1) + (h2) + add_carry_bit(l1, l2))
-
-
-/*
- * Add two 64-bit unsigned numbers, h1l1 and h2l2.
- */
-
-static FILETIME add_64
-(
- unsigned long h1, unsigned long l1,
- unsigned long h2, unsigned long l2
-)
-{
- FILETIME result;
- result.dwLowDateTime = l1 + l2;
- result.dwHighDateTime = add_64_hi( h1, l1, h2, l2 );
- return result;
-}
-
-
-static FILETIME add_FILETIME( FILETIME t1, FILETIME t2 )
-{
- return add_64( t1.dwHighDateTime, t1.dwLowDateTime, t2.dwHighDateTime,
- t2.dwLowDateTime );
-}
-
-
-static FILETIME negate_FILETIME( FILETIME t )
-{
- /* 2s complement negation */
- return add_64( ~t.dwHighDateTime, ~t.dwLowDateTime, 0, 1 );
-}
-
-
-/*
- * Convert a FILETIME to a number of seconds.
- */
-
-static double filetime_seconds( FILETIME t )
-{
- return t.dwHighDateTime * ( (double)( 1UL << 31 ) * 2.0 * 1.0e-7 ) + t.dwLowDateTime * 1.0e-7;
-}
-
-
-/*
- * What should be a simple conversion, turns out to be horribly complicated by
- * the defficiencies of MSVC and the Win32 API.
- */
-
-static time_t filetime_dt( FILETIME t_utc )
-{
- static int calc_time_diff = 1;
- static double time_diff;
- if ( calc_time_diff )
- {
- struct tm t0_;
- FILETIME f0_local;
- FILETIME f0_;
- SYSTEMTIME s0_;
- GetSystemTime( &s0_ );
- t0_.tm_year = s0_.wYear-1900;
- t0_.tm_mon = s0_.wMonth-1;
- t0_.tm_wday = s0_.wDayOfWeek;
- t0_.tm_mday = s0_.wDay;
- t0_.tm_hour = s0_.wHour;
- t0_.tm_min = s0_.wMinute;
- t0_.tm_sec = s0_.wSecond;
- t0_.tm_isdst = 0;
- SystemTimeToFileTime( &s0_, &f0_local );
- LocalFileTimeToFileTime( &f0_local, &f0_ );
- time_diff = filetime_seconds( f0_ ) - (double)mktime( &t0_ );
- calc_time_diff = 0;
- }
- return ceil( filetime_seconds( t_utc ) - time_diff );
-}
-
-
-static void record_times( HANDLE process, timing_info * time )
-{
- FILETIME creation;
- FILETIME exit;
- FILETIME kernel;
- FILETIME user;
- if ( GetProcessTimes( process, &creation, &exit, &kernel, &user ) )
- {
- time->system = filetime_seconds( kernel );
- time->user = filetime_seconds( user );
- time->start = filetime_dt ( creation );
- time->end = filetime_dt ( exit );
- }
-}
-
-
-#define IO_BUFFER_SIZE ( 16 * 1024 )
-
-static char ioBuffer[ IO_BUFFER_SIZE + 1 ];
-
-
-static void read_pipe
-(
- HANDLE in, /* the pipe to read from */
- string * out
-)
-{
- DWORD bytesInBuffer = 0;
- DWORD bytesAvailable = 0;
-
- do
- {
- /* check if we have any data to read */
- if ( !PeekNamedPipe( in, ioBuffer, IO_BUFFER_SIZE, &bytesInBuffer, &bytesAvailable, NULL ) )
- bytesAvailable = 0;
-
- /* read in the available data */
- if ( bytesAvailable > 0 )
- {
- /* we only read in the available bytes, to avoid blocking */
- if ( ReadFile( in, ioBuffer,
- bytesAvailable <= IO_BUFFER_SIZE ? bytesAvailable : IO_BUFFER_SIZE,
- &bytesInBuffer, NULL ) )
- {
- if ( bytesInBuffer > 0 )
- {
- /* Clean up some illegal chars. */
- int i;
- for ( i = 0; i < bytesInBuffer; ++i )
- {
- if ( ( (unsigned char)ioBuffer[ i ] < 1 ) )
- ioBuffer[ i ] = '?';
- }
- /* Null, terminate. */
- ioBuffer[ bytesInBuffer ] = '\0';
- /* Append to the output. */
- string_append( out, ioBuffer );
- /* Subtract what we read in. */
- bytesAvailable -= bytesInBuffer;
- }
- else
- {
- /* Likely read a error, bail out. */
- bytesAvailable = 0;
- }
- }
- else
- {
- /* Definitely read a error, bail out. */
- bytesAvailable = 0;
- }
- }
- }
- while ( bytesAvailable > 0 );
-}
-
-
-static void read_output()
-{
- int i;
- for ( i = 0; i < globs.jobs && i < MAXJOBS; ++i )
- {
- /* Read stdout data. */
- if ( cmdtab[ i ].pipe_out[ 0 ] )
- read_pipe( cmdtab[ i ].pipe_out[ 0 ], & cmdtab[ i ].buffer_out );
- /* Read stderr data. */
- if ( cmdtab[ i ].pipe_err[ 0 ] )
- read_pipe( cmdtab[ i ].pipe_err[ 0 ], & cmdtab[ i ].buffer_err );
- }
-}
-
-
-/*
- * Waits for a single child process command to complete, or the timeout,
- * whichever comes first. Returns the index of the completed command in the
- * cmdtab array, or -1.
- */
-
-static int try_wait( int timeoutMillis )
-{
- int i;
- int num_active;
- int wait_api_result;
- HANDLE active_handles[ MAXJOBS ];
- int active_procs[ MAXJOBS ];
-
- /* Prepare a list of all active processes to wait for. */
- for ( num_active = 0, i = 0; i < globs.jobs; ++i )
- {
- if ( cmdtab[ i ].pi.hProcess )
- {
- active_handles[ num_active ] = cmdtab[ i ].pi.hProcess;
- active_procs[ num_active ] = i;
- ++num_active;
- }
- }
-
- /* Wait for a child to complete, or for our timeout window to expire. */
- wait_api_result = WaitForMultipleObjects( num_active, active_handles,
- FALSE, timeoutMillis );
- if ( ( WAIT_OBJECT_0 <= wait_api_result ) &&
- ( wait_api_result < WAIT_OBJECT_0 + num_active ) )
- {
- /* Rerminated process detected - return its index. */
- return active_procs[ wait_api_result - WAIT_OBJECT_0 ];
- }
-
- /* Timeout. */
- return -1;
-}
-
-
-static int try_kill_one()
-{
- /* Only need to check if a timeout was specified with the -l option. */
- if ( globs.timeout > 0 )
- {
- int i;
- for ( i = 0; i < globs.jobs; ++i )
- {
- double t = running_time( cmdtab[ i ].pi.hProcess );
- if ( t > (double)globs.timeout )
- {
- /* The job may have left an alert dialog around, try and get rid
- * of it before killing
- */
- close_alert( cmdtab[ i ].pi.hProcess );
- /* We have a "runaway" job, kill it. */
- kill_process_tree( 0, cmdtab[ i ].pi.hProcess );
- /* And return it marked as a timeout. */
- cmdtab[ i ].exit_reason = EXIT_TIMEOUT;
- return i;
- }
- }
- }
- return -1;
-}
-
-
-static void close_alerts()
-{
- /* We only attempt this every 5 seconds, or so, because it is not a cheap
- * operation, and we will catch the alerts eventually. This check uses
- * floats as some compilers define CLOCKS_PER_SEC as a float or double.
- */
- if ( ( (float)clock() / (float)( CLOCKS_PER_SEC * 5 ) ) < ( 1.0 / 5.0 ) )
- {
- int i;
- for ( i = 0; i < globs.jobs; ++i )
- close_alert( cmdtab[ i ].pi.hProcess );
- }
-}
-
-
-/*
- * Calc the current running time of an *active* process.
- */
-
-static double running_time( HANDLE process )
-{
- FILETIME creation;
- FILETIME exit;
- FILETIME kernel;
- FILETIME user;
- FILETIME current;
- if ( GetProcessTimes( process, &creation, &exit, &kernel, &user ) )
- {
- /* Compute the elapsed time. */
- GetSystemTimeAsFileTime( &current );
- return filetime_seconds( add_FILETIME( current,
- negate_FILETIME( creation ) ) );
- }
- return 0.0;
-}
-
-
-/* It is just stupidly silly that one has to do this. */
-typedef struct PROCESS_BASIC_INFORMATION__
-{
- LONG ExitStatus;
- PVOID PebBaseAddress;
- ULONG AffinityMask;
- LONG BasePriority;
- ULONG UniqueProcessId;
- ULONG InheritedFromUniqueProcessId;
-} PROCESS_BASIC_INFORMATION_;
-typedef LONG (__stdcall * NtQueryInformationProcess__)(
- HANDLE ProcessHandle,
- LONG ProcessInformationClass,
- PVOID ProcessInformation,
- ULONG ProcessInformationLength,
- PULONG ReturnLength);
-static NtQueryInformationProcess__ NtQueryInformationProcess_ = NULL;
-static HMODULE NTDLL_ = NULL;
-DWORD get_process_id( HANDLE process )
-{
- PROCESS_BASIC_INFORMATION_ pinfo;
- if ( !NtQueryInformationProcess_ )
- {
- if ( ! NTDLL_ )
- NTDLL_ = GetModuleHandleA( "ntdll" );
- if ( NTDLL_ )
- NtQueryInformationProcess_
- = (NtQueryInformationProcess__)GetProcAddress( NTDLL_, "NtQueryInformationProcess" );
- }
- if ( NtQueryInformationProcess_ )
- {
- LONG r = (*NtQueryInformationProcess_)( process,
- /* ProcessBasicInformation == */ 0, &pinfo,
- sizeof( PROCESS_BASIC_INFORMATION_ ), NULL );
- return pinfo.UniqueProcessId;
- }
- return 0;
-}
-
-
-/*
- * Not really optimal, or efficient, but it is easier this way, and it is not
- * like we are going to be killing thousands, or even tens of processes.
- */
-
-static void kill_process_tree( DWORD pid, HANDLE process )
-{
- HANDLE process_snapshot_h = INVALID_HANDLE_VALUE;
- if ( !pid )
- pid = get_process_id( process );
- process_snapshot_h = CreateToolhelp32Snapshot( TH32CS_SNAPPROCESS, 0 );
-
- if ( INVALID_HANDLE_VALUE != process_snapshot_h )
- {
- BOOL ok = TRUE;
- PROCESSENTRY32 pinfo;
- pinfo.dwSize = sizeof( PROCESSENTRY32 );
- for (
- ok = Process32First( process_snapshot_h, &pinfo );
- ok == TRUE;
- ok = Process32Next( process_snapshot_h, &pinfo ) )
- {
- if ( pinfo.th32ParentProcessID == pid )
- {
- /* Found a child, recurse to kill it and anything else below it.
- */
- HANDLE ph = OpenProcess( PROCESS_ALL_ACCESS, FALSE,
- pinfo.th32ProcessID );
- if ( NULL != ph )
- {
- kill_process_tree( pinfo.th32ProcessID, ph );
- CloseHandle( ph );
- }
- }
- }
- CloseHandle( process_snapshot_h );
- }
- /* Now that the children are all dead, kill the root. */
- TerminateProcess( process, -2 );
-}
-
-
-static double creation_time( HANDLE process )
-{
- FILETIME creation;
- FILETIME exit;
- FILETIME kernel;
- FILETIME user;
- FILETIME current;
- return GetProcessTimes( process, &creation, &exit, &kernel, &user )
- ? filetime_seconds( creation )
- : 0.0;
-}
-
-
-/*
- * Recursive check if first process is parent (directly or indirectly) of the
- * second one. Both processes are passed as process ids, not handles. Special
- * return value 2 means that the second process is smss.exe and its parent
- * process is System (first argument is ignored).
- */
-
-static int is_parent_child( DWORD parent, DWORD child )
-{
- HANDLE process_snapshot_h = INVALID_HANDLE_VALUE;
-
- if ( !child )
- return 0;
- if ( parent == child )
- return 1;
-
- process_snapshot_h = CreateToolhelp32Snapshot( TH32CS_SNAPPROCESS, 0 );
- if ( INVALID_HANDLE_VALUE != process_snapshot_h )
- {
- BOOL ok = TRUE;
- PROCESSENTRY32 pinfo;
- pinfo.dwSize = sizeof( PROCESSENTRY32 );
- for (
- ok = Process32First( process_snapshot_h, &pinfo );
- ok == TRUE;
- ok = Process32Next( process_snapshot_h, &pinfo ) )
- {
- if ( pinfo.th32ProcessID == child )
- {
- /* Unfortunately, process ids are not really unique. There might
- * be spurious "parent and child" relationship match between two
- * non-related processes if real parent process of a given
- * process has exited (while child process kept running as an
- * "orphan") and the process id of such parent process has been
- * reused by internals of the operating system when creating
- * another process.
- *
- * Thus additional check is needed - process creation time. This
- * check may fail (i.e. return 0) for system processes due to
- * insufficient privileges, and that is OK.
- */
- double tchild = 0.0;
- double tparent = 0.0;
- HANDLE hchild = OpenProcess( PROCESS_QUERY_INFORMATION, FALSE, pinfo.th32ProcessID );
- CloseHandle( process_snapshot_h );
-
- /* csrss.exe may display message box like following:
- * xyz.exe - Unable To Locate Component
- * This application has failed to start because
- * boost_foo-bar.dll was not found. Re-installing the
- * application may fix the problem
- * This actually happens when starting test process that depends
- * on a dynamic library which failed to build. We want to
- * automatically close these message boxes even though csrss.exe
- * is not our child process. We may depend on the fact that (in
- * all current versions of Windows) csrss.exe is directly child
- * of the smss.exe process, which in turn is directly child of
- * the System process, which always has process id == 4. This
- * check must be performed before comparison of process creation
- * times.
- */
- if ( !stricmp( pinfo.szExeFile, "csrss.exe" ) &&
- ( is_parent_child( parent, pinfo.th32ParentProcessID ) == 2 ) )
- return 1;
- if ( !stricmp( pinfo.szExeFile, "smss.exe" ) &&
- ( pinfo.th32ParentProcessID == 4 ) )
- return 2;
-
- if ( hchild )
- {
- HANDLE hparent = OpenProcess( PROCESS_QUERY_INFORMATION,
- FALSE, pinfo.th32ParentProcessID );
- if ( hparent )
- {
- tchild = creation_time( hchild );
- tparent = creation_time( hparent );
- CloseHandle( hparent );
- }
- CloseHandle( hchild );
- }
-
- /* Return 0 if one of the following is true:
- * 1. we failed to read process creation time
- * 2. child was created before alleged parent
- */
- if ( ( tchild == 0.0 ) || ( tparent == 0.0 ) ||
- ( tchild < tparent ) )
- return 0;
-
- return is_parent_child( parent, pinfo.th32ParentProcessID ) & 1;
- }
- }
-
- CloseHandle( process_snapshot_h );
- }
-
- return 0;
-}
-
-typedef struct PROCESS_HANDLE_ID { HANDLE h; DWORD pid; } PROCESS_HANDLE_ID;
-
-
-/*
- * This function is called by the operating system for each topmost window.
- */
-
-BOOL CALLBACK close_alert_window_enum( HWND hwnd, LPARAM lParam )
-{
- char buf[ 7 ] = { 0 };
- PROCESS_HANDLE_ID p = *( (PROCESS_HANDLE_ID *)lParam );
- DWORD pid = 0;
- DWORD tid = 0;
-
- /* We want to find and close any window that:
- * 1. is visible and
- * 2. is a dialog and
- * 3. is displayed by any of our child processes
- */
- if ( !IsWindowVisible( hwnd ) )
- return TRUE;
-
- if ( !GetClassNameA( hwnd, buf, sizeof( buf ) ) )
- return TRUE; /* Failed to read class name; presume it is not a dialog. */
-
- if ( strcmp( buf, "#32770" ) )
- return TRUE; /* Not a dialog */
-
- /* GetWindowThreadProcessId() returns 0 on error, otherwise thread id of
- * window message pump thread.
- */
- tid = GetWindowThreadProcessId( hwnd, &pid );
-
- if ( tid && is_parent_child( p.pid, pid ) )
- {
- /* Ask really nice. */
- PostMessageA( hwnd, WM_CLOSE, 0, 0 );
- /* Now wait and see if it worked. If not, insist. */
- if ( WaitForSingleObject( p.h, 200 ) == WAIT_TIMEOUT )
- {
- PostThreadMessageA( tid, WM_QUIT, 0, 0 );
- WaitForSingleObject( p.h, 300 );
- }
-
- /* Done, we do not want to check any other window now. */
- return FALSE;
- }
-
- return TRUE;
-}
-
-
-static void close_alert( HANDLE process )
-{
- DWORD pid = get_process_id( process );
- /* If process already exited or we just can not get its process id, do not
- * go any further.
- */
- if ( pid )
- {
- PROCESS_HANDLE_ID p;
- p.h = process;
- p.pid = pid;
- EnumWindows( &close_alert_window_enum, (LPARAM)&p );
- }
-}
-
-
-void exec_done( void )
-{
-}
-
-
-#endif /* USE_EXECNT */
diff --git a/tools/build/v2/engine/execunix.c b/tools/build/v2/engine/execunix.c
deleted file mode 100644
index 147d051cd5..0000000000
--- a/tools/build/v2/engine/execunix.c
+++ /dev/null
@@ -1,581 +0,0 @@
-/*
- * Copyright 1993, 1995 Christopher Seiwald.
- * Copyright 2007 Noel Belcourt.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-#include "jam.h"
-#include "lists.h"
-#include "execcmd.h"
-#include "output.h"
-#include <errno.h>
-#include <signal.h>
-#include <stdio.h>
-#include <time.h>
-#include <unistd.h> /* needed for vfork(), _exit() prototypes */
-#include <sys/resource.h>
-#include <sys/times.h>
-#include <sys/wait.h>
-
-#if defined(sun) || defined(__sun) || defined(linux)
- #include <wait.h>
-#endif
-
-#ifdef USE_EXECUNIX
-
-#include <sys/times.h>
-
-#if defined(__APPLE__)
- #define NO_VFORK
-#endif
-
-#ifdef NO_VFORK
- #define vfork() fork()
-#endif
-
-
-/*
- * execunix.c - execute a shell script on UNIX/WinNT/OS2/AmigaOS
- *
- * If $(JAMSHELL) is defined, uses that to formulate execvp()/spawnvp().
- * The default is:
- *
- * /bin/sh -c % [ on UNIX/AmigaOS ]
- * cmd.exe /c % [ on OS2/WinNT ]
- *
- * Each word must be an individual element in a jam variable value.
- *
- * In $(JAMSHELL), % expands to the command string and ! expands to the slot
- * number (starting at 1) for multiprocess (-j) invocations. If $(JAMSHELL) does
- * not include a %, it is tacked on as the last argument.
- *
- * Do not just set JAMSHELL to /bin/sh or cmd.exe - it will not work!
- *
- * External routines:
- * exec_cmd() - launch an async command execution.
- * exec_wait() - wait and drive at most one execution completion.
- *
- * Internal routines:
- * onintr() - bump intr to note command interruption.
- *
- * 04/08/94 (seiwald) - Coherent/386 support added.
- * 05/04/94 (seiwald) - async multiprocess interface
- * 01/22/95 (seiwald) - $(JAMSHELL) support
- * 06/02/97 (gsar) - full async multiprocess support for Win32
- */
-
-static clock_t tps = 0;
-static struct timeval tv;
-static int select_timeout = 0;
-static int intr = 0;
-static int cmdsrunning = 0;
-static struct tms old_time;
-
-#define OUT 0
-#define ERR 1
-
-static struct
-{
- int pid; /* on win32, a real process handle */
- int fd[2]; /* file descriptors for stdout and stderr */
- FILE *stream[2]; /* child's stdout (0) and stderr (1) file stream */
- clock_t start_time; /* start time of child process */
- int exit_reason; /* termination status */
- int action_length; /* length of action string */
- int target_length; /* length of target string */
- char *action; /* buffer to hold action and target invoked */
- char *target; /* buffer to hold action and target invoked */
- char *command; /* buffer to hold command being invoked */
- char *buffer[2]; /* buffer to hold stdout and stderr, if any */
- void (*func)( void *closure, int status, timing_info*, const char *, const char * );
- void *closure;
- time_t start_dt; /* start of command timestamp */
-} cmdtab[ MAXJOBS ] = {{0}};
-
-/*
- * onintr() - bump intr to note command interruption
- */
-
-void onintr( int disp )
-{
- ++intr;
- printf( "...interrupted\n" );
-}
-
-
-/*
- * exec_cmd() - launch an async command execution.
- */
-
-void exec_cmd
-(
- const char * string,
- void (*func)( void *closure, int status, timing_info*, const char *, const char * ),
- void * closure,
- LIST * shell,
- const char * action,
- const char * target
-)
-{
- static int initialized = 0;
- int out[2];
- int err[2];
- int slot;
- int len;
- const char * argv[ MAXARGC + 1 ]; /* +1 for NULL */
-
- /* Find a slot in the running commands table for this one. */
- for ( slot = 0; slot < MAXJOBS; ++slot )
- if ( !cmdtab[ slot ].pid )
- break;
-
- if ( slot == MAXJOBS )
- {
- printf( "no slots for child!\n" );
- exit( EXITBAD );
- }
-
- /* Forumulate argv. If shell was defined, be prepared for % and ! subs.
- * Otherwise, use stock /bin/sh on unix or cmd.exe on NT.
- */
- if ( !list_empty( shell ) )
- {
- int i;
- char jobno[4];
- int gotpercent = 0;
- LISTITER iter = list_begin( shell ), end = list_end( shell );
-
- sprintf( jobno, "%d", slot + 1 );
-
- for ( i = 0; iter != end && i < MAXARGC; ++i, iter = list_next( iter ) )
- {
- switch ( object_str( list_item( iter ) )[0] )
- {
- case '%': argv[ i ] = string; ++gotpercent; break;
- case '!': argv[ i ] = jobno; break;
- default : argv[ i ] = object_str( list_item( iter ) );
- }
- if ( DEBUG_EXECCMD )
- printf( "argv[%d] = '%s'\n", i, argv[ i ] );
- }
-
- if ( !gotpercent )
- argv[ i++ ] = string;
-
- argv[ i ] = 0;
- }
- else
- {
- argv[ 0 ] = "/bin/sh";
- argv[ 1 ] = "-c";
- argv[ 2 ] = string;
- argv[ 3 ] = 0;
- }
-
- /* Increment jobs running. */
- ++cmdsrunning;
-
- /* Save off actual command string. */
- cmdtab[ slot ].command = BJAM_MALLOC_ATOMIC( strlen( string ) + 1 );
- strcpy( cmdtab[ slot ].command, string );
-
- /* Initialize only once. */
- if ( !initialized )
- {
- times( &old_time );
- initialized = 1;
- }
-
- /* Create pipes from child to parent. */
- {
- if ( pipe( out ) < 0 )
- exit( EXITBAD );
-
- if ( pipe( err ) < 0 )
- exit( EXITBAD );
- }
-
- /* Start the command */
-
- cmdtab[ slot ].start_dt = time(0);
-
- if ( 0 < globs.timeout )
- {
- /*
- * Handle hung processes by manually tracking elapsed time and signal
- * process when time limit expires.
- */
- struct tms buf;
- cmdtab[ slot ].start_time = times( &buf );
-
- /* Make a global, only do this once. */
- if ( tps == 0 ) tps = sysconf( _SC_CLK_TCK );
- }
-
- if ( ( cmdtab[ slot ].pid = vfork() ) == 0 )
- {
- int pid = getpid();
-
- close( out[0] );
- close( err[0] );
-
- dup2( out[1], STDOUT_FILENO );
-
- if ( globs.pipe_action == 0 )
- dup2( out[1], STDERR_FILENO );
- else
- dup2( err[1], STDERR_FILENO );
-
- close( out[1] );
- close( err[1] );
-
- /* Make this process a process group leader so that when we kill it, all
- * child processes of this process are terminated as well. We use
- * killpg(pid, SIGKILL) to kill the process group leader and all its
- * children.
- */
- if ( 0 < globs.timeout )
- {
- struct rlimit r_limit;
- r_limit.rlim_cur = globs.timeout;
- r_limit.rlim_max = globs.timeout;
- setrlimit( RLIMIT_CPU, &r_limit );
- }
- setpgid( pid,pid );
- execvp( argv[0], (char * *)argv );
- perror( "execvp" );
- _exit( 127 );
- }
- else if ( cmdtab[ slot ].pid == -1 )
- {
- perror( "vfork" );
- exit( EXITBAD );
- }
-
- setpgid( cmdtab[ slot ].pid, cmdtab[ slot ].pid );
-
- /* close write end of pipes */
- close( out[1] );
- close( err[1] );
-
- /* set both file descriptors to non-blocking */
- fcntl(out[0], F_SETFL, O_NONBLOCK);
- fcntl(err[0], F_SETFL, O_NONBLOCK);
-
- /* child writes stdout to out[1], parent reads from out[0] */
- cmdtab[ slot ].fd[ OUT ] = out[0];
- cmdtab[ slot ].stream[ OUT ] = fdopen( cmdtab[ slot ].fd[ OUT ], "rb" );
- if ( cmdtab[ slot ].stream[ OUT ] == NULL )
- {
- perror( "fdopen" );
- exit( EXITBAD );
- }
-
- /* child writes stderr to err[1], parent reads from err[0] */
- if (globs.pipe_action == 0)
- {
- close(err[0]);
- }
- else
- {
- cmdtab[ slot ].fd[ ERR ] = err[0];
- cmdtab[ slot ].stream[ ERR ] = fdopen( cmdtab[ slot ].fd[ ERR ], "rb" );
- if ( cmdtab[ slot ].stream[ ERR ] == NULL )
- {
- perror( "fdopen" );
- exit( EXITBAD );
- }
- }
-
- /* Ensure enough room for rule and target name. */
- if ( action && target )
- {
- len = strlen( action ) + 1;
- if ( cmdtab[ slot ].action_length < len )
- {
- BJAM_FREE( cmdtab[ slot ].action );
- cmdtab[ slot ].action = BJAM_MALLOC_ATOMIC( len );
- cmdtab[ slot ].action_length = len;
- }
- strcpy( cmdtab[ slot ].action, action );
- len = strlen( target ) + 1;
- if ( cmdtab[ slot ].target_length < len )
- {
- BJAM_FREE( cmdtab[ slot ].target );
- cmdtab[ slot ].target = BJAM_MALLOC_ATOMIC( len );
- cmdtab[ slot ].target_length = len;
- }
- strcpy( cmdtab[ slot ].target, target );
- }
- else
- {
- BJAM_FREE( cmdtab[ slot ].action );
- BJAM_FREE( cmdtab[ slot ].target );
- cmdtab[ slot ].action = 0;
- cmdtab[ slot ].target = 0;
- cmdtab[ slot ].action_length = 0;
- cmdtab[ slot ].target_length = 0;
- }
-
- /* Save the operation for exec_wait() to find. */
- cmdtab[ slot ].func = func;
- cmdtab[ slot ].closure = closure;
-
- /* Wait until we are under the limit of concurrent commands. Do not trust
- * globs.jobs alone.
- */
- while ( ( cmdsrunning >= MAXJOBS ) || ( cmdsrunning >= globs.jobs ) )
- if ( !exec_wait() )
- break;
-}
-
-
-/* Returns 1 if file is closed, 0 if descriptor is still live.
- *
- * i is index into cmdtab
- *
- * s (stream) indexes:
- * - cmdtab[ i ].stream[ s ]
- * - cmdtab[ i ].buffer[ s ]
- * - cmdtab[ i ].fd [ s ]
- */
-
-int read_descriptor( int i, int s )
-{
- int ret;
- int len;
- char buffer[BUFSIZ];
-
- while ( 0 < ( ret = fread( buffer, sizeof(char), BUFSIZ-1, cmdtab[ i ].stream[ s ] ) ) )
- {
- buffer[ret] = 0;
- if ( !cmdtab[ i ].buffer[ s ] )
- {
- /* Never been allocated. */
- cmdtab[ i ].buffer[ s ] = (char*)BJAM_MALLOC_ATOMIC( ret + 1 );
- memcpy( cmdtab[ i ].buffer[ s ], buffer, ret + 1 );
- }
- else
- {
- /* Previously allocated. */
- char * tmp = cmdtab[ i ].buffer[ s ];
- len = strlen( tmp );
- cmdtab[ i ].buffer[ s ] = (char*)BJAM_MALLOC_ATOMIC( len + ret + 1 );
- memcpy( cmdtab[ i ].buffer[ s ], tmp, len );
- memcpy( cmdtab[ i ].buffer[ s ] + len, buffer, ret + 1 );
- BJAM_FREE( tmp );
- }
- }
-
- return feof(cmdtab[ i ].stream[ s ]);
-}
-
-
-void close_streams( int i, int s )
-{
- /* Close the stream and pipe descriptor. */
- fclose(cmdtab[ i ].stream[ s ]);
- cmdtab[ i ].stream[ s ] = 0;
-
- close(cmdtab[ i ].fd[ s ]);
- cmdtab[ i ].fd[ s ] = 0;
-}
-
-
-void populate_file_descriptors( int * fmax, fd_set * fds)
-{
- int i, fd_max = 0;
- struct tms buf;
- clock_t current = times( &buf );
- select_timeout = globs.timeout;
-
- /* Compute max read file descriptor for use in select. */
- FD_ZERO(fds);
- for ( i = 0; i < globs.jobs; ++i )
- {
- if ( 0 < cmdtab[ i ].fd[ OUT ] )
- {
- fd_max = fd_max < cmdtab[ i ].fd[ OUT ] ? cmdtab[ i ].fd[ OUT ] : fd_max;
- FD_SET(cmdtab[ i ].fd[ OUT ], fds);
- }
- if ( globs.pipe_action != 0 )
- {
- if (0 < cmdtab[ i ].fd[ ERR ])
- {
- fd_max = fd_max < cmdtab[ i ].fd[ ERR ] ? cmdtab[ i ].fd[ ERR ] : fd_max;
- FD_SET(cmdtab[ i ].fd[ ERR ], fds);
- }
- }
-
- if (globs.timeout && cmdtab[ i ].pid) {
- clock_t consumed = (current - cmdtab[ i ].start_time) / tps;
- clock_t process_timesout = globs.timeout - consumed;
- if (0 < process_timesout && process_timesout < select_timeout) {
- select_timeout = process_timesout;
- }
- if ( globs.timeout <= consumed )
- {
- killpg( cmdtab[ i ].pid, SIGKILL );
- cmdtab[ i ].exit_reason = EXIT_TIMEOUT;
- }
- }
- }
- *fmax = fd_max;
-}
-
-
-/*
- * exec_wait() - wait and drive at most one execution completion.
- */
-
-int exec_wait()
-{
- int i;
- int ret;
- int fd_max;
- int pid;
- int status;
- int finished;
- int rstat;
- timing_info time_info;
- fd_set fds;
- struct tms new_time;
-
- /* Handle naive make1() which does not know if commands are running. */
- if ( !cmdsrunning )
- return 0;
-
- /* Process children that signaled. */
- finished = 0;
- while ( !finished && cmdsrunning )
- {
- /* Compute max read file descriptor for use in select(). */
- populate_file_descriptors( &fd_max, &fds );
-
- if ( 0 < globs.timeout )
- {
- /* Force select() to timeout so we can terminate expired processes.
- */
- tv.tv_sec = select_timeout;
- tv.tv_usec = 0;
-
- /* select() will wait until: i/o on a descriptor, a signal, or we
- * time out.
- */
- ret = select( fd_max + 1, &fds, 0, 0, &tv );
- }
- else
- {
- /* select() will wait until i/o on a descriptor or a signal. */
- ret = select( fd_max + 1, &fds, 0, 0, 0 );
- }
-
- if ( 0 < ret )
- {
- for ( i = 0; i < globs.jobs; ++i )
- {
- int out = 0;
- int err = 0;
- if ( FD_ISSET( cmdtab[ i ].fd[ OUT ], &fds ) )
- out = read_descriptor( i, OUT );
-
- if ( ( globs.pipe_action != 0 ) &&
- ( FD_ISSET( cmdtab[ i ].fd[ ERR ], &fds ) ) )
- err = read_descriptor( i, ERR );
-
- /* If feof on either descriptor, then we are done. */
- if ( out || err )
- {
- /* Close the stream and pipe descriptors. */
- close_streams( i, OUT );
- if ( globs.pipe_action != 0 )
- close_streams( i, ERR );
-
- /* Reap the child and release resources. */
- pid = waitpid( cmdtab[ i ].pid, &status, 0 );
-
- if ( pid == cmdtab[ i ].pid )
- {
- finished = 1;
- pid = 0;
- cmdtab[ i ].pid = 0;
-
- /* Set reason for exit if not timed out. */
- if ( WIFEXITED( status ) )
- {
- cmdtab[ i ].exit_reason = 0 == WEXITSTATUS( status )
- ? EXIT_OK
- : EXIT_FAIL;
- }
-
- /* Print out the rule and target name. */
- out_action( cmdtab[ i ].action, cmdtab[ i ].target,
- cmdtab[ i ].command, cmdtab[ i ].buffer[ OUT ],
- cmdtab[ i ].buffer[ ERR ], cmdtab[ i ].exit_reason
- );
-
- times( &new_time );
-
- time_info.system = (double)( new_time.tms_cstime - old_time.tms_cstime ) / CLOCKS_PER_SEC;
- time_info.user = (double)( new_time.tms_cutime - old_time.tms_cutime ) / CLOCKS_PER_SEC;
- time_info.start = cmdtab[ i ].start_dt;
- time_info.end = time( 0 );
-
- old_time = new_time;
-
- /* Drive the completion. */
- --cmdsrunning;
-
- if ( intr )
- rstat = EXEC_CMD_INTR;
- else if ( status != 0 )
- rstat = EXEC_CMD_FAIL;
- else
- rstat = EXEC_CMD_OK;
-
- /* Assume -p0 in effect so only pass buffer[ 0 ]
- * containing merged output.
- */
- (*cmdtab[ i ].func)( cmdtab[ i ].closure, rstat,
- &time_info, cmdtab[ i ].command,
- cmdtab[ i ].buffer[ 0 ] );
-
- BJAM_FREE( cmdtab[ i ].buffer[ OUT ] );
- cmdtab[ i ].buffer[ OUT ] = 0;
-
- BJAM_FREE( cmdtab[ i ].buffer[ ERR ] );
- cmdtab[ i ].buffer[ ERR ] = 0;
-
- BJAM_FREE( cmdtab[ i ].command );
- cmdtab[ i ].command = 0;
-
- cmdtab[ i ].func = 0;
- cmdtab[ i ].closure = 0;
- cmdtab[ i ].start_time = 0;
- }
- else
- {
- printf( "unknown pid %d with errno = %d\n", pid, errno );
- exit( EXITBAD );
- }
- }
- }
- }
- }
-
- return 1;
-}
-
-void exec_done( void )
-{
- int i;
- for( i = 0; i < MAXJOBS; ++i )
- {
- if( ! cmdtab[i].action ) break;
- BJAM_FREE( cmdtab[i].action );
- BJAM_FREE( cmdtab[i].target );
- }
-}
-
-# endif /* USE_EXECUNIX */
diff --git a/tools/build/v2/engine/filent.c b/tools/build/v2/engine/filent.c
deleted file mode 100644
index b448cd03f7..0000000000
--- a/tools/build/v2/engine/filent.c
+++ /dev/null
@@ -1,411 +0,0 @@
-/*
- * Copyright 1993, 1995 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/* This file is ALSO:
- * Copyright 2001-2004 David Abrahams.
- * Copyright 2005 Rene Rivera.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-# include "jam.h"
-
-# include "filesys.h"
-# include "pathsys.h"
-# include "strings.h"
-# include "object.h"
-
-# ifdef OS_NT
-
-# ifdef __BORLANDC__
-# if __BORLANDC__ < 0x550
-# include <dir.h>
-# include <dos.h>
-# endif
-# undef FILENAME /* cpp namespace collision */
-# define _finddata_t ffblk
-# endif
-
-# include <io.h>
-# include <sys/stat.h>
-# include <ctype.h>
-# include <direct.h>
-
-/*
- * filent.c - scan directories and archives on NT
- *
- * External routines:
- *
- * file_dirscan() - scan a directory for files
- * file_time() - get timestamp of file, if not done by file_dirscan()
- * file_archscan() - scan an archive for files
- *
- * File_dirscan() and file_archscan() call back a caller provided function
- * for each file found. A flag to this callback function lets file_dirscan()
- * and file_archscan() indicate that a timestamp is being provided with the
- * file. If file_dirscan() or file_archscan() do not provide the file's
- * timestamp, interested parties may later call file_time().
- *
- * 07/10/95 (taylor) Findfirst() returns the first file on NT.
- * 05/03/96 (seiwald) split apart into pathnt.c
- */
-
-/*
- * file_dirscan() - scan a directory for files
- */
-
-void file_dirscan( OBJECT * dir, scanback func, void * closure )
-{
- PROFILE_ENTER( FILE_DIRSCAN );
-
- file_info_t * d = 0;
-
- /* First enter directory itself */
-
- d = file_query( dir );
-
- if ( !d || !d->is_dir )
- {
- object_free( dir );
- PROFILE_EXIT( FILE_DIRSCAN );
- return;
- }
-
- if ( !d->files )
- {
- PATHNAME f;
- string filespec[ 1 ];
- string filename[ 1 ];
- long handle;
- int ret;
- struct _finddata_t finfo[ 1 ];
- LIST * files = L0;
- int d_length;
-
- dir = short_path_to_long_path( dir );
-
- d_length = strlen( object_str( dir ) );
-
- memset( (char *)&f, '\0', sizeof( f ) );
-
- f.f_dir.ptr = object_str( dir );
- f.f_dir.len = d_length;
-
- /* Now enter contents of directory */
-
- /* Prepare file search specification for the findfirst() API. */
- if ( d_length == 0 )
- string_copy( filespec, ".\\*" );
- else
- {
- /*
- * We can not simply assume the given folder name will never include
- * its trailing path separator or otherwise we would not support the
- * Windows root folder specified without its drive letter, i.e. '\'.
- */
- char trailingChar = object_str( dir )[ d_length - 1 ] ;
- string_copy( filespec, object_str( dir ) );
- if ( ( trailingChar != '\\' ) && ( trailingChar != '/' ) )
- string_append( filespec, "\\" );
- string_append( filespec, "*" );
- }
-
- if ( DEBUG_BINDSCAN )
- printf( "scan directory %s\n", dir );
-
- #if defined(__BORLANDC__) && __BORLANDC__ < 0x550
- if ( ret = findfirst( filespec->value, finfo, FA_NORMAL | FA_DIREC ) )
- {
- string_free( filespec );
- object_free( dir );
- PROFILE_EXIT( FILE_DIRSCAN );
- return;
- }
-
- string_new ( filename );
- while ( !ret )
- {
- file_info_t * ff = 0;
-
- f.f_base.ptr = finfo->ff_name;
- f.f_base.len = strlen( finfo->ff_name );
-
- string_truncate( filename, 0 );
- path_build( &f, filename );
-
- files = list_push_back( files, object_new(filename->value) );
- ff = file_info( filename->value );
- ff->is_file = finfo->ff_attrib & FA_DIREC ? 0 : 1;
- ff->is_dir = finfo->ff_attrib & FA_DIREC ? 1 : 0;
- ff->size = finfo->ff_fsize;
- ff->time = (finfo->ff_ftime << 16) | finfo->ff_ftime;
-
- ret = findnext( finfo );
- }
- # else
- handle = _findfirst( filespec->value, finfo );
-
- if ( ret = ( handle < 0L ) )
- {
- string_free( filespec );
- object_free( dir );
- PROFILE_EXIT( FILE_DIRSCAN );
- return;
- }
-
- string_new( filename );
- while ( !ret )
- {
- OBJECT * filename_obj;
- file_info_t * ff = 0;
-
- f.f_base.ptr = finfo->name;
- f.f_base.len = strlen( finfo->name );
-
- string_truncate( filename, 0 );
- path_build( &f, filename, 0 );
-
- filename_obj = object_new( filename->value );
- path_add_key( filename_obj );
- files = list_push_back( files, filename_obj );
- ff = file_info( filename_obj );
- ff->is_file = finfo->attrib & _A_SUBDIR ? 0 : 1;
- ff->is_dir = finfo->attrib & _A_SUBDIR ? 1 : 0;
- ff->size = finfo->size;
- ff->time = finfo->time_write;
-
- ret = _findnext( handle, finfo );
- }
-
- _findclose( handle );
- # endif
- string_free( filename );
- string_free( filespec );
- object_free( dir );
-
- d->files = files;
- }
-
- /* Special case \ or d:\ : enter it */
- {
- unsigned long len = strlen( object_str( d->name ) );
- if ( len == 1 && object_str( d->name )[0] == '\\' )
- {
- OBJECT * dir = short_path_to_long_path( d->name );
- (*func)( closure, dir, 1 /* stat()'ed */, d->time );
- object_free( dir );
- }
- else if ( len == 3 && object_str( d->name )[1] == ':' )
- {
- char buf[4];
- OBJECT * dir1 = short_path_to_long_path( d->name );
- OBJECT * dir2;
- (*func)( closure, dir1, 1 /* stat()'ed */, d->time );
- /* We've just entered 3-letter drive name spelling (with trailing
- slash), into the hash table. Now enter two-letter variant,
- without trailing slash, so that if we try to check whether
- "c:" exists, we hit it.
-
- Jam core has workarounds for that. Given:
- x = c:\whatever\foo ;
- p = $(x:D) ;
- p2 = $(p:D) ;
- There will be no trailing slash in $(p), but there will be one
- in $(p2). But, that seems rather fragile.
- */
- strcpy( buf, object_str( dir1 ) );
- buf[2] = 0;
- dir2 = object_new( buf );
- (*func)( closure, dir2, 1 /* stat()'ed */, d->time );
- object_free( dir2 );
- object_free( dir1 );
- }
- }
-
- /* Now enter contents of directory */
- if ( !list_empty( d->files ) )
- {
- LIST * files = d->files;
- LISTITER iter = list_begin( files ), end = list_end( files );
- for ( ; iter != end; iter = list_next( iter ) )
- {
- file_info_t * ff = file_info( list_item( iter ) );
- (*func)( closure, list_item( iter ), 1 /* stat()'ed */, ff->time );
- }
- }
-
- PROFILE_EXIT( FILE_DIRSCAN );
-}
-
-file_info_t * file_query( OBJECT * filename )
-{
- file_info_t * ff = file_info( filename );
- if ( ! ff->time )
- {
- struct stat statbuf;
-
- if ( stat( *object_str( filename ) ? object_str( filename ) : ".", &statbuf ) < 0 )
- return 0;
-
- ff->is_file = statbuf.st_mode & S_IFREG ? 1 : 0;
- ff->is_dir = statbuf.st_mode & S_IFDIR ? 1 : 0;
- ff->size = statbuf.st_size;
- ff->time = statbuf.st_mtime ? statbuf.st_mtime : 1;
- }
- return ff;
-}
-
-/*
- * file_time() - get timestamp of file, if not done by file_dirscan()
- */
-
-int
-file_time(
- OBJECT * filename,
- time_t * time )
-{
- file_info_t * ff = file_query( filename );
- if ( !ff ) return -1;
- *time = ff->time;
- return 0;
-}
-
-int file_is_file( OBJECT * filename )
-{
- file_info_t * ff = file_query( filename );
- if ( !ff ) return -1;
- return ff->is_file;
-}
-
-int file_mkdir( const char * pathname )
-{
- return _mkdir(pathname);
-}
-
-/*
- * file_archscan() - scan an archive for files
- */
-
-/* Straight from SunOS */
-
-#define ARMAG "!<arch>\n"
-#define SARMAG 8
-
-#define ARFMAG "`\n"
-
-struct ar_hdr {
- char ar_name[16];
- char ar_date[12];
- char ar_uid[6];
- char ar_gid[6];
- char ar_mode[8];
- char ar_size[10];
- char ar_fmag[2];
-};
-
-# define SARFMAG 2
-# define SARHDR sizeof( struct ar_hdr )
-
-void
-file_archscan(
- const char * archive,
- scanback func,
- void * closure )
-{
- struct ar_hdr ar_hdr;
- char *string_table = 0;
- char buf[ MAXJPATH ];
- long offset;
- int fd;
-
- if ( ( fd = open( archive, O_RDONLY | O_BINARY, 0 ) ) < 0 )
- return;
-
- if ( read( fd, buf, SARMAG ) != SARMAG ||
- strncmp( ARMAG, buf, SARMAG ) )
- {
- close( fd );
- return;
- }
-
- offset = SARMAG;
-
- if ( DEBUG_BINDSCAN )
- printf( "scan archive %s\n", archive );
-
- while ( ( read( fd, &ar_hdr, SARHDR ) == SARHDR ) &&
- !memcmp( ar_hdr.ar_fmag, ARFMAG, SARFMAG ) )
- {
- long lar_date;
- long lar_size;
- char * name = 0;
- char * endname;
- char * c;
- OBJECT * member;
-
- sscanf( ar_hdr.ar_date, "%ld", &lar_date );
- sscanf( ar_hdr.ar_size, "%ld", &lar_size );
-
- lar_size = ( lar_size + 1 ) & ~1;
-
- if (ar_hdr.ar_name[0] == '/' && ar_hdr.ar_name[1] == '/' )
- {
- /* this is the "string table" entry of the symbol table,
- ** which holds strings of filenames that are longer than
- ** 15 characters (ie. don't fit into a ar_name
- */
-
- string_table = BJAM_MALLOC_ATOMIC(lar_size+1);
- if (read(fd, string_table, lar_size) != lar_size)
- printf("error reading string table\n");
- string_table[lar_size] = '\0';
- offset += SARHDR + lar_size;
- continue;
- }
- else if (ar_hdr.ar_name[0] == '/' && ar_hdr.ar_name[1] != ' ')
- {
- /* Long filenames are recognized by "/nnnn" where nnnn is
- ** the offset of the string in the string table represented
- ** in ASCII decimals.
- */
-
- name = string_table + atoi( ar_hdr.ar_name + 1 );
- for ( endname = name; *endname && *endname != '\n'; ++endname) {}
- }
- else
- {
- /* normal name */
- name = ar_hdr.ar_name;
- endname = name + sizeof( ar_hdr.ar_name );
- }
-
- /* strip trailing white-space, slashes, and backslashes */
-
- while ( endname-- > name )
- if ( !isspace(*endname) && ( *endname != '\\' ) && ( *endname != '/' ) )
- break;
- *++endname = 0;
-
- /* strip leading directory names, an NT specialty */
-
- if ( c = strrchr( name, '/' ) )
- name = c + 1;
- if ( c = strrchr( name, '\\' ) )
- name = c + 1;
-
- sprintf( buf, "%s(%.*s)", archive, endname - name, name );
- member = object_new( buf );
- (*func)( closure, member, 1 /* time valid */, (time_t)lar_date );
- object_free( member );
-
- offset += SARHDR + lar_size;
- lseek( fd, offset, 0 );
- }
-
- close( fd );
-}
-
-# endif /* NT */
diff --git a/tools/build/v2/engine/filesys.c b/tools/build/v2/engine/filesys.c
deleted file mode 100644
index 8d174cfd8b..0000000000
--- a/tools/build/v2/engine/filesys.c
+++ /dev/null
@@ -1,99 +0,0 @@
-# include "jam.h"
-# include "pathsys.h"
-# include "strings.h"
-# include "object.h"
-# include "filesys.h"
-# include "lists.h"
-
-void file_build1( PATHNAME * f, string * file )
-{
- if ( DEBUG_SEARCH )
- {
- printf("build file: ");
- if ( f->f_root.len )
- printf( "root = '%.*s' ", f->f_root.len, f->f_root.ptr );
- if ( f->f_dir.len )
- printf( "dir = '%.*s' ", f->f_dir.len, f->f_dir.ptr );
- if ( f->f_base.len )
- printf( "base = '%.*s' ", f->f_base.len, f->f_base.ptr );
- printf( "\n" );
- }
-
- /* Start with the grist. If the current grist isn't */
- /* surrounded by <>'s, add them. */
-
- if ( f->f_grist.len )
- {
- if ( f->f_grist.ptr[0] != '<' )
- string_push_back( file, '<' );
- string_append_range(
- file, f->f_grist.ptr, f->f_grist.ptr + f->f_grist.len );
- if ( file->value[file->size - 1] != '>' )
- string_push_back( file, '>' );
- }
-}
-
-static struct hash * filecache_hash = 0;
-static file_info_t filecache_finfo;
-
-file_info_t * file_info( OBJECT * filename )
-{
- file_info_t *finfo = &filecache_finfo;
- int found;
-
- if ( !filecache_hash )
- filecache_hash = hashinit( sizeof( file_info_t ), "file_info" );
-
- filename = path_as_key( filename );
-
- finfo = (file_info_t *)hash_insert( filecache_hash, filename, &found );
- if ( !found )
- {
- /* printf( "file_info: %s\n", filename ); */
- finfo->name = object_copy( filename );
- finfo->is_file = 0;
- finfo->is_dir = 0;
- finfo->size = 0;
- finfo->time = 0;
- finfo->files = L0;
- }
-
- object_free( filename );
-
- return finfo;
-}
-
-static LIST * files_to_remove = L0;
-
-static void remove_files_atexit(void)
-{
- LISTITER iter = list_begin( files_to_remove ), end = list_end( files_to_remove );
- for ( ; iter != end; iter = list_next( iter ) )
- {
- remove( object_str( list_item( iter ) ) );
- }
- list_free( files_to_remove );
- files_to_remove = L0;
-}
-
-static void free_file_info ( void * xfile, void * data )
-{
- file_info_t * file = (file_info_t *)xfile;
- object_free( file->name );
- list_free( file->files );
-}
-
-void file_done()
-{
- remove_files_atexit();
- if ( filecache_hash )
- {
- hashenumerate( filecache_hash, free_file_info, (void *)0 );
- hashdone( filecache_hash );
- }
-}
-
-void file_remove_atexit( OBJECT * path )
-{
- files_to_remove = list_push_back( files_to_remove, object_copy( path ) );
-}
diff --git a/tools/build/v2/engine/filesys.h b/tools/build/v2/engine/filesys.h
deleted file mode 100644
index d32805126e..0000000000
--- a/tools/build/v2/engine/filesys.h
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/* This file is ALSO:
- * Copyright 2001-2004 David Abrahams.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-/*
- * filesys.h - OS specific file routines
- */
-
-#ifndef FILESYS_DWA20011025_H
-# define FILESYS_DWA20011025_H
-
-# include "pathsys.h"
-#include "hash.h"
-#include "lists.h"
-#include "object.h"
-
-typedef void (*scanback)( void *closure, OBJECT * file, int found, time_t t );
-
-void file_dirscan( OBJECT * dir, scanback func, void * closure );
-void file_archscan( const char * arch, scanback func, void * closure );
-
-int file_time( OBJECT * filename, time_t * time );
-
-void file_build1(PATHNAME *f, string* file) ;
-int file_is_file( OBJECT * filename );
-int file_mkdir( const char * pathname );
-
-typedef struct file_info_t file_info_t ;
-struct file_info_t
-{
- OBJECT * name;
- short is_file;
- short is_dir;
- unsigned long size;
- time_t time;
- LIST * files;
-};
-
-
-/* Creates a pointer to information about file 'filename', creating it as
- * necessary. If created, the structure will be default initialized.
- */
-file_info_t * file_info( OBJECT * filename );
-
-/* Returns information about a file, queries the OS if needed. */
-file_info_t * file_query( OBJECT * filename );
-
-void file_done();
-
-/* Marks a path/file to be removed when jam exits. */
-void file_remove_atexit( OBJECT * path );
-
-#endif
diff --git a/tools/build/v2/engine/fileunix.c b/tools/build/v2/engine/fileunix.c
deleted file mode 100644
index d8b458c9ba..0000000000
--- a/tools/build/v2/engine/fileunix.c
+++ /dev/null
@@ -1,513 +0,0 @@
-/*
- * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/* This file is ALSO:
- * Copyright 2001-2004 David Abrahams.
- * Copyright 2005 Rene Rivera.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-# include "jam.h"
-# include "filesys.h"
-# include "strings.h"
-# include "pathsys.h"
-# include "object.h"
-# include <stdio.h>
-# include <sys/stat.h>
-
-#if defined(sun) || defined(__sun) || defined(linux)
-# include <unistd.h> /* needed for read and close prototype */
-#endif
-
-# ifdef USE_FILEUNIX
-
-#if defined(sun) || defined(__sun)
-# include <unistd.h> /* needed for read and close prototype */
-#endif
-
-# if defined( OS_SEQUENT ) || \
- defined( OS_DGUX ) || \
- defined( OS_SCO ) || \
- defined( OS_ISC )
-# define PORTAR 1
-# endif
-
-# ifdef __EMX__
-# include <sys/types.h>
-# include <sys/stat.h>
-# endif
-
-# if defined( OS_RHAPSODY ) || \
- defined( OS_MACOSX ) || \
- defined( OS_NEXT )
-/* need unistd for rhapsody's proper lseek */
-# include <sys/dir.h>
-# include <unistd.h>
-# define STRUCT_DIRENT struct direct
-# else
-# include <dirent.h>
-# define STRUCT_DIRENT struct dirent
-# endif
-
-# ifdef OS_COHERENT
-# include <arcoff.h>
-# define HAVE_AR
-# endif
-
-# if defined( OS_MVS ) || \
- defined( OS_INTERIX )
-
-#define ARMAG "!<arch>\n"
-#define SARMAG 8
-#define ARFMAG "`\n"
-
-struct ar_hdr /* archive file member header - printable ascii */
-{
- char ar_name[16]; /* file member name - `/' terminated */
- char ar_date[12]; /* file member date - decimal */
- char ar_uid[6]; /* file member user id - decimal */
- char ar_gid[6]; /* file member group id - decimal */
- char ar_mode[8]; /* file member mode - octal */
- char ar_size[10]; /* file member size - decimal */
- char ar_fmag[2]; /* ARFMAG - string to end header */
-};
-
-# define HAVE_AR
-# endif
-
-# if defined( OS_QNX ) || \
- defined( OS_BEOS ) || \
- defined( OS_MPEIX )
-# define NO_AR
-# define HAVE_AR
-# endif
-
-# ifndef HAVE_AR
-
-# ifdef OS_AIX
-/* Define those for AIX to get the definitions for both the small and the
- * big variant of the archive file format. */
-# define __AR_SMALL__
-# define __AR_BIG__
-# endif
-
-# include <ar.h>
-# endif
-
-/*
- * fileunix.c - manipulate file names and scan directories on UNIX/AmigaOS
- *
- * External routines:
- *
- * file_dirscan() - scan a directory for files
- * file_time() - get timestamp of file, if not done by file_dirscan()
- * file_archscan() - scan an archive for files
- *
- * File_dirscan() and file_archscan() call back a caller provided function
- * for each file found. A flag to this callback function lets file_dirscan()
- * and file_archscan() indicate that a timestamp is being provided with the
- * file. If file_dirscan() or file_archscan() do not provide the file's
- * timestamp, interested parties may later call file_time().
- *
- * 04/08/94 (seiwald) - Coherent/386 support added.
- * 12/19/94 (mikem) - solaris string table insanity support
- * 02/14/95 (seiwald) - parse and build /xxx properly
- * 05/03/96 (seiwald) - split into pathunix.c
- * 11/21/96 (peterk) - BEOS does not have Unix-style archives
- */
-
-
-/*
- * file_dirscan() - scan a directory for files.
- */
-
-void file_dirscan( OBJECT * dir, scanback func, void * closure )
-{
- PROFILE_ENTER( FILE_DIRSCAN );
-
- file_info_t * d = 0;
-
- d = file_query( dir );
-
- if ( !d || !d->is_dir )
- {
- PROFILE_EXIT( FILE_DIRSCAN );
- return;
- }
-
- if ( list_empty( d->files ) )
- {
- LIST* files = L0;
- PATHNAME f;
- DIR *dd;
- STRUCT_DIRENT *dirent;
- string filename[1];
- const char * dirstr = object_str( dir );
-
- /* First enter directory itself */
-
- memset( (char *)&f, '\0', sizeof( f ) );
-
- f.f_dir.ptr = dirstr;
- f.f_dir.len = strlen( dirstr );
-
- dirstr = *dirstr ? dirstr : ".";
-
- /* Now enter contents of directory. */
-
- if ( !( dd = opendir( dirstr ) ) )
- {
- PROFILE_EXIT( FILE_DIRSCAN );
- return;
- }
-
- if ( DEBUG_BINDSCAN )
- printf( "scan directory %s\n", dirstr );
-
- string_new( filename );
- while ( ( dirent = readdir( dd ) ) )
- {
- OBJECT * filename_obj;
- # ifdef old_sinix
- /* Broken structure definition on sinix. */
- f.f_base.ptr = dirent->d_name - 2;
- # else
- f.f_base.ptr = dirent->d_name;
- # endif
- f.f_base.len = strlen( f.f_base.ptr );
-
- string_truncate( filename, 0 );
- path_build( &f, filename, 0 );
-
- filename_obj = object_new( filename->value );
- files = list_push_back( files, filename_obj );
- file_query( filename_obj );
- }
- string_free( filename );
-
- closedir( dd );
-
- d->files = files;
- }
-
- /* Special case / : enter it */
- {
- if ( strcmp( object_str( d->name ), "/" ) == 0 )
- (*func)( closure, d->name, 1 /* stat()'ed */, d->time );
- }
-
- /* Now enter contents of directory */
- if ( !list_empty( d->files ) )
- {
- LIST * files = d->files;
- LISTITER iter = list_begin( files ), end = list_end( files );
- for ( ; iter != end; iter = list_next( iter ) )
- {
- file_info_t * ff = file_info( list_item( iter ) );
- (*func)( closure, ff->name, 1 /* stat()'ed */, ff->time );
- files = list_next( files );
- }
- }
-
- PROFILE_EXIT( FILE_DIRSCAN );
-}
-
-
-file_info_t * file_query( OBJECT * filename )
-{
- file_info_t * ff = file_info( filename );
- if ( ! ff->time )
- {
- struct stat statbuf;
-
- if ( stat( *object_str( filename ) ? object_str( filename ) : ".", &statbuf ) < 0 )
- return 0;
-
- ff->is_file = statbuf.st_mode & S_IFREG ? 1 : 0;
- ff->is_dir = statbuf.st_mode & S_IFDIR ? 1 : 0;
- ff->size = statbuf.st_size;
- ff->time = statbuf.st_mtime ? statbuf.st_mtime : 1;
- }
- return ff;
-}
-
-/*
- * file_time() - get timestamp of file, if not done by file_dirscan()
- */
-
-int
-file_time(
- OBJECT * filename,
- time_t * time )
-{
- file_info_t * ff = file_query( filename );
- if ( !ff ) return -1;
- *time = ff->time;
- return 0;
-}
-
-int file_is_file( OBJECT * filename )
-{
- file_info_t * ff = file_query( filename );
- if ( !ff ) return -1;
- return ff->is_file;
-}
-
-int file_mkdir( const char * pathname )
-{
- return mkdir( pathname, 0766 );
-}
-
-/*
- * file_archscan() - scan an archive for files
- */
-
-# ifndef AIAMAG /* God-fearing UNIX */
-
-# define SARFMAG 2
-# define SARHDR sizeof( struct ar_hdr )
-
-void
-file_archscan(
- const char * archive,
- scanback func,
- void * closure )
-{
-# ifndef NO_AR
- struct ar_hdr ar_hdr;
- char buf[ MAXJPATH ];
- long offset;
- char *string_table = 0;
- int fd;
-
- if ( ( fd = open( archive, O_RDONLY, 0 ) ) < 0 )
- return;
-
- if ( read( fd, buf, SARMAG ) != SARMAG ||
- strncmp( ARMAG, buf, SARMAG ) )
- {
- close( fd );
- return;
- }
-
- offset = SARMAG;
-
- if ( DEBUG_BINDSCAN )
- printf( "scan archive %s\n", archive );
-
- while ( ( read( fd, &ar_hdr, SARHDR ) == SARHDR )
- && !( memcmp( ar_hdr.ar_fmag, ARFMAG, SARFMAG )
-#ifdef ARFZMAG
- /* OSF also has a compressed format */
- && memcmp( ar_hdr.ar_fmag, ARFZMAG, SARFMAG )
-#endif
- ) )
- {
- char lar_name_[257];
- char * lar_name = lar_name_ + 1;
- long lar_date;
- long lar_size;
- long lar_offset;
- char * c;
- char * src;
- char * dest;
- OBJECT * member;
-
- strncpy( lar_name, ar_hdr.ar_name, sizeof(ar_hdr.ar_name) );
-
- sscanf( ar_hdr.ar_date, "%ld", &lar_date );
- sscanf( ar_hdr.ar_size, "%ld", &lar_size );
-
- if (ar_hdr.ar_name[0] == '/')
- {
- if (ar_hdr.ar_name[1] == '/')
- {
- /* this is the "string table" entry of the symbol table,
- ** which holds strings of filenames that are longer than
- ** 15 characters (ie. don't fit into a ar_name
- */
-
- string_table = (char *)BJAM_MALLOC_ATOMIC(lar_size);
- lseek(fd, offset + SARHDR, 0);
- if (read(fd, string_table, lar_size) != lar_size)
- printf("error reading string table\n");
- }
- else if (string_table && ar_hdr.ar_name[1] != ' ')
- {
- /* Long filenames are recognized by "/nnnn" where nnnn is
- ** the offset of the string in the string table represented
- ** in ASCII decimals.
- */
- dest = lar_name;
- lar_offset = atoi(lar_name + 1);
- src = &string_table[lar_offset];
- while (*src != '/')
- *dest++ = *src++;
- *dest = '/';
- }
- }
-
- c = lar_name - 1;
- while ( ( *++c != ' ' ) && ( *c != '/' ) ) ;
- *c = '\0';
-
- if ( DEBUG_BINDSCAN )
- printf( "archive name %s found\n", lar_name );
-
- sprintf( buf, "%s(%s)", archive, lar_name );
-
- member = object_new( buf );
- (*func)( closure, member, 1 /* time valid */, (time_t)lar_date );
- object_free( member );
-
- offset += SARHDR + ( ( lar_size + 1 ) & ~1 );
- lseek( fd, offset, 0 );
- }
-
- if ( string_table )
- BJAM_FREE( string_table );
-
- close( fd );
-
-# endif /* NO_AR */
-
-}
-
-# else /* AIAMAG - RS6000 AIX */
-
-static void file_archscan_small(
- int fd, char const *archive, scanback func, void *closure)
-{
- struct fl_hdr fl_hdr;
-
- struct {
- struct ar_hdr hdr;
- char pad[ 256 ];
- } ar_hdr ;
-
- char buf[ MAXJPATH ];
- long offset;
-
- if ( read( fd, (char *)&fl_hdr, FL_HSZ ) != FL_HSZ)
- return;
-
- sscanf( fl_hdr.fl_fstmoff, "%ld", &offset );
-
- if ( DEBUG_BINDSCAN )
- printf( "scan archive %s\n", archive );
-
- while ( ( offset > 0 )
- && ( lseek( fd, offset, 0 ) >= 0 )
- && ( read( fd, &ar_hdr, sizeof( ar_hdr ) ) >= (int)sizeof( ar_hdr.hdr ) ) )
- {
- long lar_date;
- int lar_namlen;
- OBJECT * member;
-
- sscanf( ar_hdr.hdr.ar_namlen, "%d" , &lar_namlen );
- sscanf( ar_hdr.hdr.ar_date , "%ld", &lar_date );
- sscanf( ar_hdr.hdr.ar_nxtmem, "%ld", &offset );
-
- if ( !lar_namlen )
- continue;
-
- ar_hdr.hdr._ar_name.ar_name[ lar_namlen ] = '\0';
-
- sprintf( buf, "%s(%s)", archive, ar_hdr.hdr._ar_name.ar_name );
-
- member = object_new( buf );
- (*func)( closure, member, 1 /* time valid */, (time_t)lar_date );
- object_free( member );
- }
-}
-
-/* Check for OS version which supports the big variant. */
-#ifdef AR_HSZ_BIG
-
-static void file_archscan_big(
- int fd, char const *archive, scanback func, void *closure)
-{
- struct fl_hdr_big fl_hdr;
-
- struct {
- struct ar_hdr_big hdr;
- char pad[ 256 ];
- } ar_hdr ;
-
- char buf[ MAXJPATH ];
- long long offset;
-
- if ( read( fd, (char *)&fl_hdr, FL_HSZ_BIG) != FL_HSZ_BIG)
- return;
-
- sscanf( fl_hdr.fl_fstmoff, "%lld", &offset );
-
- if ( DEBUG_BINDSCAN )
- printf( "scan archive %s\n", archive );
-
- while ( ( offset > 0 )
- && ( lseek( fd, offset, 0 ) >= 0 )
- && ( read( fd, &ar_hdr, sizeof( ar_hdr ) ) >= sizeof( ar_hdr.hdr ) ) )
- {
- long lar_date;
- int lar_namlen;
- OBJECT * member;
-
- sscanf( ar_hdr.hdr.ar_namlen, "%d" , &lar_namlen );
- sscanf( ar_hdr.hdr.ar_date , "%ld" , &lar_date );
- sscanf( ar_hdr.hdr.ar_nxtmem, "%lld", &offset );
-
- if ( !lar_namlen )
- continue;
-
- ar_hdr.hdr._ar_name.ar_name[ lar_namlen ] = '\0';
-
- sprintf( buf, "%s(%s)", archive, ar_hdr.hdr._ar_name.ar_name );
-
- member = object_new( buf );
- (*func)( closure, member, 1 /* time valid */, (time_t)lar_date );
- object_free( member );
- }
-
-}
-
-#endif /* AR_HSZ_BIG */
-
-void file_archscan( const char * archive, scanback func, void *closure)
-{
- int fd;
- char fl_magic[SAIAMAG];
-
- if (( fd = open( archive, O_RDONLY, 0)) < 0)
- return;
-
- if (read( fd, fl_magic, SAIAMAG) != SAIAMAG
- || lseek(fd, 0, SEEK_SET) == -1)
- {
- close(fd);
- return;
- }
-
- if ( strncmp( AIAMAG, fl_magic, SAIAMAG ) == 0 )
- {
- /* read small variant */
- file_archscan_small( fd, archive, func, closure );
- }
-#ifdef AR_HSZ_BIG
- else if ( strncmp( AIAMAGBIG, fl_magic, SAIAMAG ) == 0 )
- {
- /* read big variant */
- file_archscan_big( fd, archive, func, closure );
- }
-#endif
-
- close( fd );
-}
-
-# endif /* AIAMAG - RS6000 AIX */
-
-# endif /* USE_FILEUNIX */
diff --git a/tools/build/v2/engine/frames.c b/tools/build/v2/engine/frames.c
deleted file mode 100644
index 29f7f03cde..0000000000
--- a/tools/build/v2/engine/frames.c
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Copyright 2001-2004 David Abrahams.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-# include "frames.h"
-# include "lists.h"
-
-void frame_init( FRAME* frame )
-{
- frame->prev = 0;
- lol_init(frame->args);
- frame->module = root_module();
- frame->rulename = "module scope";
- frame->file = 0;
- frame->line = -1;
-}
-
-void frame_free( FRAME* frame )
-{
- lol_free( frame->args );
-}
diff --git a/tools/build/v2/engine/frames.h b/tools/build/v2/engine/frames.h
deleted file mode 100644
index 1e2040d14f..0000000000
--- a/tools/build/v2/engine/frames.h
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Copyright 2001-2004 David Abrahams.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-#ifndef FRAMES_DWA20011021_H
-#define FRAMES_DWA20011021_H
-
-#include "lists.h"
-#include "object.h"
-#include "modules.h"
-
-typedef struct _PARSE PARSE;
-typedef struct frame FRAME;
-
-struct frame
-{
- FRAME * prev;
- /* The nearest enclosing frame for which module->user_module is true. */
- FRAME * prev_user;
- LOL args[ 1 ];
- module_t * module;
- OBJECT * file;
- int line;
- const char * rulename;
-};
-
-
-/* When call into Python is in progress, this variable points to the bjam frame
- * that was current at the moment of call. When the call completes, the variable
- * is not defined. Further, if Jam calls Python which calls Jam and so on, this
- * variable only keeps the most recent Jam frame.
- */
-extern struct frame * frame_before_python_call;
-
-void frame_init( FRAME * ); /* implemented in compile.c */
-void frame_free( FRAME * ); /* implemented in compile.c */
-
-#endif
diff --git a/tools/build/v2/engine/function.c b/tools/build/v2/engine/function.c
deleted file mode 100644
index 1688eba4e8..0000000000
--- a/tools/build/v2/engine/function.c
+++ /dev/null
@@ -1,4553 +0,0 @@
-/*
- * Copyright 2011 Steven Watanabe
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-#include "lists.h"
-#include "pathsys.h"
-#include "mem.h"
-#include "constants.h"
-#include "jam.h"
-#include "frames.h"
-#include "function.h"
-#include "rules.h"
-#include "variable.h"
-#include "compile.h"
-#include "search.h"
-#include "class.h"
-#include "pathsys.h"
-#include "filesys.h"
-#include <string.h>
-#include <stdlib.h>
-#include <stdio.h>
-#include <assert.h>
-
-# ifdef OS_CYGWIN
-# include <sys/cygwin.h>
-# include <windows.h>
-# endif
-
-int glob( const char * s, const char * c );
-void backtrace( FRAME * frame );
-void backtrace_line( FRAME * frame );
-
-#define INSTR_PUSH_EMPTY 0
-#define INSTR_PUSH_CONSTANT 1
-#define INSTR_PUSH_ARG 2
-#define INSTR_PUSH_VAR 3
-#define INSTR_PUSH_VAR_FIXED 57
-#define INSTR_PUSH_GROUP 4
-#define INSTR_PUSH_RESULT 5
-#define INSTR_PUSH_APPEND 6
-#define INSTR_SWAP 7
-
-#define INSTR_JUMP_EMPTY 8
-#define INSTR_JUMP_NOT_EMPTY 9
-
-#define INSTR_JUMP 10
-#define INSTR_JUMP_LT 11
-#define INSTR_JUMP_LE 12
-#define INSTR_JUMP_GT 13
-#define INSTR_JUMP_GE 14
-#define INSTR_JUMP_EQ 15
-#define INSTR_JUMP_NE 16
-#define INSTR_JUMP_IN 17
-#define INSTR_JUMP_NOT_IN 18
-
-#define INSTR_JUMP_NOT_GLOB 19
-
-#define INSTR_FOR_INIT 56
-#define INSTR_FOR_LOOP 20
-
-#define INSTR_SET_RESULT 21
-#define INSTR_RETURN 22
-#define INSTR_POP 23
-
-#define INSTR_PUSH_LOCAL 24
-#define INSTR_POP_LOCAL 25
-#define INSTR_SET 26
-#define INSTR_APPEND 27
-#define INSTR_DEFAULT 28
-
-#define INSTR_PUSH_LOCAL_FIXED 58
-#define INSTR_POP_LOCAL_FIXED 59
-#define INSTR_SET_FIXED 60
-#define INSTR_APPEND_FIXED 61
-#define INSTR_DEFAULT_FIXED 62
-
-#define INSTR_PUSH_LOCAL_GROUP 29
-#define INSTR_POP_LOCAL_GROUP 30
-#define INSTR_SET_GROUP 31
-#define INSTR_APPEND_GROUP 32
-#define INSTR_DEFAULT_GROUP 33
-
-#define INSTR_PUSH_ON 34
-#define INSTR_POP_ON 35
-#define INSTR_SET_ON 36
-#define INSTR_APPEND_ON 37
-#define INSTR_DEFAULT_ON 38
-
-#define INSTR_CALL_RULE 39
-
-#define INSTR_APPLY_MODIFIERS 40
-#define INSTR_APPLY_INDEX 41
-#define INSTR_APPLY_INDEX_MODIFIERS 42
-#define INSTR_APPLY_MODIFIERS_GROUP 43
-#define INSTR_APPLY_INDEX_GROUP 44
-#define INSTR_APPLY_INDEX_MODIFIERS_GROUP 45
-#define INSTR_COMBINE_STRINGS 46
-
-#define INSTR_INCLUDE 47
-#define INSTR_RULE 48
-#define INSTR_ACTIONS 49
-#define INSTR_PUSH_MODULE 50
-#define INSTR_POP_MODULE 51
-#define INSTR_CLASS 52
-#define INSTR_BIND_MODULE_VARIABLES 63
-
-#define INSTR_APPEND_STRINGS 53
-#define INSTR_WRITE_FILE 54
-#define INSTR_OUTPUT_STRINGS 55
-
-typedef struct instruction
-{
- unsigned int op_code;
- int arg;
-} instruction;
-
-typedef struct _subfunction
-{
- OBJECT * name;
- FUNCTION * code;
- int local;
-} SUBFUNCTION;
-
-typedef struct _subaction
-{
- OBJECT * name;
- FUNCTION * command;
- int flags;
-} SUBACTION;
-
-#define FUNCTION_BUILTIN 0
-#define FUNCTION_JAM 1
-
-struct argument {
- int flags;
-#define ARG_ONE 0
-#define ARG_OPTIONAL 1
-#define ARG_PLUS 2
-#define ARG_STAR 3
-#define ARG_VARIADIC 4
- OBJECT * type_name;
- OBJECT * arg_name;
- int index;
-};
-
-struct arg_list {
- int size;
- struct argument * args;
-};
-
-struct _function
-{
- int type;
- int reference_count;
- OBJECT * rulename;
- struct arg_list * formal_arguments;
- int num_formal_arguments;
-};
-
-typedef struct _builtin_function
-{
- FUNCTION base;
- LIST * ( * func )( FRAME *, int flags );
- int flags;
-} BUILTIN_FUNCTION;
-
-typedef struct _jam_function
-{
- FUNCTION base;
- int code_size;
- instruction * code;
- int num_constants;
- OBJECT * * constants;
- int num_subfunctions;
- SUBFUNCTION * functions;
- int num_subactions;
- SUBACTION * actions;
- FUNCTION * generic;
- OBJECT * file;
- int line;
-} JAM_FUNCTION;
-
-
-#ifdef HAVE_PYTHON
-
-#define FUNCTION_PYTHON 2
-
-typedef struct _python_function
-{
- FUNCTION base;
- PyObject * python_function;
-} PYTHON_FUNCTION;
-
-static LIST * call_python_function( PYTHON_FUNCTION * function, FRAME * frame );
-
-#endif
-
-
-struct _stack
-{
- void * data;
-};
-
-static void * stack;
-
-STACK * stack_global()
-{
- static STACK result;
- if ( !stack )
- {
- int size = 1 << 21;
- stack = BJAM_MALLOC( size );
- result.data = (char *)stack + size;
- }
- return &result;
-}
-
-static void check_alignment( STACK * s )
-{
- assert( (unsigned long)s->data % sizeof( LIST * ) == 0 );
-}
-
-void * stack_allocate( STACK * s, int size )
-{
- check_alignment( s );
- s->data = (char *)s->data - size;
- check_alignment( s );
- return s->data;
-}
-
-void stack_deallocate( STACK * s, int size )
-{
- check_alignment( s );
- s->data = (char *)s->data + size;
- check_alignment( s );
-}
-
-void stack_push( STACK * s, LIST * l )
-{
- *(LIST * *)stack_allocate( s, sizeof( LIST * ) ) = l;
-}
-
-LIST * stack_pop( STACK * s )
-{
- LIST * result = *(LIST * *)s->data;
- stack_deallocate( s, sizeof( LIST * ) );
- return result;
-}
-
-LIST * stack_top(STACK * s)
-{
- check_alignment( s );
- return *(LIST * *)s->data;
-}
-
-LIST * stack_at( STACK * s, int n )
-{
- check_alignment( s );
- return *((LIST * *)s->data + n);
-}
-
-void stack_set( STACK * s, int n, LIST * value )
-{
- check_alignment( s );
- *((LIST * *)s->data + n) = value;
-}
-
-void * stack_get( STACK * s )
-{
- check_alignment( s );
- return (LIST * *)s->data;
-}
-
-LIST * frame_get_local( FRAME * frame, int idx )
-{
- /* The only local variables are the arguments */
- return list_copy( lol_get( frame->args, idx ) );
-}
-
-static OBJECT * function_get_constant( JAM_FUNCTION * function, int idx )
-{
- return function->constants[ idx ];
-}
-
-static LIST * function_get_variable( JAM_FUNCTION * function, FRAME * frame, int idx )
-{
- return list_copy( var_get( frame->module, function->constants[idx] ) );
-}
-
-static void function_set_variable( JAM_FUNCTION * function, FRAME * frame, int idx, LIST * value )
-{
- var_set( frame->module, function->constants[idx], value, VAR_SET );
-}
-
-static LIST * function_swap_variable( JAM_FUNCTION * function, FRAME * frame, int idx, LIST * value )
-{
- return var_swap( frame->module, function->constants[idx], value );
-}
-
-static void function_append_variable( JAM_FUNCTION * function, FRAME * frame, int idx, LIST * value )
-{
- var_set( frame->module, function->constants[idx], value, VAR_APPEND );
-}
-
-static void function_default_variable( JAM_FUNCTION * function, FRAME * frame, int idx, LIST * value )
-{
- var_set( frame->module, function->constants[idx], value, VAR_DEFAULT );
-}
-
-static void function_set_rule( JAM_FUNCTION * function, FRAME * frame, STACK * s, int idx )
-{
- SUBFUNCTION * sub = function->functions + idx;
- new_rule_body( frame->module, sub->name, sub->code, !sub->local );
-}
-
-static void function_set_actions( JAM_FUNCTION * function, FRAME * frame, STACK * s, int idx )
-{
- SUBACTION * sub = function->actions + idx;
- LIST * bindlist = stack_pop( s );
-
- new_rule_actions( frame->module, sub->name, sub->command, bindlist, sub->flags );
-}
-
-/*
- * returns the index if name is "<", ">", "1", "2", ... or "19"
- * otherwise returns -1.
- */
-
-static int get_argument_index( const char * s )
-{
- if( s[ 0 ] != '\0')
- {
- if( s[ 1 ] == '\0' )
- {
- switch ( s[ 0 ] )
- {
- case '<': return 0;
- case '>': return 1;
-
- case '1':
- case '2':
- case '3':
- case '4':
- case '5':
- case '6':
- case '7':
- case '8':
- case '9':
- return s[ 0 ] - '1';
- }
- }
- else if ( s[ 0 ] == '1' && s[ 2 ] == '\0' )
- {
- switch( s[ 1 ] )
- {
- case '0':
- case '1':
- case '2':
- case '3':
- case '4':
- case '5':
- case '6':
- case '7':
- case '8':
- case '9':
- return s[ 1 ] - '0' + 10 - 1;
- }
- }
- }
- return -1;
-}
-
-static LIST * function_get_named_variable( JAM_FUNCTION * function, FRAME * frame, OBJECT * name )
-{
- int idx = get_argument_index( object_str( name ) );
- if( idx != -1 )
- {
- return list_copy( lol_get( frame->args, idx ) );
- }
- else
- {
- return list_copy( var_get( frame->module, name ) );
- }
-}
-
-static void function_set_named_variable( JAM_FUNCTION * function, FRAME * frame, OBJECT * name, LIST * value)
-{
- var_set( frame->module, name, value, VAR_SET );
-}
-
-static LIST * function_swap_named_variable( JAM_FUNCTION * function, FRAME * frame, OBJECT * name, LIST * value )
-{
- return var_swap( frame->module, name, value );
-}
-
-static void function_append_named_variable( JAM_FUNCTION * function, FRAME * frame, OBJECT * name, LIST * value)
-{
- var_set( frame->module, name, value, VAR_APPEND );
-}
-
-static void function_default_named_variable( JAM_FUNCTION * function, FRAME * frame, OBJECT * name, LIST * value )
-{
- var_set( frame->module, name, value, VAR_DEFAULT );
-}
-
-static LIST * function_call_rule( JAM_FUNCTION * function, FRAME * frame, STACK * s, int n_args, const char * unexpanded, OBJECT * file, int line )
-{
- FRAME inner[ 1 ];
- int i;
- LIST * first = stack_pop( s );
- LIST * result = L0;
- OBJECT * rulename;
- LIST * trailing;
-
- frame->file = file;
- frame->line = line;
-
- if ( list_empty( first ) )
- {
- backtrace_line( frame );
- printf( "warning: rulename %s expands to empty string\n", unexpanded );
- backtrace( frame );
-
- list_free( first );
-
- for( i = 0; i < n_args; ++i )
- {
- list_free( stack_pop( s ) );
- }
-
- return result;
- }
-
- rulename = object_copy( list_front( first ) );
-
- frame_init( inner );
-
- inner->prev = frame;
- inner->prev_user = frame->module->user_module ? frame : frame->prev_user;
- inner->module = frame->module; /* This gets fixed up in evaluate_rule(), below. */
-
- for( i = 0; i < n_args; ++i )
- {
- lol_add( inner->args, stack_at( s, n_args - i - 1 ) );
- }
-
- for( i = 0; i < n_args; ++i )
- {
- stack_pop( s );
- }
-
- trailing = list_pop_front( first );
- if ( trailing )
- {
- if ( inner->args->count == 0 )
- {
- lol_add( inner->args, trailing );
- }
- else
- {
- LIST * * l = &inner->args->list[0];
- *l = list_append( trailing, *l );
- }
- }
-
- result = evaluate_rule( rulename, inner );
- frame_free( inner );
- object_free( rulename );
- return result;
-}
-
-/* Variable expansion */
-
-typedef struct
-{
- int sub1;
- int sub2;
-} subscript_t;
-
-typedef struct
-{
- PATHNAME f; /* :GDBSMR -- pieces */
- char parent; /* :P -- go to parent directory */
- char filemods; /* one of the above applied */
- char downshift; /* :L -- downshift result */
- char upshift; /* :U -- upshift result */
- char to_slashes; /* :T -- convert "\" to "/" */
- char to_windows; /* :W -- convert cygwin to native paths */
- PATHPART empty; /* :E -- default for empties */
- PATHPART join; /* :J -- join list with char */
-} VAR_EDITS;
-
-static LIST * apply_modifiers_impl( LIST * result, string * buf, VAR_EDITS * edits, int n, LISTITER iter, LISTITER end );
-static void get_iters( subscript_t subscript, LISTITER * first, LISTITER * last, int length );
-static void var_edit_file( const char * in, string * out, VAR_EDITS * edits );
-static void var_edit_shift( string * out, size_t pos, VAR_EDITS * edits );
-static int var_edit_parse( const char * mods, VAR_EDITS * edits, int havezeroed );
-
-
-/*
- * var_edit_parse() - parse : modifiers into PATHNAME structure
- *
- * The : modifiers in a $(varname:modifier) currently support replacing or
- * omitting elements of a filename, and so they are parsed into a PATHNAME
- * structure (which contains pointers into the original string).
- *
- * Modifiers of the form "X=value" replace the component X with the given value.
- * Modifiers without the "=value" cause everything but the component X to be
- * omitted. X is one of:
- *
- * G <grist>
- * D directory name
- * B base name
- * S .suffix
- * M (member)
- * R root directory - prepended to whole path
- *
- * This routine sets:
- *
- * f->f_xxx.ptr = 0
- * f->f_xxx.len = 0
- * -> leave the original component xxx
- *
- * f->f_xxx.ptr = string
- * f->f_xxx.len = strlen( string )
- * -> replace component xxx with string
- *
- * f->f_xxx.ptr = ""
- * f->f_xxx.len = 0
- * -> omit component xxx
- *
- * var_edit_file() below and path_build() obligingly follow this convention.
- */
-
-static int var_edit_parse( const char * mods, VAR_EDITS * edits, int havezeroed )
-{
- while ( *mods )
- {
- PATHPART * fp;
-
- switch ( *mods++ )
- {
- case 'L': edits->downshift = 1; continue;
- case 'U': edits->upshift = 1; continue;
- case 'P': edits->parent = edits->filemods = 1; continue;
- case 'E': fp = &edits->empty; goto strval;
- case 'J': fp = &edits->join; goto strval;
- case 'G': fp = &edits->f.f_grist; goto fileval;
- case 'R': fp = &edits->f.f_root; goto fileval;
- case 'D': fp = &edits->f.f_dir; goto fileval;
- case 'B': fp = &edits->f.f_base; goto fileval;
- case 'S': fp = &edits->f.f_suffix; goto fileval;
- case 'M': fp = &edits->f.f_member; goto fileval;
- case 'T': edits->to_slashes = 1; continue;
- case 'W': edits->to_windows = 1; continue;
- default:
- continue; /* Should complain, but so what... */
- }
-
- fileval:
- /* Handle :CHARS, where each char (without a following =) selects a
- * particular file path element. On the first such char, we deselect all
- * others (by setting ptr = "", len = 0) and for each char we select
- * that element (by setting ptr = 0).
- */
- edits->filemods = 1;
-
- if ( *mods != '=' )
- {
- if ( !havezeroed++ )
- {
- int i;
- for ( i = 0; i < 6; ++i )
- {
- edits->f.part[ i ].len = 0;
- edits->f.part[ i ].ptr = "";
- }
- }
-
- fp->ptr = 0;
- continue;
- }
-
- strval:
- /* Handle :X=value, or :X */
- if ( *mods != '=' )
- {
- fp->ptr = "";
- fp->len = 0;
- }
- else
- {
- fp->ptr = ++mods;
- fp->len = strlen( mods );
- mods += fp->len;
- }
- }
-
- return havezeroed;
-}
-
-/*
- * var_edit_file() - copy input target name to output, modifying filename.
- */
-
-static void var_edit_file( const char * in, string * out, VAR_EDITS * edits )
-{
- if ( edits->filemods )
- {
- PATHNAME pathname;
-
- /* Parse apart original filename, putting parts into "pathname". */
- path_parse( in, &pathname );
-
- /* Replace any pathname with edits->f */
- if ( edits->f.f_grist .ptr ) pathname.f_grist = edits->f.f_grist;
- if ( edits->f.f_root .ptr ) pathname.f_root = edits->f.f_root;
- if ( edits->f.f_dir .ptr ) pathname.f_dir = edits->f.f_dir;
- if ( edits->f.f_base .ptr ) pathname.f_base = edits->f.f_base;
- if ( edits->f.f_suffix.ptr ) pathname.f_suffix = edits->f.f_suffix;
- if ( edits->f.f_member.ptr ) pathname.f_member = edits->f.f_member;
-
- /* If requested, modify pathname to point to parent. */
- if ( edits->parent )
- path_parent( &pathname );
-
- /* Put filename back together. */
- path_build( &pathname, out, 0 );
- }
- else
- {
- string_append( out, in );
- }
-}
-
-/*
- * var_edit_shift() - do upshift/downshift mods.
- */
-
-static void var_edit_shift( string * out, size_t pos, VAR_EDITS * edits )
-{
- if ( edits->upshift || edits->downshift || edits->to_windows || edits->to_slashes )
- {
- /* Handle upshifting, downshifting and slash translation now. */
- char * p;
-# ifdef OS_CYGWIN
- if ( edits->to_windows )
- {
- /* FIXME: skip grist */
- char result[ MAX_PATH + 1 ];
- cygwin_conv_to_win32_path( out->value + pos, result );
- assert( strlen( result ) <= MAX_PATH );
- string_truncate( out, pos );
- string_append( out, result );
- edits->to_slashes = 0;
- }
-# endif
- for ( p = out->value + pos; *p; ++p)
- {
- if ( edits->upshift )
- *p = toupper( *p );
- else if ( edits->downshift )
- *p = tolower( *p );
- if ( edits->to_slashes && ( *p == '\\' ) )
- *p = '/';
- }
- }
-}
-
-/*
- * Reads n LISTs from the top of the STACK and
- * combines them to form VAR_EDITS.
- *
- * returns the number of VAR_EDITS pushed onto
- * the STACK.
- */
-
-static int expand_modifiers( STACK * s, int n )
-{
- int i;
- int total = 1;
- LIST * * args = stack_get( s );
- for( i = 0; i < n; ++i)
- total *= list_length( args[i] );
-
- if ( total != 0 )
- {
- VAR_EDITS * out = stack_allocate( s, total * sizeof(VAR_EDITS) );
- LISTITER * iter = stack_allocate( s, n * sizeof(LIST *) );
- for (i = 0; i < n; ++i )
- {
- iter[i] = list_begin( args[i] );
- }
- i = 0;
- {
- int havezeroed;
- loop:
- memset( out, 0, sizeof( *out ) );
- havezeroed = 0;
- for (i = 0; i < n; ++i )
- {
- havezeroed = var_edit_parse( object_str( list_item( iter[i] ) ), out, havezeroed );
- }
- ++out;
- while ( --i >= 0 )
- {
- if ( list_next( iter[i] ) != list_end( args[i] ) )
- {
- iter[i] = list_next( iter[i] );
- goto loop;
- }
- else
- {
- iter[i] = list_begin( args[i] );
- }
- }
- }
- stack_deallocate( s, n * sizeof( LIST * ) );
- }
- return total;
-}
-
-static LIST * apply_modifiers( STACK * s, int n )
-{
- LIST * value = stack_top( s );
- LIST * result = L0;
- VAR_EDITS * edits = (VAR_EDITS *)( (LIST * *)stack_get( s ) + 1 );
- string buf[1];
- string_new( buf );
- result = apply_modifiers_impl( result, buf, edits, n, list_begin( value ), list_end( value ) );
- string_free( buf );
- return result;
-}
-
-/*
- * Parse a string of the form "1-2", "-2--1", "2-"
- * and return the two subscripts.
- */
-
-subscript_t parse_subscript( const char * s )
-{
- subscript_t result;
- result.sub1 = 0;
- result.sub2 = 0;
- do /* so we can use "break" */
- {
- /* Allow negative subscripts. */
- if ( !isdigit( *s ) && ( *s != '-' ) )
- {
- result.sub2 = 0;
- break;
- }
- result.sub1 = atoi( s );
-
- /* Skip over the first symbol, which is either a digit or dash. */
- ++s;
- while ( isdigit( *s ) ) ++s;
-
- if ( *s == '\0' )
- {
- result.sub2 = result.sub1;
- break;
- }
-
- if ( *s != '-' )
- {
- result.sub2 = 0;
- break;
- }
-
- ++s;
-
- if ( *s == '\0' )
- {
- result.sub2 = -1;
- break;
- }
-
- if ( !isdigit( *s ) && ( *s != '-' ) )
- {
- result.sub2 = 0;
- break;
- }
-
- /* First, compute the index of the last element. */
- result.sub2 = atoi( s );
- while ( isdigit( *++s ) );
-
- if ( *s != '\0' )
- result.sub2 = 0;
-
- } while ( 0 );
- return result;
-}
-
-static LIST * apply_subscript( STACK * s )
-{
- LIST * value = stack_top( s );
- LIST * indices = stack_at( s, 1 );
- LIST * result = L0;
- int length = list_length( value );
- string buf[1];
- LISTITER indices_iter = list_begin( indices ), indices_end = list_end( indices );
- string_new( buf );
- for ( ; indices_iter != indices_end; indices_iter = list_next( indices_iter ) )
- {
- LISTITER iter = list_begin( value );
- LISTITER end = list_end( value );
- subscript_t subscript = parse_subscript( object_str( list_item( indices_iter ) ) );
- get_iters( subscript, &iter, &end, length );
- for ( ; iter != end; iter = list_next( iter ) )
- {
- result = list_push_back( result, object_copy( list_item( iter ) ) );
- }
- }
- string_free( buf );
- return result;
-}
-
-/*
- * Reads the LIST from first and applies subscript to it.
- * The results are written to *first and *last.
- */
-
-static void get_iters( subscript_t subscript, LISTITER * first, LISTITER * last, int length )
-{
- int start;
- int size;
- LISTITER iter;
- LISTITER end;
- {
-
- if ( subscript.sub1 < 0 )
- start = length + subscript.sub1;
- else if( subscript.sub1 > length )
- start = length;
- else
- start = subscript.sub1 - 1;
-
- if ( subscript.sub2 < 0 )
- size = length + 1 + subscript.sub2 - start;
- else
- size = subscript.sub2 - start;
-
- /*
- * HACK: When the first subscript is before the start of the
- * list, it magically becomes the beginning of the list.
- * This is inconsistent, but needed for backwards
- * compatibility.
- */
- if ( start < 0 )
- start = 0;
-
- /* The "sub2 < 0" test handles the semantic error of sub2 <
- * sub1.
- */
- if ( size < 0 )
- size = 0;
-
- if ( start + size > length )
- size = length - start;
- }
-
- iter = *first;
- while ( start-- > 0 )
- iter = list_next( iter );
-
- end = iter;
- while ( size-- > 0 )
- end = list_next( end );
-
- *first = iter;
- *last = end;
-}
-
-static LIST * apply_modifiers_empty( LIST * result, string * buf, VAR_EDITS * edits, int n)
-{
- int i;
- for ( i = 0; i < n; ++i )
- {
- if ( edits[i].empty.ptr )
- {
- /** FIXME: is empty.ptr always null-terminated? */
- var_edit_file( edits[i].empty.ptr, buf, edits + i );
- var_edit_shift( buf, 0, edits + i );
- result = list_push_back( result, object_new( buf->value ) );
- string_truncate( buf, 0 );
- }
- }
- return result;
-}
-
-static LIST * apply_modifiers_non_empty( LIST * result, string * buf, VAR_EDITS * edits, int n, LISTITER begin, LISTITER end )
-{
- int i;
- LISTITER iter;
- for ( i = 0; i < n; ++i )
- {
- if ( edits[i].join.ptr )
- {
- var_edit_file( object_str( list_item( begin ) ), buf, edits + i );
- var_edit_shift( buf, 0, edits + i );
- for ( iter = list_next( begin ); iter != end; iter = list_next( iter ) )
- {
- size_t size;
- string_append( buf, edits[i].join.ptr );
- size = buf->size;
- var_edit_file( object_str( list_item( iter ) ), buf, edits + i );
- var_edit_shift( buf, size, edits + i );
- }
- result = list_push_back( result, object_new( buf->value ) );
- string_truncate( buf, 0 );
- }
- else
- {
- for ( iter = begin; iter != end; iter = list_next( iter ) )
- {
- var_edit_file( object_str( list_item( iter ) ), buf, edits + i );
- var_edit_shift( buf, 0, edits + i );
- result = list_push_back( result, object_new( buf->value ) );
- string_truncate( buf, 0 );
- }
- }
- }
- return result;
-}
-
-static LIST * apply_modifiers_impl( LIST * result, string * buf, VAR_EDITS * edits, int n, LISTITER iter, LISTITER end )
-{
- if ( iter != end )
- {
- return apply_modifiers_non_empty( result, buf, edits, n, iter, end );
- }
- else
- {
- return apply_modifiers_empty( result, buf, edits, n );
- }
-}
-
-static LIST * apply_subscript_and_modifiers( STACK * s, int n )
-{
- LIST * value = stack_top( s );
- LIST * indices = stack_at( s, 1 );
- LIST * result = L0;
- VAR_EDITS * edits = (VAR_EDITS *)((LIST * *)stack_get( s ) + 2);
- int length = list_length( value );
- string buf[1];
- LISTITER indices_iter = list_begin( indices ), indices_end = list_end( indices );
- string_new( buf );
- for ( ; indices_iter != indices_end; indices_iter = list_next( indices_iter ) )
- {
- LISTITER iter = list_begin( value );
- LISTITER end = list_end( value );
- subscript_t sub = parse_subscript( object_str( list_item( indices_iter ) ) );
- get_iters( sub, &iter, &end, length );
- result = apply_modifiers_impl( result, buf, edits, n, iter, end );
- }
- string_free( buf );
- return result;
-}
-
-typedef struct expansion_item
-{
- LISTITER elem;
- LIST * saved;
- int size;
-} expansion_item;
-
-static LIST * expand( expansion_item * elem, int length )
-{
- LIST * result = L0;
- string buf[1];
- int size = 0;
- int i;
- assert( length > 0 );
- for ( i = 0; i < length; ++i )
- {
- int max = 0;
- LISTITER iter = elem[i].elem, end = list_end( elem[i].saved );
- if ( iter == end ) return result;
- for ( ; iter != end; iter = list_next( iter ) )
- {
- int len = strlen( object_str( list_item( iter ) ) );
- if ( len > max ) max = len;
- }
- size += max;
- }
- string_new( buf );
- string_reserve( buf, size );
- i = 0;
- {
- loop:
- for ( ; i < length; ++i )
- {
- elem[i].size = buf->size;
- string_append( buf, object_str( list_item( elem[i].elem ) ) );
- }
- result = list_push_back( result, object_new( buf->value ) );
- while ( --i >= 0 )
- {
- if( list_next( elem[i].elem ) != list_end( elem[i].saved ) )
- {
- elem[i].elem = list_next( elem[i].elem );
- string_truncate( buf, elem[i].size );
- goto loop;
- }
- else
- {
- elem[i].elem = list_begin( elem[i].saved );
- }
- }
- }
- string_free( buf );
- return result;
-}
-
-static void combine_strings( STACK * s, int n, string * out )
-{
- int i;
- for ( i = 0; i < n; ++i )
- {
- LIST * values = stack_pop( s );
- LISTITER iter = list_begin( values ), end = list_end( values );
- if ( iter != end )
- {
- string_append( out, object_str( list_item( iter ) ) );
- for ( iter = list_next( iter ); iter != end; iter = list_next( iter ) )
- {
- string_push_back( out, ' ' );
- string_append( out, object_str( list_item( iter ) ) );
- }
- list_free( values );
- }
- }
-}
-
-struct dynamic_array
-{
- int size;
- int capacity;
- void * data;
-};
-
-static void dynamic_array_init( struct dynamic_array * array )
-{
- array->size = 0;
- array->capacity = 0;
- array->data = 0;
-}
-
-static void dynamic_array_free( struct dynamic_array * array )
-{
- BJAM_FREE( array->data );
-}
-
-static void dynamic_array_push_impl( struct dynamic_array * array, void * value, int unit_size )
-{
- if ( array->capacity == 0 )
- {
- array->capacity = 2;
- array->data = BJAM_MALLOC( array->capacity * unit_size );
- }
- else if ( array->capacity == array->size )
- {
- void * new_data;
- array->capacity *= 2;
- new_data = BJAM_MALLOC( array->capacity * unit_size );
- memcpy( new_data, array->data, array->size * unit_size );
- BJAM_FREE( array->data );
- array->data = new_data;
- }
- memcpy( (char *)array->data + array->size * unit_size, value, unit_size );
- ++array->size;
-}
-
-#define dynamic_array_push( array, value ) ( dynamic_array_push_impl( array, &value, sizeof(value) ) )
-#define dynamic_array_at( type, array, idx ) (((type *)(array)->data)[idx])
-
-/*
- * struct compiler
- */
-
-struct label_info
-{
- int absolute_position;
- struct dynamic_array uses[1];
-};
-
-struct stored_rule
-{
- OBJECT * name;
- PARSE * parse;
- int num_arguments;
- struct arg_list * arguments;
- int local;
-};
-
-typedef struct compiler
-{
- struct dynamic_array code[1];
- struct dynamic_array constants[1];
- struct dynamic_array labels[1];
- struct dynamic_array rules[1];
- struct dynamic_array actions[1];
-} compiler;
-
-static void compiler_init( compiler * c )
-{
- dynamic_array_init( c->code );
- dynamic_array_init( c->constants );
- dynamic_array_init( c->labels );
- dynamic_array_init( c->rules );
- dynamic_array_init( c->actions );
-}
-
-static void compiler_free( compiler * c )
-{
- int i;
- dynamic_array_free( c->actions );
- dynamic_array_free( c->rules );
- for ( i = 0; i < c->labels->size; ++i )
- {
- dynamic_array_free( dynamic_array_at( struct label_info, c->labels, i ).uses );
- }
- dynamic_array_free( c->labels );
- dynamic_array_free( c->constants );
- dynamic_array_free( c->code );
-}
-
-static void compile_emit_instruction( compiler * c, instruction instr )
-{
- dynamic_array_push( c->code, instr );
-}
-
-static int compile_new_label( compiler * c )
-{
- int result = c->labels->size;
- struct label_info info;
- info.absolute_position = -1;
- dynamic_array_init( info.uses );
- dynamic_array_push( c->labels, info );
- return result;
-}
-
-static void compile_set_label( compiler * c, int label )
-{
- struct label_info * l = &dynamic_array_at( struct label_info, c->labels, label );
- int pos = c->code->size;
- int i;
- assert( l->absolute_position == -1 );
- l->absolute_position = pos;
- for ( i = 0; i < l->uses->size; ++i )
- {
- int id = dynamic_array_at( int, l->uses, i );
- int offset = (int)(pos - id - 1);
- dynamic_array_at( instruction, c->code, id ).arg = offset;
- }
-}
-
-static void compile_emit( compiler * c, unsigned int op_code, int arg )
-{
- instruction instr;
- instr.op_code = op_code;
- instr.arg = arg;
- compile_emit_instruction( c, instr );
-}
-
-static void compile_emit_branch( compiler * c, unsigned int op_code, int label )
-{
- struct label_info * l = &dynamic_array_at( struct label_info, c->labels, label );
- int pos = c->code->size;
- instruction instr;
- instr.op_code = op_code;
- if ( l->absolute_position == -1 )
- {
- instr.arg = 0;
- dynamic_array_push( l->uses, pos );
- }
- else
- {
- instr.arg = (int)( l->absolute_position - pos - 1 );
- }
- compile_emit_instruction( c, instr );
-}
-
-static int compile_emit_constant( compiler * c, OBJECT * value )
-{
- OBJECT * copy = object_copy( value );
- dynamic_array_push( c->constants, copy );
- return c->constants->size - 1;
-}
-
-static int compile_emit_rule( compiler * c, OBJECT * name, PARSE * parse, int num_arguments, struct arg_list * arguments, int local )
-{
- struct stored_rule rule;
- rule.name = object_copy( name );
- rule.parse = parse;
- rule.num_arguments = num_arguments;
- rule.arguments = arguments;
- rule.local = local;
- dynamic_array_push( c->rules, rule );
- return (int)( c->rules->size - 1 );
-}
-
-static int compile_emit_actions( compiler * c, PARSE * parse )
-{
- SUBACTION a;
- a.name = object_copy( parse->string );
- a.command = function_compile_actions( object_str( parse->string1 ), parse->file, parse->line );
- a.flags = parse->num;
- dynamic_array_push( c->actions, a );
- return (int)( c->actions->size - 1 );
-}
-
-static JAM_FUNCTION * compile_to_function( compiler * c )
-{
- JAM_FUNCTION * result = BJAM_MALLOC( sizeof(JAM_FUNCTION) );
- int i;
- result->base.type = FUNCTION_JAM;
- result->base.reference_count = 1;
- result->base.formal_arguments = 0;
- result->base.num_formal_arguments = 0;
-
- result->base.rulename = 0;
-
- result->code_size = c->code->size;
- result->code = BJAM_MALLOC( c->code->size * sizeof(instruction) );
- memcpy( result->code, c->code->data, c->code->size * sizeof(instruction) );
-
- result->constants = BJAM_MALLOC( c->constants->size * sizeof(OBJECT *) );
- memcpy( result->constants, c->constants->data, c->constants->size * sizeof(OBJECT *) );
- result->num_constants = c->constants->size;
-
- result->num_subfunctions = c->rules->size;
- result->functions = BJAM_MALLOC( c->rules->size * sizeof(SUBFUNCTION) );
- for ( i = 0; i < c->rules->size; ++i )
- {
- struct stored_rule * rule = &dynamic_array_at( struct stored_rule, c->rules, i );
- result->functions[i].name = rule->name;
- result->functions[i].code = function_compile( rule->parse );
- result->functions[i].code->num_formal_arguments = rule->num_arguments;
- result->functions[i].code->formal_arguments = rule->arguments;
- result->functions[i].local = rule->local;
- }
-
- result->actions = BJAM_MALLOC( c->actions->size * sizeof(SUBACTION) );
- memcpy( result->actions, c->actions->data, c->actions->size * sizeof(SUBACTION) );
- result->num_subactions = c->actions->size;
-
- result->generic = 0;
-
- result->file = 0;
- result->line = -1;
-
- return result;
-}
-
-/*
- * Parsing of variable expansions
- */
-
-typedef struct VAR_PARSE_GROUP
-{
- struct dynamic_array elems[1];
-} VAR_PARSE_GROUP;
-
-typedef struct VAR_PARSE_ACTIONS
-{
- struct dynamic_array elems[1];
-} VAR_PARSE_ACTIONS;
-
-#define VAR_PARSE_TYPE_VAR 0
-#define VAR_PARSE_TYPE_STRING 1
-#define VAR_PARSE_TYPE_FILE 2
-
-typedef struct _var_parse
-{
- int type; /* string or variable */
-} VAR_PARSE;
-
-typedef struct
-{
- VAR_PARSE base;
- VAR_PARSE_GROUP * name;
- VAR_PARSE_GROUP * subscript;
- struct dynamic_array modifiers[1];
-} VAR_PARSE_VAR;
-
-typedef struct
-{
- VAR_PARSE base;
- OBJECT * s;
-} VAR_PARSE_STRING;
-
-typedef struct
-{
- VAR_PARSE base;
- struct dynamic_array filename[1];
- struct dynamic_array contents[1];
-} VAR_PARSE_FILE;
-
-static void var_parse_free( VAR_PARSE * );
-
-/*
- * VAR_PARSE_GROUP
- */
-
-static VAR_PARSE_GROUP * var_parse_group_new()
-{
- VAR_PARSE_GROUP * result = BJAM_MALLOC( sizeof( VAR_PARSE_GROUP ) );
- dynamic_array_init( result->elems );
- return result;
-}
-
-static void var_parse_group_free( VAR_PARSE_GROUP * group )
-{
- int i;
- for ( i = 0; i < group->elems->size; ++i )
- {
- var_parse_free( dynamic_array_at( VAR_PARSE *, group->elems, i ) );
- }
- dynamic_array_free( group->elems );
- BJAM_FREE( group );
-}
-
-static void var_parse_group_add( VAR_PARSE_GROUP * group, VAR_PARSE * elem )
-{
- dynamic_array_push( group->elems, elem );
-}
-
-static void var_parse_group_maybe_add_constant( VAR_PARSE_GROUP * group, const char * start, const char * end )
-{
- if ( start != end )
- {
- string buf[1];
- VAR_PARSE_STRING * value = (VAR_PARSE_STRING *)BJAM_MALLOC( sizeof(VAR_PARSE_STRING) );
- value->base.type = VAR_PARSE_TYPE_STRING;
- string_new( buf );
- string_append_range( buf, start, end );
- value->s = object_new( buf->value );
- string_free( buf );
- var_parse_group_add( group, (VAR_PARSE *)value );
- }
-}
-
-VAR_PARSE_STRING * var_parse_group_as_literal( VAR_PARSE_GROUP * group )
-{
- if ( group->elems->size == 1 )
- {
- VAR_PARSE * result = dynamic_array_at( VAR_PARSE *, group->elems, 0 );
- if ( result->type == VAR_PARSE_TYPE_STRING )
- {
- return (VAR_PARSE_STRING *)result;
- }
- }
- return 0;
-}
-
-/*
- * VAR_PARSE_ACTIONS
- */
-
-static VAR_PARSE_ACTIONS * var_parse_actions_new()
-{
- VAR_PARSE_ACTIONS * result = (VAR_PARSE_ACTIONS *)BJAM_MALLOC( sizeof(VAR_PARSE_ACTIONS) );
- dynamic_array_init( result->elems );
- return result;
-}
-
-static void var_parse_actions_free( VAR_PARSE_ACTIONS * actions )
-{
- int i;
- for ( i = 0; i < actions->elems->size; ++i )
- {
- var_parse_group_free( dynamic_array_at( VAR_PARSE_GROUP *, actions->elems, i ) );
- }
- dynamic_array_free( actions->elems );
- BJAM_FREE( actions );
-}
-
-/*
- * VAR_PARSE_VAR
- */
-
-static VAR_PARSE_VAR * var_parse_var_new()
-{
- VAR_PARSE_VAR * result = BJAM_MALLOC( sizeof( VAR_PARSE_VAR ) );
- result->base.type = VAR_PARSE_TYPE_VAR;
- result->name = var_parse_group_new();
- result->subscript = 0;
- dynamic_array_init( result->modifiers );
- return result;
-}
-
-static void var_parse_var_free( VAR_PARSE_VAR * var )
-{
- int i;
- var_parse_group_free( var->name );
- if ( var->subscript )
- var_parse_group_free( var->subscript );
- for( i = 0; i < var->modifiers->size; ++i )
- var_parse_group_free( dynamic_array_at( VAR_PARSE_GROUP *, var->modifiers, i ) );
- dynamic_array_free( var->modifiers );
- BJAM_FREE( var );
-}
-
-static VAR_PARSE_GROUP * var_parse_var_new_modifier( VAR_PARSE_VAR * var )
-{
- VAR_PARSE_GROUP * result = var_parse_group_new();
- dynamic_array_push( var->modifiers, result );
- return result;
-}
-
-/*
- * VAR_PARSE_STRING
- */
-
-static void var_parse_string_free( VAR_PARSE_STRING * string )
-{
- object_free( string->s );
- BJAM_FREE( string );
-}
-
-/*
- * VAR_PARSE_FILE
- */
-
-static VAR_PARSE_FILE * var_parse_file_new( void )
-{
- VAR_PARSE_FILE * result = (VAR_PARSE_FILE *)BJAM_MALLOC( sizeof( VAR_PARSE_FILE ) );
- result->base.type = VAR_PARSE_TYPE_FILE;
- dynamic_array_init( result->filename );
- dynamic_array_init( result->contents );
- return result;
-}
-
-static void var_parse_file_free( VAR_PARSE_FILE * file )
-{
- int i;
- for( i = 0; i < file->filename->size; ++i )
- var_parse_group_free( dynamic_array_at( VAR_PARSE_GROUP *, file->filename, i ) );
- dynamic_array_free( file->filename );
- for( i = 0; i < file->contents->size; ++i )
- var_parse_group_free( dynamic_array_at( VAR_PARSE_GROUP *, file->contents, i ) );
- dynamic_array_free( file->contents );
- BJAM_FREE( file );
-}
-
-/*
- * VAR_PARSE
- */
-
-static void var_parse_free( VAR_PARSE * parse )
-{
- if ( parse->type == VAR_PARSE_TYPE_VAR )
- {
- var_parse_var_free( (VAR_PARSE_VAR *)parse );
- }
- else if ( parse->type == VAR_PARSE_TYPE_STRING )
- {
- var_parse_string_free( (VAR_PARSE_STRING *)parse );
- }
- else if ( parse->type == VAR_PARSE_TYPE_FILE )
- {
- var_parse_file_free( (VAR_PARSE_FILE *)parse );
- }
- else
- {
- assert(!"Invalid type");
- }
-}
-
-/*
- * Compile VAR_PARSE
- */
-
-static void var_parse_group_compile( const VAR_PARSE_GROUP * parse, compiler * c );
-
-static void var_parse_var_compile( const VAR_PARSE_VAR * parse, compiler * c )
-{
- int expand_name = 0;
- /* If there are modifiers, emit them in reverse order. */
- if ( parse->modifiers->size > 0 )
- {
- int i;
- for ( i = 0; i < parse->modifiers->size; ++i )
- {
- var_parse_group_compile( dynamic_array_at( VAR_PARSE_GROUP *, parse->modifiers, parse->modifiers->size - i - 1 ), c );
- }
- }
-
- /* If there's a subscript, emit it. */
- if ( parse->subscript )
- {
- var_parse_group_compile( parse->subscript, c );
- }
-
- /* If the variable name is empty, look it up. */
- if ( parse->name->elems->size == 0 )
- {
- compile_emit( c, INSTR_PUSH_VAR, compile_emit_constant( c, constant_empty ) );
- }
- /* If the variable name doesn't need to be expanded, look it up. */
- else if ( parse->name->elems->size == 1 &&
- dynamic_array_at( VAR_PARSE *, parse->name->elems, 0 )->type == VAR_PARSE_TYPE_STRING )
- {
- OBJECT * name = ( (VAR_PARSE_STRING *)dynamic_array_at( VAR_PARSE *, parse->name->elems, 0 ) )->s;
- int idx = get_argument_index( object_str( name ) );
- if ( idx != -1 )
- {
- compile_emit( c, INSTR_PUSH_ARG, idx );
- }
- else
- {
- compile_emit( c, INSTR_PUSH_VAR, compile_emit_constant( c, name ) );
- }
- }
- /* Otherwise, push the var names and use the group instruction. */
- else
- {
- var_parse_group_compile( parse->name, c );
- expand_name = 1;
- }
-
- /** Select the instruction for expanding the variable. */
- if ( !parse->modifiers->size && !parse->subscript && !expand_name )
- {
- /* Nothing to do */
- }
- else if ( !parse->modifiers->size && !parse->subscript && expand_name )
- {
- compile_emit( c, INSTR_PUSH_GROUP, 0 );
- }
- else if ( !parse->modifiers->size && parse->subscript && !expand_name )
- {
- compile_emit( c, INSTR_APPLY_INDEX, 0 );
- }
- else if ( !parse->modifiers->size && parse->subscript && expand_name )
- {
- compile_emit( c, INSTR_APPLY_INDEX_GROUP, 0 );
- }
- if ( parse->modifiers->size && !parse->subscript && !expand_name )
- {
- compile_emit( c, INSTR_APPLY_MODIFIERS, parse->modifiers->size );
- }
- else if ( parse->modifiers->size && !parse->subscript && expand_name )
- {
- compile_emit( c, INSTR_APPLY_MODIFIERS_GROUP, parse->modifiers->size );
- }
- else if ( parse->modifiers->size && parse->subscript && !expand_name )
- {
- compile_emit( c, INSTR_APPLY_INDEX_MODIFIERS, parse->modifiers->size );
- }
- else if ( parse->modifiers->size && parse->subscript && expand_name )
- {
- compile_emit( c, INSTR_APPLY_INDEX_MODIFIERS_GROUP, parse->modifiers->size );
- }
-}
-
-static void var_parse_string_compile( const VAR_PARSE_STRING * parse, compiler * c )
-{
- compile_emit( c, INSTR_PUSH_CONSTANT, compile_emit_constant( c, parse->s ) );
-}
-
-static void var_parse_file_compile( const VAR_PARSE_FILE * parse, compiler * c )
-{
- int i;
- for ( i = 0; i < parse->filename->size; ++i )
- {
- var_parse_group_compile( dynamic_array_at( VAR_PARSE_GROUP *, parse->filename, parse->filename->size - i - 1 ), c );
- }
- compile_emit( c, INSTR_APPEND_STRINGS, parse->filename->size );
- for ( i = 0; i < parse->contents->size; ++i )
- {
- var_parse_group_compile( dynamic_array_at( VAR_PARSE_GROUP *, parse->contents, parse->contents->size - i - 1 ), c );
- }
- compile_emit( c, INSTR_WRITE_FILE, parse->contents->size );
-}
-
-static void var_parse_compile( const VAR_PARSE * parse, compiler * c )
-{
- if( parse->type == VAR_PARSE_TYPE_VAR )
- {
- var_parse_var_compile( (const VAR_PARSE_VAR *)parse, c );
- }
- else if( parse->type == VAR_PARSE_TYPE_STRING )
- {
- var_parse_string_compile( (const VAR_PARSE_STRING *)parse, c );
- }
- else if( parse->type == VAR_PARSE_TYPE_FILE )
- {
- var_parse_file_compile( (const VAR_PARSE_FILE *)parse, c );
- }
- else
- {
- assert( !"Unknown var parse type." );
- }
-}
-
-static void var_parse_group_compile( const VAR_PARSE_GROUP * parse, compiler * c )
-{
- /* Emit the elements in reverse order. */
- int i;
- for( i = 0; i < parse->elems->size; ++i)
- {
- var_parse_compile( dynamic_array_at( VAR_PARSE *, parse->elems, parse->elems->size - i - 1 ), c );
- }
- /* If there're no elements, emit an empty string. */
- if ( parse->elems->size == 0 )
- {
- compile_emit( c, INSTR_PUSH_CONSTANT, compile_emit_constant( c, constant_empty ) );
- }
- /* If there's more than one element, combine them. */
- if ( parse->elems->size > 1 )
- {
- compile_emit( c, INSTR_COMBINE_STRINGS, parse->elems->size );
- }
-}
-
-static void var_parse_actions_compile( const VAR_PARSE_ACTIONS * actions, compiler * c )
-{
- int i;
- for ( i = 0; i < actions->elems->size; ++i )
- {
- var_parse_group_compile( dynamic_array_at( VAR_PARSE_GROUP *, actions->elems, actions->elems->size - i - 1 ), c );
- }
- compile_emit( c, INSTR_OUTPUT_STRINGS, actions->elems->size );
-}
-
-/*
- * Parse VAR_PARSE_VAR
- */
-
-static VAR_PARSE * parse_at_file( const char * start, const char * mid, const char * end );
-static VAR_PARSE * parse_variable( const char * * string );
-static int try_parse_variable( const char * * s_, const char * * string, VAR_PARSE_GROUP * out);
-static void balance_parentheses( const char * * s_, const char * * string, VAR_PARSE_GROUP * out);
-static void parse_var_string( const char * first, const char * last, struct dynamic_array * out );
-
-/*
- * Parses a string that can contain variables to expand.
- */
-
-static VAR_PARSE_GROUP * parse_expansion( const char * * string )
-{
- VAR_PARSE_GROUP * result = var_parse_group_new();
- const char * s = *string;
- for (;;)
- {
- if(try_parse_variable( &s, string, result )) {}
- else if(s[0] == '\0')
- {
- var_parse_group_maybe_add_constant( result, *string, s );
- return result;
- }
- else
- {
- ++s;
- }
- }
-}
-
-static VAR_PARSE_ACTIONS * parse_actions( const char * string )
-{
- VAR_PARSE_ACTIONS * result = var_parse_actions_new();
- parse_var_string( string, string + strlen( string ), result->elems );
- return result;
-}
-
-/*
- * Checks whether the string a *s_ starts with
- * a variable expansion "$(". *string should point
- * to the first unemitted character before *s.
- * If *s_ starts with variable expansion, appends
- * elements to out up to the closing ")", and
- * adjusts *s_ and *string to point to next character.
- * Returns 1 if s_ starts with a variable, 0 otherwise.
- */
-
-static int try_parse_variable( const char * * s_, const char * * string, VAR_PARSE_GROUP * out)
-{
- const char * s = *s_;
- if(s[0] == '$' && s[1] == '(')
- {
- var_parse_group_maybe_add_constant( out, *string, s );
- s += 2;
- var_parse_group_add( out, parse_variable( &s ) );
- *string = s;
- *s_ = s;
- return 1;
- }
- else if(s[0] == '@' && s[1] == '(')
- {
- int depth = 1;
- const char * ine;
- const char * split = 0;
- var_parse_group_maybe_add_constant( out, *string, s );
- s += 2;
- ine = s;
-
- /* Scan the content of the response file @() section. */
- while ( *ine && ( depth > 0 ) )
- {
- switch ( *ine )
- {
- case '(': ++depth; break;
- case ')': --depth; break;
- case ':':
- if ( ( depth == 1 ) && ( ine[ 1 ] == 'E' ) && ( ine[ 2 ] == '=' ) )
- split = ine;
- break;
- }
- ++ine;
- }
-
- if ( !split || depth != 0 )
- {
- return 0;
- }
-
- var_parse_group_add( out, parse_at_file( s, split, ine - 1 ) );
- *string = ine;
- *s_ = ine;
-
- return 1;
- }
- else
- {
- return 0;
- }
-}
-
-static const char * current_file = "";
-static int current_line;
-
-static void parse_error( const char * message )
-{
- printf( "%s:%d: %s\n", current_file, current_line, message );
-}
-
-/*
- * Parses a single variable up to the closing ")" and
- * adjusts *string to point to the next character. *string
- * should point to the character immediately after
- * the initial "$("
- */
-
-static VAR_PARSE * parse_variable( const char * * string )
-{
- VAR_PARSE_VAR * result = var_parse_var_new();
- VAR_PARSE_GROUP * name = result->name;
- const char * s = *string;
- for ( ; ; )
- {
- if ( try_parse_variable( &s, string, name ) ) {}
- else if ( s[0] == ':' )
- {
- VAR_PARSE_GROUP * mod;
- var_parse_group_maybe_add_constant( name, *string, s );
- ++s;
- *string = s;
- mod = var_parse_var_new_modifier( result );
- for ( ; ; )
- {
- if ( try_parse_variable( &s, string, mod ) ) {}
- else if ( s[0] == ')' )
- {
- var_parse_group_maybe_add_constant( mod, *string, s );
- ++s;
- *string = s;
- return (VAR_PARSE *)result;
- }
- else if ( s[0] == '(' )
- {
- ++s;
- balance_parentheses( &s, string, mod );
- }
- else if ( s[0] == ':' )
- {
- var_parse_group_maybe_add_constant( mod, *string, s );
- ++s;
- *string = s;
- mod = var_parse_var_new_modifier( result );
- }
- else if ( s[0] == '[' )
- {
- parse_error("unexpected subscript");
- ++s;
- }
- else if ( s[0] == '\0' )
- {
- parse_error( "unbalanced parentheses" );
- var_parse_group_maybe_add_constant( mod, *string, s );
- *string = s;
- return (VAR_PARSE *)result;
- }
- else
- {
- ++s;
- }
- }
- }
- else if ( s[0] == '[' )
- {
- VAR_PARSE_GROUP * subscript = var_parse_group_new();
- result->subscript = subscript;
- var_parse_group_maybe_add_constant( name, *string, s );
- ++s;
- *string = s;
- for ( ; ; )
- {
- if ( try_parse_variable( &s, string, subscript ) ) {}
- else if ( s[0] == ']' )
- {
- var_parse_group_maybe_add_constant( subscript, *string, s );
- ++s;
- *string = s;
- if ( s[0] == ')' || s[0] == ':' || s[0] == '\0')
- {
- break;
- }
- else
- {
- parse_error( "unexpected text following []" );
- break;
- }
- }
- else if ( isdigit( s[0] ) || s[0] == '-' )
- {
- ++s;
- }
- else if( s[0] == '\0' )
- {
- parse_error( "malformed subscript" );
- break;
- }
- else
- {
- parse_error( "malformed subscript" );
- ++s;
- }
- }
- }
- else if ( s[0] == ')' )
- {
- var_parse_group_maybe_add_constant( name, *string, s );
- ++s;
- *string = s;
- return (VAR_PARSE *)result;
- }
- else if ( s[0] == '(' )
- {
- ++s;
- balance_parentheses( &s, string, name );
- }
- else if ( s[0] == '\0' )
- {
- parse_error( "unbalanced parentheses" );
- var_parse_group_maybe_add_constant( name, *string, s );
- *string = s;
- return (VAR_PARSE *)result;
- }
- else
- {
- ++s;
- }
- }
-}
-
-static void parse_var_string( const char * first, const char * last, struct dynamic_array * out )
-{
- const char * saved = first;
- for ( ; ; )
- {
- /* Handle whitespace */
- for ( ; first != last; ++first ) if ( !isspace(*first) ) break;
- if ( saved != first )
- {
- VAR_PARSE_GROUP * group = var_parse_group_new();
- var_parse_group_maybe_add_constant( group, saved, first );
- saved = first;
- dynamic_array_push( out, group );
- }
-
- if ( first == last ) break;
-
- /* Handle non-whitespace */
-
- {
- VAR_PARSE_GROUP * group = var_parse_group_new();
- for ( ; ; )
- {
-
- if( first == last || isspace( *first ) )
- {
- var_parse_group_maybe_add_constant( group, saved, first );
- saved = first;
- break;
- }
- else if ( try_parse_variable( &first, &saved, group ) )
- {
- assert( first <= last );
- }
- else
- {
- ++first;
- }
- }
- dynamic_array_push( out, group );
- }
- if ( first == last ) break;
- }
-}
-
-/*
- * start should point to the character immediately following the
- * opening "@(", mid should point to the ":E=", and end should
- * point to the closing ")".
- */
-
-static VAR_PARSE * parse_at_file( const char * start, const char * mid, const char * end )
-{
- VAR_PARSE_FILE * result = var_parse_file_new();
- parse_var_string( start, mid, result->filename );
- parse_var_string( mid + 3, end, result->contents );
- return (VAR_PARSE *)result;
-}
-
-/*
- * Given that *s_ points to the character after a "(",
- * parses up to the matching ")". *string should
- * point to the first unemitted character before *s_.
- *
- * When the function returns, *s_ will point to the character
- * after the ")", and *string will point to the first
- * unemitted character before *s_. The range from *string
- * to *s_ does not contain any variables that need to be
- * expanded.
- */
-
-void balance_parentheses( const char * * s_, const char * * string, VAR_PARSE_GROUP * out)
-{
- int depth = 1;
- const char * s = *s_;
- for ( ; ; )
- {
- if ( try_parse_variable( &s, string, out ) ) { }
- else if(s[0] == ':' || s[0] == '[')
- {
- parse_error( "unbalanced parentheses" );
- ++s;
- }
- else if(s[0] == '\0')
- {
- parse_error( "unbalanced parentheses" );
- break;
- }
- else if(s[0] == ')')
- {
- ++s;
- if(--depth == 0) break;
- }
- else if(s[0] == '(')
- {
- ++depth;
- ++s;
- }
- else
- {
- ++s;
- }
- }
- *s_ = s;
-}
-
-/*
- * Main compile
- */
-
-#define RESULT_STACK 0
-#define RESULT_RETURN 1
-#define RESULT_NONE 2
-
-static void compile_parse( PARSE * parse, compiler * c, int result_location );
-static struct arg_list * arg_list_compile( PARSE * parse, int * num_arguments );
-
-static void compile_condition( PARSE * parse, compiler * c, int branch_true, int label )
-{
- assert( parse->type == PARSE_EVAL );
- switch ( parse->num )
- {
- case EXPR_EXISTS:
- {
- compile_parse( parse->left, c, RESULT_STACK );
- if ( branch_true )
- compile_emit_branch( c, INSTR_JUMP_NOT_EMPTY, label );
- else
- compile_emit_branch( c, INSTR_JUMP_EMPTY, label );
- break;
- }
- case EXPR_EQUALS:
- {
- compile_parse( parse->left, c, RESULT_STACK );
- compile_parse( parse->right, c, RESULT_STACK );
- if ( branch_true )
- compile_emit_branch( c, INSTR_JUMP_EQ, label );
- else
- compile_emit_branch( c, INSTR_JUMP_NE, label );
- break;
- }
- case EXPR_NOTEQ:
- {
- compile_parse( parse->left, c, RESULT_STACK );
- compile_parse( parse->right, c, RESULT_STACK );
- if ( branch_true )
- compile_emit_branch( c, INSTR_JUMP_NE, label );
- else
- compile_emit_branch( c, INSTR_JUMP_EQ, label );
- break;
- }
- case EXPR_LESS:
- {
- compile_parse( parse->left, c, RESULT_STACK );
- compile_parse( parse->right, c, RESULT_STACK );
- if ( branch_true )
- compile_emit_branch( c, INSTR_JUMP_LT, label );
- else
- compile_emit_branch( c, INSTR_JUMP_GE, label );
- break;
- }
- case EXPR_LESSEQ:
- {
- compile_parse( parse->left, c, RESULT_STACK );
- compile_parse( parse->right, c, RESULT_STACK );
- if ( branch_true )
- compile_emit_branch( c, INSTR_JUMP_LE, label );
- else
- compile_emit_branch( c, INSTR_JUMP_GT, label );
- break;
- }
- case EXPR_MORE:
- {
- compile_parse( parse->left, c, RESULT_STACK );
- compile_parse( parse->right, c, RESULT_STACK );
- if ( branch_true )
- compile_emit_branch( c, INSTR_JUMP_GT, label );
- else
- compile_emit_branch( c, INSTR_JUMP_LE, label );
- break;
- }
- case EXPR_MOREEQ:
- {
- compile_parse( parse->left, c, RESULT_STACK );
- compile_parse( parse->right, c, RESULT_STACK );
- if ( branch_true )
- compile_emit_branch( c, INSTR_JUMP_GE, label );
- else
- compile_emit_branch( c, INSTR_JUMP_LT, label );
- break;
- }
- case EXPR_IN:
- {
- compile_parse( parse->left, c, RESULT_STACK );
- compile_parse( parse->right, c, RESULT_STACK );
- if ( branch_true )
- compile_emit_branch( c, INSTR_JUMP_IN, label );
- else
- compile_emit_branch( c, INSTR_JUMP_NOT_IN, label );
- break;
- }
- case EXPR_AND:
- {
- if ( branch_true )
- {
- int f = compile_new_label( c );
- compile_condition( parse->left, c, 0, f );
- compile_condition( parse->right, c, 1, label );
- compile_set_label( c, f );
- }
- else
- {
- compile_condition( parse->left, c, 0, label );
- compile_condition( parse->right, c, 0, label );
- }
- break;
- }
- case EXPR_OR:
- {
- if ( branch_true )
- {
- compile_condition( parse->left, c, 1, label );
- compile_condition( parse->right, c, 1, label );
- }
- else
- {
- int t = compile_new_label( c );
- compile_condition( parse->left, c, 1, t );
- compile_condition( parse->right, c, 0, label );
- compile_set_label( c, t );
- }
- break;
- }
- case EXPR_NOT:
- {
- compile_condition( parse->left, c, !branch_true, label );
- break;
- }
- }
-}
-
-static void adjust_result( compiler * c, int actual_location, int desired_location )
-{
- if ( actual_location == desired_location )
- ;
- else if ( actual_location == RESULT_STACK && desired_location == RESULT_RETURN )
- compile_emit( c, INSTR_SET_RESULT, 0 );
- else if( actual_location == RESULT_STACK && desired_location == RESULT_NONE )
- compile_emit( c, INSTR_POP, 0 );
- else if( actual_location == RESULT_RETURN && desired_location == RESULT_STACK )
- compile_emit( c, INSTR_PUSH_RESULT, 0 );
- else if ( actual_location == RESULT_RETURN && desired_location == RESULT_NONE )
- ;
- else if ( actual_location == RESULT_NONE && desired_location == RESULT_STACK )
- compile_emit( c, INSTR_PUSH_EMPTY, 0 );
- else if ( actual_location == RESULT_NONE && desired_location == RESULT_RETURN )
- {
- compile_emit( c, INSTR_PUSH_EMPTY, 0 );
- compile_emit( c, INSTR_SET_RESULT, 0 );
- }
- else
- {
- assert( !"invalid result location" );
- }
-}
-
-static const char * parse_type( PARSE * parse )
-{
- switch ( parse->type )
- {
- case PARSE_APPEND: return "append";
- case PARSE_EVAL: return "eval";
- case PARSE_RULES: return "rules";
- default: return "unknown";
- }
-}
-
-static void compile_append_chain( PARSE * parse, compiler * c )
-{
- assert( parse->type == PARSE_APPEND );
- if ( parse->left->type == PARSE_NULL )
- {
- compile_parse( parse->right, c, RESULT_STACK );
- }
- else
- {
- if ( parse->left->type == PARSE_APPEND )
- compile_append_chain( parse->left, c );
- else
- compile_parse( parse->left, c, RESULT_STACK );
- compile_parse( parse->right, c, RESULT_STACK );
- compile_emit( c, INSTR_PUSH_APPEND, 0 );
- }
-}
-
-static void compile_parse( PARSE * parse, compiler * c, int result_location )
-{
- if ( parse->type == PARSE_APPEND )
- {
- compile_append_chain( parse, c );
- adjust_result( c, RESULT_STACK, result_location );
- }
- else if ( parse->type == PARSE_EVAL )
- {
- /* FIXME: This is only needed because of the bizarre parsing of conditions. */
- if ( parse->num == EXPR_EXISTS )
- {
- compile_parse( parse->left, c, result_location );
- }
- else
- {
- int f = compile_new_label( c );
- int end = compile_new_label( c );
-
- printf( "%s:%d: Conditional used as list (check operator precedence).\n", object_str(parse->file), parse->line );
-
- /* Emit the condition */
- compile_condition( parse, c, 0, f );
- compile_emit( c, INSTR_PUSH_CONSTANT, compile_emit_constant( c, constant_true ) );
- compile_emit_branch( c, INSTR_JUMP, end );
- compile_set_label( c, f );
- compile_emit( c, INSTR_PUSH_EMPTY, 0 );
- compile_set_label( c, end );
- adjust_result( c, RESULT_STACK, result_location );
- }
- }
- else if ( parse->type == PARSE_FOREACH )
- {
- int var = compile_emit_constant( c, parse->string );
- int top = compile_new_label( c );
- int end = compile_new_label( c );
-
- /*
- * Evaluate the list.
- */
- compile_parse( parse->left, c, RESULT_STACK );
-
- /* Localize the loop variable */
- if ( parse->num )
- {
- compile_emit( c, INSTR_PUSH_EMPTY, 0 );
- compile_emit( c, INSTR_PUSH_LOCAL, var );
- compile_emit( c, INSTR_SWAP, 1 );
- }
-
- compile_emit( c, INSTR_FOR_INIT, 0 );
- compile_set_label( c, top );
- compile_emit_branch( c, INSTR_FOR_LOOP, end );
- compile_emit( c, INSTR_SET, var );
- compile_emit( c, INSTR_POP, 0 );
-
- /* Run the loop body */
- compile_parse( parse->right, c, RESULT_NONE );
-
- compile_emit_branch( c, INSTR_JUMP, top );
- compile_set_label( c, end );
-
- if ( parse->num )
- {
- compile_emit( c, INSTR_POP_LOCAL, var );
- }
-
- adjust_result( c, RESULT_NONE, result_location);
- }
- else if( parse->type == PARSE_IF )
- {
- int f = compile_new_label( c );
- /* Emit the condition */
- compile_condition( parse->left, c, 0, f );
- /* Emit the if block */
- compile_parse( parse->right, c, result_location );
- if ( parse->third->type != PARSE_NULL || result_location != RESULT_NONE )
- {
- /* Emit the else block */
- int end = compile_new_label( c );
- compile_emit_branch( c, INSTR_JUMP, end );
- compile_set_label( c, f );
- compile_parse( parse->third, c, result_location );
- compile_set_label( c, end );
- }
- else
- {
- compile_set_label( c, f );
- }
-
- }
- else if( parse->type == PARSE_WHILE )
- {
- int nested_result = result_location == RESULT_NONE? RESULT_NONE : RESULT_RETURN;
- int test = compile_new_label( c );
- int top = compile_new_label( c );
- /* Make sure that we return an empty list if the loop runs zero times. */
- adjust_result( c, RESULT_NONE, nested_result );
- /* Jump to the loop test */
- compile_emit_branch( c, INSTR_JUMP, test );
- compile_set_label( c, top );
- /* Emit the loop body */
- compile_parse( parse->right, c, nested_result );
- /* Emit the condition */
- compile_set_label( c, test );
- compile_condition( parse->left, c, 1, top );
-
- adjust_result( c, nested_result, result_location );
- }
- else if ( parse->type == PARSE_INCLUDE )
- {
- compile_parse( parse->left, c, RESULT_STACK );
- compile_emit( c, INSTR_INCLUDE, 0 );
- compile_emit( c, INSTR_BIND_MODULE_VARIABLES, 0 );
- adjust_result( c, RESULT_NONE, result_location );
- }
- else if ( parse->type == PARSE_MODULE )
- {
- int nested_result = result_location == RESULT_NONE? RESULT_NONE : RESULT_RETURN;
- compile_parse( parse->left, c, RESULT_STACK );
- compile_emit( c, INSTR_PUSH_MODULE, 0 );
- compile_parse( parse->right, c, nested_result );
- compile_emit( c, INSTR_POP_MODULE, 0 );
- adjust_result( c, nested_result, result_location );
- }
- else if ( parse->type == PARSE_CLASS )
- {
- /* Evaluate the class name. */
- compile_parse( parse->left->right, c, RESULT_STACK );
- /* Evaluate the base classes. */
- if ( parse->left->left )
- {
- compile_parse( parse->left->left->right, c, RESULT_STACK );
- }
- else
- {
- compile_emit( c, INSTR_PUSH_EMPTY, 0 );
- }
- compile_emit( c, INSTR_CLASS, 0 );
- compile_parse( parse->right, c, RESULT_NONE );
- compile_emit( c, INSTR_BIND_MODULE_VARIABLES, 0 );
- compile_emit( c, INSTR_POP_MODULE, 0 );
-
- adjust_result( c, RESULT_NONE, result_location );
- }
- else if ( parse->type == PARSE_LIST )
- {
- OBJECT * o = parse->string;
- const char * s = object_str( o );
- VAR_PARSE_GROUP * group;
- current_file = object_str( parse->file );
- current_line = parse->line;
- group = parse_expansion( &s );
- var_parse_group_compile( group, c );
- var_parse_group_free( group );
- adjust_result( c, RESULT_STACK, result_location );
- }
- else if ( parse->type == PARSE_LOCAL )
- {
- int nested_result = result_location == RESULT_NONE? RESULT_NONE : RESULT_RETURN;
- /*
- * This should be left recursive group of compile_appends
- */
- PARSE * vars = parse->left;
-
- /* Special case an empty list of vars */
- if ( vars->type == PARSE_NULL )
- {
- compile_parse( parse->right, c, RESULT_NONE );
- compile_parse( parse->third, c, result_location );
- nested_result = result_location;
- }
- /*
- * Check whether there is exactly one variable
- * with a constant name
- */
- else if ( vars->left->type == PARSE_NULL &&
- vars->right->type == PARSE_LIST )
- {
- const char * s = object_str( vars->right->string );
- VAR_PARSE_GROUP * group;
- current_file = object_str( parse->file );
- current_line = parse->line;
- group = parse_expansion( &s );
- if ( group->elems->size == 1 &&
- dynamic_array_at( VAR_PARSE *, group->elems, 0 )->type == VAR_PARSE_TYPE_STRING )
- {
- int name = compile_emit_constant( c, ( (VAR_PARSE_STRING *)dynamic_array_at( VAR_PARSE *, group->elems, 0 ) )->s );
- var_parse_group_free( group );
- compile_parse( parse->right, c, RESULT_STACK );
- compile_emit( c, INSTR_PUSH_LOCAL, name );
- compile_parse( parse->third, c, nested_result );
- compile_emit( c, INSTR_POP_LOCAL, name );
- }
- else
- {
- var_parse_group_compile( group, c );
- var_parse_group_free( group );
- compile_parse( parse->right, c, RESULT_STACK );
- compile_emit( c, INSTR_PUSH_LOCAL_GROUP, 0 );
- compile_parse( parse->third, c, nested_result );
- compile_emit( c, INSTR_POP_LOCAL_GROUP, 0 );
- }
- }
- else
- {
- compile_parse( parse->left, c, RESULT_STACK );
- compile_parse( parse->right, c, RESULT_STACK );
- compile_emit( c, INSTR_PUSH_LOCAL_GROUP, 0 );
- compile_parse( parse->third, c, nested_result );
- compile_emit( c, INSTR_POP_LOCAL_GROUP, 0 );
- }
- adjust_result( c, nested_result, result_location );
- }
- else if ( parse->type == PARSE_ON )
- {
- int end = compile_new_label( c );
- compile_parse( parse->left, c, RESULT_STACK );
- compile_emit_branch( c, INSTR_PUSH_ON, end );
- compile_parse( parse->right, c, RESULT_STACK );
- compile_emit( c, INSTR_POP_ON, 0 );
- compile_set_label( c, end );
- adjust_result( c, RESULT_STACK, result_location );
- }
- else if ( parse->type == PARSE_RULE )
- {
- PARSE * p;
- int n = 0;
- VAR_PARSE_GROUP * group;
- const char * s = object_str( parse->string );
-
- if ( parse->left->left == NULL && parse->left->right->type == PARSE_NULL )
- ;
- else
- for ( p = parse->left; p; p = p->left )
- {
- compile_parse( p->right, c, RESULT_STACK );
- ++n;
- }
-
- current_file = object_str( parse->file );
- current_line = parse->line;
- group = parse_expansion( &s );
- var_parse_group_compile( group, c );
- var_parse_group_free( group );
- compile_emit( c, INSTR_CALL_RULE, n );
- compile_emit( c, compile_emit_constant( c, parse->string ), parse->line );
- adjust_result( c, RESULT_STACK, result_location );
- }
- else if ( parse->type == PARSE_RULES )
- {
- do compile_parse( parse->left, c, RESULT_NONE );
- while ( ( parse = parse->right )->type == PARSE_RULES );
- compile_parse( parse, c, result_location );
- }
- else if ( parse->type == PARSE_SET )
- {
- PARSE * vars = parse->left;
- unsigned int op_code;
- unsigned int op_code_group;
-
- switch ( parse->num )
- {
- case ASSIGN_SET: default: op_code = INSTR_SET; op_code_group = INSTR_SET_GROUP; break;
- case ASSIGN_APPEND: op_code = INSTR_APPEND; op_code_group = INSTR_APPEND_GROUP; break;
- case ASSIGN_DEFAULT: op_code = INSTR_DEFAULT; op_code_group = INSTR_DEFAULT_GROUP; break;
- }
-
- /*
- * Check whether there is exactly one variable
- * with a constant name
- */
- if ( vars->type == PARSE_LIST )
- {
- const char * s = object_str( vars->string );
- VAR_PARSE_GROUP * group;
- current_file = object_str( parse->file );
- current_line = parse->line;
- group = parse_expansion( &s );
- if ( group->elems->size == 1 &&
- dynamic_array_at( VAR_PARSE *, group->elems, 0 )->type == VAR_PARSE_TYPE_STRING )
- {
- int name = compile_emit_constant( c, ( (VAR_PARSE_STRING *)dynamic_array_at( VAR_PARSE *, group->elems, 0 ) )->s );
- var_parse_group_free( group );
- compile_parse( parse->right, c, RESULT_STACK );
- compile_emit( c, op_code, name );
- }
- else
- {
- var_parse_group_compile( group, c );
- var_parse_group_free( group );
- compile_parse( parse->right, c, RESULT_STACK );
- compile_emit( c, op_code_group, 0 );
- }
- }
- else
- {
- compile_parse( parse->left, c, RESULT_STACK );
- compile_parse( parse->right, c, RESULT_STACK );
- compile_emit( c, op_code_group, 0 );
- }
- adjust_result( c, RESULT_STACK, result_location );
- }
- else if ( parse->type == PARSE_SETCOMP )
- {
- int n_args;
- struct arg_list * args = arg_list_compile( parse->right, &n_args );
-
- int rule_id = compile_emit_rule( c, parse->string, parse->left, n_args, args, parse->num );
-
- compile_emit( c, INSTR_RULE, rule_id );
- adjust_result( c, RESULT_NONE, result_location );
- }
- else if ( parse->type == PARSE_SETEXEC )
- {
- int actions_id = compile_emit_actions( c, parse );
-
- compile_parse( parse->left, c, RESULT_STACK );
-
- compile_emit( c, INSTR_ACTIONS, actions_id );
- adjust_result( c, RESULT_NONE, result_location );
- }
- else if ( parse->type == PARSE_SETTINGS )
- {
- compile_parse( parse->left, c, RESULT_STACK );
- compile_parse( parse->third, c, RESULT_STACK );
- compile_parse( parse->right, c, RESULT_STACK );
-
- switch ( parse->num )
- {
- case ASSIGN_SET: default: compile_emit( c, INSTR_SET_ON, 0 ); break;
- case ASSIGN_APPEND: compile_emit( c, INSTR_APPEND_ON, 0 ); break;
- case ASSIGN_DEFAULT: compile_emit( c, INSTR_DEFAULT_ON, 0 ); break;
- }
-
- adjust_result( c, RESULT_STACK, result_location );
- }
- else if ( parse->type == PARSE_SWITCH )
- {
- int switch_end = compile_new_label( c );
- compile_parse( parse->left, c, RESULT_STACK );
-
- for ( parse = parse->right; parse; parse = parse->right )
- {
- int id = compile_emit_constant( c, parse->left->string );
- int next_case = compile_new_label( c );
- compile_emit( c, INSTR_PUSH_CONSTANT, id );
- compile_emit_branch( c, INSTR_JUMP_NOT_GLOB, next_case );
- compile_parse( parse->left->left, c, result_location );
- compile_emit_branch( c, INSTR_JUMP, switch_end );
- compile_set_label( c, next_case );
- }
- compile_emit( c, INSTR_POP, 0 );
- adjust_result( c, RESULT_NONE, result_location );
- compile_set_label( c, switch_end );
- }
- else if ( parse->type == PARSE_NULL )
- {
- adjust_result( c, RESULT_NONE, result_location );
- }
- else
- {
- assert( !"unknown PARSE type." );
- }
-}
-
-OBJECT * function_rulename( FUNCTION * function )
-{
- return function->rulename;
-}
-
-void function_set_rulename( FUNCTION * function, OBJECT * rulename )
-{
- function->rulename = rulename;
-}
-
-void function_location( FUNCTION * function_, OBJECT * * file, int * line )
-{
- if ( function_->type == FUNCTION_BUILTIN )
- {
- *file = constant_builtin;
- *line = -1;
- }
-#ifdef HAVE_PYTHON
- if ( function_->type == FUNCTION_PYTHON )
- {
- *file = constant_builtin;
- *line = -1;
- }
-#endif
- else
- {
- JAM_FUNCTION * function = (JAM_FUNCTION *)function_;
- assert( function_->type == FUNCTION_JAM );
- *file = function->file;
- *line = function->line;
- }
-}
-
-static struct arg_list * arg_list_compile_builtin( const char * * args, int * num_arguments );
-
-FUNCTION * function_builtin( LIST * ( * func )( FRAME * frame, int flags ), int flags, const char * * args )
-{
- BUILTIN_FUNCTION * result = BJAM_MALLOC( sizeof( BUILTIN_FUNCTION ) );
- result->base.type = FUNCTION_BUILTIN;
- result->base.reference_count = 1;
- result->base.rulename = 0;
- result->base.formal_arguments = arg_list_compile_builtin( args, &result->base.num_formal_arguments );
- result->func = func;
- result->flags = flags;
- return (FUNCTION *)result;
-}
-
-FUNCTION * function_compile( PARSE * parse )
-{
- compiler c[1];
- JAM_FUNCTION * result;
- compiler_init( c );
- compile_parse( parse, c, RESULT_RETURN );
- compile_emit( c, INSTR_RETURN, 0 );
- result = compile_to_function( c );
- compiler_free( c );
- result->file = object_copy( parse->file );
- result->line = parse->line;
- return (FUNCTION *)result;
-}
-
-FUNCTION * function_compile_actions( const char * actions, OBJECT * file, int line )
-{
- compiler c[1];
- JAM_FUNCTION * result;
- VAR_PARSE_ACTIONS * parse;
- current_file = object_str( file );
- current_line = line;
- parse = parse_actions( actions );
- compiler_init( c );
- var_parse_actions_compile( parse, c );
- var_parse_actions_free( parse );
- compile_emit( c, INSTR_RETURN, 0 );
- result = compile_to_function( c );
- compiler_free( c );
- result->file = object_copy( file );
- result->line = line;
- return (FUNCTION *)result;
-}
-
-static void argument_list_print( struct arg_list * args, int num_args );
-
-
-/* Define delimiters for type check elements in argument lists (and return type
- * specifications, eventually).
- */
-# define TYPE_OPEN_DELIM '['
-# define TYPE_CLOSE_DELIM ']'
-
-/*
- * is_type_name() - true iff the given string represents a type check
- * specification.
- */
-
-int is_type_name( const char * s )
-{
- return ( s[ 0 ] == TYPE_OPEN_DELIM ) &&
- ( s[ strlen( s ) - 1 ] == TYPE_CLOSE_DELIM );
-}
-
-static void argument_error( const char * message, FUNCTION * procedure, FRAME * frame, OBJECT * arg )
-{ extern void print_source_line( FRAME * );
- LOL * actual = frame->args;
- backtrace_line( frame->prev );
- printf( "*** argument error\n* rule %s ( ", frame->rulename );
- argument_list_print( procedure->formal_arguments, procedure->num_formal_arguments );
- printf( " )\n* called with: ( " );
- lol_print( actual );
- printf( " )\n* %s %s\n", message, arg ? object_str ( arg ) : "" );
- function_location( procedure, &frame->file, &frame->line );
- print_source_line( frame );
- printf( "see definition of rule '%s' being called\n", frame->rulename );
- backtrace( frame->prev );
- exit( 1 );
-}
-
-static void type_check_range
-(
- OBJECT * type_name,
- LISTITER iter,
- LISTITER end,
- FRAME * caller,
- FUNCTION * called,
- OBJECT * arg_name
-)
-{
- static module_t * typecheck = 0;
-
- /* If nothing to check, bail now. */
- if ( iter == end || !type_name )
- return;
-
- if ( !typecheck )
- {
- typecheck = bindmodule( constant_typecheck );
- }
-
- /* If the checking rule can not be found, also bail. */
- if ( !typecheck->rules || !hash_find( typecheck->rules, type_name ) )
- return;
-
- for ( ; iter != end; iter = list_next( iter ) )
- {
- LIST *error;
- FRAME frame[1];
- frame_init( frame );
- frame->module = typecheck;
- frame->prev = caller;
- frame->prev_user = caller->module->user_module ? caller : caller->prev_user;
-
- /* Prepare the argument list */
- lol_add( frame->args, list_new( object_copy( list_item( iter ) ) ) );
- error = evaluate_rule( type_name, frame );
-
- if ( !list_empty( error ) )
- argument_error( object_str( list_front( error ) ), called, caller, arg_name );
-
- frame_free( frame );
- }
-}
-
-static void type_check
-(
- OBJECT * type_name,
- LIST * values,
- FRAME * caller,
- FUNCTION * called,
- OBJECT * arg_name
-)
-{
- type_check_range( type_name, list_begin( values ), list_end( values ), caller, called, arg_name );
-}
-
-void argument_list_check( struct arg_list * formal, int formal_count, FUNCTION * function, FRAME * frame )
-{
- LOL * all_actual = frame->args;
- int i, j;
-
- for ( i = 0; i < formal_count; ++i )
- {
- LIST *actual = lol_get( all_actual, i );
- LISTITER actual_iter = list_begin( actual ), actual_end = list_end( actual );
- for ( j = 0; j < formal[i].size; ++j )
- {
- struct argument * formal_arg = &formal[i].args[j];
- LIST * value;
-
- switch ( formal_arg->flags )
- {
- case ARG_ONE:
- if ( actual_iter == actual_end )
- argument_error( "missing argument", function, frame, formal_arg->arg_name );
- type_check_range( formal_arg->type_name, actual_iter, list_next( actual_iter ), frame, function, formal_arg->arg_name );
- actual_iter = list_next( actual_iter );
- break;
- case ARG_OPTIONAL:
- if ( actual_iter == actual_end )
- value = L0;
- else
- {
- type_check_range( formal_arg->type_name, actual_iter, list_next( actual_iter ), frame, function, formal_arg->arg_name );
- actual_iter = list_next( actual_iter );
- }
- break;
- case ARG_PLUS:
- if ( actual_iter == actual_end )
- argument_error( "missing argument", function, frame, formal_arg->arg_name );
- /* fallthrough */
- case ARG_STAR:
- type_check_range( formal_arg->type_name, actual_iter, actual_end, frame, function, formal_arg->arg_name );
- actual_iter = actual_end;
- break;
- case ARG_VARIADIC:
- return;
- }
- }
-
- if ( actual_iter != actual_end )
- {
- argument_error( "extra argument", function, frame, list_item( actual_iter ) );
- }
- }
-
- for ( ; i < all_actual->count; ++i )
- {
- LIST * actual = lol_get( all_actual, i );
- if ( !list_empty( actual ) )
- {
- argument_error( "extra argument", function, frame, list_front( actual ) );
- }
- }
-}
-
-void argument_list_push( struct arg_list * formal, int formal_count, FUNCTION * function, FRAME * frame, STACK * s )
-{
- LOL * all_actual = frame->args;
- int i, j;
-
- for ( i = 0; i < formal_count; ++i )
- {
- LIST *actual = lol_get( all_actual, i );
- LISTITER actual_iter = list_begin( actual ), actual_end = list_end( actual );
- for ( j = 0; j < formal[i].size; ++j )
- {
- struct argument * formal_arg = &formal[i].args[j];
- LIST * value;
-
- switch ( formal_arg->flags )
- {
- case ARG_ONE:
- if ( actual_iter == actual_end )
- argument_error( "missing argument", function, frame, formal_arg->arg_name );
- value = list_new( object_copy( list_item( actual_iter ) ) );
- actual_iter = list_next( actual_iter );
- break;
- case ARG_OPTIONAL:
- if ( actual_iter == actual_end )
- value = L0;
- else
- {
- value = list_new( object_copy( list_item( actual_iter ) ) );
- actual_iter = list_next( actual_iter );
- }
- break;
- case ARG_PLUS:
- if ( actual_iter == actual_end )
- argument_error( "missing argument", function, frame, formal_arg->arg_name );
- /* fallthrough */
- case ARG_STAR:
- value = list_copy_range( actual, actual_iter, actual_end );
- actual_iter = actual_end;
- break;
- case ARG_VARIADIC:
- return;
- }
-
- type_check( formal_arg->type_name, value, frame, function, formal_arg->arg_name );
-
- if ( formal_arg->index != -1 )
- {
- LIST * * old = &frame->module->fixed_variables[ formal_arg->index ];
- stack_push( s, *old );
- *old = value;
- }
- else
- {
- stack_push( s, var_swap( frame->module, formal_arg->arg_name, value ) );
- }
- }
-
- if ( actual_iter != actual_end )
- {
- argument_error( "extra argument", function, frame, list_item( actual_iter ) );
- }
- }
-
- for ( ; i < all_actual->count; ++i )
- {
- LIST * actual = lol_get( all_actual, i );
- if ( !list_empty( actual ) )
- {
- argument_error( "extra argument", function, frame, list_front( actual ) );
- }
- }
-}
-
-void argument_list_pop( struct arg_list * formal, int formal_count, FRAME * frame, STACK * s )
-{
- int i, j;
-
- for ( i = formal_count - 1; i >= 0; --i )
- {
- for ( j = formal[i].size - 1; j >= 0 ; --j )
- {
- struct argument * formal_arg = &formal[i].args[j];
-
- if ( formal_arg->flags == ARG_VARIADIC )
- {
- continue;
- }
- else if ( formal_arg->index != -1 )
- {
- LIST * old = stack_pop( s );
- LIST * * pos = &frame->module->fixed_variables[ formal_arg->index ];
- list_free( *pos );
- *pos = old;
- }
- else
- {
- var_set( frame->module, formal_arg->arg_name, stack_pop( s ), VAR_SET );
- }
- }
- }
-}
-
-
-struct argument_compiler
-{
- struct dynamic_array args[ 1 ];
- struct argument arg;
- int state;
-#define ARGUMENT_COMPILER_START 0
-#define ARGUMENT_COMPILER_FOUND_TYPE 1
-#define ARGUMENT_COMPILER_FOUND_OBJECT 2
-#define ARGUMENT_COMPILER_DONE 3
-};
-
-
-static void argument_compiler_init( struct argument_compiler * c )
-{
- dynamic_array_init( c->args );
- c->state = ARGUMENT_COMPILER_START;
-}
-
-static void argument_compiler_free( struct argument_compiler * c )
-{
- dynamic_array_free( c->args );
-}
-
-static void argument_compiler_add( struct argument_compiler * c, OBJECT * arg, OBJECT * file, int line )
-{
- switch ( c->state )
- {
- case ARGUMENT_COMPILER_FOUND_OBJECT:
-
- if ( object_equal( arg, constant_question_mark ) )
- {
- c->arg.flags = ARG_OPTIONAL;
- }
- else if ( object_equal( arg, constant_plus ) )
- {
- c->arg.flags = ARG_PLUS;
- }
- else if ( object_equal( arg, constant_star ) )
- {
- c->arg.flags = ARG_STAR;
- }
-
- dynamic_array_push( c->args, c->arg );
- c->state = ARGUMENT_COMPILER_START;
-
- if ( c->arg.flags != ARG_ONE )
- break;
- /* fall-through */
-
- case ARGUMENT_COMPILER_START:
-
- c->arg.type_name = 0;
- c->arg.index = -1;
- c->arg.flags = ARG_ONE;
-
- if ( is_type_name( object_str( arg ) ) )
- {
- c->arg.type_name = object_copy( arg );
- c->state = ARGUMENT_COMPILER_FOUND_TYPE;
- break;
- }
- /* fall-through */
-
- case ARGUMENT_COMPILER_FOUND_TYPE:
-
- if ( is_type_name( object_str( arg ) ) )
- {
- printf( "%s:%d: missing argument name before type name: %s\n", object_str( file ), line, object_str( arg ) );
- exit( 1 );
- }
-
- c->arg.arg_name = object_copy( arg );
- if ( object_equal( arg, constant_star ) )
- {
- c->arg.flags = ARG_VARIADIC;
- dynamic_array_push( c->args, c->arg );
- c->state = ARGUMENT_COMPILER_DONE;
- }
- else
- {
- c->state = ARGUMENT_COMPILER_FOUND_OBJECT;
- }
- break;
-
- case ARGUMENT_COMPILER_DONE:
- break;
- }
-}
-
-static void argument_compiler_recurse( struct argument_compiler * c, PARSE * parse )
-{
- if ( parse->type == PARSE_APPEND )
- {
- argument_compiler_recurse( c, parse->left );
- argument_compiler_recurse( c, parse->right );
- }
- else if ( parse->type != PARSE_NULL )
- {
- assert( parse->type == PARSE_LIST );
- argument_compiler_add( c, parse->string, parse->file, parse->line );
- }
-}
-
-static struct arg_list arg_compile_impl( struct argument_compiler * c, OBJECT * file, int line )
-{
- struct arg_list result;
- switch ( c->state )
- {
- case ARGUMENT_COMPILER_START:
- case ARGUMENT_COMPILER_DONE:
- break;
- case ARGUMENT_COMPILER_FOUND_TYPE:
- printf( "%s:%d: missing argument name after type name: %s\n", object_str( file ), line, object_str( c->arg.type_name ) );
- exit( 1 );
- case ARGUMENT_COMPILER_FOUND_OBJECT:
- dynamic_array_push( c->args, c->arg );
- break;
- }
- result.size = c->args->size;
- result.args = BJAM_MALLOC( c->args->size * sizeof( struct argument ) );
- memcpy( result.args, c->args->data, c->args->size * sizeof( struct argument ) );
- return result;
-}
-
-static struct arg_list arg_compile( PARSE * parse )
-{
- struct argument_compiler c[ 1 ];
- struct arg_list result;
- argument_compiler_init( c );
- argument_compiler_recurse( c, parse );
- result = arg_compile_impl( c, parse->file, parse->line );
- argument_compiler_free( c );
- return result;
-}
-
-struct argument_list_compiler
-{
- struct dynamic_array args[ 1 ];
-};
-
-static void argument_list_compiler_init( struct argument_list_compiler * c )
-{
- dynamic_array_init( c->args );
-}
-
-static void argument_list_compiler_free( struct argument_list_compiler * c )
-{
- dynamic_array_free( c->args );
-}
-
-static void argument_list_compiler_add( struct argument_list_compiler * c, PARSE * parse )
-{
- struct arg_list args = arg_compile( parse );
- dynamic_array_push( c->args, args );
-}
-
-static void argument_list_compiler_recurse( struct argument_list_compiler * c, PARSE * parse )
-{
- if ( parse )
- {
- argument_list_compiler_add( c, parse->right );
- argument_list_compiler_recurse( c, parse->left );
- }
-}
-
-static struct arg_list * arg_list_compile( PARSE * parse, int * num_arguments )
-{
- if ( parse )
- {
- struct argument_list_compiler c[ 1 ];
- struct arg_list * result;
- argument_list_compiler_init( c );
- argument_list_compiler_recurse( c, parse );
- *num_arguments = c->args->size;
- result = BJAM_MALLOC( c->args->size * sizeof( struct arg_list ) );
- memcpy( result, c->args->data, c->args->size * sizeof( struct arg_list ) );
- argument_list_compiler_free( c );
- return result;
- }
- else
- {
- *num_arguments = 0;
- return 0;
- }
-}
-
-static struct arg_list * arg_list_compile_builtin( const char * * args, int * num_arguments )
-{
- if ( args )
- {
- struct argument_list_compiler c[ 1 ];
- struct arg_list * result;
- argument_list_compiler_init( c );
- while ( *args )
- {
- struct argument_compiler arg_comp[ 1 ];
- struct arg_list arg;
- argument_compiler_init( arg_comp );
- for ( ; *args; ++args )
- {
- OBJECT * token;
- if ( strcmp( *args, ":" ) == 0 )
- {
- ++args;
- break;
- }
- token = object_new( *args );
- argument_compiler_add( arg_comp, token, constant_builtin, -1 );
- object_free( token );
- }
- arg = arg_compile_impl( arg_comp, constant_builtin, -1 );
- dynamic_array_push( c->args, arg );
- argument_compiler_free( arg_comp );
- }
- *num_arguments = c->args->size;
- result = BJAM_MALLOC( c->args->size * sizeof( struct arg_list ) );
- memcpy( result, c->args->data, c->args->size * sizeof( struct arg_list ) );
- argument_list_compiler_free( c );
- return result;
- }
- else
- {
- *num_arguments = 0;
- return 0;
- }
-}
-
-static void argument_list_print( struct arg_list * args, int num_args )
-{
- if ( args )
- {
- int i, j;
- for ( i = 0; i < num_args; ++i )
- {
- if ( i ) printf(" : ");
- for ( j = 0; j < args[ i ].size; ++j )
- {
- struct argument * formal_arg = &args[ i ].args[ j ];
- if ( j ) printf( " " );
- if ( formal_arg->type_name ) printf( "%s ", object_str( formal_arg->type_name ) );
- printf( "%s", formal_arg->arg_name );
- switch( formal_arg->flags )
- {
- case ARG_OPTIONAL: printf( " ?" ); break;
- case ARG_PLUS: printf( " +" ); break;
- case ARG_STAR: printf( " *" ); break;
- }
- }
- }
- }
-}
-
-
-struct arg_list * argument_list_bind_variables( struct arg_list * formal, int formal_count, module_t * module, int * counter )
-{
- if ( formal )
- {
- struct arg_list * result = (struct arg_list *)BJAM_MALLOC( sizeof( struct arg_list ) * formal_count );
- int i, j;
-
- for ( i = 0; i < formal_count; ++i )
- {
- struct argument * args = (struct argument *)BJAM_MALLOC( sizeof( struct argument ) * formal[ i ].size );
- for ( j = 0; j < formal[ i ].size; ++j )
- {
- args[ j ] = formal[ i ].args[ j ];
- if ( args[ j ].type_name )
- args[ j ].type_name = object_copy( args[ j ].type_name );
- args[ j ].arg_name = object_copy( args[ j ].arg_name );
- if ( args[ j ].flags != ARG_VARIADIC )
- {
- args[ j ].index = module_add_fixed_var( module, args[ j ].arg_name, counter );
- }
- }
- result[ i ].args = args;
- result[ i ].size = formal[ i ].size;
- }
-
- return result;
- }
- else
- {
- return 0;
- }
-}
-
-
-void argument_list_free( struct arg_list * args, int args_count )
-{
- int i, j;
- for ( i = 0; i < args_count; ++i )
- {
- for ( j = 0; j < args[ i ].size; ++j )
- {
- if ( args[ i ].args[ j ].type_name )
- object_free( args[ i ].args[ j ].type_name );
- object_free( args[ i ].args[ j ].arg_name );
- }
- BJAM_FREE( args[ i ].args );
- }
- BJAM_FREE( args );
-}
-
-
-FUNCTION * function_unbind_variables( FUNCTION * f )
-{
- if ( f->type == FUNCTION_JAM )
- {
- JAM_FUNCTION * func = (JAM_FUNCTION *)f;
- if ( func->generic )
- return func->generic;
- else
- return (FUNCTION *)func;
- }
-#ifdef HAVE_PYTHON
- else if ( f->type == FUNCTION_PYTHON )
- {
- return f;
- }
-#endif
- else
- {
- assert( f->type == FUNCTION_BUILTIN );
- return f;
- }
-}
-
-FUNCTION * function_bind_variables( FUNCTION * f, module_t * module, int * counter )
-{
- if ( f->type == FUNCTION_BUILTIN )
- {
- return f;
- }
-#ifdef HAVE_PYTHON
- else if ( f->type == FUNCTION_PYTHON )
- {
- return f;
- }
-#endif
- else
- {
- JAM_FUNCTION * func = (JAM_FUNCTION *)f;
- JAM_FUNCTION * new_func = BJAM_MALLOC( sizeof( JAM_FUNCTION ) );
- instruction * code;
- int i;
- assert( f->type == FUNCTION_JAM );
- memcpy( new_func, func, sizeof( JAM_FUNCTION ) );
- new_func->base.reference_count = 1;
- new_func->base.formal_arguments = argument_list_bind_variables( f->formal_arguments, f->num_formal_arguments, module, counter );
- new_func->code = BJAM_MALLOC( func->code_size * sizeof( instruction ) );
- memcpy( new_func->code, func->code, func->code_size * sizeof( instruction ) );
- new_func->generic = (FUNCTION *)func;
- func = new_func;
- for ( i = 0; ; ++i )
- {
- OBJECT * key;
- int op_code;
- code = func->code + i;
- switch ( code->op_code )
- {
- case INSTR_PUSH_VAR: op_code = INSTR_PUSH_VAR_FIXED; break;
- case INSTR_PUSH_LOCAL: op_code = INSTR_PUSH_LOCAL_FIXED; break;
- case INSTR_POP_LOCAL: op_code = INSTR_POP_LOCAL_FIXED; break;
- case INSTR_SET: op_code = INSTR_SET_FIXED; break;
- case INSTR_APPEND: op_code = INSTR_APPEND_FIXED; break;
- case INSTR_DEFAULT: op_code = INSTR_DEFAULT_FIXED; break;
- case INSTR_RETURN: return (FUNCTION *)new_func;
- case INSTR_CALL_RULE: ++i; continue;
- case INSTR_PUSH_MODULE:
- {
- int depth = 1;
- ++i;
- while ( depth > 0 )
- {
- code = func->code + i;
- switch ( code->op_code )
- {
- case INSTR_PUSH_MODULE:
- case INSTR_CLASS:
- ++depth;
- break;
- case INSTR_POP_MODULE:
- --depth;
- break;
- case INSTR_CALL_RULE:
- ++i;
- break;
- }
- ++i;
- }
- --i;
- }
- default: continue;
- }
- key = func->constants[ code->arg ];
- if ( !( object_equal( key, constant_TMPDIR ) ||
- object_equal( key, constant_TMPNAME ) ||
- object_equal( key, constant_TMPFILE ) ||
- object_equal( key, constant_STDOUT ) ||
- object_equal( key, constant_STDERR ) ) )
- {
- code->op_code = op_code;
- code->arg = module_add_fixed_var( module, key, counter );
- }
- }
- }
-}
-
-void function_refer( FUNCTION * func )
-{
- ++func->reference_count;
-}
-
-void function_free( FUNCTION * function_ )
-{
- int i;
-
- if ( --function_->reference_count != 0 ) return;
-
- if ( function_->formal_arguments ) argument_list_free( function_->formal_arguments, function_->num_formal_arguments );
-
- if ( function_->type == FUNCTION_JAM )
- {
- JAM_FUNCTION * func = (JAM_FUNCTION *)function_;
-
- BJAM_FREE( func->code );
-
- if ( func->generic )
- function_free( func->generic );
- else
- {
- if ( function_->rulename ) object_free( function_->rulename );
-
- for ( i = 0; i < func->num_constants; ++i )
- {
- object_free( func->constants[i] );
- }
- BJAM_FREE( func->constants );
-
- for ( i = 0; i < func->num_subfunctions; ++i )
- {
- object_free( func->functions[i].name );
- function_free( func->functions[i].code );
- }
- BJAM_FREE( func->functions );
-
- for ( i = 0; i < func->num_subactions; ++i )
- {
- object_free( func->actions[i].name );
- function_free( func->actions[i].command );
- }
- BJAM_FREE( func->actions );
-
- object_free( func->file );
- }
- }
-#ifdef HAVE_PYTHON
- else if ( function_->type == FUNCTION_PYTHON )
- {
- PYTHON_FUNCTION * func = (PYTHON_FUNCTION *)function_;
- Py_DECREF( func->python_function );
- if ( function_->rulename ) object_free( function_->rulename );
- }
-#endif
- else
- {
- assert( function_->type == FUNCTION_BUILTIN );
- if ( function_->rulename ) object_free( function_->rulename );
- }
-
- BJAM_FREE( function_ );
-}
-
-
-/* Alignment check for stack */
-
-struct align_var_edits
-{
- char ch;
- VAR_EDITS e;
-};
-
-struct align_expansion_item
-{
- char ch;
- expansion_item e;
-};
-
-static char check_align_var_edits[ sizeof(struct align_var_edits) <= sizeof(VAR_EDITS) + sizeof(void *) ? 1 : -1 ];
-static char check_align_expansion_item[ sizeof(struct align_expansion_item) <= sizeof(expansion_item) + sizeof(void *) ? 1 : -1 ];
-
-static char check_ptr_size1[ sizeof(LIST *) <= sizeof(void *) ? 1 : -1 ];
-static char check_ptr_size2[ sizeof(char *) <= sizeof(void *) ? 1 : -1 ];
-
-void function_run_actions( FUNCTION * function, FRAME * frame, STACK * s, string * out )
-{
- *(string * *)stack_allocate( s, sizeof( string * ) ) = out;
- list_free( function_run( function, frame, s ) );
- stack_deallocate( s, sizeof( string * ) );
-}
-
-/*
- * WARNING: The instruction set is tuned for Jam and
- * is not really generic. Be especially careful about
- * stack push/pop.
- */
-
-LIST * function_run( FUNCTION * function_, FRAME * frame, STACK * s )
-{
- JAM_FUNCTION * function;
- instruction * code;
- LIST * l;
- LIST * r;
- LIST * result = L0;
- void * saved_stack = s->data;
-
- if ( function_->type == FUNCTION_BUILTIN )
- {
- BUILTIN_FUNCTION * f = (BUILTIN_FUNCTION *)function_;
- if ( function_->formal_arguments )
- argument_list_check( function_->formal_arguments, function_->num_formal_arguments, function_, frame );
- return f->func( frame, f->flags );
- }
-
-#ifdef HAVE_PYTHON
-
- else if ( function_->type == FUNCTION_PYTHON )
- {
- PYTHON_FUNCTION * f = (PYTHON_FUNCTION *)function_;
- return call_python_function( f, frame );
- }
-
-#endif
-
- assert( function_->type == FUNCTION_JAM );
-
- if ( function_->formal_arguments )
- argument_list_push( function_->formal_arguments, function_->num_formal_arguments, function_, frame, s );
-
- function = (JAM_FUNCTION *)function_;
- code = function->code;
- for ( ; ; )
- {
- switch ( code->op_code )
- {
-
- /*
- * Basic stack manipulation
- */
-
- case INSTR_PUSH_EMPTY:
- {
- stack_push( s, L0 );
- break;
- }
-
- case INSTR_PUSH_CONSTANT:
- {
- OBJECT * value = function_get_constant( function, code->arg );
- stack_push( s, list_new( object_copy( value ) ) );
- break;
- }
-
- case INSTR_PUSH_ARG:
- {
- stack_push( s, frame_get_local( frame, code->arg ) );
- break;
- }
-
- case INSTR_PUSH_VAR:
- {
- stack_push( s, function_get_variable( function, frame, code->arg ) );
- break;
- }
-
- case INSTR_PUSH_VAR_FIXED:
- {
- stack_push( s, list_copy( frame->module->fixed_variables[ code->arg ] ) );
- break;
- }
-
- case INSTR_PUSH_GROUP:
- {
- LIST * value = L0;
- LISTITER iter, end;
- l = stack_pop( s );
- for ( iter = list_begin( l ), end = list_end( l ); iter != end; iter = list_next( iter ) )
- {
- LIST * one = function_get_named_variable( function, frame, list_item( iter ) );
- value = list_append( value, one );
- }
- list_free( l );
- stack_push( s, value );
- break;
- }
-
- case INSTR_PUSH_APPEND:
- {
- r = stack_pop( s );
- l = stack_pop( s );
- stack_push( s, list_append( l, r ) );
- break;
- }
-
- case INSTR_SWAP:
- {
- l = stack_top( s );
- stack_set( s, 0, stack_at( s, code->arg ) );
- stack_set( s, code->arg, l );
- break;
- }
-
- case INSTR_POP:
- {
- list_free( stack_pop( s ) );
- break;
- }
-
- /*
- * Branch instructions
- */
-
- case INSTR_JUMP:
- {
- code += code->arg;
- break;
- }
-
- case INSTR_JUMP_EMPTY:
- {
- l = stack_pop( s );
- if ( !list_cmp( l, L0 ) ) { code += code->arg; }
- list_free( l );
- break;
- }
-
- case INSTR_JUMP_NOT_EMPTY:
- {
- l = stack_pop( s );
- if( list_cmp( l, L0 ) ) { code += code->arg; }
- list_free( l );
- break;
- }
-
- case INSTR_JUMP_LT:
- {
- r = stack_pop( s );
- l = stack_pop( s );
- if ( list_cmp( l, r ) < 0 ) { code += code->arg; }
- list_free( l );
- list_free( r );
- break;
- }
-
- case INSTR_JUMP_LE:
- {
- r = stack_pop( s );
- l = stack_pop( s );
- if ( list_cmp( l, r ) <= 0 ) { code += code->arg; }
- list_free( l );
- list_free( r );
- break;
- }
-
- case INSTR_JUMP_GT:
- {
- r = stack_pop( s );
- l = stack_pop( s );
- if ( list_cmp( l, r ) > 0 ) { code += code->arg; }
- list_free( l );
- list_free( r );
- break;
- }
-
- case INSTR_JUMP_GE:
- {
- r = stack_pop( s );
- l = stack_pop( s );
- if ( list_cmp( l, r ) >= 0 ) { code += code->arg; }
- list_free( l );
- list_free( r );
- break;
- }
-
- case INSTR_JUMP_EQ:
- {
- r = stack_pop( s );
- l = stack_pop( s );
- if( list_cmp( l, r ) == 0 ) { code += code->arg; }
- list_free( l );
- list_free( r );
- break;
- }
-
- case INSTR_JUMP_NE:
- {
- r = stack_pop(s);
- l = stack_pop(s);
- if( list_cmp(l, r) != 0 ) { code += code->arg; }
- list_free(l);
- list_free(r);
- break;
- }
-
- case INSTR_JUMP_IN:
- {
- r = stack_pop(s);
- l = stack_pop(s);
- if ( list_is_sublist( l, r ) ) { code += code->arg; }
- list_free(l);
- list_free(r);
- break;
- }
-
- case INSTR_JUMP_NOT_IN:
- {
- r = stack_pop( s );
- l = stack_pop( s );
- if( !list_is_sublist( l, r ) ) { code += code->arg; }
- list_free( l );
- list_free( r );
- break;
- }
-
- /*
- * For
- */
-
- case INSTR_FOR_INIT:
- {
- l = stack_top( s );
- *(LISTITER *)stack_allocate( s, sizeof( LISTITER ) ) =
- list_begin( l );
- break;
- }
-
- case INSTR_FOR_LOOP:
- {
- LISTITER iter = *(LISTITER *)stack_get( s );
- stack_deallocate( s, sizeof( LISTITER ) );
- l = stack_top( s );
- if( iter == list_end( l ) )
- {
- list_free( stack_pop( s ) );
- code += code->arg;
- }
- else
- {
- r = list_new( object_copy( list_item( iter ) ) );
- iter = list_next( iter );
- *(LISTITER *)stack_allocate( s, sizeof( LISTITER ) ) = iter;
- stack_push( s, r );
- }
- break;
- }
-
- /*
- * Switch
- */
-
- case INSTR_JUMP_NOT_GLOB:
- {
- const char * pattern;
- const char * match;
- l = stack_pop( s );
- r = stack_top( s );
- pattern = !list_empty( l ) ? object_str( list_front( l ) ) : "";
- match = !list_empty( r ) ? object_str( list_front( r ) ) : "";
- if( glob( pattern, match ) )
- {
- code += code->arg;
- }
- else
- {
- list_free( stack_pop( s ) );
- }
- list_free( l );
- break;
- }
-
- /*
- * Return
- */
-
- case INSTR_SET_RESULT:
- {
- list_free( result );
- result = stack_pop( s );
- break;
- }
-
- case INSTR_PUSH_RESULT:
- {
- stack_push( s, result );
- result = L0;
- break;
- }
-
- case INSTR_RETURN:
- {
- if ( function_->formal_arguments )
- argument_list_pop( function_->formal_arguments, function_->num_formal_arguments, frame, s );
-#ifndef NDEBUG
-
- if ( !( saved_stack == s->data ) )
- {
- frame->file = function->file;
- frame->line = function->line;
- backtrace_line( frame );
- printf( "error: stack check failed.\n" );
- backtrace( frame );
- assert( saved_stack == s->data );
- }
-#endif
- assert( saved_stack == s->data );
- return result;
- }
-
- /*
- * Local variables
- */
-
- case INSTR_PUSH_LOCAL:
- {
- LIST * value = stack_pop( s );
- stack_push( s, function_swap_variable( function, frame, code->arg, value ) );
- break;
- }
-
- case INSTR_POP_LOCAL:
- {
- function_set_variable( function, frame, code->arg, stack_pop( s ) );
- break;
- }
-
- case INSTR_PUSH_LOCAL_FIXED:
- {
- LIST * value = stack_pop( s );
- LIST * * ptr = &frame->module->fixed_variables[ code->arg ];
- assert( code->arg < frame->module->num_fixed_variables );
- stack_push( s, *ptr );
- *ptr = value;
- break;
- }
-
- case INSTR_POP_LOCAL_FIXED:
- {
- LIST * value = stack_pop( s );
- LIST * * ptr = &frame->module->fixed_variables[ code->arg ];
- assert( code->arg < frame->module->num_fixed_variables );
- list_free( *ptr );
- *ptr = value;
- break;
- }
-
- case INSTR_PUSH_LOCAL_GROUP:
- {
- LIST * value = stack_pop( s );
- LISTITER iter, end;
- l = stack_pop( s );
- for( iter = list_begin( l ), end = list_end( l ); iter != end; iter = list_next( iter ) )
- {
- LIST * saved = function_swap_named_variable( function, frame, list_item( iter ), list_copy( value ) );
- stack_push( s, saved );
- }
- list_free( value );
- stack_push( s, l );
- break;
- }
-
- case INSTR_POP_LOCAL_GROUP:
- {
- LISTITER iter, end;
- r = stack_pop( s );
- l = list_reverse( r );
- list_free( r );
- for( iter = list_begin( l ), end = list_end( l ); iter != end; iter = list_next( iter ) )
- {
- function_set_named_variable( function, frame, list_item( iter ), stack_pop( s ) );
- }
- list_free( l );
- break;
- }
-
- /*
- * on $(TARGET) variables
- */
-
- case INSTR_PUSH_ON:
- {
- LIST * targets = stack_top( s );
- if ( !list_empty( targets ) )
- {
- /*
- * FIXME: push the state onto the stack instead of
- * using pushsettings.
- */
- TARGET * t = bindtarget( list_front( targets ) );
- pushsettings( frame->module, t->settings );
- }
- else
- {
- /*
- * [ on $(TARGET) ... ] is ignored if $(TARGET) is empty.
- */
- list_free( stack_pop( s ) );
- stack_push( s, L0 );
- code += code->arg;
- }
- break;
- }
-
- case INSTR_POP_ON:
- {
- LIST * result = stack_pop( s );
- LIST * targets = stack_pop( s );
- if ( !list_empty( targets ) )
- {
- TARGET * t = bindtarget( list_front( targets ) );
- popsettings( frame->module, t->settings );
- }
- list_free( targets );
- stack_push( s, result );
- break;
- }
-
- case INSTR_SET_ON:
- {
- LIST * targets = stack_pop( s );
- LIST * value = stack_pop( s );
- LIST * vars = stack_pop( s );
- LISTITER iter = list_begin( targets ), end = list_end( targets );
- for ( ; iter != end; iter = list_next( iter ) )
- {
- TARGET * t = bindtarget( list_item( iter ) );
- LISTITER vars_iter = list_begin( vars ), vars_end = list_end( vars );
-
- for ( ; vars_iter != vars_end; vars_iter = list_next( vars_iter ) )
- t->settings = addsettings( t->settings, VAR_SET, list_item( vars_iter ),
- list_copy( value ) );
- }
- list_free( vars );
- list_free( targets );
- stack_push( s, value );
- break;
- }
-
- case INSTR_APPEND_ON:
- {
- LIST * targets = stack_pop( s );
- LIST * value = stack_pop( s );
- LIST * vars = stack_pop( s );
- LISTITER iter = list_begin( targets ), end = list_end( targets );
- for ( ; iter != end; iter = list_next( iter ) )
- {
- TARGET * t = bindtarget( list_item( iter ) );
- LISTITER vars_iter = list_begin( vars ), vars_end = list_end( vars );
-
- for ( ; vars_iter != vars_end; vars_iter = list_next( vars_iter ) )
- t->settings = addsettings( t->settings, VAR_APPEND, list_item( vars_iter ),
- list_copy( value ) );
- }
- list_free( vars );
- list_free( targets );
- stack_push( s, value );
- break;
- }
-
- case INSTR_DEFAULT_ON:
- {
- LIST * targets = stack_pop( s );
- LIST * value = stack_pop( s );
- LIST * vars = stack_pop( s );
- LISTITER iter = list_begin( targets ), end = list_end( targets );
- for ( ; iter != end; iter = list_next( iter ) )
- {
- TARGET * t = bindtarget( list_item( iter ) );
- LISTITER vars_iter = list_begin( vars ), vars_end = list_end( vars );
-
- for ( ; vars_iter != vars_end; vars_iter = list_next( vars_iter ) )
- t->settings = addsettings( t->settings, VAR_DEFAULT, list_item( vars_iter ),
- list_copy( value ) );
- }
- list_free( vars );
- list_free( targets );
- stack_push( s, value );
- break;
- }
-
- /*
- * Variable setting
- */
-
- case INSTR_SET:
- {
- function_set_variable( function, frame, code->arg, list_copy( stack_top( s ) ) );
- break;
- }
-
- case INSTR_APPEND:
- {
- function_append_variable( function, frame, code->arg, list_copy( stack_top( s ) ) );
- break;
- }
-
- case INSTR_DEFAULT:
- {
- function_default_variable( function, frame, code->arg, list_copy( stack_top( s ) ) );
- break;
- }
-
- case INSTR_SET_FIXED:
- {
- LIST * * ptr = &frame->module->fixed_variables[ code->arg ];
- assert( code->arg < frame->module->num_fixed_variables );
- list_free( *ptr );
- *ptr = list_copy( stack_top( s ) );
- break;
- }
-
- case INSTR_APPEND_FIXED:
- {
- LIST * * ptr = &frame->module->fixed_variables[ code->arg ];
- assert( code->arg < frame->module->num_fixed_variables );
- *ptr = list_append( *ptr, list_copy( stack_top( s ) ) );
- break;
- }
-
- case INSTR_DEFAULT_FIXED:
- {
- LIST * * ptr = &frame->module->fixed_variables[ code->arg ];
- assert( code->arg < frame->module->num_fixed_variables );
- if ( list_empty( *ptr ) )
- *ptr = list_copy( stack_top( s ) );
- break;
- }
-
- case INSTR_SET_GROUP:
- {
- LIST * value = stack_pop( s );
- LIST * vars = stack_pop( s );
- LISTITER iter = list_begin( vars ), end = list_end( vars );
- for( ; iter != end; iter = list_next( iter ) )
- function_set_named_variable( function, frame, list_item( iter ), list_copy( value ) );
- list_free( vars );
- stack_push( s, value );
- break;
- }
-
- case INSTR_APPEND_GROUP:
- {
- LIST * value = stack_pop( s );
- LIST * vars = stack_pop( s );
- LISTITER iter = list_begin( vars ), end = list_end( vars );
- for( ; iter != end; iter = list_next( iter ) )
- function_append_named_variable( function, frame, list_item( iter ), list_copy( value ) );
- list_free( vars );
- stack_push( s, value );
- break;
- }
-
- case INSTR_DEFAULT_GROUP:
- {
- LIST * value = stack_pop( s );
- LIST * vars = stack_pop( s );
- LISTITER iter = list_begin( vars ), end = list_end( vars );
- for( ; iter != end; iter = list_next( iter ) )
- function_default_named_variable( function, frame, list_item( iter ), list_copy( value ) );
- list_free( vars );
- stack_push( s, value );
- break;
- }
-
- /*
- * Rules
- */
-
- case INSTR_CALL_RULE:
- {
- const char * unexpanded =
- object_str( function_get_constant( function, code[1].op_code ) );
- LIST * result = function_call_rule( function, frame, s, code->arg, unexpanded, function->file, code[1].arg );
- stack_push( s, result );
- ++code;
- break;
- }
-
- case INSTR_RULE:
- {
- function_set_rule( function, frame, s, code->arg );
- break;
- }
-
- case INSTR_ACTIONS:
- {
- function_set_actions( function, frame, s, code->arg );
- break;
- }
-
- /*
- * Variable expansion
- */
-
- case INSTR_APPLY_MODIFIERS:
- {
- int n;
- int i;
- l = stack_pop( s );
- n = expand_modifiers( s, code->arg );
- stack_push( s, l );
- l = apply_modifiers( s, n );
- list_free( stack_pop( s ) );
- stack_deallocate( s, n * sizeof( VAR_EDITS ) );
- for ( i = 0; i < code->arg; ++i )
- list_free( stack_pop( s ) ); /* pop modifiers */
- stack_push( s, l );
- break;
- }
-
- case INSTR_APPLY_INDEX:
- {
- l = apply_subscript( s );
- list_free( stack_pop( s ) );
- list_free( stack_pop( s ) );
- stack_push( s, l );
- break;
- }
-
- case INSTR_APPLY_INDEX_MODIFIERS:
- {
- int i;
- int n;
- l = stack_pop( s );
- r = stack_pop( s );
- n = expand_modifiers( s, code->arg );
- stack_push( s, r );
- stack_push( s, l );
- l = apply_subscript_and_modifiers( s, n );
- list_free( stack_pop( s ) );
- list_free( stack_pop( s ) );
- stack_deallocate( s, n * sizeof( VAR_EDITS ) );
- for ( i = 0; i < code->arg; ++i )
- list_free( stack_pop( s ) ); /* pop modifiers */
- stack_push( s, l );
- break;
- }
-
- case INSTR_APPLY_MODIFIERS_GROUP:
- {
- int i;
- LIST * vars = stack_pop( s );
- int n = expand_modifiers( s, code->arg );
- LIST * result = L0;
- LISTITER iter = list_begin( vars ), end = list_end( vars );
- for( ; iter != end; iter = list_next( iter ) )
- {
- stack_push( s, function_get_named_variable( function, frame, list_item( iter ) ) );
- result = list_append( result, apply_modifiers( s, n ) );
- list_free( stack_pop( s ) );
- }
- list_free( vars );
- stack_deallocate( s, n * sizeof( VAR_EDITS ) );
- for ( i = 0; i < code->arg; ++i )
- list_free( stack_pop( s ) ); /* pop modifiers */
- stack_push( s, result );
- break;
- }
-
- case INSTR_APPLY_INDEX_GROUP:
- {
- LIST * vars = stack_pop( s );
- LIST * result = L0;
- LISTITER iter = list_begin( vars ), end = list_end( vars );
- for( ; iter != end; iter = list_next( iter ) )
- {
- stack_push( s, function_get_named_variable( function, frame, list_item( iter ) ) );
- result = list_append( result, apply_subscript( s ) );
- list_free( stack_pop( s ) );
- }
- list_free( vars );
- list_free( stack_pop( s ) );
- stack_push( s, result );
- break;
- }
-
- case INSTR_APPLY_INDEX_MODIFIERS_GROUP:
- {
- int i;
- LIST * vars = stack_pop( s );
- LIST * r = stack_pop( s );
- int n = expand_modifiers( s, code->arg );
- LIST * result = L0;
- LISTITER iter = list_begin( vars ), end = list_end( vars );
- stack_push( s, r );
- for( ; iter != end; iter = list_next( iter ) )
- {
- stack_push( s, function_get_named_variable( function, frame, list_item( iter ) ) );
- result = list_append( result, apply_subscript_and_modifiers( s, n ) );
- list_free( stack_pop( s ) );
- }
- list_free( stack_pop( s ) );
- list_free( vars );
- stack_deallocate( s, n * sizeof( VAR_EDITS ) );
- for ( i = 0; i < code->arg; ++i )
- list_free( stack_pop( s ) ); /* pop modifiers */
- stack_push( s, result );
- break;
- }
-
- case INSTR_COMBINE_STRINGS:
- {
- LIST * result;
- size_t buffer_size = code->arg * sizeof( expansion_item );
- LIST * * stack_pos = stack_get( s );
- expansion_item * items = stack_allocate( s, buffer_size );
- int i;
- for( i = 0; i < code->arg; ++i )
- {
- items[i].saved = stack_pos[i];
- items[i].elem = list_begin( items[i].saved );
- }
- result = expand( items, code->arg );
- stack_deallocate( s, buffer_size );
- for( i = 0; i < code->arg; ++i )
- {
- list_free( stack_pop( s ) );
- }
- stack_push( s, result );
- break;
- }
-
- case INSTR_INCLUDE:
- {
- LIST * nt = stack_pop( s );
-
- if ( !list_empty( nt ) )
- {
- TARGET * t = bindtarget( list_front( nt ) );
- list_free( nt );
-
- /* DWA 2001/10/22 - Perforce Jam cleared the arguments here, which
- * prevents an included file from being treated as part of the body of a
- * rule. I did not see any reason to do that, so I lifted the
- * restriction.
- */
-
- /* Bind the include file under the influence of */
- /* "on-target" variables. Though they are targets, */
- /* include files are not built with make(). */
-
- pushsettings( root_module(), t->settings );
- /* We don't expect that file to be included is generated by some
- action. Therefore, pass 0 as third argument.
- If the name resolves to directory, let it error out. */
- object_free( t->boundname );
- t->boundname = search( t->name, &t->time, 0, 0 );
- popsettings( root_module(), t->settings );
-
- parse_file( t->boundname, frame );
- }
-
- break;
- }
-
- /*
- * Classes and modules
- */
-
- case INSTR_PUSH_MODULE:
- {
- LIST * module_name = stack_pop( s );
-
- module_t * outer_module = frame->module;
- frame->module = !list_empty( module_name ) ? bindmodule( list_front( module_name ) ) : root_module();
-
- list_free( module_name );
-
- *(module_t * *)stack_allocate( s, sizeof( module_t * ) ) = outer_module;
-
- break;
- }
-
- case INSTR_POP_MODULE:
- {
- module_t * outer_module = *(module_t * *)stack_get( s );
- stack_deallocate( s, sizeof( module_t * ) );
- frame->module = outer_module;
- break;
- }
-
- case INSTR_CLASS:
- {
- LIST * bases = stack_pop( s );
- LIST * name = stack_pop( s );
- OBJECT * class_module = make_class_module( name, bases, frame );
-
- module_t * outer_module = frame->module;
- frame->module = bindmodule( class_module );
- object_free( class_module );
-
- *(module_t * *)stack_allocate( s, sizeof( module_t * ) ) = outer_module;
-
- break;
- }
-
- case INSTR_BIND_MODULE_VARIABLES:
- {
- module_bind_variables( frame->module );
- break;
- }
-
- case INSTR_APPEND_STRINGS:
- {
- string buf[1];
- string_new( buf );
- combine_strings( s, code->arg, buf );
- stack_push( s, list_new( object_new( buf->value ) ) );
- string_free( buf );
- break;
- }
-
- case INSTR_WRITE_FILE:
- {
- string buf[1];
- const char * out;
- OBJECT * tmp_filename = 0;
- int out_debug = DEBUG_EXEC ? 1 : 0;
- FILE * out_file = 0;
- string_new( buf );
- combine_strings( s, code->arg, buf );
- out = object_str( list_front( stack_top( s ) ) );
-
- /* For stdout/stderr we will create a temp file and generate
- * a command that outputs the content as needed.
- */
- if ( ( strcmp( "STDOUT", out ) == 0 ) ||
- ( strcmp( "STDERR", out ) == 0 ) )
- {
- int err_redir = strcmp( "STDERR", out ) == 0;
- string result[1];
- tmp_filename = path_tmpfile();
- string_new( result );
-
- #ifdef OS_NT
- string_append( result, "type \"" );
- #else
- string_append( result, "cat \"" );
- #endif
- string_append( result, object_str( tmp_filename ) );
- string_push_back( result, '\"' );
- if ( err_redir )
- string_append( result, " 1>&2" );
-
- /* Replace STDXXX with the temporary file. */
- list_free( stack_pop( s ) );
- stack_push( s, list_new( object_new( result->value ) ) );
- out = object_str( tmp_filename );
-
- string_free( result );
-
- /* We also make sure that the temp files created by this
- * get nuked eventually.
- */
- file_remove_atexit( tmp_filename );
- }
-
- if ( !globs.noexec )
- {
- string out_name[1];
- /* Handle "path to file" filenames. */
- if ( ( out[ 0 ] == '"' ) && ( out[ strlen( out ) - 1 ] == '"' ) )
- {
- string_copy( out_name, out + 1 );
- string_truncate( out_name, out_name->size - 1 );
- }
- else
- {
- string_copy( out_name, out );
- }
- out_file = fopen( out_name->value, "w" );
-
- if ( !out_file )
- {
- printf( "failed to write output file '%s'!\n", out_name->value );
- exit( EXITBAD );
- }
- string_free( out_name );
- }
-
- if ( out_debug ) printf( "\nfile %s\n", out );
-
- if ( out_file ) fputs( buf->value, out_file );
- if ( out_debug ) fputs( buf->value, stdout );
-
- if ( out_file )
- {
- fflush( out_file );
- fclose( out_file );
- }
- string_free( buf );
- if ( tmp_filename )
- object_free( tmp_filename );
-
- if ( out_debug ) fputc( '\n', stdout );
-
- break;
- }
-
- case INSTR_OUTPUT_STRINGS:
- {
- string * buf = *(string * *)( (char *)stack_get( s ) + ( code->arg * sizeof( LIST * ) ) );
- combine_strings( s, code->arg, buf );
- break;
- }
-
- }
- ++code;
- }
-}
-
-
-#ifdef HAVE_PYTHON
-
-static struct arg_list * arg_list_compile_python( PyObject * bjam_signature, int * num_arguments )
-{
- if ( bjam_signature )
- {
- struct argument_list_compiler c[ 1 ];
- struct arg_list * result;
- Py_ssize_t s, i, j, inner;
- argument_list_compiler_init( c );
-
- s = PySequence_Size( bjam_signature );
- for ( i = 0; i < s; ++i )
- {
- struct argument_compiler arg_comp[ 1 ];
- struct arg_list arg;
- PyObject * v = PySequence_GetItem( bjam_signature, i );
- argument_compiler_init( arg_comp );
-
- inner = PySequence_Size( v );
- for ( j = 0; j < inner; ++j )
- {
- PyObject * x = PySequence_GetItem( v, j );
- argument_compiler_add( arg_comp, object_new( PyString_AsString( x ) ), constant_builtin, -1 );
- }
-
- arg = arg_compile_impl( arg_comp, constant_builtin, -1 );
- dynamic_array_push( c->args, arg );
- argument_compiler_free( arg_comp );
- Py_DECREF( v );
- }
-
- *num_arguments = c->args->size;
- result = BJAM_MALLOC( c->args->size * sizeof( struct arg_list ) );
- memcpy( result, c->args->data, c->args->size * sizeof( struct arg_list ) );
- argument_list_compiler_free( c );
- return result;
- }
- else
- {
- *num_arguments = 0;
- return 0;
- }
-}
-
-FUNCTION * function_python( PyObject * function, PyObject * bjam_signature )
-{
- PYTHON_FUNCTION * result = BJAM_MALLOC( sizeof( PYTHON_FUNCTION ) );
-
- result->base.type = FUNCTION_PYTHON;
- result->base.reference_count = 1;
- result->base.rulename = 0;
- result->base.formal_arguments = arg_list_compile_python( bjam_signature, &result->base.num_formal_arguments );
- Py_INCREF( function );
- result->python_function = function;
-
- return (FUNCTION *)result;
-}
-
-static void argument_list_to_python( struct arg_list * formal, int formal_count, FUNCTION * function, FRAME * frame, PyObject * kw )
-{
- LOL * all_actual = frame->args;
- int i, j;
-
- for ( i = 0; i < formal_count; ++i )
- {
- LIST *actual = lol_get( all_actual, i );
- LISTITER actual_iter = list_begin( actual ), actual_end = list_end( actual );
- for ( j = 0; j < formal[i].size; ++j )
- {
- struct argument * formal_arg = &formal[i].args[j];
- PyObject * value;
- LIST * l;
-
- switch ( formal_arg->flags )
- {
- case ARG_ONE:
- if ( actual_iter == actual_end )
- argument_error( "missing argument", function, frame, formal_arg->arg_name );
- type_check_range( formal_arg->type_name, actual_iter, list_next( actual_iter ), frame, function, formal_arg->arg_name );
- value = PyString_FromString( object_str( list_item( actual_iter) ) );
- actual_iter = list_next( actual_iter );
- break;
- case ARG_OPTIONAL:
- if ( actual_iter == actual_end )
- value = 0;
- else
- {
- type_check_range( formal_arg->type_name, actual_iter, list_next( actual_iter ), frame, function, formal_arg->arg_name );
- value = PyString_FromString( object_str( list_item( actual_iter) ) );
- actual_iter = list_next( actual_iter );
- }
- break;
- case ARG_PLUS:
- if ( actual_iter == actual_end )
- argument_error( "missing argument", function, frame, formal_arg->arg_name );
- /* fallthrough */
- case ARG_STAR:
- type_check_range( formal_arg->type_name, actual_iter, actual_end, frame, function, formal_arg->arg_name );
- l = list_copy_range( actual, actual_iter, actual_end );
- value = list_to_python( l );
- list_free( l );
- actual_iter = actual_end;
- break;
- case ARG_VARIADIC:
- return;
- }
-
- if (value)
- {
- PyObject * key = PyString_FromString( object_str( formal_arg->arg_name ) );
- PyDict_SetItem( kw, key, value );
- Py_DECREF( key );
- Py_DECREF( value );
- }
- }
-
- if ( actual_iter != actual_end )
- {
- argument_error( "extra argument", function, frame, list_item( actual_iter ) );
- }
- }
-
- for ( ; i < all_actual->count; ++i )
- {
- LIST * actual = lol_get( all_actual, i );
- if ( !list_empty( actual ) )
- {
- argument_error( "extra argument", function, frame, list_front( actual ) );
- }
- }
-}
-
-/* Given a Python object, return a string to use in Jam
- code instead of said object.
- If the object is string, use the string value
- If the object implemenets __jam_repr__ method, use that.
- Otherwise return 0. */
-OBJECT * python_to_string( PyObject * value )
-{
- if ( PyString_Check( value ) )
- {
- return object_new( PyString_AsString( value ) );
- }
- else
- {
- /* See if this is an instance that defines special __jam_repr__
- method. */
- if ( PyInstance_Check( value )
- && PyObject_HasAttrString( value, "__jam_repr__" ) )
- {
- PyObject* repr = PyObject_GetAttrString( value, "__jam_repr__" );
- if ( repr )
- {
- PyObject * arguments2 = PyTuple_New( 0 );
- PyObject * value2 = PyObject_Call( repr, arguments2, 0 );
- Py_DECREF( repr );
- Py_DECREF( arguments2 );
- if ( PyString_Check( value2 ) )
- {
- return object_new( PyString_AsString( value2 ) );
- }
- Py_DECREF( value2 );
- }
- }
- return 0;
- }
-}
-
-static module_t * python_module()
-{
- static module_t * python = 0;
- if ( !python )
- python = bindmodule(constant_python);
- return python;
-}
-
-static LIST * call_python_function( PYTHON_FUNCTION * function, FRAME * frame )
-{
- LIST * result = 0;
- PyObject * arguments = 0;
- PyObject * kw = NULL;
- int i ;
- PyObject * py_result;
- FRAME * prev_frame_before_python_call;
-
- if ( function->base.formal_arguments )
- {
- arguments = PyTuple_New(0);
- kw = PyDict_New();
-
- argument_list_to_python( function->base.formal_arguments, function->base.num_formal_arguments, &function->base, frame, kw );
- }
- else
- {
- arguments = PyTuple_New( frame->args->count );
- for ( i = 0; i < frame->args->count; ++i )
- {
- PyTuple_SetItem( arguments, i, list_to_python( lol_get( frame->args, i ) ) );
- }
- }
-
- frame->module = python_module();
-
- prev_frame_before_python_call = frame_before_python_call;
- frame_before_python_call = frame;
- py_result = PyObject_Call( function->python_function, arguments, kw );
- frame_before_python_call = prev_frame_before_python_call;
- Py_DECREF( arguments );
- Py_XDECREF( kw );
- if ( py_result != NULL )
- {
- if ( PyList_Check( py_result ) )
- {
- int size = PyList_Size( py_result );
- int i;
- for ( i = 0; i < size; ++i )
- {
- PyObject * item = PyList_GetItem( py_result, i );
- OBJECT *s = python_to_string( item );
- if ( !s ) {
- fprintf( stderr, "Non-string object returned by Python call.\n" );
- } else {
- result = list_push_back( result, s );
- }
- }
- }
- else if ( py_result == Py_None )
- {
- result = L0;
- }
- else
- {
- OBJECT *s = python_to_string( py_result );
- if (s)
- result = list_new( s );
- else
- /* We have tried all we could. Return empty list. There are
- cases, e.g. feature.feature function that should return
- value for the benefit of Python code and which also can be
- called by Jam code, where no sensible value can be
- returned. We cannot even emit a warning, since there will
- be a pile of them. */
- result = L0;
- }
-
- Py_DECREF( py_result );
- }
- else
- {
- PyErr_Print();
- fprintf( stderr,"Call failed\n" );
- }
-
- return result;
-}
-
-#endif
-
-
-void function_done( void )
-{
- BJAM_FREE( stack );
-}
diff --git a/tools/build/v2/engine/hash.c b/tools/build/v2/engine/hash.c
deleted file mode 100644
index 6e748ab4d0..0000000000
--- a/tools/build/v2/engine/hash.c
+++ /dev/null
@@ -1,396 +0,0 @@
-/*
- * Copyright 1993, 1995 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-# include "jam.h"
-# include "hash.h"
-# include "compile.h"
-# include "object.h"
-# include <assert.h>
-
-/*
- * hash.c - simple in-memory hashing routines
- *
- * External routines:
- *
- * hashinit() - initialize a hash table, returning a handle
- * hashitem() - find a record in the table, and optionally enter a new one
- * hashdone() - free a hash table, given its handle
- *
- * Internal routines:
- *
- * hashrehash() - resize and rebuild hp->tab, the hash table
- *
- * 4/29/93 - ensure ITEM's are aligned
- */
-
-/* */
-#define HASH_DEBUG_PROFILE 1
-/* */
-
-/* Header attached to all data items entered into a hash table. */
-
-struct hashhdr
-{
- struct item * next;
-};
-
-typedef struct item
-{
- struct hashhdr hdr;
-} ITEM ;
-
-# define MAX_LISTS 32
-
-struct hash
-{
- /*
- * the hash table, just an array of item pointers
- */
- struct {
- int nel;
- ITEM **base;
- } tab;
-
- int bloat; /* tab.nel / items.nel */
- int inel; /* initial number of elements */
-
- /*
- * the array of records, maintained by these routines
- * essentially a microallocator
- */
- struct {
- int more; /* how many more ITEMs fit in lists[ list ] */
- ITEM *free; /* free list of items */
- char *next; /* where to put more ITEMs in lists[ list ] */
- int size; /* sizeof( ITEM ) + aligned datalen */
- int nel; /* total ITEMs held by all lists[] */
- int list; /* index into lists[] */
-
- struct {
- int nel; /* total ITEMs held by this list */
- char *base; /* base of ITEMs array */
- } lists[ MAX_LISTS ];
- } items;
-
- const char * name; /* just for hashstats() */
-};
-
-static void hashrehash( struct hash *hp );
-static void hashstat( struct hash *hp );
-
-static unsigned int hash_keyval( OBJECT * key )
-{
- return object_hash( key );
-}
-
-#define hash_bucket(hp,keyval) ((hp)->tab.base + ( (keyval) % (hp)->tab.nel ))
-
-#define hash_data_key(data) (*(OBJECT * *)(data))
-#define hash_item_data(item) ((HASHDATA *)((char *)item + sizeof(struct hashhdr)))
-#define hash_item_key(item) (hash_data_key(hash_item_data(item)))
-
-/* Find the hash item for the given data. Returns pointer to the
- item and if given a pointer to the item before the found item.
- If it's the first item in a bucket, there is no previous item,
- and zero is returned for the previous item instead.
-*/
-static ITEM * hash_search(
- struct hash *hp,
- unsigned int keyval,
- OBJECT * keydata,
- ITEM * * previous )
-{
- ITEM * i = *hash_bucket(hp,keyval);
- ITEM * p = 0;
-
- for ( ; i; i = i->hdr.next )
- {
- if ( object_equal( hash_item_key( i ), keydata ) )
- {
- if (previous)
- {
- *previous = p;
- }
- return i;
- }
- p = i;
- }
-
- return 0;
-}
-
-/*
- * hash_insert() - insert a record in the table or return the existing one
- */
-
-HASHDATA * hash_insert( struct hash * hp, OBJECT * key, int * found )
-{
- ITEM * i;
- unsigned int keyval = hash_keyval( key );
-
- #ifdef HASH_DEBUG_PROFILE
- profile_frame prof[1];
- if ( DEBUG_PROFILE )
- profile_enter( 0, prof );
- #endif
-
- if ( !hp->items.more )
- hashrehash( hp );
-
- i = hash_search( hp, keyval, key, 0 );
- if ( i )
- {
- *found = 1;
- }
- else
- {
- ITEM * * base = hash_bucket( hp, keyval );
-
- /* try to grab one from the free list */
- if ( hp->items.free )
- {
- i = hp->items.free;
- hp->items.free = i->hdr.next;
- assert( hash_item_key( i ) == 0 );
- }
- else
- {
- i = (ITEM *)hp->items.next;
- hp->items.next += hp->items.size;
- }
- hp->items.more--;
- i->hdr.next = *base;
- *base = i;
- *found = 0;
- }
-
- #ifdef HASH_DEBUG_PROFILE
- if ( DEBUG_PROFILE )
- profile_exit( prof );
- #endif
-
- return hash_item_data( i );
-}
-
-/*
- * hash_find() - find a record in the table or NULL if none exists
- */
-
-HASHDATA * hash_find( struct hash *hp, OBJECT *key )
-{
- ITEM *i;
- unsigned int keyval = hash_keyval(key);
-
- #ifdef HASH_DEBUG_PROFILE
- profile_frame prof[1];
- if ( DEBUG_PROFILE )
- profile_enter( 0, prof );
- #endif
-
- if ( !hp->items.nel )
- {
- #ifdef HASH_DEBUG_PROFILE
- if ( DEBUG_PROFILE )
- profile_exit( prof );
- #endif
- return 0;
- }
-
- i = hash_search( hp, keyval, key, 0 );
-
- #ifdef HASH_DEBUG_PROFILE
- if ( DEBUG_PROFILE )
- profile_exit( prof );
- #endif
-
- if (i)
- {
- return hash_item_data( i );
- }
- else
- {
- return 0;
- }
-}
-
-/*
- * hashrehash() - resize and rebuild hp->tab, the hash table
- */
-
-static void hashrehash( register struct hash *hp )
-{
- int i = ++hp->items.list;
- hp->items.more = i ? 2 * hp->items.nel : hp->inel;
- hp->items.next = (char *)BJAM_MALLOC( hp->items.more * hp->items.size );
- hp->items.free = 0;
-
- hp->items.lists[i].nel = hp->items.more;
- hp->items.lists[i].base = hp->items.next;
- hp->items.nel += hp->items.more;
-
- if ( hp->tab.base )
- BJAM_FREE( (char *)hp->tab.base );
-
- hp->tab.nel = hp->items.nel * hp->bloat;
- hp->tab.base = (ITEM **)BJAM_MALLOC( hp->tab.nel * sizeof(ITEM **) );
-
- memset( (char *)hp->tab.base, '\0', hp->tab.nel * sizeof( ITEM * ) );
-
- for ( i = 0; i < hp->items.list; ++i )
- {
- int nel = hp->items.lists[i].nel;
- char *next = hp->items.lists[i].base;
-
- for ( ; nel--; next += hp->items.size )
- {
- register ITEM *i = (ITEM *)next;
- ITEM **ip = hp->tab.base + object_hash( hash_item_key( i ) ) % hp->tab.nel;
- /* code currently assumes rehashing only when there are no free items */
- assert( hash_item_key( i ) != 0 );
-
- i->hdr.next = *ip;
- *ip = i;
- }
- }
-}
-
-void hashenumerate( struct hash * hp, void (* f)( void *, void * ), void * data )
-{
- int i;
- for ( i = 0; i <= hp->items.list; ++i )
- {
- char * next = hp->items.lists[i].base;
- int nel = hp->items.lists[i].nel;
- if ( i == hp->items.list )
- nel -= hp->items.more;
-
- for ( ; nel--; next += hp->items.size )
- {
- ITEM * i = (ITEM *)next;
- if ( hash_item_key( i ) != 0 ) /* DO not enumerate freed items. */
- f( hash_item_data( i ), data );
- }
- }
-}
-
-/* --- */
-
-# define ALIGNED(x) ( ( x + sizeof( ITEM ) - 1 ) & ~( sizeof( ITEM ) - 1 ) )
-
-/*
- * hashinit() - initialize a hash table, returning a handle
- */
-
-struct hash *
-hashinit(
- int datalen,
- const char *name )
-{
- struct hash *hp = (struct hash *)BJAM_MALLOC( sizeof( *hp ) );
-
- hp->bloat = 3;
- hp->tab.nel = 0;
- hp->tab.base = (ITEM **)0;
- hp->items.more = 0;
- hp->items.free = 0;
- hp->items.size = sizeof( struct hashhdr ) + ALIGNED( datalen );
- hp->items.list = -1;
- hp->items.nel = 0;
- hp->inel = 11 /* 47 */;
- hp->name = name;
-
- return hp;
-}
-
-void hashdone( struct hash * hp )
-{
- if ( !hp )
- return;
- if ( DEBUG_MEM || DEBUG_PROFILE )
- hashstat( hp );
- hash_free( hp );
-}
-
-/*
- * hash_free() - free a hash table, given its handle
- */
-
-void
-hash_free( struct hash * hp )
-{
- int i;
-
- if ( !hp )
- return;
-
- if ( hp->tab.base )
- BJAM_FREE( (char *)hp->tab.base );
- for ( i = 0; i <= hp->items.list; ++i )
- BJAM_FREE( hp->items.lists[i].base );
- BJAM_FREE( (char *)hp );
-}
-
-
-/* ---- */
-
-static void hashstat( struct hash * hp )
-{
- struct hashstats stats[ 1 ];
- hashstats_init( stats );
- hashstats_add( stats, hp );
- hashstats_print( stats, hp->name );
-}
-
-void hashstats_init( struct hashstats * stats )
-{
- stats->count = 0;
- stats->num_items = 0;
- stats->tab_size = 0;
- stats->item_size = 0;
- stats->sets = 0;
-}
-
-void hashstats_add( struct hashstats * stats, struct hash * hp )
-{
- if ( hp )
- {
- ITEM * * tab = hp->tab.base;
- int nel = hp->tab.nel;
- int count = 0;
- int sets = 0;
- int i;
-
- for ( i = 0; i < nel; ++i )
- {
- ITEM * item;
- int here = 0;
- for ( item = tab[ i ]; item != 0; item = item->hdr.next )
- ++here;
-
- count += here;
- if ( here > 0 )
- ++sets;
- }
-
- stats->count += count;
- stats->sets += sets;
- stats->num_items += hp->items.nel;
- stats->tab_size += hp->tab.nel;
- stats->item_size = hp->items.size;
- }
-}
-
-void hashstats_print( struct hashstats * stats, const char * name )
-{
- printf( "%s table: %d+%d+%d (%dK+%luK) items+table+hash, %f density\n",
- name,
- stats->count,
- stats->num_items,
- stats->tab_size,
- stats->num_items * stats->item_size / 1024,
- (long unsigned)stats->tab_size * sizeof( ITEM ** ) / 1024,
- (float)stats->count / (float)stats->sets );
-}
diff --git a/tools/build/v2/engine/hash.h b/tools/build/v2/engine/hash.h
deleted file mode 100644
index 65db56c1c5..0000000000
--- a/tools/build/v2/engine/hash.h
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Copyright 1993, 1995 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/*
- * hash.h - simple in-memory hashing routines
- */
-
-#ifndef BOOST_JAM_HASH_H
-#define BOOST_JAM_HASH_H
-
-/*
- * An opaque struct representing an item in the
- * hash table. The first element of every struct
- * stored in the table must be an OBJECT * which
- * is treated as the key.
- */
-typedef struct hashdata HASHDATA;
-
-/*
- * hashinit() - initialize a hash table, returning a handle.
- * datalen is the size of the items. name is used for debugging.
- */
-struct hash * hashinit ( int datalen, const char * name );
-
-/*
- * hash_free() - free a hash table, given its handle
- */
-void hash_free( struct hash * hp );
-void hashdone( struct hash * hp );
-
-/*
- * hashenumerate() - call f(i, data) on each item, i in the hash
- * table. The order of the items is unspecified.
- */
-void hashenumerate( struct hash * hp, void (* f)( void *, void * ), void * data );
-
-/*
- * hash_insert() - insert a new item in a hash table, or return an
- * existing one.
- *
- * Preconditions:
- * - hp must be a hash table created by hashinit
- * - key must be an object created by object_new
- *
- * Postconditions:
- * - if the key does not already exist in the hash
- * table, *found == 0 and the result will be a
- * pointer to an uninitialized item. The key
- * of the new item must be set to a value equal to
- * key before any further operations on the
- * hash table except hashdone.
- * - if the key is present then *found == 1 and
- * the result is a pointer to the existing
- * record.
- */
-HASHDATA * hash_insert ( struct hash * hp, OBJECT * key, int * found );
-
-/*
- * hash_find() - find a record in the table or NULL if none exists
- */
-HASHDATA * hash_find ( struct hash * hp, OBJECT * key );
-
-struct hashstats {
- int count;
- int num_items;
- int tab_size;
- int item_size;
- int sets;
-};
-
-void hashstats_init( struct hashstats * stats );
-void hashstats_add( struct hashstats * stats, struct hash * hp );
-void hashstats_print( struct hashstats * stats, const char * name );
-
-#endif
diff --git a/tools/build/v2/engine/hcache.c b/tools/build/v2/engine/hcache.c
deleted file mode 100644
index 9e7a0343ed..0000000000
--- a/tools/build/v2/engine/hcache.c
+++ /dev/null
@@ -1,506 +0,0 @@
-/*
- * This file has been donated to Jam.
- */
-
-# include "jam.h"
-# include "lists.h"
-# include "parse.h"
-# include "rules.h"
-# include "regexp.h"
-# include "headers.h"
-# include "object.h"
-# include "hash.h"
-# include "hcache.h"
-# include "variable.h"
-# include "search.h"
-# include "modules.h"
-
-#ifdef OPT_HEADER_CACHE_EXT
-
-/*
- * Craig W. McPheeters, Alias|Wavefront.
- *
- * hcache.c hcache.h - handle cacheing of #includes in source files.
- *
- * Create a cache of files scanned for headers. When starting jam, look for the
- * cache file and load it if present. When finished the binding phase, create a
- * new header cache. The cache contains files, their timestamps and the header
- * files found in their scan. During the binding phase of jam, look in the
- * header cache first for the headers contained in a file. If the cache is
- * present and valid, use its contents. This results in dramatic speedups with
- * large projects (eg. 3min -> 1min startup for one project.)
- *
- * External routines:
- * hcache_init() - read and parse the local .jamdeps file.
- * hcache_done() - write a new .jamdeps file.
- * hcache() - return list of headers on target. Use cache or do a scan.
- *
- * The dependency file format is an ASCII file with 1 line per target. Each line
- * has the following fields:
- * @boundname@ timestamp @file@ @file@ @file@ ... \n
- */
-
-typedef struct hcachedata HCACHEDATA ;
-
-struct hcachedata
-{
- OBJECT * boundname;
- time_t time;
- LIST * includes;
- LIST * hdrscan; /* the HDRSCAN value for this target */
- int age; /* if too old, we'll remove it from cache */
- HCACHEDATA * next;
-};
-
-
-static struct hash * hcachehash = 0;
-static HCACHEDATA * hcachelist = 0;
-
-static int queries = 0;
-static int hits = 0;
-
-#define CACHE_FILE_VERSION "version 4"
-#define CACHE_RECORD_HEADER "header"
-#define CACHE_RECORD_END "end"
-
-
-/*
- * Return the name of the header cache file. May return NULL.
- *
- * The user sets this by setting the HCACHEFILE variable in a Jamfile. We cache
- * the result so the user can not change the cache file during header scanning.
- */
-
-static const char * cache_name( void )
-{
- static OBJECT * name = 0;
- if ( !name )
- {
- LIST * hcachevar = var_get( root_module(), constant_HCACHEFILE );
-
- if ( !list_empty( hcachevar ) )
- {
- TARGET * t = bindtarget( list_front( hcachevar ) );
-
- pushsettings( root_module(), t->settings );
- /* Do not expect the cache file to be generated, so pass 0 as the
- * third argument to search. Expect the location to be specified via
- * LOCATE, so pass 0 as the fourth arugment.
- */
- object_free( t->boundname );
- t->boundname = search( t->name, &t->time, 0, 0 );
- popsettings( root_module(), t->settings );
-
- name = object_copy( t->boundname );
- }
- }
- return name ? object_str( name ) : 0;
-}
-
-
-/*
- * Return the maximum age a cache entry can have before it is purged ftom the
- * cache.
- */
-
-static int cache_maxage( void )
-{
- int age = 100;
- LIST * var = var_get( root_module(), constant_HCACHEMAXAGE );
- if ( !list_empty( var ) )
- {
- age = atoi( object_str( list_front( var ) ) );
- if ( age < 0 )
- age = 0;
- }
- return age;
-}
-
-
-/*
- * Read a netstring. The caveat is that the string can not contain ASCII 0. The
- * returned value is as returned by object_new().
- */
-
-OBJECT * read_netstring( FILE * f )
-{
- unsigned long len;
- static char * buf = NULL;
- static unsigned long buf_len = 0;
-
- if ( fscanf( f, " %9lu", &len ) != 1 )
- return NULL;
- if ( fgetc( f ) != (int)'\t' )
- return NULL;
-
- if ( len > 1024 * 64 )
- return NULL; /* sanity check */
-
- if ( len > buf_len )
- {
- unsigned long new_len = buf_len * 2;
- if ( new_len < len )
- new_len = len;
- buf = (char *)BJAM_REALLOC( buf, new_len + 1 );
- if ( buf )
- buf_len = new_len;
- }
-
- if ( !buf )
- return NULL;
-
- if ( fread( buf, 1, len, f ) != len )
- return NULL;
- if ( fgetc( f ) != (int)'\n' )
- return NULL;
-
- buf[ len ] = 0;
- return object_new( buf );
-}
-
-
-/*
- * Write a netstring.
- */
-
-void write_netstring( FILE * f, char const * s )
-{
- if ( !s )
- s = "";
- fprintf( f, "%lu\t%s\n", (long unsigned)strlen( s ), s );
-}
-
-
-void hcache_init()
-{
- FILE * f;
- OBJECT * version = 0;
- int header_count = 0;
- const char * hcachename;
-
- if ( hcachehash )
- return;
-
- hcachehash = hashinit( sizeof( HCACHEDATA ), "hcache" );
-
- if ( !( hcachename = cache_name() ) )
- return;
-
- if ( !( f = fopen( hcachename, "rb" ) ) )
- return;
-
- version = read_netstring( f );
-
- if ( !version || strcmp( object_str( version ), CACHE_FILE_VERSION ) )
- goto bail;
-
- while ( 1 )
- {
- HCACHEDATA cachedata;
- HCACHEDATA * c;
- OBJECT * record_type = 0;
- OBJECT * time_str = 0;
- OBJECT * age_str = 0;
- OBJECT * includes_count_str = 0;
- OBJECT * hdrscan_count_str = 0;
- int i;
- int count;
- LIST * l;
- int found;
-
- cachedata.boundname = 0;
- cachedata.includes = 0;
- cachedata.hdrscan = 0;
-
- record_type = read_netstring( f );
- if ( !record_type )
- {
- fprintf( stderr, "invalid %s\n", hcachename );
- goto cleanup;
- }
- if ( !strcmp( object_str( record_type ), CACHE_RECORD_END ) )
- {
- object_free( record_type );
- break;
- }
- if ( strcmp( object_str( record_type ), CACHE_RECORD_HEADER ) )
- {
- fprintf( stderr, "invalid %s with record separator <%s>\n",
- hcachename, record_type ? object_str( record_type ) : "<null>" );
- goto cleanup;
- }
-
- cachedata.boundname = read_netstring( f );
- time_str = read_netstring( f );
- age_str = read_netstring( f );
- includes_count_str = read_netstring( f );
-
- if ( !cachedata.boundname || !time_str || !age_str || !includes_count_str )
- {
- fprintf( stderr, "invalid %s\n", hcachename );
- goto cleanup;
- }
-
- cachedata.time = atoi( object_str( time_str ) );
- cachedata.age = atoi( object_str( age_str ) ) + 1;
-
- count = atoi( object_str( includes_count_str ) );
- for ( l = L0, i = 0; i < count; ++i )
- {
- OBJECT * s = read_netstring( f );
- if ( !s )
- {
- fprintf( stderr, "invalid %s\n", hcachename );
- list_free( l );
- goto cleanup;
- }
- l = list_push_back( l, s );
- }
- cachedata.includes = l;
-
- hdrscan_count_str = read_netstring( f );
- if ( !hdrscan_count_str )
- {
- fprintf( stderr, "invalid %s\n", hcachename );
- goto cleanup;
- }
-
- count = atoi( object_str( hdrscan_count_str ) );
- for ( l = L0, i = 0; i < count; ++i )
- {
- OBJECT * s = read_netstring( f );
- if ( !s )
- {
- fprintf( stderr, "invalid %s\n", hcachename );
- list_free( l );
- goto cleanup;
- }
- l = list_push_back( l, s );
- }
- cachedata.hdrscan = l;
-
- c = (HCACHEDATA *)hash_insert( hcachehash, cachedata.boundname, &found );
- if ( !found )
- {
- c->boundname = cachedata.boundname;
- c->time = cachedata.time;
- c->includes = cachedata.includes;
- c->hdrscan = cachedata.hdrscan;
- c->age = cachedata.age;
- }
- else
- {
- fprintf( stderr, "can't insert header cache item, bailing on %s\n",
- hcachename );
- goto cleanup;
- }
-
- c->next = hcachelist;
- hcachelist = c;
-
- ++header_count;
-
- object_free( record_type );
- object_free( time_str );
- object_free( age_str );
- object_free( includes_count_str );
- object_free( hdrscan_count_str );
- continue;
-
-cleanup:
-
- if ( record_type ) object_free( record_type );
- if ( time_str ) object_free( time_str );
- if ( age_str ) object_free( age_str );
- if ( includes_count_str ) object_free( includes_count_str );
- if ( hdrscan_count_str ) object_free( hdrscan_count_str );
-
- if ( cachedata.boundname ) object_free( cachedata.boundname );
- if ( cachedata.includes ) list_free( cachedata.includes );
- if ( cachedata.hdrscan ) list_free( cachedata.hdrscan );
-
- goto bail;
- }
-
- if ( DEBUG_HEADER )
- printf( "hcache read from file %s\n", hcachename );
-
-bail:
- if ( version )
- object_free( version );
- fclose( f );
-}
-
-
-void hcache_done()
-{
- FILE * f;
- HCACHEDATA * c;
- int header_count = 0;
- const char * hcachename;
- int maxage;
-
- if ( !hcachehash )
- return;
-
- if ( !( hcachename = cache_name() ) )
- goto cleanup;
-
- if ( !( f = fopen( hcachename, "wb" ) ) )
- goto cleanup;
-
- maxage = cache_maxage();
-
- /* Print out the version. */
- write_netstring( f, CACHE_FILE_VERSION );
-
- c = hcachelist;
- for ( c = hcachelist; c; c = c->next )
- {
- LISTITER iter, end;
- char time_str[ 30 ];
- char age_str[ 30 ];
- char includes_count_str[ 30 ];
- char hdrscan_count_str[ 30 ];
-
- if ( maxage == 0 )
- c->age = 0;
- else if ( c->age > maxage )
- continue;
-
- sprintf( includes_count_str, "%lu", (long unsigned) list_length( c->includes ) );
- sprintf( hdrscan_count_str, "%lu", (long unsigned) list_length( c->hdrscan ) );
- sprintf( time_str, "%lu", (long unsigned) c->time );
- sprintf( age_str, "%lu", (long unsigned) c->age );
-
- write_netstring( f, CACHE_RECORD_HEADER );
- write_netstring( f, object_str( c->boundname ) );
- write_netstring( f, time_str );
- write_netstring( f, age_str );
- write_netstring( f, includes_count_str );
- for ( iter = list_begin( c->includes ), end = list_end( c->includes );
- iter != end; iter = list_next( iter ) )
- write_netstring( f, object_str( list_item( iter ) ) );
- write_netstring( f, hdrscan_count_str );
- for ( iter = list_begin( c->hdrscan ), end = list_end( c->hdrscan );
- iter != end; iter = list_next( iter ) )
- write_netstring( f, object_str( list_item( iter ) ) );
- fputs( "\n", f );
- ++header_count;
- }
- write_netstring( f, CACHE_RECORD_END );
-
- if ( DEBUG_HEADER )
- printf( "hcache written to %s. %d dependencies, %.0f%% hit rate\n",
- hcachename, header_count, queries ? 100.0 * hits / queries : 0 );
-
- fclose ( f );
-
-cleanup:
- for ( c = hcachelist; c; c = c->next )
- {
- list_free( c->includes );
- list_free( c->hdrscan );
- object_free( c->boundname );
- }
-
- hcachelist = 0;
- if ( hcachehash )
- hashdone( hcachehash );
- hcachehash = 0;
-}
-
-
-LIST * hcache( TARGET * t, int rec, regexp * re[], LIST * hdrscan )
-{
- HCACHEDATA * c;
-
- LIST * l = 0;
-
- ++queries;
-
- if ( ( c = (HCACHEDATA *)hash_find( hcachehash, t->boundname ) ) )
- {
- if ( c->time == t->time )
- {
- LIST *l1 = hdrscan, *l2 = c->hdrscan;
- LISTITER iter1 = list_begin( l1 ), end1 = list_end( l1 ),
- iter2 = list_begin( l2 ), end2 = list_end( l2 );
- while ( iter1 != end1 && iter2 != end2 )
- {
- if ( !object_equal( list_item( iter1 ), list_item( iter2 ) ) )
- {
- iter1 = end1;
- }
- else
- {
- iter1 = list_next( iter1 );
- iter2 = list_next( iter2 );
- }
- }
- if ( iter1 != end1 || iter2 != end2 )
- {
- if (DEBUG_HEADER)
- printf( "HDRSCAN out of date in cache for %s\n",
- object_str( t->boundname ) );
-
- printf( "HDRSCAN out of date for %s\n",
- object_str( t->boundname ) );
- printf(" real : ");
- list_print( hdrscan );
- printf( "\n cached: " );
- list_print( c->hdrscan );
- printf( "\n" );
-
- list_free( c->includes );
- list_free( c->hdrscan );
- c->includes = L0;
- c->hdrscan = L0;
- }
- else
- {
- if (DEBUG_HEADER)
- printf( "using header cache for %s\n",
- object_str( t->boundname ) );
- c->age = 0;
- ++hits;
- l = list_copy( c->includes );
- return l;
- }
- }
- else
- {
- if (DEBUG_HEADER)
- printf ("header cache out of date for %s\n",
- object_str( t->boundname ) );
- list_free( c->includes );
- list_free( c->hdrscan );
- c->includes = L0;
- c->hdrscan = L0;
- }
- }
- else
- {
- int found;
- c = (HCACHEDATA *)hash_insert( hcachehash, t->boundname, &found );
- if ( !found )
- {
- c->boundname = object_copy( t->boundname );
- c->next = hcachelist;
- hcachelist = c;
- }
- }
-
- /* 'c' points at the cache entry. Its out of date. */
-
- l = headers1( L0, t->boundname, rec, re );
-
- c->time = t->time;
- c->age = 0;
- c->includes = list_copy( l );
- c->hdrscan = list_copy( hdrscan );
-
- return l;
-}
-
-#endif
diff --git a/tools/build/v2/engine/hcache.h b/tools/build/v2/engine/hcache.h
deleted file mode 100644
index 2aa2394939..0000000000
--- a/tools/build/v2/engine/hcache.h
+++ /dev/null
@@ -1,18 +0,0 @@
-/*
- * This file is not part of Jam
- */
-
-/*
- * hcache.h - handle #includes in source files
- */
-#ifndef HCACHE_H
-# define HCACHE_H
-
-# include "regexp.h"
-# include "lists.h"
-
-void hcache_init( void );
-void hcache_done( void );
-LIST * hcache( TARGET *t, int rec, regexp * re[], LIST * hdrscan );
-
-#endif
diff --git a/tools/build/v2/engine/hdrmacro.c b/tools/build/v2/engine/hdrmacro.c
deleted file mode 100644
index 6ef2a131c9..0000000000
--- a/tools/build/v2/engine/hdrmacro.c
+++ /dev/null
@@ -1,142 +0,0 @@
-/*
- * Copyright 1993, 2000 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/* This file is ALSO:
- * Copyright 2001-2004 David Abrahams.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-# include "jam.h"
-# include "lists.h"
-# include "parse.h"
-# include "compile.h"
-# include "rules.h"
-# include "variable.h"
-# include "regexp.h"
-# include "hdrmacro.h"
-# include "hash.h"
-# include "object.h"
-# include "strings.h"
-
-/*
- * hdrmacro.c - handle header files that define macros used in
- * #include statements.
- *
- * we look for lines like "#define MACRO <....>" or '#define MACRO " "'
- * in the target file. When found, we
- *
- * we then phony up a rule invocation like:
- *
- * $(HDRRULE) <target> : <resolved included files> ;
- *
- * External routines:
- * headers1() - scan a target for "#include MACRO" lines and try
- * to resolve them when needed
- *
- * Internal routines:
- * headers1() - using regexp, scan a file and build include LIST
- *
- * 04/13/94 (seiwald) - added shorthand L0 for null list pointer
- * 09/10/00 (seiwald) - replaced call to compile_rule with evaluate_rule,
- * so that headers() doesn't have to mock up a parse structure
- * just to invoke a rule.
- */
-
-/* this type is used to store a dictionary of file header macros */
-typedef struct header_macro
-{
- OBJECT * symbol;
- OBJECT * filename; /* we could maybe use a LIST here ?? */
-} HEADER_MACRO;
-
-static struct hash * header_macros_hash = 0;
-
-
-/*
- * headers() - scan a target for include files and call HDRRULE
- */
-
-# define MAXINC 10
-
-void
-macro_headers( TARGET * t )
-{
- static regexp *re = 0;
- FILE *f;
- char buf[ 1024 ];
-
- if ( DEBUG_HEADER )
- printf( "macro header scan for %s\n", object_str( t->name ) );
-
- /* this regexp is used to detect lines of the form */
- /* "#define MACRO <....>" or "#define MACRO "....." */
- /* in the header macro files.. */
- if ( re == 0 )
- {
- OBJECT * re_str = object_new(
- "^[ ]*#[ ]*define[ ]*([A-Za-z][A-Za-z0-9_]*)[ ]*"
- "[<\"]([^\">]*)[\">].*$" );
- re = regex_compile( re_str );
- object_free( re_str );
- }
-
- if ( !( f = fopen( object_str( t->boundname ), "r" ) ) )
- return;
-
- while ( fgets( buf, sizeof( buf ), f ) )
- {
- HEADER_MACRO var;
- HEADER_MACRO *v = &var;
-
- if ( regexec( re, buf ) && re->startp[1] )
- {
- OBJECT * symbol;
- int found;
- /* we detected a line that looks like "#define MACRO filename */
- ((char *)re->endp[1])[0] = '\0';
- ((char *)re->endp[2])[0] = '\0';
-
- if ( DEBUG_HEADER )
- printf( "macro '%s' used to define filename '%s' in '%s'\n",
- re->startp[1], re->startp[2], object_str( t->boundname ) );
-
- /* add macro definition to hash table */
- if ( !header_macros_hash )
- header_macros_hash = hashinit( sizeof( HEADER_MACRO ), "hdrmacros" );
-
- symbol = object_new( re->startp[1] );
- v = (HEADER_MACRO *)hash_insert( header_macros_hash, symbol, &found );
- if ( !found )
- {
- v->symbol = symbol;
- v->filename = object_new( re->startp[2] ); /* never freed */
- }
- else
- {
- object_free( symbol );
- }
- /* XXXX: FOR NOW, WE IGNORE MULTIPLE MACRO DEFINITIONS !! */
- /* WE MIGHT AS WELL USE A LIST TO STORE THEM.. */
- }
- }
-
- fclose( f );
-}
-
-
-OBJECT * macro_header_get( OBJECT * macro_name )
-{
- HEADER_MACRO * v;
-
- if ( header_macros_hash && ( v = (HEADER_MACRO *)hash_find( header_macros_hash, macro_name ) ) )
- {
- if ( DEBUG_HEADER )
- printf( "### macro '%s' evaluated to '%s'\n", object_str( macro_name ), object_str( v->filename ) );
- return v->filename;
- }
- return 0;
-}
diff --git a/tools/build/v2/engine/hdrmacro.h b/tools/build/v2/engine/hdrmacro.h
deleted file mode 100644
index ddfa6eeac2..0000000000
--- a/tools/build/v2/engine/hdrmacro.h
+++ /dev/null
@@ -1,19 +0,0 @@
-/*
- * Copyright 1993, 1995 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/*
- * hdrmacro.h - parses header files for #define MACRO <filename> or
- * #define MACRO "filename" definitions
- */
-
-#ifndef HDRMACRO_SW20111118_H
-#define HDRMACRO_SW20111118_H
-
-void macro_headers( TARGET *t );
-
-OBJECT * macro_header_get( OBJECT * macro_name );
-
-#endif
diff --git a/tools/build/v2/engine/headers.c b/tools/build/v2/engine/headers.c
deleted file mode 100644
index 7e01440169..0000000000
--- a/tools/build/v2/engine/headers.c
+++ /dev/null
@@ -1,216 +0,0 @@
-/*
- * Copyright 1993, 2000 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-/* This file is ALSO:
- * Copyright 2001-2004 David Abrahams.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-# include "jam.h"
-# include "lists.h"
-# include "parse.h"
-# include "compile.h"
-# include "rules.h"
-# include "modules.h"
-# include "variable.h"
-# include "regexp.h"
-# include "headers.h"
-# include "hdrmacro.h"
-# include "object.h"
-
-#ifdef OPT_HEADER_CACHE_EXT
-# include "hcache.h"
-#endif
-
-/*
- * headers.c - handle #includes in source files
- *
- * Using regular expressions provided as the variable $(HDRSCAN),
- * headers() searches a file for #include files and phonies up a
- * rule invocation:
- *
- * $(HDRRULE) <target> : <include files> ;
- *
- * External routines:
- * headers() - scan a target for include files and call HDRRULE
- *
- * Internal routines:
- * headers1() - using regexp, scan a file and build include LIST
- *
- * 04/13/94 (seiwald) - added shorthand L0 for null list pointer
- * 09/10/00 (seiwald) - replaced call to compile_rule with evaluate_rule,
- * so that headers() doesn't have to mock up a parse structure
- * just to invoke a rule.
- */
-
-#ifndef OPT_HEADER_CACHE_EXT
-static LIST * headers1( LIST * l, OBJECT * file, int rec, regexp * re[]);
-#endif
-
-/*
- * headers() - scan a target for include files and call HDRRULE
- */
-
-# define MAXINC 10
-
-void
-headers( TARGET * t )
-{
- LIST * hdrscan;
- LIST * hdrrule;
- #ifndef OPT_HEADER_CACHE_EXT
- LIST * headlist = L0;
- #endif
- regexp * re[ MAXINC ];
- int rec = 0;
- LISTITER iter, end;
-
- hdrscan = var_get( root_module(), constant_HDRSCAN );
- if ( list_empty( hdrscan ) )
- return;
-
- hdrrule = var_get( root_module(), constant_HDRRULE );
- if ( list_empty( hdrrule ) )
- return;
-
- if ( DEBUG_HEADER )
- printf( "header scan %s\n", object_str( t->name ) );
-
- /* Compile all regular expressions in HDRSCAN */
- iter = list_begin( hdrscan ), end = list_end( hdrscan );
- for ( ; ( rec < MAXINC ) && iter != end; iter = list_next( iter ) )
- {
- re[ rec++ ] = regex_compile( list_item( iter ) );
- }
-
- /* Doctor up call to HDRRULE rule */
- /* Call headers1() to get LIST of included files. */
- {
- FRAME frame[1];
- frame_init( frame );
- lol_add( frame->args, list_new( object_copy( t->name ) ) );
-#ifdef OPT_HEADER_CACHE_EXT
- lol_add( frame->args, hcache( t, rec, re, hdrscan ) );
-#else
- lol_add( frame->args, headers1( headlist, t->boundname, rec, re ) );
-#endif
-
- if ( lol_get( frame->args, 1 ) )
- {
- /* The third argument to HDRRULE is the bound name of
- * $(<) */
- lol_add( frame->args, list_new( object_copy( t->boundname ) ) );
-
- list_free( evaluate_rule( list_front( hdrrule ), frame ) );
- }
-
- /* Clean up. */
- frame_free( frame );
- }
-}
-
-
-/*
- * headers1() - using regexp, scan a file and build include LIST.
- */
-
-#ifdef OPT_HEADER_CACHE_EXT
-LIST *
-#else
-static LIST *
-#endif
-headers1(
- LIST * l,
- OBJECT * file,
- int rec,
- regexp * re[] )
-{
- FILE * f;
- char buf[ 1024 ];
- int i;
- static regexp * re_macros = 0;
-
-#ifdef OPT_IMPROVED_PATIENCE_EXT
- static int count = 0;
- ++count;
- if ( ((count == 100) || !( count % 1000 )) && DEBUG_MAKE )
- printf("...patience...\n");
-#endif
-
- /* the following regexp is used to detect cases where a */
- /* file is included through a line line "#include MACRO" */
- if ( re_macros == 0 )
- {
- OBJECT * re_str = object_new(
- "^[ ]*#[ ]*include[ ]*([A-Za-z][A-Za-z0-9_]*).*$" );
- re_macros = regex_compile( re_str );
- object_free( re_str );
- }
-
- if ( !( f = fopen( object_str( file ), "r" ) ) )
- return l;
-
- while ( fgets( buf, sizeof( buf ), f ) )
- {
- int size = strlen( buf );
- /* Remove trailing \r and \n, if any. */
- while ( ( size > 0 ) &&
- ( buf[ size - 1 ] == '\n' ) &&
- ( buf[ size - 1 ] == '\r' ) )
- {
- buf[ size - 1 ] = '\0';
- --size;
- }
-
- for ( i = 0; i < rec; ++i )
- if ( regexec( re[i], buf ) && re[i]->startp[1] )
- {
- ((char *)re[i]->endp[1])[0] = '\0';
-
- if ( DEBUG_HEADER )
- printf( "header found: %s\n", re[i]->startp[1] );
-
- l = list_push_back( l, object_new( re[i]->startp[1] ) );
- }
-
- /* special treatment for #include MACRO */
- if ( regexec( re_macros, buf ) && re_macros->startp[1] )
- {
- OBJECT * header_filename;
- OBJECT * macro_name;
-
- ((char *)re_macros->endp[1])[0] = '\0';
-
- if ( DEBUG_HEADER )
- printf( "macro header found: %s", re_macros->startp[1] );
-
- macro_name = object_new( re_macros->startp[1] );
- header_filename = macro_header_get( macro_name );
- object_free( macro_name );
- if ( header_filename )
- {
- if ( DEBUG_HEADER )
- printf( " resolved to '%s'\n", object_str( header_filename ) );
- l = list_push_back( l, object_copy( header_filename ) );
- }
- else
- {
- if ( DEBUG_HEADER )
- printf( " ignored !!\n" );
- }
- }
- }
-
- fclose( f );
-
- return l;
-}
-
-
-void regerror( const char * s )
-{
- printf( "re error %s\n", s );
-}
diff --git a/tools/build/v2/engine/jam.c b/tools/build/v2/engine/jam.c
deleted file mode 100644
index 09faf85225..0000000000
--- a/tools/build/v2/engine/jam.c
+++ /dev/null
@@ -1,662 +0,0 @@
-/*
- * /+\
- * +\ Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
- * \+/
- *
- * This file is part of jam.
- *
- * License is hereby granted to use this software and distribute it
- * freely, as long as this copyright notice is retained and modifications
- * are clearly marked.
- *
- * ALL WARRANTIES ARE HEREBY DISCLAIMED.
- */
-
-/* This file is ALSO:
- * Copyright 2001-2004 David Abrahams.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-/*
- * jam.c - make redux
- *
- * See Jam.html for usage information.
- *
- * These comments document the code.
- *
- * The top half of the code is structured such:
- *
- * jam
- * / | \
- * +---+ | \
- * / | \
- * jamgram option \
- * / | \ \
- * / | \ \
- * / | \ |
- * scan | compile make
- * | | / | \ / | \
- * | | / | \ / | \
- * | | / | \ / | \
- * jambase parse | rules search make1
- * | | | \
- * | | | \
- * | | | \
- * builtins timestamp command execute
- * |
- * |
- * |
- * filesys
- *
- *
- * The support routines are called by all of the above, but themselves
- * are layered thus:
- *
- * variable|expand
- * / | |
- * / | |
- * / | |
- * lists | pathsys
- * \ |
- * \ hash
- * \ |
- * \ |
- * \ |
- * \ |
- * \ |
- * object
- *
- * Roughly, the modules are:
- *
- * builtins.c - jam's built-in rules
- * command.c - maintain lists of commands
- * compile.c - compile parsed jam statements
- * execunix.c - execute a shell script on UNIX
- * file*.c - scan directories and archives on *
- * hash.c - simple in-memory hashing routines
- * hdrmacro.c - handle header file parsing for filename macro definitions
- * headers.c - handle #includes in source files
- * jambase.c - compilable copy of Jambase
- * jamgram.y - jam grammar
- * lists.c - maintain lists of strings
- * make.c - bring a target up to date, once rules are in place
- * make1.c - execute command to bring targets up to date
- * object.c - string manipulation routines
- * option.c - command line option processing
- * parse.c - make and destroy parse trees as driven by the parser
- * path*.c - manipulate file names on *
- * hash.c - simple in-memory hashing routines
- * regexp.c - Henry Spencer's regexp
- * rules.c - access to RULEs, TARGETs, and ACTIONs
- * scan.c - the jam yacc scanner
- * search.c - find a target along $(SEARCH) or $(LOCATE)
- * timestamp.c - get the timestamp of a file or archive member
- * variable.c - handle jam multi-element variables
- *
- * 05/04/94 (seiwald) - async multiprocess (-j) support
- * 02/08/95 (seiwald) - -n implies -d2.
- * 02/22/95 (seiwald) - -v for version info.
- * 09/11/00 (seiwald) - PATCHLEVEL folded into VERSION.
- * 01/10/01 (seiwald) - pathsys.h split from filesys.h
- */
-
-
-#include "jam.h"
-#include "option.h"
-#include "patchlevel.h"
-
-/* These get various function declarations. */
-#include "lists.h"
-#include "parse.h"
-#include "variable.h"
-#include "compile.h"
-#include "builtins.h"
-#include "rules.h"
-#include "object.h"
-#include "scan.h"
-#include "timestamp.h"
-#include "make.h"
-#include "strings.h"
-#include "filesys.h"
-#include "output.h"
-#include "search.h"
-#include "class.h"
-#include "execcmd.h"
-#include "constants.h"
-#include "function.h"
-#include "pwd.h"
-#include "hcache.h"
-
-/* Macintosh is "special" */
-#ifdef OS_MAC
- #include <QuickDraw.h>
-#endif
-
-/* And UNIX for this. */
-#ifdef unix
- #include <sys/utsname.h>
- #include <signal.h>
-#endif
-
-struct globs globs =
-{
- 0, /* noexec */
- 1, /* jobs */
- 0, /* quitquick */
- 0, /* newestfirst */
- 0, /* pipes action stdout and stderr merged to action output */
-#ifdef OS_MAC
- { 0, 0 }, /* debug - suppress tracing output */
-#else
- { 0, 1 }, /* debug ... */
-#endif
- 0, /* output commands, not run them */
- 0 /* action timeout */
-};
-
-/* Symbols to be defined as true for use in Jambase. */
-static char * othersyms[] = { OSMAJOR, OSMINOR, OSPLAT, JAMVERSYM, 0 };
-
-
-/* Known for sure:
- * mac needs arg_enviro
- * OS2 needs extern environ
- */
-
-#ifdef OS_MAC
- #define use_environ arg_environ
- #ifdef MPW
- QDGlobals qd;
- #endif
-#endif
-
-/* on Win32-LCC */
-#if defined( OS_NT ) && defined( __LCC__ )
- #define use_environ _environ
-#endif
-
-# if defined( __MWERKS__)
- #define use_environ _environ
- extern char * * _environ;
-#endif
-
-#ifndef use_environ
- #define use_environ environ
- #if !defined( __WATCOM__ ) && !defined( OS_OS2 ) && !defined( OS_NT )
- extern char **environ;
- #endif
-#endif
-
-#if YYDEBUG != 0
- extern int yydebug;
-#endif
-
-#ifndef NDEBUG
-static void run_unit_tests()
-{
-#if defined( USE_EXECNT )
- extern void execnt_unit_test();
- execnt_unit_test();
-#endif
- string_unit_test();
-}
-#endif
-
-int anyhow = 0;
-
-#ifdef HAVE_PYTHON
- extern PyObject * bjam_call ( PyObject * self, PyObject * args );
- extern PyObject * bjam_import_rule ( PyObject * self, PyObject * args );
- extern PyObject * bjam_define_action( PyObject * self, PyObject * args );
- extern PyObject * bjam_variable ( PyObject * self, PyObject * args );
- extern PyObject * bjam_backtrace ( PyObject * self, PyObject * args );
- extern PyObject * bjam_caller ( PyObject * self, PyObject * args );
-#endif
-
-void regex_done();
-
-const char *saved_argv0;
-
-int main( int argc, char * * argv, char * * arg_environ )
-{
- int n;
- char * s;
- struct bjam_option optv[N_OPTS];
- char const * all = "all";
- int status;
- int arg_c = argc;
- char * * arg_v = argv;
- char const * progname = argv[0];
- module_t * environ_module;
-
- saved_argv0 = argv[0];
-
- BJAM_MEM_INIT();
-
-# ifdef OS_MAC
- InitGraf(&qd.thePort);
-# endif
-
- --argc;
- ++argv;
-
- if ( getoptions( argc, argv, "-:l:d:j:p:f:gs:t:ano:qv", optv ) < 0 )
- {
- printf( "\nusage: %s [ options ] targets...\n\n", progname );
-
- printf( "-a Build all targets, even if they are current.\n" );
- printf( "-dx Set the debug level to x (0-9).\n" );
- printf( "-fx Read x instead of Jambase.\n" );
- /* printf( "-g Build from newest sources first.\n" ); */
- printf( "-jx Run up to x shell commands concurrently.\n" );
- printf( "-lx Limit actions to x number of seconds after which they are stopped.\n" );
- printf( "-n Don't actually execute the updating actions.\n" );
- printf( "-ox Write the updating actions to file x.\n" );
- printf( "-px x=0, pipes action stdout and stderr merged into action output.\n" );
- printf( "-q Quit quickly as soon as a target fails.\n" );
- printf( "-sx=y Set variable x=y, overriding environment.\n" );
- printf( "-tx Rebuild x, even if it is up-to-date.\n" );
- printf( "-v Print the version of jam and exit.\n" );
- printf( "--x Option is ignored.\n\n" );
-
- exit( EXITBAD );
- }
-
- /* Version info. */
- if ( ( s = getoptval( optv, 'v', 0 ) ) )
- {
- printf( "Boost.Jam " );
- printf( "Version %s. %s.\n", VERSION, OSMINOR );
- printf( " Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc. \n" );
- printf( " Copyright 2001 David Turner.\n" );
- printf( " Copyright 2001-2004 David Abrahams.\n" );
- printf( " Copyright 2002-2008 Rene Rivera.\n" );
- printf( " Copyright 2003-2008 Vladimir Prus.\n" );
-
- return EXITOK;
- }
-
- /* Pick up interesting options. */
- if ( ( s = getoptval( optv, 'n', 0 ) ) )
- globs.noexec++, globs.debug[2] = 1;
-
- if ( ( s = getoptval( optv, 'p', 0 ) ) )
- {
- /* Undocumented -p3 (acts like both -p1 -p2) means separate pipe action
- * stdout and stderr.
- */
- globs.pipe_action = atoi( s );
- if ( ( 3 < globs.pipe_action ) || ( globs.pipe_action < 0 ) )
- {
- printf(
- "Invalid pipe descriptor '%d', valid values are -p[0..3].\n",
- globs.pipe_action );
- exit( EXITBAD );
- }
- }
-
- if ( ( s = getoptval( optv, 'q', 0 ) ) )
- globs.quitquick = 1;
-
- if ( ( s = getoptval( optv, 'a', 0 ) ) )
- anyhow++;
-
- if ( ( s = getoptval( optv, 'j', 0 ) ) )
- {
- globs.jobs = atoi( s );
- if (globs.jobs == 0)
- {
- printf("Invalid value for the '-j' option.\n");
- exit(EXITBAD);
- }
- }
-
- if ( ( s = getoptval( optv, 'g', 0 ) ) )
- globs.newestfirst = 1;
-
- if ( ( s = getoptval( optv, 'l', 0 ) ) )
- globs.timeout = atoi( s );
-
- /* Turn on/off debugging */
- for ( n = 0; ( s = getoptval( optv, 'd', n ) ); ++n )
- {
- int i;
-
- /* First -d, turn off defaults. */
- if ( !n )
- for ( i = 0; i < DEBUG_MAX; ++i )
- globs.debug[i] = 0;
-
- i = atoi( s );
-
- if ( ( i < 0 ) || ( i >= DEBUG_MAX ) )
- {
- printf( "Invalid debug level '%s'.\n", s );
- continue;
- }
-
- /* n turns on levels 1-n. */
- /* +n turns on level n. */
- if ( *s == '+' )
- globs.debug[i] = 1;
- else while ( i )
- globs.debug[i--] = 1;
- }
-
- constants_init();
-
- {
- PROFILE_ENTER( MAIN );
-
-#ifdef HAVE_PYTHON
- {
- PROFILE_ENTER( MAIN_PYTHON );
- Py_Initialize();
- {
- static PyMethodDef BjamMethods[] = {
- {"call", bjam_call, METH_VARARGS,
- "Call the specified bjam rule."},
- {"import_rule", bjam_import_rule, METH_VARARGS,
- "Imports Python callable to bjam."},
- {"define_action", bjam_define_action, METH_VARARGS,
- "Defines a command line action."},
- {"variable", bjam_variable, METH_VARARGS,
- "Obtains a variable from bjam's global module."},
- {"backtrace", bjam_backtrace, METH_VARARGS,
- "Returns bjam backtrace from the last call into Python."},
- {"caller", bjam_caller, METH_VARARGS,
- "Returns the module from which the last call into Python is made."},
- {NULL, NULL, 0, NULL}
- };
-
- Py_InitModule( "bjam", BjamMethods );
- }
- PROFILE_EXIT( MAIN_PYTHON );
- }
-#endif
-
-#ifndef NDEBUG
- run_unit_tests();
-#endif
-#if YYDEBUG != 0
- if ( DEBUG_PARSE )
- yydebug = 1;
-#endif
-
- /* Set JAMDATE. */
- var_set( root_module(), constant_JAMDATE, list_new( outf_time(time(0)) ), VAR_SET );
-
- /* Set JAM_VERSION. */
- var_set( root_module(), constant_JAM_VERSION,
- list_push_back( list_push_back( list_new(
- object_new( VERSION_MAJOR_SYM ) ),
- object_new( VERSION_MINOR_SYM ) ),
- object_new( VERSION_PATCH_SYM ) ),
- VAR_SET );
-
- /* Set JAMUNAME. */
-#ifdef unix
- {
- struct utsname u;
-
- if ( uname( &u ) >= 0 )
- {
- var_set( root_module(), constant_JAMUNAME,
- list_push_back(
- list_push_back(
- list_push_back(
- list_push_back(
- list_new(
- object_new( u.sysname ) ),
- object_new( u.nodename ) ),
- object_new( u.release ) ),
- object_new( u.version ) ),
- object_new( u.machine ) ), VAR_SET );
- }
- }
-#endif /* unix */
-
- /* Load up environment variables. */
-
- /* First into the global module, with splitting, for backward
- * compatibility.
- */
- var_defines( root_module(), use_environ, 1 );
-
- environ_module = bindmodule( constant_ENVIRON );
- /* Then into .ENVIRON, without splitting. */
- var_defines( environ_module, use_environ, 0 );
-
- /*
- * Jam defined variables OS & OSPLAT. We load them after environment, so
- * that setting OS in environment does not change Jam's notion of the
- * current platform.
- */
- var_defines( root_module(), othersyms, 1 );
-
- /* Load up variables set on command line. */
- for ( n = 0; ( s = getoptval( optv, 's', n ) ); ++n )
- {
- char *symv[2];
- symv[ 0 ] = s;
- symv[ 1 ] = 0;
- var_defines( root_module(), symv, 1 );
- var_defines( environ_module, symv, 0 );
- }
-
- /* Set the ARGV to reflect the complete list of arguments of invocation.
- */
- for ( n = 0; n < arg_c; ++n )
- {
- var_set( root_module(), constant_ARGV, list_new( object_new( arg_v[n] ) ), VAR_APPEND );
- }
-
- /* Initialize built-in rules. */
- load_builtins();
-
- /* Add the targets in the command line to the update list. */
- for ( n = 1; n < arg_c; ++n )
- {
- if ( arg_v[ n ][ 0 ] == '-' )
- {
- char * f = "-:l:d:j:f:gs:t:ano:qv";
- for ( ; *f; ++f ) if ( *f == arg_v[ n ][ 1 ] ) break;
- if ( ( f[ 1 ] == ':' ) && ( arg_v[ n ][ 2 ] == '\0' ) ) ++n;
- }
- else
- {
- OBJECT * target = object_new( arg_v[ n ] );
- mark_target_for_updating( target );
- object_free( target );
- }
- }
-
- if ( list_empty( targets_to_update() ) )
- {
- mark_target_for_updating( constant_all );
- }
-
- /* Parse ruleset. */
- {
- FRAME frame[ 1 ];
- frame_init( frame );
- for ( n = 0; ( s = getoptval( optv, 'f', n ) ); ++n )
- {
- OBJECT * filename = object_new( s );
- parse_file( filename, frame );
- object_free( filename );
- }
-
- if ( !n )
- {
- parse_file( constant_plus, frame );
- }
- }
-
- status = yyanyerrors();
-
- /* Manually touch -t targets. */
- for ( n = 0; ( s = getoptval( optv, 't', n ) ); ++n )
- {
- OBJECT * target = object_new( s );
- touch_target( target );
- object_free( target );
- }
-
- /* If an output file is specified, set globs.cmdout to that. */
- if ( ( s = getoptval( optv, 'o', 0 ) ) )
- {
- if ( !( globs.cmdout = fopen( s, "w" ) ) )
- {
- printf( "Failed to write to '%s'\n", s );
- exit( EXITBAD );
- }
- ++globs.noexec;
- }
-
- /* The build system may set the PARALLELISM variable to override -j
- options. */
- {
- LIST *p = L0;
- p = var_get ( root_module(), constant_PARALLELISM );
- if ( !list_empty( p ) )
- {
- int j = atoi( object_str( list_front( p ) ) );
- if ( j == -1 )
- {
- printf( "Invalid value of PARALLELISM: %s\n", object_str( list_front( p ) ) );
- }
- else
- {
- globs.jobs = j;
- }
- }
- }
-
- /* KEEP_GOING overrides -q option. */
- {
- LIST *p = L0;
- p = var_get( root_module(), constant_KEEP_GOING );
- if ( !list_empty( p ) )
- {
- int v = atoi( object_str( list_front( p ) ) );
- if ( v == 0 )
- globs.quitquick = 1;
- else
- globs.quitquick = 0;
- }
- }
-
- /* Now make target. */
- {
- PROFILE_ENTER( MAIN_MAKE );
-
- LIST * targets = targets_to_update();
- if ( !list_empty( targets ) )
- {
- status |= make( targets, anyhow );
- }
- else
- {
- status = last_update_now_status;
- }
-
- PROFILE_EXIT( MAIN_MAKE );
- }
-
- PROFILE_EXIT( MAIN );
- }
-
- if ( DEBUG_PROFILE )
- profile_dump();
-
-
-#ifdef OPT_HEADER_CACHE_EXT
- hcache_done();
-#endif
-
- clear_targets_to_update();
-
- /* Widely scattered cleanup. */
- file_done();
- rules_done();
- stamps_done();
- search_done();
- class_done();
- modules_done();
- regex_done();
- exec_done();
- pwd_done();
- path_done();
- function_done();
- list_done();
- constants_done();
- object_done();
-
- /* Close cmdout. */
- if ( globs.cmdout )
- fclose( globs.cmdout );
-
-#ifdef HAVE_PYTHON
- Py_Finalize();
-#endif
-
- BJAM_MEM_CLOSE();
-
- return status ? EXITBAD : EXITOK;
-}
-
-#if defined(_WIN32)
-#include <windows.h>
-char *executable_path(const char *argv0) {
- char buf[1024];
- DWORD ret = GetModuleFileName(NULL, buf, sizeof(buf));
- if (ret == 0 || ret == sizeof(buf)) return NULL;
- return strdup (buf);
-}
-#elif defined(__APPLE__) /* Not tested */
-#include <mach-o/dyld.h>
-char *executable_path(const char *argv0) {
- char buf[1024];
- uint32_t size = sizeof(buf);
- int ret = _NSGetExecutablePath(buf, &size);
- if (ret != 0) return NULL;
- return strdup(buf);
-}
-#elif defined(sun) || defined(__sun) /* Not tested */
-#include <stdlib.h>
-
-char *executable_path(const char *argv0) {
- return strdup(getexecname());
-}
-#elif defined(__FreeBSD__)
-#include <sys/sysctl.h>
-char *executable_path(const char *argv0) {
- int mib[4];
- mib[0] = CTL_KERN;
- mib[1] = KERN_PROC;
- mib[2] = KERN_PROC_PATHNAME;
- mib[3] = -1;
- char buf[1024];
- size_t size = sizeof(buf);
- sysctl(mib, 4, buf, &size, NULL, 0);
- if (size == 0 || size == sizeof(buf)) return NULL;
- return strndup(buf, size);
-}
-#elif defined(__linux__)
-#include <unistd.h>
-char *executable_path(const char *argv0) {
- char buf[1024];
- ssize_t ret = readlink("/proc/self/exe", buf, sizeof(buf));
- if (ret == 0 || ret == sizeof(buf)) return NULL;
- return strndup(buf, ret);
-}
-#else
-char *executable_path(const char *argv0) {
- /* If argv0 is absolute path, assume it's the right absolute path. */
- if (argv0[0] == '/')
- return strdup(argv0);
- return NULL;
-}
-#endif
diff --git a/tools/build/v2/engine/jam.h b/tools/build/v2/engine/jam.h
deleted file mode 100644
index 26b94a7f7e..0000000000
--- a/tools/build/v2/engine/jam.h
+++ /dev/null
@@ -1,496 +0,0 @@
-/*
- * Copyright 1993, 1995 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/* This file is ALSO:
- * Copyright 2001-2004 David Abrahams.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-/*
- * jam.h - includes and globals for jam
- *
- * 04/08/94 (seiwald) - Coherent/386 support added.
- * 04/21/94 (seiwald) - DGUX is __DGUX__, not just __DGUX.
- * 05/04/94 (seiwald) - new globs.jobs (-j jobs)
- * 11/01/94 (wingerd) - let us define path of Jambase at compile time.
- * 12/30/94 (wingerd) - changed command buffer size for NT (MS-DOS shell).
- * 02/22/95 (seiwald) - Jambase now in /usr/local/lib.
- * 04/30/95 (seiwald) - FreeBSD added. Live Free or Die.
- * 05/10/95 (seiwald) - SPLITPATH character set up here.
- * 08/20/95 (seiwald) - added LINUX.
- * 08/21/95 (seiwald) - added NCR.
- * 10/23/95 (seiwald) - added SCO.
- * 01/03/96 (seiwald) - SINIX (nixdorf) added.
- * 03/13/96 (seiwald) - Jambase now compiled in; remove JAMBASE variable.
- * 04/29/96 (seiwald) - AIX now has 31 and 42 OSVERs.
- * 11/21/96 (peterk) - added BeOS with MW CW mwcc
- * 12/21/96 (seiwald) - OSPLAT now defined for NT.
- * 07/19/99 (sickel) - Mac OS X Server and Client support added
- * 02/18/00 (belmonte)- Support for Cygwin.
- * 09/12/00 (seiwald) - OSSYMS split to OSMAJOR/OSMINOR/OSPLAT
- * 12/29/00 (seiwald) - OSVER dropped.
- */
-
-#ifndef JAM_H_VP_2003_08_01
-#define JAM_H_VP_2003_08_01
-
-#ifdef HAVE_PYTHON
-#include <Python.h>
-#endif
-
-/* Assume popen support is available unless known otherwise. */
-#define HAVE_POPEN 1
-
-/*
- * Windows NT
- */
-
-#ifdef NT
-
-#include <fcntl.h>
-#include <stdlib.h>
-#include <stdio.h>
-#include <ctype.h>
-#include <malloc.h>
-#ifndef __MWERKS__
- #include <memory.h>
-#endif
-#include <signal.h>
-#include <string.h>
-#include <time.h>
-
-#define OSMAJOR "NT=true"
-#define OSMINOR "OS=NT"
-#define OS_NT
-#define SPLITPATH ';'
-/* Windows NT 3.51 only allows 996 chars per line, but we deal with the problem
- * in "execnt.c".
- */
-#define MAXLINE (maxline()) /* longest 'together' actions */
-#define USE_EXECNT
-#define USE_PATHUNIX
-#define PATH_DELIM '\\'
-#define DOWNSHIFT_PATHS
-
-/* AS400 cross-compile from NT. */
-
-#ifdef AS400
- #undef OSMINOR
- #undef OSMAJOR
- #define OSMAJOR "AS400=true"
- #define OSMINOR "OS=AS400"
- #define OS_AS400
-#endif
-
-/* Metrowerks Standard Library on Windows. */
-
-#ifdef __MSL__
- #undef HAVE_POPEN
-#endif
-
-# endif
-
-/*
- * Windows MingW32
- */
-
-#ifdef MINGW
-
-#include <fcntl.h>
-#include <stdlib.h>
-#include <stdio.h>
-#include <ctype.h>
-#include <malloc.h>
-#include <memory.h>
-#include <signal.h>
-#include <string.h>
-#include <time.h>
-
-#define OSMAJOR "MINGW=true"
-#define OSMINOR "OS=MINGW"
-#define OS_NT
-#define SPLITPATH ';'
-#define MAXLINE 996 /* longest 'together' actions */
-#define USE_EXECUNIX
-#define USE_PATHUNIX
-#define PATH_DELIM '\\'
-#define DOWNSHIFT_PATHS
-
-#endif
-
-/*
- * God fearing UNIX.
- */
-
-#ifndef OSMINOR
-
-#define OSMAJOR "UNIX=true"
-#define USE_EXECUNIX
-#define USE_FILEUNIX
-#define USE_PATHUNIX
-#define PATH_DELIM '/'
-
-#ifdef _AIX
- #define unix
- #define MAXLINE 23552 /* 24k - 1k, longest 'together' actions */
- #define OSMINOR "OS=AIX"
- #define OS_AIX
- #define NO_VFORK
-#endif
-#ifdef AMIGA
- #define OSMINOR "OS=AMIGA"
- #define OS_AMIGA
-#endif
-#ifdef __BEOS__
- #define unix
- #define OSMINOR "OS=BEOS"
- #define OS_BEOS
- #define NO_VFORK
-#endif
-#ifdef __bsdi__
- #define OSMINOR "OS=BSDI"
- #define OS_BSDI
-#endif
-#if defined (COHERENT) && defined (_I386)
- #define OSMINOR "OS=COHERENT"
- #define OS_COHERENT
- #define NO_VFORK
-#endif
-#if defined(__cygwin__) || defined(__CYGWIN__)
- #define OSMINOR "OS=CYGWIN"
- #define OS_CYGWIN
-#endif
-#if defined(__FreeBSD__) && !defined(__DragonFly__)
- #define OSMINOR "OS=FREEBSD"
- #define OS_FREEBSD
-#endif
-#ifdef __DragonFly__
- #define OSMINOR "OS=DRAGONFLYBSD"
- #define OS_DRAGONFLYBSD
-#endif
-#ifdef __DGUX__
- #define OSMINOR "OS=DGUX"
- #define OS_DGUX
-#endif
-#ifdef __hpux
- #define OSMINOR "OS=HPUX"
- #define OS_HPUX
-#endif
-#ifdef __OPENNT
- #define unix
- #define OSMINOR "OS=INTERIX"
- #define OS_INTERIX
- #define NO_VFORK
-#endif
-#ifdef __sgi
- #define OSMINOR "OS=IRIX"
- #define OS_IRIX
- #define NO_VFORK
-#endif
-#ifdef __ISC
- #define OSMINOR "OS=ISC"
- #define OS_ISC
- #define NO_VFORK
-#endif
-#ifdef linux
- #define OSMINOR "OS=LINUX"
- #define OS_LINUX
-#endif
-#ifdef __Lynx__
- #define OSMINOR "OS=LYNX"
- #define OS_LYNX
- #define NO_VFORK
- #define unix
-#endif
-#ifdef __MACHTEN__
- #define OSMINOR "OS=MACHTEN"
- #define OS_MACHTEN
-#endif
-#ifdef mpeix
- #define unix
- #define OSMINOR "OS=MPEIX"
- #define OS_MPEIX
- #define NO_VFORK
-#endif
-#ifdef __MVS__
- #define unix
- #define OSMINOR "OS=MVS"
- #define OS_MVS
-#endif
-#ifdef _ATT4
- #define OSMINOR "OS=NCR"
- #define OS_NCR
-#endif
-#ifdef __NetBSD__
- #define unix
- #define OSMINOR "OS=NETBSD"
- #define OS_NETBSD
- #define NO_VFORK
-#endif
-#ifdef __QNX__
- #define unix
- #ifdef __QNXNTO__
- #define OSMINOR "OS=QNXNTO"
- #define OS_QNXNTO
- #else
- #define OSMINOR "OS=QNX"
- #define OS_QNX
- #define NO_VFORK
- #define MAXLINE 996
- #endif
-#endif
-#ifdef NeXT
- #ifdef __APPLE__
- #define OSMINOR "OS=RHAPSODY"
- #define OS_RHAPSODY
- #else
- #define OSMINOR "OS=NEXT"
- #define OS_NEXT
- #endif
-#endif
-#ifdef __APPLE__
- #define unix
- #define OSMINOR "OS=MACOSX"
- #define OS_MACOSX
-#endif
-#ifdef __osf__
- #ifndef unix
- #define unix
- #endif
- #define OSMINOR "OS=OSF"
- #define OS_OSF
-#endif
-#ifdef _SEQUENT_
- #define OSMINOR "OS=PTX"
- #define OS_PTX
-#endif
-#ifdef M_XENIX
- #define OSMINOR "OS=SCO"
- #define OS_SCO
- #define NO_VFORK
-#endif
-#ifdef sinix
- #define unix
- #define OSMINOR "OS=SINIX"
- #define OS_SINIX
-#endif
-#ifdef sun
- #if defined(__svr4__) || defined(__SVR4)
- #define OSMINOR "OS=SOLARIS"
- #define OS_SOLARIS
- #else
- #define OSMINOR "OS=SUNOS"
- #define OS_SUNOS
- #endif
-#endif
-#ifdef ultrix
- #define OSMINOR "OS=ULTRIX"
- #define OS_ULTRIX
-#endif
-#ifdef _UNICOS
- #define OSMINOR "OS=UNICOS"
- #define OS_UNICOS
-#endif
-#if defined(__USLC__) && !defined(M_XENIX)
- #define OSMINOR "OS=UNIXWARE"
- #define OS_UNIXWARE
-#endif
-#ifdef __OpenBSD__
- #define OSMINOR "OS=OPENBSD"
- #define OS_OPENBSD
- #define unix
-#endif
-#if defined (__FreeBSD_kernel__) && !defined(__FreeBSD__)
- #define OSMINOR "OS=KFREEBSD"
- #define OS_KFREEBSD
-#endif
-#ifndef OSMINOR
- #define OSMINOR "OS=UNKNOWN"
-#endif
-
-/* All the UNIX includes */
-
-#include <sys/types.h>
-#include <sys/stat.h>
-
-#ifndef OS_MPEIX
- #include <sys/file.h>
-#endif
-
-#include <fcntl.h>
-#include <stdio.h>
-#include <ctype.h>
-#include <signal.h>
-#include <string.h>
-#include <time.h>
-#include <unistd.h>
-
-#ifndef OS_QNX
- #include <memory.h>
-#endif
-
-#ifndef OS_ULTRIX
- #include <stdlib.h>
-#endif
-
-#if !defined( OS_BSDI ) && \
- !defined( OS_FREEBSD ) && \
- !defined( OS_DRAGONFLYBSD ) && \
- !defined( OS_NEXT ) && \
- !defined( OS_MACHTEN ) && \
- !defined( OS_MACOSX ) && \
- !defined( OS_RHAPSODY ) && \
- !defined( OS_MVS ) && \
- !defined( OS_OPENBSD )
- #include <malloc.h>
-#endif
-
-#endif
-
-/*
- * OSPLAT definitions - suppressed when it is a one-of-a-kind.
- */
-
-#if defined( _M_PPC ) || \
- defined( PPC ) || \
- defined( ppc ) || \
- defined( __powerpc__ ) || \
- defined( __ppc__ )
- #define OSPLAT "OSPLAT=PPC"
-#endif
-
-#if defined( _ALPHA_ ) || \
- defined( __alpha__ )
- #define OSPLAT "OSPLAT=AXP"
-#endif
-
-#if defined( _i386_ ) || \
- defined( __i386__ ) || \
- defined( __i386 ) || \
- defined( _M_IX86 )
- #define OSPLAT "OSPLAT=X86"
-#endif
-
-#if defined( __ia64__ ) || \
- defined( __IA64__ ) || \
- defined( __ia64 )
- #define OSPLAT "OSPLAT=IA64"
-#endif
-
-#if defined( __x86_64__ ) || \
- defined( __amd64__ ) || \
- defined( _M_AMD64 )
- #define OSPLAT "OSPLAT=X86_64"
-#endif
-
-
-#if defined( __sparc__ ) || \
- defined( __sparc )
- #define OSPLAT "OSPLAT=SPARC"
-#endif
-
-#ifdef __mips__
- #define OSPLAT "OSPLAT=MIPS"
-#endif
-
-#ifdef __arm__
- #define OSPLAT "OSPLAT=ARM"
-#endif
-
-#ifdef __s390__
- #define OSPLAT "OSPLAT=390"
-#endif
-
-#ifdef __hppa
- #define OSPLAT "OSPLAT=PARISC"
-#endif
-
-#ifndef OSPLAT
- #define OSPLAT ""
-#endif
-
-/*
- * Jam implementation misc.
- */
-
-#ifndef MAXLINE
- #define MAXLINE 102400 /* longest 'together' actions' */
-#endif
-
-#ifndef EXITOK
- #define EXITOK 0
- #define EXITBAD 1
-#endif
-
-#ifndef SPLITPATH
- #define SPLITPATH ':'
-#endif
-
-/* You probably do not need to muck with these. */
-
-#define MAXSYM 1024 /* longest symbol in the environment */
-#define MAXJPATH 1024 /* longest filename */
-
-#define MAXJOBS 64 /* silently enforced -j limit */
-#define MAXARGC 32 /* words in $(JAMSHELL) */
-
-/* Jam private definitions below. */
-
-#define DEBUG_MAX 14
-
-
-struct globs
-{
- int noexec;
- int jobs;
- int quitquick;
- int newestfirst; /* build newest sources first */
- int pipe_action;
- char debug[ DEBUG_MAX ];
- FILE * cmdout; /* print cmds, not run them */
- long timeout; /* number of seconds to limit actions to,
- * default 0 for no limit.
- */
- int dart; /* output build and test results formatted for Dart */
-};
-
-extern struct globs globs;
-
-#define DEBUG_MAKE ( globs.debug[ 1 ] ) /* show actions when executed */
-#define DEBUG_MAKEQ ( globs.debug[ 2 ] ) /* show even quiet actions */
-#define DEBUG_EXEC ( globs.debug[ 2 ] ) /* show text of actons */
-#define DEBUG_MAKEPROG ( globs.debug[ 3 ] ) /* show progress of make0 */
-#define DEBUG_BIND ( globs.debug[ 3 ] ) /* show when files bound */
-
-#define DEBUG_EXECCMD ( globs.debug[ 4 ] ) /* show execcmds()'s work */
-
-#define DEBUG_COMPILE ( globs.debug[ 5 ] ) /* show rule invocations */
-
-#define DEBUG_HEADER ( globs.debug[ 6 ] ) /* show result of header scan */
-#define DEBUG_BINDSCAN ( globs.debug[ 6 ] ) /* show result of dir scan */
-#define DEBUG_SEARCH ( globs.debug[ 6 ] ) /* show attempts at binding */
-
-#define DEBUG_VARSET ( globs.debug[ 7 ] ) /* show variable settings */
-#define DEBUG_VARGET ( globs.debug[ 8 ] ) /* show variable fetches */
-#define DEBUG_VAREXP ( globs.debug[ 8 ] ) /* show variable expansions */
-#define DEBUG_IF ( globs.debug[ 8 ] ) /* show 'if' calculations */
-#define DEBUG_LISTS ( globs.debug[ 9 ] ) /* show list manipulation */
-#define DEBUG_SCAN ( globs.debug[ 9 ] ) /* show scanner tokens */
-#define DEBUG_MEM ( globs.debug[ 9 ] ) /* show memory use */
-
-#define DEBUG_PROFILE ( globs.debug[ 10 ] ) /* dump rule execution times */
-#define DEBUG_PARSE ( globs.debug[ 11 ] ) /* debug parsing */
-#define DEBUG_GRAPH ( globs.debug[ 12 ] ) /* debug dependencies */
-#define DEBUG_FATE ( globs.debug[ 13 ] ) /* show changes to fate in make0() */
-
-/* Everyone gets the memory definitions. */
-#include "mem.h"
-
-/* They also get the profile functions. */
-#include "debug.h"
-
-#endif
diff --git a/tools/build/v2/engine/lists.c b/tools/build/v2/engine/lists.c
deleted file mode 100644
index c93fd7c090..0000000000
--- a/tools/build/v2/engine/lists.c
+++ /dev/null
@@ -1,526 +0,0 @@
-/*
- * Copyright 1993, 1995 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-# include "jam.h"
-# include "object.h"
-# include "lists.h"
-# include "assert.h"
-
-/*
- * lists.c - maintain lists of objects
- *
- * 08/23/94 (seiwald) - new list_append()
- * 09/07/00 (seiwald) - documented lol_*() functions
- */
-
-struct freelist_node { struct freelist_node *next; };
-
-static struct freelist_node *freelist[32]; /* junkpile for list_free() */
-
-static unsigned get_bucket( unsigned size )
-{
- unsigned bucket = 0;
- while ( size > ( 1u << bucket ) ) ++bucket;
- return bucket;
-}
-
-static LIST * list_alloc( unsigned size )
-{
- unsigned bucket = get_bucket( size );
- if ( freelist[ bucket ] )
- {
- struct freelist_node * result = freelist[ bucket ];
- freelist[ bucket ] = result->next;
- return (LIST *)result;
- }
- else
- {
- return (LIST *)BJAM_MALLOC( sizeof( LIST ) + ( 1u << bucket ) * sizeof( OBJECT * ) );
- }
-}
-
-static void list_dealloc( LIST * l )
-{
- unsigned size = list_length( l );
- unsigned bucket;
- struct freelist_node * node = (struct freelist_node *)l;
-
- if ( size == 0 ) return;
-
- bucket = get_bucket( size );;
-
-#ifdef BJAM_NO_MEM_CACHE
-
- BJAM_FREE( node );
-
-#else
-
- node->next = freelist[ bucket ];
- freelist[ bucket ] = node;
-
-#endif
-
-}
-
-/*
- * list_append() - append a list onto another one, returning total
- */
-
-LIST * list_append( LIST * l, LIST * nl )
-{
- if ( list_empty( nl ) )
- {
- /* Just return l */
- }
- else if ( list_empty( l ) )
- {
- l = nl;
- }
- else
- {
- int l_size = list_length( l );
- int nl_size = list_length( nl );
- int size = l_size + nl_size;
- unsigned bucket;
- int i;
-
- bucket = get_bucket( size );
- /* Do we need to reallocate? */
- if ( l_size <= ( 1u << (bucket - 1) ) )
- {
- LIST * result = list_alloc( size );
- memcpy( list_begin( result ), list_begin( l ), l_size * sizeof( OBJECT * ) );
- list_dealloc( l );
- l = result;
- }
-
- l->impl.size = size;
- memcpy( list_begin( l ) + l_size, list_begin( nl ), nl_size * sizeof( OBJECT * ) );
- list_dealloc( nl );
- return l;
- }
-
- return l;
-}
-
-LISTITER list_begin( LIST * l )
-{
- if ( l )
- return (LISTITER)( (char *)l + sizeof(LIST) );
- else
- return 0;
-}
-
-LISTITER list_end( LIST * l )
-{
- if ( l )
- return list_begin( l ) + l->impl.size;
- else
- return 0;
-}
-
-LIST * list_new( OBJECT * value )
-{
- LIST * head;
- if ( freelist[ 0 ] )
- {
- struct freelist_node * result = freelist[ 0 ];
- freelist[ 0 ] = result->next;
- head = (LIST *)result;
- }
- else
- {
- head = BJAM_MALLOC( sizeof( LIST * ) + sizeof( OBJECT * ) );
- }
-
- head->impl.size = 1;
- list_begin( head )[ 0 ] = value;
-
- return head;
-}
-
-/*
- * list_push_back() - tack a string onto the end of a list of strings
- */
-
-LIST * list_push_back( LIST * head, OBJECT * value )
-{
- unsigned int size = list_length( head );
- unsigned int i;
-
- if ( DEBUG_LISTS )
- printf( "list > %s <\n", object_str( value ) );
-
- /* If the size is a power of 2, reallocate. */
- if ( size == 0 )
- {
- head = list_alloc( 1 );
- }
- else if ( ( ( size - 1 ) & size ) == 0 )
- {
- LIST * l = list_alloc( size + 1 );
- memcpy( l, head, sizeof( LIST ) + size * sizeof( OBJECT * ) );
- list_dealloc( head );
- head = l;
- }
-
- list_begin( head )[ size ] = value;
- head->impl.size = size + 1;
-
- return head;
-}
-
-
-/*
- * list_copy() - copy a whole list of strings (nl) onto end of another (l).
- */
-
-LIST * list_copy( LIST * l )
-{
- int size = list_length( l );
- int i;
- LIST * result;
-
- if ( size == 0 ) return L0;
-
- result = list_alloc( size );
- result->impl.size = size;
- for ( i = 0; i < size; ++i )
- {
- list_begin( result )[ i ] = object_copy( list_begin( l )[ i ] );
- }
- return result;
-}
-
-
-LIST * list_copy_range( LIST *l, LISTITER first, LISTITER last )
-{
- if ( first == last )
- {
- return L0;
- }
- else
- {
- int size = last - first;
- LIST * result = list_alloc( size );
- LISTITER dest = list_begin( result );
- result->impl.size = size;
- for ( ; first != last; ++first, ++dest )
- {
- *dest = object_copy( *first );
- }
- return result;
- }
-}
-
-
-/*
- * list_sublist() - copy a subset of a list of strings.
- */
-
-LIST * list_sublist( LIST * l, int start, int count )
-{
- int end = start + count;
- int size = list_length( l );
- if ( start >= size ) return L0;
- if ( end > size ) end = size;
- return list_copy_range( l, list_begin( l ) + start, list_begin( l ) + end );
-}
-
-
-static int str_ptr_compare( void const * va, void const * vb )
-{
- OBJECT * a = *( (OBJECT * *)va );
- OBJECT * b = *( (OBJECT * *)vb );
- return strcmp(object_str(a), object_str(b));
-}
-
-
-LIST * list_sort( LIST * l )
-{
- int len;
- int ii;
- LIST * result;
-
- if ( !l )
- return L0;
-
- len = list_length( l );
- result = list_copy( l );
-
- qsort( list_begin( result ), len, sizeof( OBJECT * ), str_ptr_compare );
-
- return result;
-}
-
-
-/*
- * list_free() - free a list of strings
- */
-
-void list_free( LIST * head )
-{
- if ( !list_empty( head ) )
- {
- LISTITER iter = list_begin( head ), end = list_end( head );
- for ( ; iter != end; iter = list_next( iter ) )
- {
- object_free( list_item( iter ) );
- }
- list_dealloc( head );
- }
-}
-
-
-/*
- * list_pop_front() - remove the front element from a list of strings
- */
-
-LIST * list_pop_front( LIST * l )
-{
- unsigned size = list_length( l );
- assert( size != 0 );
- --size;
- object_free( list_front( l ) );
-
- if ( size == 0 )
- {
- list_dealloc( l );
- return L0;
- }
- else if ( ( ( size - 1 ) & size ) == 0 )
- {
- LIST * nl = list_alloc( size );
- nl->impl.size = size;
- memcpy( list_begin( nl ), list_begin( l ) + 1, size * sizeof( OBJECT * ) );
- list_dealloc( l );
- return nl;
- }
- else
- {
- l->impl.size = size;
- memmove( list_begin( l ), list_begin( l ) + 1, size * sizeof( OBJECT * ) );
- return l;
- }
-}
-
-LIST * list_reverse( LIST * l )
-{
- int size = list_length( l );
- if ( size == 0 ) return L0;
- else
- {
- LIST * result = list_alloc( size );
- int i;
- result->impl.size = size;
- for ( i = 0; i < size; ++i )
- {
- list_begin( result )[ i ] = object_copy( list_begin( l )[ size - i - 1 ] );
- }
- return result;
- }
-}
-
-int list_cmp( LIST * t, LIST * s )
-{
- int status = 0;
- LISTITER t_it = list_begin( t ), t_end = list_end( t );
- LISTITER s_it = list_begin( s ), s_end = list_end( s );
-
- while ( !status && ( t_it != t_end || s_it != s_end ) )
- {
- const char *st = t_it != t_end ? object_str( list_item( t_it ) ) : "";
- const char *ss = s_it != s_end ? object_str( list_item( s_it ) ) : "";
-
- status = strcmp( st, ss );
-
- t_it = t_it != t_end ? list_next( t_it ) : t_it;
- s_it = s_it != s_end ? list_next( s_it ) : s_it;
- }
-
- return status;
-}
-
-int list_is_sublist( LIST * sub, LIST * l )
-{
- LISTITER iter = list_begin( sub ), end = list_end( sub );
- for ( ; iter != end; iter = list_next( iter ) )
- {
- if ( !list_in( l, list_item( iter ) ) )
- return 0;
- }
- return 1;
-}
-
-/*
- * list_print() - print a list of strings to stdout
- */
-
-void list_print( LIST * l )
-{
- LISTITER iter = list_begin( l ), end = list_end( l );
- if ( iter != end )
- {
- printf( "%s", object_str( list_item( iter ) ) );
- iter = list_next( iter );
- for ( ; iter != end; iter = list_next( iter ) )
- printf( " %s", object_str( list_item( iter ) ) );
- }
-}
-
-
-/*
- * list_length() - return the number of items in the list
- */
-
-int list_length( LIST * l )
-{
- if ( l )
- return l->impl.size;
- else
- return 0;
-}
-
-
-int list_in( LIST * l, OBJECT * value )
-{
- LISTITER iter = list_begin( l ), end = list_end( l );
- for ( ; iter != end; iter = list_next( iter ) )
- if ( object_equal( list_item( iter ), value ) )
- return 1;
- return 0;
-}
-
-
-LIST * list_unique( LIST * sorted_list )
-{
- LIST * result = L0;
- OBJECT * last_added = 0;
-
- LISTITER iter = list_begin( sorted_list ), end = list_end( sorted_list );
- for ( ; iter != end; iter = list_next( iter ) )
- {
- if ( !last_added || !object_equal( list_item( iter ), last_added ) )
- {
- result = list_push_back( result, object_copy( list_item( iter ) ) );
- last_added = list_item( iter );
- }
- }
- return result;
-}
-
-void list_done()
-{
- int i;
- int total = 0;
- for ( i = 0; i < sizeof( freelist ) / sizeof( freelist[ 0 ] ); ++i )
- {
- struct freelist_node *l, *tmp;
- int bytes;
- for( l = freelist[ i ]; l; )
- {
- tmp = l;
- l = l->next;
- BJAM_FREE( tmp );
- }
- }
-}
-
-
-/*
- * lol_init() - initialize a LOL (list of lists).
- */
-
-void lol_init( LOL * lol )
-{
- lol->count = 0;
-}
-
-
-/*
- * lol_add() - append a LIST onto an LOL.
- */
-
-void lol_add( LOL * lol, LIST * l )
-{
- if ( lol->count < LOL_MAX )
- lol->list[ lol->count++ ] = l;
-}
-
-
-/*
- * lol_free() - free the LOL and its LISTs.
- */
-
-void lol_free( LOL * lol )
-{
- int i;
- for ( i = 0; i < lol->count; ++i )
- list_free( lol->list[ i ] );
- lol->count = 0;
-}
-
-
-/*
- * lol_get() - return one of the LISTs in the LOL.
- */
-
-LIST * lol_get( LOL * lol, int i )
-{
- return i < lol->count ? lol->list[ i ] : L0;
-}
-
-
-/*
- * lol_print() - debug print LISTS separated by ":".
- */
-
-void lol_print( LOL * lol )
-{
- int i;
-
- for ( i = 0; i < lol->count; ++i )
- {
- if ( i )
- printf( " : " );
- list_print( lol->list[ i ] );
- }
-}
-
-#ifdef HAVE_PYTHON
-
-PyObject *list_to_python(LIST *l)
-{
- PyObject *result = PyList_New(0);
- LISTITER iter = list_begin( l ), end = list_end( l );
-
- for (; iter != end; iter = list_next( iter ) )
- {
- PyObject* s = PyString_FromString(object_str(list_item(iter)));
- PyList_Append(result, s);
- Py_DECREF(s);
- }
-
- return result;
-}
-
-LIST *list_from_python(PyObject *l)
-{
- LIST * result = L0;
-
- Py_ssize_t i, n;
- n = PySequence_Size(l);
- for (i = 0; i < n; ++i)
- {
- PyObject *v = PySequence_GetItem(l, i);
- result = list_push_back(result, object_new (PyString_AsString(v)));
- Py_DECREF(v);
- }
-
- return result;
-}
-
-#endif
diff --git a/tools/build/v2/engine/lists.h b/tools/build/v2/engine/lists.h
deleted file mode 100644
index bc97261e57..0000000000
--- a/tools/build/v2/engine/lists.h
+++ /dev/null
@@ -1,124 +0,0 @@
-/*
- * Copyright 1993, 1995 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/* This file is ALSO:
- * Copyright 2001-2004 David Abrahams.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-/*
- * lists.h - the LIST structure and routines to manipulate them
- *
- * The whole of jam relies on lists of objects as a datatype. This
- * module, in conjunction with object.c, handles these relatively
- * efficiently.
- *
- * Structures defined:
- *
- * LIST - list of OBJECTs
- * LOL - list of LISTs
- *
- * External routines:
- *
- * list_append() - append a list onto another one, returning total
- * list_new() - tack an object onto the end of a list of objects
- * list_copy() - copy a whole list of objects
- * list_sublist() - copy a subset of a list of objects
- * list_free() - free a list of objects
- * list_print() - print a list of objects to stdout
- * list_length() - return the number of items in the list
- *
- * lol_init() - initialize a LOL (list of lists)
- * lol_add() - append a LIST onto an LOL
- * lol_free() - free the LOL and its LISTs
- * lol_get() - return one of the LISTs in the LOL
- * lol_print() - debug print LISTS separated by ":"
- *
- * 04/13/94 (seiwald) - added shorthand L0 for null list pointer
- * 08/23/94 (seiwald) - new list_append()
- */
-
-#ifndef LISTS_DWA20011022_H
-# define LISTS_DWA20011022_H
-
-#include "object.h"
-
-#ifdef HAVE_PYTHON
-#include <Python.h>
-#endif
-
-/*
- * LIST - list of strings
- */
-
-typedef struct _list LIST;
-
-struct _list {
- union {
- int size;
- OBJECT *align;
- } impl;
-};
-
-typedef OBJECT * * LISTITER;
-
-/*
- * LOL - list of LISTs
- */
-
-typedef struct _lol LOL;
-
-# define LOL_MAX 19
-
-struct _lol {
- int count;
- LIST *list[ LOL_MAX ];
-};
-
-LIST * list_new( OBJECT * value );
-LIST * list_append( LIST *l, LIST *nl );
-LIST * list_copy( LIST *l );
-LIST * list_copy_range( LIST *l, LISTITER first, LISTITER last );
-void list_free( LIST *head );
-LIST * list_push_back( LIST *head, OBJECT *string );
-void list_print( LIST *l );
-int list_length( LIST *l );
-LIST * list_sublist( LIST *l, int start, int count );
-LIST * list_pop_front( LIST *l );
-LIST * list_sort( LIST *l);
-LIST * list_unique( LIST *sorted_list);
-int list_in(LIST* l, OBJECT* value);
-LIST * list_reverse( LIST * );
-int list_cmp( LIST * lhs, LIST * rhs );
-int list_is_sublist( LIST * sub, LIST * l );
-void list_done();
-
-LISTITER list_begin( LIST * );
-LISTITER list_end( LIST * );
-# define list_next( it ) ((it) + 1)
-# define list_item( it ) (*(it))
-# define list_empty( l ) ( (l) == L0 )
-# define list_front( l ) list_item( list_begin( l ) )
-
-# define L0 ((LIST *)0)
-
-void lol_add( LOL *lol, LIST *l );
-void lol_init( LOL *lol );
-void lol_free( LOL *lol );
-LIST * lol_get( LOL *lol, int i );
-void lol_print( LOL *lol );
-void lol_build( LOL* lol, const char** elements );
-
-#ifdef HAVE_PYTHON
-
-PyObject *list_to_python(LIST *l);
-LIST *list_from_python(PyObject *l);
-
-#endif
-
-#endif
-
diff --git a/tools/build/v2/engine/make.c b/tools/build/v2/engine/make.c
deleted file mode 100644
index 96416cbd52..0000000000
--- a/tools/build/v2/engine/make.c
+++ /dev/null
@@ -1,819 +0,0 @@
-/*
- * Copyright 1993, 1995 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/* This file is ALSO:
- * Copyright 2001-2004 David Abrahams.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-/*
- * make.c - bring a target up to date, once rules are in place.
- *
- * This modules controls the execution of rules to bring a target and its
- * dependencies up to date. It is invoked after the targets, rules, et. al.
- * described in rules.h are created by the interpreting jam files.
- *
- * This file contains the main make() entry point and the first pass make0().
- * The second pass, make1(), which actually does the command execution, is in
- * make1.c.
- *
- * External routines:
- * make() - make a target, given its name
- *
- * Internal routines:
- * make0() - bind and scan everything to make a TARGET
- * make0sort() - reorder TARGETS chain by their time (newest to oldest)
- *
- * 12/26/93 (seiwald) - allow NOTIME targets to be expanded via $(<), $(>).
- * 01/04/94 (seiwald) - print all targets, bounded, when tracing commands.
- * 04/08/94 (seiwald) - progress report now reflects only targets with actions.
- * 04/11/94 (seiwald) - Combined deps & headers into deps[2] in TARGET.
- * 12/20/94 (seiwald) - NOTIME renamed NOTFILE.
- * 12/20/94 (seiwald) - make0() headers after determining fate of target, so
- * that headers are not seen as being dependent on
- * themselves.
- * 01/19/95 (seiwald) - distinguish between CANTFIND/CANTMAKE targets.
- * 02/02/95 (seiwald) - propagate leaf source time for new LEAVES rule.
- * 02/14/95 (seiwald) - NOUPDATE rule means don't update existing target.
- * 08/22/95 (seiwald) - NOUPDATE targets immune to anyhow (-a) flag.
- * 09/06/00 (seiwald) - NOCARE affects targets with sources/actions.
- * 03/02/01 (seiwald) - reverse NOCARE change.
- * 03/14/02 (seiwald) - TEMPORARY targets no longer take on parents age.
- * 03/16/02 (seiwald) - support for -g (reorder builds by source time).
- */
-
-#include "jam.h"
-
-#include "lists.h"
-#include "parse.h"
-#include "variable.h"
-#include "rules.h"
-
-#ifdef OPT_HEADER_CACHE_EXT
- #include "hcache.h"
-#endif
-
-#include "search.h"
-#include "object.h"
-#include "make.h"
-#include "headers.h"
-#include "command.h"
-#include <assert.h>
-
-#ifndef max
- #define max( a,b ) ((a)>(b)?(a):(b))
-#endif
-
-static TARGETS * make0sort( TARGETS * c );
-
-#ifdef OPT_GRAPH_DEBUG_EXT
- static void dependGraphOutput( TARGET * t, int depth );
-#endif
-
-static const char * target_fate[] =
-{
- "init", /* T_FATE_INIT */
- "making", /* T_FATE_MAKING */
- "stable", /* T_FATE_STABLE */
- "newer", /* T_FATE_NEWER */
- "temp", /* T_FATE_ISTMP */
- "touched", /* T_FATE_TOUCHED */
- "rebuild", /* T_FATE_REBUILD */
- "missing", /* T_FATE_MISSING */
- "needtmp", /* T_FATE_NEEDTMP */
- "old", /* T_FATE_OUTDATED */
- "update", /* T_FATE_UPDATE */
- "nofind", /* T_FATE_CANTFIND */
- "nomake" /* T_FATE_CANTMAKE */
-};
-
-static const char * target_bind[] =
-{
- "unbound",
- "missing",
- "parents",
- "exists",
-};
-
-# define spaces(x) ( " " + ( x > 20 ? 0 : 20-x ) )
-
-
-/*
- * make() - make a target, given its name.
- */
-
-int make( LIST * targets, int anyhow )
-{
- COUNTS counts[ 1 ];
- int status = 0; /* 1 if anything fails */
-
-#ifdef OPT_HEADER_CACHE_EXT
- hcache_init();
-#endif
-
- memset( (char *)counts, 0, sizeof( *counts ) );
-
- /* First bind all targets with LOCATE_TARGET setting. This is needed to
- * correctly handle dependencies to generated headers.
- */
- bind_explicitly_located_targets();
-
- {
- LISTITER iter, end;
- PROFILE_ENTER( MAKE_MAKE0 );
- for ( iter = list_begin( targets ), end = list_end( targets ); iter != end; iter = list_next( iter ) )
- {
- TARGET * t = bindtarget( list_item( iter ) );
- if ( t->fate == T_FATE_INIT )
- make0( t, 0, 0, counts, anyhow );
- }
- PROFILE_EXIT( MAKE_MAKE0 );
- }
-
-#ifdef OPT_GRAPH_DEBUG_EXT
- if ( DEBUG_GRAPH )
- {
- LISTITER iter, end;
- for ( iter = list_begin( targets ), end = list_end( targets ); iter != end; iter = list_next( iter ) )
- dependGraphOutput( bindtarget( list_item( iter ) ), 0 );
- }
-#endif
-
- if ( DEBUG_MAKE )
- {
- if ( counts->targets )
- printf( "...found %d target%s...\n", counts->targets,
- counts->targets > 1 ? "s" : "" );
- if ( counts->temp )
- printf( "...using %d temp target%s...\n", counts->temp,
- counts->temp > 1 ? "s" : "" );
- if ( counts->updating )
- printf( "...updating %d target%s...\n", counts->updating,
- counts->updating > 1 ? "s" : "" );
- if ( counts->cantfind )
- printf( "...can't find %d target%s...\n", counts->cantfind,
- counts->cantfind > 1 ? "s" : "" );
- if ( counts->cantmake )
- printf( "...can't make %d target%s...\n", counts->cantmake,
- counts->cantmake > 1 ? "s" : "" );
- }
-
- status = counts->cantfind || counts->cantmake;
-
- {
- LISTITER iter, end;
- PROFILE_ENTER( MAKE_MAKE1 );
- for ( iter = list_begin( targets ), end = list_end( targets ); iter != end; iter = list_next( iter ) )
- status |= make1( bindtarget( list_item( iter ) ) );
- PROFILE_EXIT( MAKE_MAKE1 );
- }
-
- return status;
-}
-
-
-/* Force any dependants of t that have already at least begun being visited by
- * make0() to be updated.
- */
-
-static void update_dependants( TARGET * t )
-{
- TARGETS * q;
-
- for ( q = t->dependants; q; q = q->next )
- {
- TARGET * p = q->target;
- char fate0 = p->fate;
-
- /* If we have already at least begun visiting it and we are not already
- * rebuilding it for other reasons.
- */
- if ( ( fate0 != T_FATE_INIT ) && ( fate0 < T_FATE_BUILD ) )
- {
- p->fate = T_FATE_UPDATE;
-
- if ( DEBUG_FATE )
- {
- printf( "fate change %s from %s to %s (as dependant of %s)\n",
- object_str( p->name ), target_fate[ (int) fate0 ], target_fate[ (int) p->fate ], object_str( t->name ) );
- }
-
- /* If we are done visiting it, go back and make sure its dependants
- * get rebuilt.
- */
- if ( fate0 > T_FATE_MAKING )
- update_dependants( p );
- }
- }
-}
-
-
-/*
- * Make sure that all of t's rebuilds get rebuilt.
- */
-
-static void force_rebuilds( TARGET * t )
-{
- TARGETS * d;
- for ( d = t->rebuilds; d; d = d->next )
- {
- TARGET * r = d->target;
-
- /* If it is not already being rebuilt for other reasons. */
- if ( r->fate < T_FATE_BUILD )
- {
- if ( DEBUG_FATE )
- printf( "fate change %s from %s to %s (by rebuild)\n",
- object_str( r->name ), target_fate[ (int) r->fate ], target_fate[ T_FATE_REBUILD ] );
-
- /* Force rebuild it. */
- r->fate = T_FATE_REBUILD;
-
- /* And make sure its dependants are updated too. */
- update_dependants( r );
- }
- }
-}
-
-
-/*
- * make0() - bind and scan everything to make a TARGET.
- *
- * Recursively binds a target, searches for #included headers, calls itself on
- * those headers and any dependencies.
- */
-
-void make0
-(
- TARGET * t,
- TARGET * p, /* parent */
- int depth, /* for display purposes */
- COUNTS * counts, /* for reporting */
- int anyhow
-) /* forcibly touch all (real) targets */
-{
- TARGETS * c;
- TARGET * ptime = t;
- time_t last;
- time_t leaf;
- time_t hlast;
- int fate;
- char const * flag = "";
- SETTINGS * s;
-
-#ifdef OPT_GRAPH_DEBUG_EXT
- int savedFate, oldTimeStamp;
-#endif
-
- if ( DEBUG_MAKEPROG )
- printf( "make\t--\t%s%s\n", spaces( depth ), object_str( t->name ) );
-
- /*
- * Step 1: initialize
- */
-
- if ( DEBUG_MAKEPROG )
- printf( "make\t--\t%s%s\n", spaces( depth ), object_str( t->name ) );
-
- t->fate = T_FATE_MAKING;
-
- /*
- * Step 2: under the influence of "on target" variables,
- * bind the target and search for headers.
- */
-
- /* Step 2a: set "on target" variables. */
- s = copysettings( t->settings );
- pushsettings( root_module(), s );
-
- /* Step 2b: find and timestamp the target file (if it is a file). */
- if ( ( t->binding == T_BIND_UNBOUND ) && !( t->flags & T_FLAG_NOTFILE ) )
- {
- OBJECT * another_target;
- object_free( t->boundname );
- t->boundname = search( t->name, &t->time, &another_target,
- t->flags & T_FLAG_ISFILE );
- /* If it was detected that this target refers to an already existing and
- * bound one, we add an include dependency, so that every target
- * depending on us will depend on that other target as well.
- */
- if ( another_target )
- target_include( t, bindtarget( another_target ) );
-
- t->binding = t->time ? T_BIND_EXISTS : T_BIND_MISSING;
- }
-
- /* INTERNAL, NOTFILE header nodes have the time of their parents. */
- if ( p && ( t->flags & T_FLAG_INTERNAL ) )
- ptime = p;
-
- /* If temp file does not exist but parent does, use parent. */
- if ( p && ( t->flags & T_FLAG_TEMP ) &&
- ( t->binding == T_BIND_MISSING ) &&
- ( p->binding != T_BIND_MISSING ) )
- {
- t->binding = T_BIND_PARENTS;
- ptime = p;
- }
-
-#ifdef OPT_SEMAPHORE
- {
- LIST * var = var_get( root_module(), constant_JAM_SEMAPHORE );
- if ( !list_empty( var ) )
- {
- TARGET * semaphore = bindtarget( list_front( var ) );
- semaphore->progress = T_MAKE_SEMAPHORE;
- t->semaphore = semaphore;
- }
- }
-#endif
-
- /* Step 2c: If its a file, search for headers. */
- if ( t->binding == T_BIND_EXISTS )
- headers( t );
-
- /* Step 2d: reset "on target" variables. */
- popsettings( root_module(), s );
- freesettings( s );
-
- /*
- * Pause for a little progress reporting .
- */
-
- if ( DEBUG_BIND )
- {
- if ( ! object_equal( t->name, t->boundname ) )
- printf( "bind\t--\t%s%s: %s\n",
- spaces( depth ), object_str( t->name ), object_str( t->boundname ) );
-
- switch ( t->binding )
- {
- case T_BIND_UNBOUND:
- case T_BIND_MISSING:
- case T_BIND_PARENTS:
- printf( "time\t--\t%s%s: %s\n",
- spaces( depth ), object_str( t->name ), target_bind[ (int) t->binding ] );
- break;
-
- case T_BIND_EXISTS:
- printf( "time\t--\t%s%s: %s",
- spaces( depth ), object_str( t->name ), ctime( &t->time ) );
- break;
- }
- }
-
- /*
- * Step 3: recursively make0() dependencies & headers.
- */
-
- /* Step 3a: recursively make0() dependencies. */
- for ( c = t->depends; c; c = c->next )
- {
- int internal = t->flags & T_FLAG_INTERNAL;
-
- /* Warn about circular deps, except for includes, which include each
- * other alot.
- */
- if ( c->target->fate == T_FATE_INIT )
- make0( c->target, ptime, depth + 1, counts, anyhow );
- else if ( c->target->fate == T_FATE_MAKING && !internal )
- printf( "warning: %s depends on itself\n", object_str( c->target->name ) );
- }
-
- /* Step 3b: recursively make0() internal includes node. */
- if ( t->includes )
- make0( t->includes, p, depth + 1, counts, anyhow );
-
- /* Step 3c: add dependencies' includes to our direct dependencies. */
- {
- TARGETS * incs = 0;
- for ( c = t->depends; c; c = c->next )
- if ( c->target->includes )
- incs = targetentry( incs, c->target->includes );
- t->depends = targetchain( t->depends, incs );
- }
-
- /*
- * Step 4: compute time & fate
- */
-
- /* Step 4a: pick up dependencies' time and fate */
- last = 0;
- leaf = 0;
- fate = T_FATE_STABLE;
- for ( c = t->depends; c; c = c->next )
- {
- /* If LEAVES has been applied, we only heed the timestamps of the leaf
- * source nodes.
- */
- leaf = max( leaf, c->target->leaf );
-
- if ( t->flags & T_FLAG_LEAVES )
- {
- last = leaf;
- continue;
- }
-
- last = max( last, c->target->time );
- fate = max( fate, c->target->fate );
-
-#ifdef OPT_GRAPH_DEBUG_EXT
- if ( DEBUG_FATE )
- if ( fate < c->target->fate )
- printf( "fate change %s from %s to %s by dependency %s\n",
- object_str( t->name ), target_fate[(int) fate], target_fate[(int) c->target->fate],
- object_str( c->target->name ) );
-#endif
- }
-
- /* Step 4b: pick up included headers time */
-
- /*
- * If a header is newer than a temp source that includes it,
- * the temp source will need building.
- */
-
- hlast = t->includes ? t->includes->time : 0;
-
- /* Step 4c: handle NOUPDATE oddity.
- *
- * If a NOUPDATE file exists, mark it as having eternally old dependencies.
- * Do not inherit our fate from our dependencies. Decide fate based only on
- * other flags and our binding (done later).
- */
- if ( t->flags & T_FLAG_NOUPDATE )
- {
-#ifdef OPT_GRAPH_DEBUG_EXT
- if ( DEBUG_FATE )
- if ( fate != T_FATE_STABLE )
- printf( "fate change %s back to stable, NOUPDATE.\n",
- object_str( t->name ) );
-#endif
-
- last = 0;
- t->time = 0;
-
- /* Do not inherit our fate from our dependencies. Decide fate based only
- * upon other flags and our binding (done later).
- */
- fate = T_FATE_STABLE;
- }
-
- /* Step 4d: determine fate: rebuild target or what? */
-
- /*
- In English:
- If can not find or make child, can not make target.
- If children changed, make target.
- If target missing, make it.
- If children newer, make target.
- If temp's children newer than parent, make temp.
- If temp's headers newer than parent, make temp.
- If deliberately touched, make it.
- If up-to-date temp file present, use it.
- If target newer than non-notfile parent, mark target newer.
- Otherwise, stable!
-
- Note this block runs from least to most stable:
- as we make it further down the list, the target's
- fate is getting stabler.
- */
-
-#ifdef OPT_GRAPH_DEBUG_EXT
- savedFate = fate;
- oldTimeStamp = 0;
-#endif
-
- if ( fate >= T_FATE_BROKEN )
- {
- fate = T_FATE_CANTMAKE;
- }
- else if ( fate >= T_FATE_SPOIL )
- {
- fate = T_FATE_UPDATE;
- }
- else if ( t->binding == T_BIND_MISSING )
- {
- fate = T_FATE_MISSING;
- }
- else if ( ( t->binding == T_BIND_EXISTS ) && ( last > t->time ) )
- {
-#ifdef OPT_GRAPH_DEBUG_EXT
- oldTimeStamp = 1;
-#endif
- fate = T_FATE_OUTDATED;
- }
- else if ( ( t->binding == T_BIND_PARENTS ) && ( last > p->time ) )
- {
-#ifdef OPT_GRAPH_DEBUG_EXT
- oldTimeStamp = 1;
-#endif
- fate = T_FATE_NEEDTMP;
- }
- else if ( ( t->binding == T_BIND_PARENTS ) && ( hlast > p->time ) )
- {
- fate = T_FATE_NEEDTMP;
- }
- else if ( t->flags & T_FLAG_TOUCHED )
- {
- fate = T_FATE_TOUCHED;
- }
- else if ( anyhow && !( t->flags & T_FLAG_NOUPDATE ) )
- {
- fate = T_FATE_TOUCHED;
- }
- else if ( ( t->binding == T_BIND_EXISTS ) && ( t->flags & T_FLAG_TEMP ) )
- {
- fate = T_FATE_ISTMP;
- }
- else if ( ( t->binding == T_BIND_EXISTS ) && p &&
- ( p->binding != T_BIND_UNBOUND ) && ( t->time > p->time ) )
- {
-#ifdef OPT_GRAPH_DEBUG_EXT
- oldTimeStamp = 1;
-#endif
- fate = T_FATE_NEWER;
- }
- else
- {
- fate = T_FATE_STABLE;
- }
-#ifdef OPT_GRAPH_DEBUG_EXT
- if ( DEBUG_FATE && ( fate != savedFate ) )
- {
- if ( savedFate == T_FATE_STABLE )
- printf( "fate change %s set to %s%s\n", object_str( t->name ),
- target_fate[ fate ], oldTimeStamp ? " (by timestamp)" : "" );
- else
- printf( "fate change %s from %s to %s%s\n", object_str( t->name ),
- target_fate[ savedFate ], target_fate[ fate ],
- oldTimeStamp ? " (by timestamp)" : "" );
- }
-#endif
-
- /* Step 4e: handle missing files */
- /* If it is missing and there are no actions to create it, boom. */
- /* If we can not make a target we do not care about it, okay. */
- /* We could insist that there are updating actions for all missing */
- /* files, but if they have dependencies we just pretend it is a NOTFILE. */
-
- if ( ( fate == T_FATE_MISSING ) && !t->actions && !t->depends )
- {
- if ( t->flags & T_FLAG_NOCARE )
- {
-#ifdef OPT_GRAPH_DEBUG_EXT
- if ( DEBUG_FATE )
- printf( "fate change %s to STABLE from %s, "
- "no actions, no dependencies and do not care\n",
- object_str( t->name ), target_fate[ fate ] );
-#endif
- fate = T_FATE_STABLE;
- }
- else
- {
- printf( "don't know how to make %s\n", object_str( t->name ) );
- fate = T_FATE_CANTFIND;
- }
- }
-
- /* Step 4f: propagate dependencies' time & fate. */
- /* Set leaf time to be our time only if this is a leaf. */
-
- t->time = max( t->time, last );
- t->leaf = leaf ? leaf : t->time ;
- /* This target's fate may have been updated by virtue of following some
- * target's rebuilds list, so only allow it to be increased to the fate we
- * have calculated. Otherwise, grab its new fate.
- */
- if ( fate > t->fate )
- t->fate = fate;
- else
- fate = t->fate;
-
- /* Step 4g: if this target needs to be built, force rebuild everything in
- * this target's rebuilds list.
- */
- if ( ( fate >= T_FATE_BUILD ) && ( fate < T_FATE_BROKEN ) )
- force_rebuilds( t );
-
- /*
- * Step 5: sort dependencies by their update time.
- */
-
- if ( globs.newestfirst )
- t->depends = make0sort( t->depends );
-
- /*
- * Step 6: a little harmless tabulating for tracing purposes
- */
-
- /* Do not count or report interal includes nodes. */
- if ( t->flags & T_FLAG_INTERNAL )
- return;
-
- if ( counts )
- {
-#ifdef OPT_IMPROVED_PATIENCE_EXT
- ++counts->targets;
-#else
- if ( !( ++counts->targets % 1000 ) && DEBUG_MAKE )
- printf( "...patience...\n" );
-#endif
-
- if ( fate == T_FATE_ISTMP )
- ++counts->temp;
- else if ( fate == T_FATE_CANTFIND )
- ++counts->cantfind;
- else if ( ( fate == T_FATE_CANTMAKE ) && t->actions )
- ++counts->cantmake;
- else if ( ( fate >= T_FATE_BUILD ) && ( fate < T_FATE_BROKEN ) &&
- t->actions )
- ++counts->updating;
- }
-
- if ( !( t->flags & T_FLAG_NOTFILE ) && ( fate >= T_FATE_SPOIL ) )
- flag = "+";
- else if ( ( t->binding == T_BIND_EXISTS ) && p && ( t->time > p->time ) )
- flag = "*";
-
- if ( DEBUG_MAKEPROG )
- printf( "made%s\t%s\t%s%s\n", flag, target_fate[ (int) t->fate ],
- spaces( depth ), object_str( t->name ) );
-}
-
-
-#ifdef OPT_GRAPH_DEBUG_EXT
-
-static const char * target_name( TARGET * t )
-{
- static char buf[ 1000 ];
- if ( t->flags & T_FLAG_INTERNAL )
- {
- sprintf( buf, "%s (internal node)", object_str( t->name ) );
- return buf;
- }
- return object_str( t->name );
-}
-
-
-/*
- * dependGraphOutput() - output the DG after make0 has run.
- */
-
-static void dependGraphOutput( TARGET * t, int depth )
-{
- TARGETS * c;
-
- if ( ( t->flags & T_FLAG_VISITED ) || !t->name || !t->boundname )
- return;
-
- t->flags |= T_FLAG_VISITED;
-
- switch ( t->fate )
- {
- case T_FATE_TOUCHED:
- case T_FATE_MISSING:
- case T_FATE_OUTDATED:
- case T_FATE_UPDATE:
- printf( "->%s%2d Name: %s\n", spaces( depth ), depth, target_name( t ) );
- break;
- default:
- printf( " %s%2d Name: %s\n", spaces( depth ), depth, target_name( t ) );
- break;
- }
-
- if ( ! object_equal( t->name, t->boundname ) )
- printf( " %s Loc: %s\n", spaces( depth ), object_str( t->boundname ) );
-
- switch ( t->fate )
- {
- case T_FATE_STABLE:
- printf( " %s : Stable\n", spaces( depth ) );
- break;
- case T_FATE_NEWER:
- printf( " %s : Newer\n", spaces( depth ) );
- break;
- case T_FATE_ISTMP:
- printf( " %s : Up to date temp file\n", spaces( depth ) );
- break;
- case T_FATE_NEEDTMP:
- printf( " %s : Temporary file, to be updated\n", spaces( depth ) );
- break;
- case T_FATE_TOUCHED:
- printf( " %s : Been touched, updating it\n", spaces( depth ) );
- break;
- case T_FATE_MISSING:
- printf( " %s : Missing, creating it\n", spaces( depth ) );
- break;
- case T_FATE_OUTDATED:
- printf( " %s : Outdated, updating it\n", spaces( depth ) );
- break;
- case T_FATE_REBUILD:
- printf( " %s : Rebuild, updating it\n", spaces( depth ) );
- break;
- case T_FATE_UPDATE:
- printf( " %s : Updating it\n", spaces( depth ) );
- break;
- case T_FATE_CANTFIND:
- printf( " %s : Can not find it\n", spaces( depth ) );
- break;
- case T_FATE_CANTMAKE:
- printf( " %s : Can make it\n", spaces( depth ) );
- break;
- }
-
- if ( t->flags & ~T_FLAG_VISITED )
- {
- printf( " %s : ", spaces( depth ) );
- if ( t->flags & T_FLAG_TEMP ) printf( "TEMPORARY " );
- if ( t->flags & T_FLAG_NOCARE ) printf( "NOCARE " );
- if ( t->flags & T_FLAG_NOTFILE ) printf( "NOTFILE " );
- if ( t->flags & T_FLAG_TOUCHED ) printf( "TOUCHED " );
- if ( t->flags & T_FLAG_LEAVES ) printf( "LEAVES " );
- if ( t->flags & T_FLAG_NOUPDATE ) printf( "NOUPDATE " );
- printf( "\n" );
- }
-
- for ( c = t->depends; c; c = c->next )
- {
- printf( " %s : Depends on %s (%s)", spaces( depth ),
- target_name( c->target ), target_fate[ (int) c->target->fate ] );
- if ( c->target->time == t->time )
- printf( " (max time)");
- printf( "\n" );
- }
-
- for ( c = t->depends; c; c = c->next )
- dependGraphOutput( c->target, depth + 1 );
-}
-#endif
-
-
-/*
- * make0sort() - reorder TARGETS chain by their time (newest to oldest).
- *
- * We walk chain, taking each item and inserting it on the sorted result, with
- * newest items at the front. This involves updating each of the TARGETS'
- * c->next and c->tail. Note that we make c->tail a valid prev pointer for every
- * entry. Normally, it is only valid at the head, where prev == tail. Note also
- * that while tail is a loop, next ends at the end of the chain.
- */
-
-static TARGETS * make0sort( TARGETS * chain )
-{
- PROFILE_ENTER( MAKE_MAKE0SORT );
-
- TARGETS * result = 0;
-
- /* Walk the current target list. */
- while ( chain )
- {
- TARGETS * c = chain;
- TARGETS * s = result;
-
- chain = chain->next;
-
- /* Find point s in result for c. */
- while ( s && ( s->target->time > c->target->time ) )
- s = s->next;
-
- /* Insert c in front of s (might be 0). Do not even think of deciphering
- * this.
- */
- c->next = s; /* good even if s = 0 */
- if ( result == s ) result = c; /* new head of chain? */
- if ( !s ) s = result; /* wrap to ensure a next */
- if ( result != c ) s->tail->next = c; /* not head? be prev's next */
- c->tail = s->tail; /* take on next's prev */
- s->tail = c; /* make next's prev us */
- }
-
- PROFILE_EXIT( MAKE_MAKE0SORT );
- return result;
-}
-
-
-static LIST * targets_to_update_ = L0;
-
-
-void mark_target_for_updating( OBJECT * target )
-{
- targets_to_update_ = list_push_back( targets_to_update_, object_copy( target ) );
-}
-
-
-LIST * targets_to_update()
-{
- return targets_to_update_;
-}
-
-
-void clear_targets_to_update()
-{
- list_free( targets_to_update_ );
- targets_to_update_ = L0;
-}
diff --git a/tools/build/v2/engine/make.h b/tools/build/v2/engine/make.h
deleted file mode 100644
index e0c7906583..0000000000
--- a/tools/build/v2/engine/make.h
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Copyright 1993, 1995 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/*
- * make.h - bring a target up to date, once rules are in place
- */
-
-#ifndef MAKE_SW20111118_H
-#define MAKE_SW20111118_H
-
-#include "lists.h"
-#include "object.h"
-
-int make( LIST * targets, int anyhow );
-int make1( TARGET * t );
-
-typedef struct {
- int temp;
- int updating;
- int cantfind;
- int cantmake;
- int targets;
- int made;
-} COUNTS ;
-
-
-void make0( TARGET *t, TARGET *p, int depth,
- COUNTS *counts, int anyhow );
-
-
-/*
- * Specifies that the target should be updated.
- */
-void mark_target_for_updating( OBJECT * target );
-/*
- * Returns the list of all the target previously passed to 'mark_target_for_updating'.
- */
-LIST * targets_to_update();
-/*
- * Cleasr/unmarks all targets that are currently marked for update.
- */
-void clear_targets_to_update();
-
-#endif
diff --git a/tools/build/v2/engine/make1.c b/tools/build/v2/engine/make1.c
deleted file mode 100644
index 47132419a8..0000000000
--- a/tools/build/v2/engine/make1.c
+++ /dev/null
@@ -1,1170 +0,0 @@
-/*
- * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/* This file is ALSO:
- * Copyright 2001-2004 David Abrahams.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-/*
- * make1.c - execute command to bring targets up to date
- *
- * This module contains make1(), the entry point called by make() to
- * recursively decend the dependency graph executing update actions as
- * marked by make0().
- *
- * External routines:
- *
- * make1() - execute commands to update a TARGET and all of its dependencies.
- *
- * Internal routines, the recursive/asynchronous command executors:
- *
- * make1a() - recursively traverse dependency target tree, calling make1b().
- * make1atail() - started processing all dependencies so go on to make1b().
- * make1b() - when dependencies are up to date, build target with make1c().
- * make1c() - launch target's next command, call parents' make1b() if none.
- * make1d() - handle command execution completion and call back make1c().
- *
- * Internal support routines:
- *
- * make1cmds() - turn ACTIONS into CMDs, grouping, splitting, etc.
- * make1list() - turn a list of targets into a LIST, for $(<) and $(>).
- * make1settings() - for vars that get bound values, build up replacement lists.
- * make1bind() - bind targets that weren't bound in dependency analysis.
- *
- * 04/16/94 (seiwald) - Split from make.c.
- * 04/21/94 (seiwald) - Handle empty "updated" actions.
- * 05/04/94 (seiwald) - async multiprocess (-j) support.
- * 06/01/94 (seiwald) - new 'actions existing' does existing sources.
- * 12/20/94 (seiwald) - NOTIME renamed NOTFILE.
- * 01/19/95 (seiwald) - distinguish between CANTFIND/CANTMAKE targets.
- * 01/22/94 (seiwald) - pass per-target JAMSHELL down to exec_cmd().
- * 02/28/95 (seiwald) - Handle empty "existing" actions.
- * 03/10/95 (seiwald) - Fancy counts.
- */
-
-#include "jam.h"
-
-#include "lists.h"
-#include "parse.h"
-#include "assert.h"
-#include "variable.h"
-#include "rules.h"
-#include "headers.h"
-
-#include "search.h"
-#include "object.h"
-#include "make.h"
-#include "command.h"
-#include "execcmd.h"
-#include "compile.h"
-#include "output.h"
-
-#include <stdlib.h>
-
-#if ! defined(NT) || defined(__GNUC__)
- #include <unistd.h> /* for unlink */
-#endif
-
-static CMD * make1cmds ( TARGET * );
-static LIST * make1list ( LIST *, TARGETS *, int flags );
-static SETTINGS * make1settings( struct module_t * module, LIST * vars );
-static void make1bind ( TARGET * );
-
-/* Ugly static - it is too hard to carry it through the callbacks. */
-
-static struct
-{
- int failed;
- int skipped;
- int total;
- int made;
-} counts[ 1 ] ;
-
-/* Target state - remove recursive calls by just keeping track of state target
- * is in.
- */
-typedef struct _state
-{
- struct _state * prev; /* previous state on stack */
- TARGET * t; /* current target */
- TARGET * parent; /* parent argument necessary for make1a() */
-#define T_STATE_MAKE1A 0 /* make1a() should be called */
-#define T_STATE_MAKE1ATAIL 1 /* make1atail() should be called */
-#define T_STATE_MAKE1B 2 /* make1b() should be called */
-#define T_STATE_MAKE1C 3 /* make1c() should be called */
-#define T_STATE_MAKE1D 4 /* make1d() should be called */
- int curstate; /* current state */
- int status;
-} state;
-
-static void make1a ( state * );
-static void make1atail ( state * );
-static void make1b ( state * );
-static void make1c ( state * );
-static void make1d ( state * );
-static void make_closure( void * closure, int status, timing_info *, const char *, const char * );
-
-typedef struct _stack
-{
- state * stack;
-} stack;
-
-static stack state_stack = { NULL };
-
-static state * state_freelist = NULL;
-
-
-static state * alloc_state()
-{
- if ( state_freelist != NULL )
- {
- state * pState = state_freelist;
- state_freelist = pState->prev;
- memset( pState, 0, sizeof( state ) );
- return pState;
- }
-
- return (state *)BJAM_MALLOC( sizeof( state ) );
-}
-
-
-static void free_state( state * pState )
-{
- pState->prev = state_freelist;
- state_freelist = pState;
-}
-
-
-static void clear_state_freelist()
-{
- while ( state_freelist != NULL )
- {
- state * pState = state_freelist;
- state_freelist = state_freelist->prev;
- BJAM_FREE( pState );
- }
-}
-
-
-static state * current_state( stack * pStack )
-{
- return pStack->stack;
-}
-
-
-static void pop_state( stack * pStack )
-{
- if ( pStack->stack != NULL )
- {
- state * pState = pStack->stack->prev;
- free_state( pStack->stack );
- pStack->stack = pState;
- }
-}
-
-
-static state * push_state( stack * pStack, TARGET * t, TARGET * parent, int curstate )
-{
- state * pState = alloc_state();
-
- pState->t = t;
- pState->parent = parent;
- pState->prev = pStack->stack;
- pState->curstate = curstate;
-
- pStack->stack = pState;
-
- return pStack->stack;
-}
-
-
-/*
- * Pushes a stack onto another stack, effectively reversing the order.
- */
-
-static void push_stack_on_stack( stack * pDest, stack * pSrc )
-{
- while ( pSrc->stack != NULL )
- {
- state * pState = pSrc->stack;
- pSrc->stack = pSrc->stack->prev;
- pState->prev = pDest->stack;
- pDest->stack = pState;
- }
-}
-
-
-/*
- * make1() - execute commands to update a TARGET and all of its dependencies.
- */
-
-static int intr = 0;
-
-int make1( TARGET * t )
-{
- state * pState;
-
- memset( (char *)counts, 0, sizeof( *counts ) );
-
- /* Recursively make the target and its dependencies. */
- push_state( &state_stack, t, NULL, T_STATE_MAKE1A );
-
- do
- {
- while ( ( pState = current_state( &state_stack ) ) != NULL )
- {
- if ( intr )
- pop_state( &state_stack );
-
- switch ( pState->curstate )
- {
- case T_STATE_MAKE1A : make1a ( pState ); break;
- case T_STATE_MAKE1ATAIL: make1atail( pState ); break;
- case T_STATE_MAKE1B : make1b ( pState ); break;
- case T_STATE_MAKE1C : make1c ( pState ); break;
- case T_STATE_MAKE1D : make1d ( pState ); break;
- }
- }
- }
- /* Wait for any outstanding commands to finish running. */
- while ( exec_wait() );
-
- clear_state_freelist();
-
- /* Talk about it. */
- if ( counts->failed )
- printf( "...failed updating %d target%s...\n", counts->failed,
- counts->failed > 1 ? "s" : "" );
- if ( DEBUG_MAKE && counts->skipped )
- printf( "...skipped %d target%s...\n", counts->skipped,
- counts->skipped > 1 ? "s" : "" );
- if ( DEBUG_MAKE && counts->made )
- printf( "...updated %d target%s...\n", counts->made,
- counts->made > 1 ? "s" : "" );
-
- return counts->total != counts->made;
-}
-
-
-/*
- * make1a() - recursively traverse target tree, calling make1b().
- *
- * Called to start processing a specified target. Does nothing if the target is
- * already being processed or otherwise starts processing all of its
- * dependencies. Once all of its dependencies have started being processed goes
- * on and calls make1b() (actually does that indirectly via a helper
- * make1atail() state).
- */
-
-static void make1a( state * pState )
-{
- TARGET * t = pState->t;
- TARGETS * c;
-
- /* If the parent is the first to try to build this target or this target is
- * in the make1c() quagmire, arrange for the parent to be notified when this
- * target is built.
- */
- if ( pState->parent )
- switch ( pState->t->progress )
- {
- case T_MAKE_INIT:
- case T_MAKE_ACTIVE:
- case T_MAKE_RUNNING:
- pState->t->parents = targetentry( pState->t->parents,
- pState->parent );
- ++pState->parent->asynccnt;
- }
-
- /*
- * If the target has been previously updated with -n in
- * effect, and we're ignoring -n, update it for real.
- */
- if ( !globs.noexec && pState->t->progress == T_MAKE_NOEXEC_DONE )
- {
- pState->t->progress = T_MAKE_INIT;
- }
-
- /* If this target is already being processed then do nothing. There is no
- * need to start processing the same target all over again.
- */
- if ( pState->t->progress != T_MAKE_INIT )
- {
- pop_state( &state_stack );
- return;
- }
-
- /* Asynccnt counts the dependencies preventing this target from proceeding
- * to make1b() for actual building. We start off with a count of 1 to
- * prevent anything from happening until we can notify all dependencies that
- * they are needed. This 1 is accounted for when we call make1b() ourselves,
- * below. Without this if a a dependency gets built before we finish
- * processing all of our other dependencies our build might be triggerred
- * prematurely.
- */
- pState->t->asynccnt = 1;
-
- /* Add header nodes created during the building process. */
- {
- TARGETS * inc = 0;
- for ( c = t->depends; c; c = c->next )
- if ( c->target->rescanned && c->target->includes )
- inc = targetentry( inc, c->target->includes );
- t->depends = targetchain( t->depends, inc );
- }
-
- /* Guard against circular dependencies. */
- pState->t->progress = T_MAKE_ONSTACK;
-
- {
- stack temp_stack = { NULL };
- for ( c = t->depends; c && !intr; c = c->next )
- push_state( &temp_stack, c->target, pState->t, T_STATE_MAKE1A );
-
- /* Using stacks reverses the order of execution. Reverse it back. */
- push_stack_on_stack( &state_stack, &temp_stack );
- }
-
- pState->curstate = T_STATE_MAKE1ATAIL;
-}
-
-
-/*
- * make1atail() - started processing all dependencies so go on to make1b().
- */
-
-static void make1atail( state * pState )
-{
- pState->t->progress = T_MAKE_ACTIVE;
- /* Now that all of our dependencies have bumped up our asynccnt we can
- * remove our own internal bump added to prevent this target from being
- * built before all of its dependencies start getting processed.
- */
- pState->curstate = T_STATE_MAKE1B;
-}
-
-
-/*
- * make1b() - when dependencies are up to date, build target with make1c().
- *
- * Called after all dependencies have started being processed and after each of
- * them finishes its processing. The target actually goes on to getting built in
- * make1c() only after all of its dependencies have finished their processing.
- */
-
-static void make1b( state * pState )
-{
- TARGET * t = pState->t;
- TARGETS * c;
- TARGET * failed = 0;
- const char * failed_name = "dependencies";
-
- /* If any dependencies are still outstanding, wait until they call make1b()
- * to signal their completion.
- */
- if ( --pState->t->asynccnt )
- {
- pop_state( &state_stack );
- return;
- }
-
- /* Try to aquire a semaphore. If it is locked, wait until the target that
- * locked it is built and signal completition.
- */
-#ifdef OPT_SEMAPHORE
- if ( t->semaphore && t->semaphore->asynccnt )
- {
- /* Append 't' to the list of targets waiting on semaphore. */
- t->semaphore->parents = targetentry( t->semaphore->parents, t );
- t->asynccnt++;
-
- if ( DEBUG_EXECCMD )
- printf( "SEM: %s is busy, delaying launch of %s\n",
- object_str( t->semaphore->name ), object_str( t->name ) );
- pop_state( &state_stack );
- return;
- }
-#endif
-
- /* Now ready to build target 't', if dependencies built OK. */
-
- /* Collect status from dependencies. If -n was passed then
- * act as though all dependencies built correctly. The only
- * way they can fail is if UPDATE_NOW was called. If
- * the dependencies can't be found or we got an interrupt,
- * we can't get here.
- */
- if ( !globs.noexec )
- {
- for ( c = t->depends; c; c = c->next )
- if ( c->target->status > t->status && !( c->target->flags & T_FLAG_NOCARE ) )
- {
- failed = c->target;
- pState->t->status = c->target->status;
- }
- }
- /* If an internal header node failed to build, we want to output the target
- * that it failed on.
- */
- if ( failed )
- {
- failed_name = failed->flags & T_FLAG_INTERNAL
- ? failed->failed
- : object_str( failed->name );
- }
- t->failed = failed_name;
-
- /* If actions for building any of the dependencies have failed, bail.
- * Otherwise, execute all actions to make the current target.
- */
- if ( ( pState->t->status == EXEC_CMD_FAIL ) && pState->t->actions )
- {
- ++counts->skipped;
- if ( ( pState->t->flags & ( T_FLAG_RMOLD | T_FLAG_NOTFILE ) ) == T_FLAG_RMOLD )
- {
- if ( !unlink( object_str( pState->t->boundname ) ) )
- printf( "...removing outdated %s\n", object_str( pState->t->boundname ) );
- }
- else
- printf( "...skipped %s for lack of %s...\n", object_str( pState->t->name ), failed_name );
- }
-
- if ( pState->t->status == EXEC_CMD_OK )
- switch ( pState->t->fate )
- {
- /* These are handled by the default case below now
- case T_FATE_INIT:
- case T_FATE_MAKING:
- */
-
- case T_FATE_STABLE:
- case T_FATE_NEWER:
- break;
-
- case T_FATE_CANTFIND:
- case T_FATE_CANTMAKE:
- pState->t->status = EXEC_CMD_FAIL;
- break;
-
- case T_FATE_ISTMP:
- if ( DEBUG_MAKE )
- printf( "...using %s...\n", object_str( pState->t->name ) );
- break;
-
- case T_FATE_TOUCHED:
- case T_FATE_MISSING:
- case T_FATE_NEEDTMP:
- case T_FATE_OUTDATED:
- case T_FATE_UPDATE:
- case T_FATE_REBUILD:
- /* Prepare commands for executing actions scheduled for this target
- * and then schedule transfer to make1c() state to proceed with
- * executing the prepared commands. Commands have their embedded
- * variables automatically expanded, including making use of any "on
- * target" variables.
- */
- if ( pState->t->actions )
- {
- ++counts->total;
- if ( DEBUG_MAKE && !( counts->total % 100 ) )
- printf( "...on %dth target...\n", counts->total );
-
- pState->t->cmds = (char *)make1cmds( pState->t );
- /* Set the target's "progress" so that make1c() counts it among
- * its successes/failures.
- */
- pState->t->progress = T_MAKE_RUNNING;
- }
- break;
-
- /* All possible fates should have been accounted for by now. */
- default:
- printf( "ERROR: %s has bad fate %d", object_str( pState->t->name ),
- pState->t->fate );
- abort();
- }
-
- /* Call make1c() to begin the execution of the chain of commands needed to
- * build the target. If we are not going to build the target (due of
- * dependency failures or no commands needing to be run) the chain will be
- * empty and make1c() will directly signal the target's completion.
- */
-
-#ifdef OPT_SEMAPHORE
- /* If there is a semaphore, indicate that it is in use. */
- if ( pState->t->semaphore )
- {
- ++pState->t->semaphore->asynccnt;
- if ( DEBUG_EXECCMD )
- printf( "SEM: %s now used by %s\n", object_str( pState->t->semaphore->name ),
- object_str( pState->t->name ) );
- }
-#endif
-
- pState->curstate = T_STATE_MAKE1C;
-}
-
-
-/*
- * make1c() - launch target's next command, call parents' make1b() if none.
- *
- * If there are (more) commands to run to build this target (and we have not hit
- * an error running earlier comands) we launch the command using exec_cmd(). If
- * there are no more commands to run, we collect the status from all the actions
- * and report our completion to all the parents.
- */
-
-static void make1c( state * pState )
-{
- CMD * cmd = (CMD *)pState->t->cmds;
-
- if ( cmd && ( pState->t->status == EXEC_CMD_OK ) )
- {
- const char * rule_name = 0;
- const char * target = 0;
-
- if ( DEBUG_MAKEQ ||
- ( !( cmd->rule->actions->flags & RULE_QUIETLY ) && DEBUG_MAKE ) )
- {
- rule_name = object_str( cmd->rule->name );
- target = object_str( list_front( lol_get( &cmd->args, 0 ) ) );
- if ( globs.noexec )
- out_action( rule_name, target, cmd->buf->value, "", "", EXIT_OK );
- }
-
- if ( globs.noexec )
- {
- pState->curstate = T_STATE_MAKE1D;
- pState->status = EXEC_CMD_OK;
- }
- else
- {
- /* Pop state first because exec_cmd() could push state. */
- pop_state( &state_stack );
- exec_cmd( cmd->buf->value, make_closure, pState->t, cmd->shell, rule_name,
- target );
- }
- }
- else
- {
- TARGETS * c;
- ACTIONS * actions;
-
- /* Collect status from actions, and distribute it as well. */
- for ( actions = pState->t->actions; actions; actions = actions->next )
- if ( actions->action->status > pState->t->status )
- pState->t->status = actions->action->status;
- for ( actions = pState->t->actions; actions; actions = actions->next )
- if ( pState->t->status > actions->action->status )
- actions->action->status = pState->t->status;
-
- /* Tally success/failure for those we tried to update. */
- if ( pState->t->progress == T_MAKE_RUNNING )
- switch ( pState->t->status )
- {
- case EXEC_CMD_OK : ++counts->made ; break;
- case EXEC_CMD_FAIL: ++counts->failed; break;
- }
-
- /* Tell parents their dependency has been built. */
- {
- stack temp_stack = { NULL };
- TARGET * t = pState->t;
- TARGET * additional_includes = NULL;
-
- if ( globs.noexec )
- t->progress = T_MAKE_NOEXEC_DONE;
- else
- t->progress = T_MAKE_DONE;
-
- /* Target has been updated so rescan it for dependencies. */
- if ( ( t->fate >= T_FATE_MISSING ) &&
- ( t->status == EXEC_CMD_OK ) &&
- !t->rescanned )
- {
- TARGET * saved_includes;
- TARGET * target_to_rescan = t;
- SETTINGS * s;
-
- target_to_rescan->rescanned = 1;
-
- if ( target_to_rescan->flags & T_FLAG_INTERNAL )
- target_to_rescan = t->original_target;
-
- /* Clean current includes. */
- saved_includes = target_to_rescan->includes;
- target_to_rescan->includes = 0;
-
- s = copysettings( target_to_rescan->settings );
- pushsettings( root_module(), s );
- headers( target_to_rescan );
- popsettings( root_module(), s );
- freesettings( s );
-
- if ( target_to_rescan->includes )
- {
- /* Link the old includes on to make sure that it gets
- * cleaned up correctly.
- */
- target_to_rescan->includes->includes = saved_includes;
- target_to_rescan->includes->rescanned = 1;
- /* Tricky. The parents have already been processed, but they
- * have not seen the internal node, because it was just
- * created. We need to make the calls to make1a() that would
- * have been made by the parents here, and also make sure
- * all unprocessed parents will pick up the includes. We
- * must make sure processing of the additional make1a()
- * invocations is done before make1b() which means this
- * target is built, otherwise the parent would be considered
- * built before this make1a() processing has even started.
- */
- make0( target_to_rescan->includes, target_to_rescan->parents->target, 0, 0, 0 );
- for ( c = target_to_rescan->parents; c; c = c->next )
- c->target->depends = targetentry( c->target->depends,
- target_to_rescan->includes );
- /* Will be processed below. */
- additional_includes = target_to_rescan->includes;
- }
- else
- {
- target_to_rescan->includes = saved_includes;
- }
- }
-
- if ( additional_includes )
- for ( c = t->parents; c; c = c->next )
- push_state( &temp_stack, additional_includes, c->target, T_STATE_MAKE1A );
-
- for ( c = t->parents; c; c = c->next )
- push_state( &temp_stack, c->target, NULL, T_STATE_MAKE1B );
-
-#ifdef OPT_SEMAPHORE
- /* If there is a semaphore, it is now free. */
- if ( t->semaphore )
- {
- assert( t->semaphore->asynccnt == 1 );
- --t->semaphore->asynccnt;
-
- if ( DEBUG_EXECCMD )
- printf( "SEM: %s is now free\n", object_str( t->semaphore->name ) );
-
- /* If anything is waiting, notify the next target. There is no
- * point in notifying waiting targets, since they will be
- * notified again.
- */
- if ( t->semaphore->parents )
- {
- TARGETS * first = t->semaphore->parents;
- if ( first->next )
- first->next->tail = first->tail;
- t->semaphore->parents = first->next;
-
- if ( DEBUG_EXECCMD )
- printf( "SEM: placing %s on stack\n", object_str( first->target->name ) );
- push_state( &temp_stack, first->target, NULL, T_STATE_MAKE1B );
- BJAM_FREE( first );
- }
- }
-#endif
-
- /* Must pop state before pushing any more. */
- pop_state( &state_stack );
-
- /* Using stacks reverses the order of execution. Reverse it back. */
- push_stack_on_stack( &state_stack, &temp_stack );
- }
- }
-}
-
-
-/*
- * call_timing_rule() - Look up the __TIMING_RULE__ variable on the given
- * target, and if non-empty, invoke the rule it names, passing the given
- * timing_info.
- */
-
-static void call_timing_rule( TARGET * target, timing_info * time )
-{
- LIST * timing_rule;
-
- pushsettings( root_module(), target->settings );
- timing_rule = var_get( root_module(), constant_TIMING_RULE );
- popsettings( root_module(), target->settings );
-
- if ( !list_empty( timing_rule ) )
- {
- /* rule timing-rule ( args * : target : start end user system ) */
-
- /* Prepare the argument list. */
- FRAME frame[ 1 ];
- frame_init( frame );
-
- /* args * :: $(__TIMING_RULE__[2-]) */
- lol_add( frame->args, list_copy_range( timing_rule, list_next( list_begin( timing_rule ) ), list_end( timing_rule ) ) );
-
- /* target :: the name of the target */
- lol_add( frame->args, list_new( object_copy( target->name ) ) );
-
- /* start end user system :: info about the action command */
- lol_add( frame->args, list_push_back( list_push_back( list_push_back( list_new(
- outf_time ( time->start ) ),
- outf_time ( time->end ) ),
- outf_double( time->user ) ),
- outf_double( time->system ) ) );
-
- /* Call the rule. */
- evaluate_rule( list_front( timing_rule ), frame );
-
- /* Clean up. */
- frame_free( frame );
- }
-}
-
-
-/*
- * call_action_rule() - Look up the __ACTION_RULE__ variable on the given
- * target, and if non-empty, invoke the rule it names, passing the given info,
- * timing_info, executed command and command output.
- */
-
-static void call_action_rule
-(
- TARGET * target,
- int status,
- timing_info * time,
- const char * executed_command,
- const char * command_output
-)
-{
- LIST * action_rule;
-
- pushsettings( root_module(), target->settings );
- action_rule = var_get( root_module(), constant_ACTION_RULE );
- popsettings( root_module(), target->settings );
-
- if ( !list_empty( action_rule ) )
- {
- /* rule action-rule (
- args * :
- target :
- command status start end user system :
- output ? ) */
-
- /* Prepare the argument list. */
- FRAME frame[ 1 ];
- frame_init( frame );
-
- /* args * :: $(__ACTION_RULE__[2-]) */
- lol_add( frame->args, list_copy_range( action_rule, list_next( list_begin( action_rule ) ), list_end( action_rule ) ) );
-
- /* target :: the name of the target */
- lol_add( frame->args, list_new( object_copy( target->name ) ) );
-
- /* command status start end user system :: info about the action command */
- lol_add( frame->args,
- list_push_back( list_push_back( list_push_back( list_push_back( list_push_back( list_new(
- object_new( executed_command ) ),
- outf_int( status ) ),
- outf_time( time->start ) ),
- outf_time( time->end ) ),
- outf_double( time->user ) ),
- outf_double( time->system ) ) );
-
- /* output ? :: the output of the action command */
- if ( command_output )
- lol_add( frame->args, list_new( object_new( command_output ) ) );
- else
- lol_add( frame->args, L0 );
-
- /* Call the rule. */
- evaluate_rule( list_front( action_rule ), frame );
-
- /* Clean up. */
- frame_free( frame );
- }
-}
-
-
-/*
- * make_closure() - internal function passed as a notification callback for when
- * commands finish getting executed by the OS.
- */
-
-static void make_closure
-(
- void * closure,
- int status,
- timing_info * time,
- const char * executed_command,
- const char * command_output
-)
-{
- TARGET * built = (TARGET *)closure;
-
- call_timing_rule( built, time );
- if ( DEBUG_EXECCMD )
- printf( "%f sec system; %f sec user\n", time->system, time->user );
-
- call_action_rule( built, status, time, executed_command, command_output );
-
- push_state( &state_stack, built, NULL, T_STATE_MAKE1D )->status = status;
-}
-
-
-/*
- * make1d() - handle command execution completion and call back make1c().
- *
- * exec_cmd() has completed and now all we need to do is fiddle with the status
- * and call back to make1c() so it can run the next command scheduled for
- * building this target or close up the target's build process in case there are
- * no more commands scheduled for it. On interrupts, we bail heavily.
- */
-
-static void make1d( state * pState )
-{
- TARGET * t = pState->t;
- CMD * cmd = (CMD *)t->cmds;
- int status = pState->status;
-
- if ( t->flags & T_FLAG_FAIL_EXPECTED && !globs.noexec )
- {
- /* Invert execution result when FAIL_EXPECTED has been applied. */
- switch ( status )
- {
- case EXEC_CMD_FAIL: status = EXEC_CMD_OK ; break;
- case EXEC_CMD_OK: status = EXEC_CMD_FAIL; break;
- }
- }
-
- if ( ( status == EXEC_CMD_FAIL ) &&
- ( cmd->rule->actions->flags & RULE_IGNORE ) )
- status = EXEC_CMD_OK;
-
- /* On interrupt, set intr so _everything_ fails. */
- if ( status == EXEC_CMD_INTR )
- ++intr;
-
- /* Print command text on failure. */
- if ( ( status == EXEC_CMD_FAIL ) && DEBUG_MAKE )
- {
- if ( !DEBUG_EXEC )
- printf( "%s\n", cmd->buf->value );
-
- printf( "...failed %s ", object_str( cmd->rule->name ) );
- list_print( lol_get( &cmd->args, 0 ) );
- printf( "...\n" );
- }
-
- /* Treat failed commands as interrupts in case we were asked to stop the
- * build in case of any errors.
- */
- if ( ( status == EXEC_CMD_FAIL ) && globs.quitquick )
- ++intr;
-
- /* If the command was interrupted or failed and the target is not
- * "precious", remove the targets.
- */
- if (status != EXEC_CMD_OK)
- {
- LIST * targets = lol_get( &cmd->args, 0 );
- LISTITER iter = list_begin( targets ), end = list_end( targets );
- for ( ; iter != end; iter = list_next( iter ) )
- {
- int need_unlink = 1;
- TARGET* t = bindtarget ( list_item( iter ) );
- if (t->flags & T_FLAG_PRECIOUS)
- {
- need_unlink = 0;
- }
- if (need_unlink && !unlink( object_str( list_item( iter ) ) ) )
- printf( "...removing %s\n", object_str( list_item( iter ) ) );
- }
- }
-
- /* Free this command and call make1c() to move onto the next one scheduled
- * for building this same target.
- */
- t->status = status;
- t->cmds = (char *)cmd_next( cmd );
- cmd_free( cmd );
- pState->curstate = T_STATE_MAKE1C;
-}
-
-
-/*
- * swap_settings() - replace the settings from the current module and target
- * with those from the new module and target
- */
-
-static void swap_settings
-(
- module_t * * current_module,
- TARGET * * current_target,
- module_t * new_module,
- TARGET * new_target
-)
-{
- if ( ( new_target == *current_target ) && ( new_module == *current_module ) )
- return;
-
- if ( *current_target )
- popsettings( *current_module, (*current_target)->settings );
-
- if ( new_target )
- pushsettings( new_module, new_target->settings );
-
- *current_module = new_module;
- *current_target = new_target;
-}
-
-
-/*
- * make1cmds() - turn ACTIONS into CMDs, grouping, splitting, etc.
- *
- * Essentially copies a chain of ACTIONs to a chain of CMDs, grouping
- * RULE_TOGETHER actions, splitting RULE_PIECEMEAL actions, and handling
- * RULE_NEWSRCS actions. The result is a chain of CMDs which can be expanded by
- * var_string() and executed using exec_cmd().
- */
-
-static CMD * make1cmds( TARGET * t )
-{
- CMD * cmds = 0;
- LIST * shell = L0;
- module_t * settings_module = 0;
- TARGET * settings_target = 0;
- ACTIONS * a0;
- int running_flag = globs.noexec ? A_RUNNING_NOEXEC : A_RUNNING;
-
- /* Step through actions. Actions may be shared with other targets or grouped
- * using RULE_TOGETHER, so actions already seen are skipped.
- */
- for ( a0 = t->actions ; a0; a0 = a0->next )
- {
- RULE * rule = a0->action->rule;
- rule_actions * actions = rule->actions;
- SETTINGS * boundvars;
- LIST * nt;
- LIST * ns;
- ACTIONS * a1;
- int start;
- int chunk;
- int length;
-
- /* Only do rules with commands to execute. If this action has already
- * been executed, use saved status.
- */
- if ( !actions || a0->action->running >= running_flag )
- continue;
-
- a0->action->running = running_flag;
-
- /* Make LISTS of targets and sources. If `execute together` has been
- * specified for this rule, tack on sources from each instance of this
- * rule for this target.
- */
- nt = make1list( L0, a0->action->targets, 0 );
- ns = make1list( L0, a0->action->sources, actions->flags );
- if ( actions->flags & RULE_TOGETHER )
- for ( a1 = a0->next; a1; a1 = a1->next )
- if ( a1->action->rule == rule && a1->action->running < running_flag )
- {
- ns = make1list( ns, a1->action->sources, actions->flags );
- a1->action->running = running_flag;
- }
-
- /* If doing only updated (or existing) sources, but none have been
- * updated (or exist), skip this action.
- */
- if ( list_empty( ns ) && ( actions->flags & ( RULE_NEWSRCS | RULE_EXISTING ) ) )
- {
- list_free( nt );
- continue;
- }
-
- swap_settings( &settings_module, &settings_target, rule->module, t );
- if ( list_empty( shell ) )
- {
- shell = var_get( rule->module, constant_JAMSHELL ); /* shell is per-target */
- }
-
- /* If we had 'actions xxx bind vars' we bind the vars now. */
- boundvars = make1settings( rule->module, actions->bindlist );
- pushsettings( rule->module, boundvars );
-
- /*
- * Build command, starting with all source args.
- *
- * If cmd_new returns 0, it is because the resulting command length is
- * > MAXLINE. In this case, we will slowly reduce the number of source
- * arguments presented until it does fit. This only applies to actions
- * that allow PIECEMEAL commands.
- *
- * While reducing slowly takes a bit of compute time to get things just
- * right, it is worth it to get as close to MAXLINE as possible, because
- * launching the commands we are executing is likely to be much more
- * compute intensive.
- *
- * Note we loop through at least once, for sourceless actions.
- */
-
- start = 0;
- chunk = length = list_length( ns );
-
- do
- {
- /* Build cmd: cmd_new consumes its lists. */
- CMD * cmd = cmd_new( rule,
- list_copy( nt ),
- list_sublist( ns, start, chunk ),
- list_copy( shell ) );
-
- if ( cmd )
- {
- /* It fit: chain it up. */
- if ( !cmds ) cmds = cmd;
- else cmds->tail->next = cmd;
- cmds->tail = cmd;
- start += chunk;
- }
- else if ( ( actions->flags & RULE_PIECEMEAL ) && ( chunk > 1 ) )
- {
- /* Reduce chunk size slowly. */
- chunk = chunk * 9 / 10;
- }
- else
- {
- /* Too long and not splittable. */
- printf( "%s actions too long (max %d):\n", object_str( rule->name ), MAXLINE
- );
-
- /* Tell the user what didn't fit. */
- cmd = cmd_new( rule, list_copy( nt ),
- list_sublist( ns, start, chunk ),
- list_new( object_copy( constant_percent ) ) );
- fputs( cmd->buf->value, stdout );
- exit( EXITBAD );
- }
- }
- while ( start < length );
-
- /* These were always copied when used. */
- list_free( nt );
- list_free( ns );
-
- /* Free the variables whose values were bound by 'actions xxx bind
- * vars'.
- */
- popsettings( rule->module, boundvars );
- freesettings( boundvars );
- }
-
- swap_settings( &settings_module, &settings_target, 0, 0 );
- return cmds;
-}
-
-
-/*
- * make1list() - turn a list of targets into a LIST, for $(<) and $(>).
- */
-
-static LIST * make1list( LIST * l, TARGETS * targets, int flags )
-{
- for ( ; targets; targets = targets->next )
- {
- TARGET * t = targets->target;
-
- if ( t->binding == T_BIND_UNBOUND )
- make1bind( t );
-
- if ( ( flags & RULE_EXISTING ) && ( flags & RULE_NEWSRCS ) )
- {
- if ( ( t->binding != T_BIND_EXISTS ) && ( t->fate <= T_FATE_STABLE ) )
- continue;
- }
- else
- {
- if ( ( flags & RULE_EXISTING ) && ( t->binding != T_BIND_EXISTS ) )
- continue;
-
- if ( ( flags & RULE_NEWSRCS ) && ( t->fate <= T_FATE_STABLE ) )
- continue;
- }
-
- /* Prohibit duplicates for RULE_TOGETHER. */
- if ( flags & RULE_TOGETHER )
- {
- LISTITER iter = list_begin( l ), end = list_end( l );
- for ( ; iter != end; iter = list_next( iter ) )
- if ( object_equal( list_item( iter ), t->boundname ) )
- break;
- if ( iter != end )
- continue;
- }
-
- /* Build new list. */
- l = list_push_back( l, object_copy( t->boundname ) );
- }
-
- return l;
-}
-
-
-/*
- * make1settings() - for vars that get bound values, build up replacement lists.
- */
-
-static SETTINGS * make1settings( struct module_t * module, LIST * vars )
-{
- SETTINGS * settings = 0;
-
- LISTITER vars_iter = list_begin( vars ), vars_end = list_end( vars );
- for ( ; vars_iter != vars_end; vars_iter = list_next( vars_iter ) )
- {
- LIST * l = var_get( module, list_item( vars_iter ) );
- LIST * nl = L0;
- LISTITER iter = list_begin( l ), end = list_end( l );
-
- for ( ; iter != end; iter = list_next( iter ) )
- {
- TARGET * t = bindtarget( list_item( iter ) );
-
- /* Make sure the target is bound. */
- if ( t->binding == T_BIND_UNBOUND )
- make1bind( t );
-
- /* Build a new list. */
- nl = list_push_back( nl, object_copy( t->boundname ) );
- }
-
- /* Add to settings chain. */
- settings = addsettings( settings, VAR_SET, list_item( vars_iter ), nl );
- }
-
- return settings;
-}
-
-
-/*
- * make1bind() - bind targets that were not bound during dependency analysis
- *
- * Spot the kludge! If a target is not in the dependency tree, it did not get
- * bound by make0(), so we have to do it here. Ugly.
- */
-
-static void make1bind( TARGET * t )
-{
- if ( t->flags & T_FLAG_NOTFILE )
- return;
-
- pushsettings( root_module(), t->settings );
- object_free( t->boundname );
- t->boundname = search( t->name, &t->time, 0, ( t->flags & T_FLAG_ISFILE ) );
- t->binding = t->time ? T_BIND_EXISTS : T_BIND_MISSING;
- popsettings( root_module(), t->settings );
-}
diff --git a/tools/build/v2/engine/mem.h b/tools/build/v2/engine/mem.h
deleted file mode 100644
index 71b2fb4be2..0000000000
--- a/tools/build/v2/engine/mem.h
+++ /dev/null
@@ -1,134 +0,0 @@
-/*
-Copyright Rene Rivera 2006.
-Distributed under the Boost Software License, Version 1.0.
-(See accompanying file LICENSE_1_0.txt or copy at
-http://www.boost.org/LICENSE_1_0.txt)
-*/
-
-#ifndef BJAM_MEM_H
-#define BJAM_MEM_H
-
-
-#ifdef OPT_BOEHM_GC
-
- /* Use Boehm GC memory allocator. */
- #include <gc.h>
- #define bjam_malloc_x(s) memset(GC_malloc(s),0,s)
- #define bjam_malloc_atomic_x(s) memset(GC_malloc_atomic(s),0,s)
- #define bjam_calloc_x(n,s) memset(GC_malloc((n)*(s)),0,(n)*(s))
- #define bjam_calloc_atomic_x(n,s) memset(GC_malloc_atomic((n)*(s)),0,(n)*(s))
- #define bjam_realloc_x(p,s) GC_realloc(p,s)
- #define bjam_free_x(p) GC_free(p)
- #define bjam_mem_init_x() GC_init(); GC_enable_incremental()
-
- #define bjam_malloc_raw_x(s) malloc(s)
- #define bjam_calloc_raw_x(n,s) calloc(n,s)
- #define bjam_realloc_raw_x(p,s) realloc(p,s)
- #define bjam_free_raw_x(p) free(p)
-
- #ifndef BJAM_NEWSTR_NO_ALLOCATE
- #define BJAM_NEWSTR_NO_ALLOCATE
- #endif
-
-#elif defined(OPT_DUMA)
-
- /* Use Duma memory debugging library. */
- #include <stdlib.h>
- #define _DUMA_CONFIG_H_
- #define DUMA_NO_GLOBAL_MALLOC_FREE
- #define DUMA_EXPLICIT_INIT
- #define DUMA_NO_THREAD_SAFETY
- #define DUMA_NO_CPP_SUPPORT
- /* #define DUMA_NO_LEAKDETECTION */
- /* #define DUMA_USE_FRAMENO */
- /* #define DUMA_PREFER_ATEXIT */
- /* #define DUMA_OLD_DEL_MACRO */
- /* #define DUMA_NO_HANG_MSG */
- #define DUMA_PAGE_SIZE 4096
- #define DUMA_MIN_ALIGNMENT 1
- /* #define DUMA_GNU_INIT_ATTR 0 */
- typedef unsigned int DUMA_ADDR;
- typedef unsigned int DUMA_SIZE;
- #include <duma.h>
- #define bjam_malloc_x(s) malloc(s)
- #define bjam_calloc_x(n,s) calloc(n,s)
- #define bjam_realloc_x(p,s) realloc(p,s)
- #define bjam_free_x(p) free(p)
-
- #ifndef BJAM_NEWSTR_NO_ALLOCATE
- #define BJAM_NEWSTR_NO_ALLOCATE
- #endif
-
-#else
-
- /* Standard C memory allocation. */
- #define bjam_malloc_x(s) malloc(s)
- #define bjam_calloc_x(n,s) calloc(n,s)
- #define bjam_realloc_x(p,s) realloc(p,s)
- #define bjam_free_x(p) free(p)
-
-#endif
-
-#ifndef bjam_malloc_atomic_x
- #define bjam_malloc_atomic_x(s) bjam_malloc_x(s)
-#endif
-#ifndef bjam_calloc_atomic_x
- #define bjam_calloc_atomic_x(n,s) bjam_calloc_x(n,s)
-#endif
-#ifndef bjam_mem_init_x
- #define bjam_mem_init_x()
-#endif
-#ifndef bjam_mem_close_x
- #define bjam_mem_close_x()
-#endif
-#ifndef bjam_malloc_raw_x
- #define bjam_malloc_raw_x(s) bjam_malloc_x(s)
-#endif
-#ifndef bjam_calloc_raw_x
- #define bjam_calloc_raw_x(n,s) bjam_calloc_x(n,s)
-#endif
-#ifndef bjam_realloc_raw_x
- #define bjam_realloc_raw_x(p,s) bjam_realloc_x(p,s)
-#endif
-#ifndef bjam_free_raw_x
- #define bjam_free_raw_x(p) bjam_free_x(p)
-#endif
-
-#ifdef OPT_DEBUG_PROFILE
-
- /* Profile tracing of memory allocations. */
- #define BJAM_MALLOC(s) (profile_memory(s), bjam_malloc_x(s))
- #define BJAM_MALLOC_ATOMIC(s) (profile_memory(s), bjam_malloc_atomic_x(s))
- #define BJAM_CALLOC(n,s) (profile_memory(n*s), bjam_calloc_x(n,s))
- #define BJAM_CALLOC_ATOMIC(n,s) (profile_memory(n*s), bjam_calloc_atomic_x(n,s))
- #define BJAM_REALLOC(p,s) (profile_memory(s), bjam_realloc_x(p,s))
- #define BJAM_FREE(p) bjam_free_x(p)
- #define BJAM_MEM_INIT() bjam_mem_init_x()
- #define BJAM_MEM_CLOSE() bjam_mem_close_x()
-
- #define BJAM_MALLOC_RAW(s) (profile_memory(s), bjam_malloc_raw_x(s))
- #define BJAM_CALLOC_RAW(n,s) (profile_memory(n*s), bjam_calloc_raw_x(n,s))
- #define BJAM_REALLOC_RAW(p,s) (profile_memory(s), bjam_realloc_raw_x(p,s))
- #define BJAM_FREE_RAW(p) bjam_free_raw_x(p)
-
-#else
-
- /* No mem tracing. */
- #define BJAM_MALLOC(s) bjam_malloc_x(s)
- #define BJAM_MALLOC_ATOMIC(s) bjam_malloc_atomic_x(s)
- #define BJAM_CALLOC(n,s) bjam_calloc_x(n,s)
- #define BJAM_CALLOC_ATOMIC(n,s) bjam_calloc_atomic_x(n,s)
- #define BJAM_REALLOC(p,s) bjam_realloc_x(p,s)
- #define BJAM_FREE(p) bjam_free_x(p)
- #define BJAM_MEM_INIT() bjam_mem_init_x()
- #define BJAM_MEM_CLOSE() bjam_mem_close_x()
-
- #define BJAM_MALLOC_RAW(s) bjam_malloc_raw_x(s)
- #define BJAM_CALLOC_RAW(n,s) bjam_calloc_raw_x(n,s)
- #define BJAM_REALLOC_RAW(p,s) bjam_realloc_raw_x(p,s)
- #define BJAM_FREE_RAW(p) bjam_free_raw_x(p)
-
-#endif
-
-
-#endif
diff --git a/tools/build/v2/engine/modules.c b/tools/build/v2/engine/modules.c
deleted file mode 100644
index 8898d18bb0..0000000000
--- a/tools/build/v2/engine/modules.c
+++ /dev/null
@@ -1,436 +0,0 @@
-/*
- * Copyright 2001-2004 David Abrahams.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-#include "jam.h"
-
-#include "modules.h"
-#include "string.h"
-#include "hash.h"
-#include "object.h"
-#include "lists.h"
-#include "parse.h"
-#include "rules.h"
-#include "variable.h"
-#include "strings.h"
-#include "native.h"
-#include <assert.h>
-
-static struct hash * module_hash = 0;
-static module_t root;
-
-module_t * bindmodule( OBJECT * name )
-{
-
- if ( !name )
- {
- return &root;
- }
- else
- {
- PROFILE_ENTER( BINDMODULE );
-
- module_t * m;
- int found;
-
- if ( !module_hash )
- module_hash = hashinit( sizeof( module_t ), "modules" );
-
- m = (module_t *)hash_insert( module_hash, name, &found );
- if ( !found )
- {
- m->name = object_copy( name );
- m->variables = 0;
- m->variable_indices = 0;
- m->num_fixed_variables = 0;
- m->fixed_variables = 0;
- m->rules = 0;
- m->imported_modules = 0;
- m->class_module = 0;
- m->native_rules = 0;
- m->user_module = 0;
- }
-
- PROFILE_EXIT( BINDMODULE );
-
- return m;
- }
-}
-
-/*
- * demand_rules() - Get the module's "rules" hash on demand.
- */
-struct hash * demand_rules( module_t * m )
-{
- if ( !m->rules )
- m->rules = hashinit( sizeof( RULE ), "rules" );
- return m->rules;
-}
-
-
-/*
- * delete_module() - wipe out the module's rules and variables.
- */
-
-static void delete_rule_( void * xrule, void * data )
-{
- rule_free( (RULE *)xrule );
-}
-
-
-static void delete_native_rule( void * xrule, void * data )
-{
- native_rule_t * rule = (native_rule_t *)xrule;
- object_free( rule->name );
- if ( rule->procedure )
- function_free( rule->procedure );
-}
-
-
-static void delete_imported_modules( void * xmodule_name, void * data )
-{
- object_free( *(OBJECT * *)xmodule_name );
-}
-
-static void free_fixed_variable( void * xvar, void * data );
-
-void delete_module( module_t * m )
-{
- /* Clear out all the rules. */
- if ( m->rules )
- {
- hashenumerate( m->rules, delete_rule_, (void *)0 );
- hash_free( m->rules );
- m->rules = 0;
- }
-
- if ( m->native_rules )
- {
- hashenumerate( m->native_rules, delete_native_rule, (void *)0 );
- hash_free( m->native_rules );
- m->native_rules = 0;
- }
-
- if ( m->variables )
- {
- var_done( m );
- m->variables = 0;
- }
-
- if ( m->fixed_variables )
- {
- int i;
- for ( i = 0; i < m->num_fixed_variables; ++i )
- {
- list_free( m->fixed_variables[ i ] );
- }
- BJAM_FREE( m->fixed_variables );
- m->fixed_variables = 0;
- }
-
- if ( m->variable_indices )
- {
- hashenumerate( m->variable_indices, &free_fixed_variable, (void *)0 );
- hash_free( m->variable_indices );
- m->variable_indices = 0;
- }
-
- if ( m->imported_modules )
- {
- hashenumerate( m->imported_modules, delete_imported_modules, (void *)0 );
- hash_free( m->imported_modules );
- m->imported_modules = 0;
- }
-}
-
-
-struct module_stats
-{
- OBJECT * module_name;
- struct hashstats rules_stats[ 1 ];
- struct hashstats variables_stats[ 1 ];
- struct hashstats variable_indices_stats[ 1 ];
- struct hashstats imported_modules_stats[ 1 ];
-};
-
-
-static void module_stat( struct hash * hp, OBJECT * module, const char * name )
-{
- if ( hp )
- {
- struct hashstats stats[ 1 ];
- string id[ 1 ];
- hashstats_init( stats );
- string_new( id );
- string_append( id, object_str( module ) );
- string_push_back( id, ' ' );
- string_append( id, name );
-
- hashstats_add( stats, hp );
- hashstats_print( stats, id->value );
-
- string_free( id );
- }
-}
-
-
-static void class_module_stat( struct hashstats * stats, OBJECT * module, const char * name )
-{
- if ( stats->item_size )
- {
- string id[ 1 ];
- string_new( id );
- string_append( id, object_str( module ) );
- string_append( id, " object " );
- string_append( id, name );
-
- hashstats_print( stats, id->value );
-
- string_free( id );
- }
-}
-
-
-static void stat_module( void * xmodule, void * data )
-{
- module_t *m = (module_t *)xmodule;
-
- if ( DEBUG_MEM || DEBUG_PROFILE )
- {
- struct hash * class_info = (struct hash *)data;
- if ( m->class_module )
- {
- int found;
- struct module_stats * ms = (struct module_stats *)hash_insert( class_info, m->class_module->name, &found );
- if ( !found )
- {
- ms->module_name = m->class_module->name;
- hashstats_init( ms->rules_stats );
- hashstats_init( ms->variables_stats );
- hashstats_init( ms->variable_indices_stats );
- hashstats_init( ms->imported_modules_stats );
- }
-
- hashstats_add( ms->rules_stats, m->rules );
- hashstats_add( ms->variables_stats, m->variables );
- hashstats_add( ms->variable_indices_stats, m->variable_indices );
- hashstats_add( ms->imported_modules_stats, m->imported_modules );
- }
- else
- {
- module_stat( m->rules, m->name, "rules" );
- module_stat( m->variables, m->name, "variables" );
- module_stat( m->variable_indices, m->name, "fixed variables" );
- module_stat( m->imported_modules, m->name, "imported modules" );
- }
- }
-
- delete_module( m );
- object_free( m->name );
-}
-
-static void print_class_stats( void * xstats, void * data )
-{
- struct module_stats * stats = (struct module_stats *)xstats;
- class_module_stat( stats->rules_stats, stats->module_name, "rules" );
- class_module_stat( stats->variables_stats, stats->module_name, "variables" );
- class_module_stat( stats->variable_indices_stats, stats->module_name, "fixed variables" );
- class_module_stat( stats->imported_modules_stats, stats->module_name, "imported modules" );
-}
-
-
-static void delete_module_( void * xmodule, void * data )
-{
- module_t *m = (module_t *)xmodule;
-
- delete_module( m );
- object_free( m->name );
-}
-
-
-void modules_done()
-{
- if ( DEBUG_MEM || DEBUG_PROFILE )
- {
- struct hash * class_hash = hashinit( sizeof( struct module_stats ), "object info" );
- hashenumerate( module_hash, stat_module, (void *)class_hash );
- hashenumerate( class_hash, print_class_stats, (void *)0 );
- hash_free( class_hash );
- }
- hashenumerate( module_hash, delete_module_, (void *)0 );
- hashdone( module_hash );
- module_hash = 0;
- delete_module( &root );
-}
-
-module_t * root_module()
-{
- return &root;
-}
-
-
-void import_module( LIST * module_names, module_t * target_module )
-{
- PROFILE_ENTER( IMPORT_MODULE );
-
- struct hash * h;
- LISTITER iter, end;
-
- if ( !target_module->imported_modules )
- target_module->imported_modules = hashinit( sizeof( char * ), "imported" );
- h = target_module->imported_modules;
-
- iter = list_begin( module_names ), end = list_end( module_names );
- for ( ; iter != end; iter = list_next( iter ) )
- {
- int found;
- OBJECT * s = list_item( iter );
- OBJECT * * ss = (OBJECT * *)hash_insert( h, s, &found );
- if( !found )
- {
- *ss = object_copy( s );
- }
- }
-
- PROFILE_EXIT( IMPORT_MODULE );
-}
-
-
-static void add_module_name( void * r_, void * result_ )
-{
- OBJECT * * r = (OBJECT * *)r_;
- LIST * * result = (LIST * *)result_;
-
- *result = list_push_back( *result, object_copy( *r ) );
-}
-
-
-LIST * imported_modules( module_t * module )
-{
- LIST * result = L0;
- if ( module->imported_modules )
- hashenumerate( module->imported_modules, add_module_name, &result );
- return result;
-}
-
-
-FUNCTION * function_bind_variables( FUNCTION * f, module_t * module, int * counter );
-FUNCTION * function_unbind_variables( FUNCTION * f );
-
-struct fixed_variable
-{
- OBJECT * key;
- int n;
-};
-
-struct bind_vars_t
-{
- module_t * module;
- int counter;
-};
-
-static void free_fixed_variable( void * xvar, void * data )
-{
- object_free( ( (struct fixed_variable *)xvar )->key );
-}
-
-static void bind_variables_for_rule( void * xrule, void * xdata )
-{
- RULE * rule = (RULE *)xrule;
- struct bind_vars_t * data = (struct bind_vars_t *)xdata;
- if ( rule->procedure && rule->module == data->module )
- rule->procedure = function_bind_variables( rule->procedure, data->module, &data->counter );
-}
-
-void module_bind_variables( struct module_t * m )
-{
- if ( m != root_module() && m->rules )
- {
- struct bind_vars_t data;
- data.module = m;
- data.counter = m->num_fixed_variables;
- hashenumerate( m->rules, &bind_variables_for_rule, &data );
- module_set_fixed_variables( m, data.counter );
- }
-}
-
-int module_add_fixed_var( struct module_t * m, OBJECT * name, int * counter )
-{
- struct fixed_variable * v;
- int found;
-
- assert( !m->class_module );
-
- if ( !m->variable_indices )
- m->variable_indices = hashinit( sizeof( struct fixed_variable ), "variable index table" );
-
- v = (struct fixed_variable *)hash_insert( m->variable_indices, name, &found );
- if ( !found )
- {
- v->key = object_copy( name );
- v->n = (*counter)++;
- }
-
- return v->n;
-}
-
-LIST * var_get_and_clear_raw( module_t * m, OBJECT * name );
-
-static void load_fixed_variable( void * xvar, void * data )
-{
- struct fixed_variable * var = (struct fixed_variable *)xvar;
- struct module_t * m = (struct module_t *)data;
- if ( var->n >= m->num_fixed_variables )
- {
- m->fixed_variables[ var->n ] = var_get_and_clear_raw( m, var->key );
- }
-}
-
-void module_set_fixed_variables( struct module_t * m, int n_variables )
-{
- /* Reallocate */
- struct hash * variable_indices;
- LIST * * fixed_variables = BJAM_MALLOC( n_variables * sizeof( LIST * ) );
- if ( m->fixed_variables )
- {
- memcpy( fixed_variables, m->fixed_variables, n_variables * sizeof( LIST * ) );
- BJAM_FREE( m->fixed_variables );
- }
- m->fixed_variables = fixed_variables;
- if ( m->class_module )
- {
- variable_indices = m->class_module->variable_indices;
- }
- else
- {
- variable_indices = m->variable_indices;
- }
- if ( variable_indices )
- hashenumerate( variable_indices, &load_fixed_variable, m );
- m->num_fixed_variables = n_variables;
-}
-
-int module_get_fixed_var( struct module_t * m_, OBJECT * name )
-{
- struct fixed_variable * v;
- struct module_t * m = m_;
-
- if ( m->class_module )
- {
- m = m->class_module;
- }
-
- if ( !m->variable_indices )
- return -1;
-
- v = (struct fixed_variable *)hash_find( m->variable_indices, name );
- if ( v && v->n < m_->num_fixed_variables )
- {
- return v->n;
- }
- else
- {
- return -1;
- }
-}
diff --git a/tools/build/v2/engine/modules.h b/tools/build/v2/engine/modules.h
deleted file mode 100644
index a7d9752b8c..0000000000
--- a/tools/build/v2/engine/modules.h
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Copyright 2001-2004 David Abrahams.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-#ifndef MODULES_DWA10182001_H
-# define MODULES_DWA10182001_H
-
-#include "lists.h"
-
-struct module_t
-{
- OBJECT * name;
- struct hash * rules;
- struct hash * variables;
- struct hash * variable_indices;
- int num_fixed_variables;
- LIST * * fixed_variables;
- struct hash * imported_modules;
- struct module_t * class_module;
- struct hash * native_rules;
- int user_module;
-};
-
-typedef struct module_t module_t ; /* MSVC debugger gets confused unless this is provided */
-
-module_t * bindmodule( OBJECT * name );
-module_t * root_module();
-void delete_module( module_t * );
-
-void import_module( LIST * module_names, module_t * target_module );
-LIST* imported_modules(module_t* module);
-
-struct hash * demand_rules( module_t * );
-
-void module_bind_variables( struct module_t * m );
-
-/*
- * After calling module_add_fixed_var, module_set_fixed_variables
- * must be called before accessing any variables in the module.
- */
-int module_add_fixed_var( struct module_t * m, OBJECT * name, int * n );
-void module_set_fixed_variables( struct module_t * m, int n );
-
-/*
- * Returns the index of the variable or -1 if none exists.
- */
-int module_get_fixed_var( struct module_t * m, OBJECT * name );
-
-void modules_done();
-
-#endif
-
diff --git a/tools/build/v2/engine/modules/order.c b/tools/build/v2/engine/modules/order.c
deleted file mode 100644
index ed632be3ac..0000000000
--- a/tools/build/v2/engine/modules/order.c
+++ /dev/null
@@ -1,147 +0,0 @@
-/* Copyright Vladimir Prus 2004. Distributed under the Boost */
-/* Software License, Version 1.0. (See accompanying */
-/* file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) */
-
-#include "../native.h"
-#include "../lists.h"
-#include "../strings.h"
-#include "../object.h"
-#include "../variable.h"
-
-
-/* Use quite klugy approach: when we add order dependency from 'a' to 'b',
- just append 'b' to of value of variable 'a'.
-*/
-LIST *add_pair( FRAME *frame, int flags )
-{
- LIST* arg = lol_get( frame->args, 0 );
- LISTITER iter = list_begin( arg ), end = list_end( arg );
-
- var_set( frame->module, list_item( iter ), list_copy_range( arg, list_next( iter ), end ), VAR_APPEND );
-
- return L0;
-}
-
-/** Given a list and a value, returns position of that value in
- the list, or -1 if not found.
-*/
-int list_index(LIST* list, OBJECT* value)
-{
- int result = 0;
- LISTITER iter = list_begin(list), end = list_end(list);
- for(; iter != end; iter = list_next(iter), ++result) {
- if (object_equal(list_item(iter), value))
- return result;
- }
- return -1;
-}
-
-enum colors { white, gray, black };
-
-/* Main routite of topological sort. Calls itself recursively on all
- adjacent vertices which were not yet visited. After that, 'current_vertex'
- is added to '*result_ptr'.
-*/
-void do_ts(int** graph, int current_vertex, int* colors, int** result_ptr)
-{
- int i;
-
- colors[current_vertex] = gray;
- for(i = 0; graph[current_vertex][i] != -1; ++i) {
- int adjacent_vertex = graph[current_vertex][i];
-
- if (colors[adjacent_vertex] == white)
- do_ts(graph, adjacent_vertex, colors, result_ptr);
- /* The vertex is either black, in which case we don't have to do
- anything, a gray, in which case we have a loop. If we have a loop,
- it's not clear what useful diagnostic we can emit, so we emit
- nothing. */
- }
- colors[current_vertex] = black;
- **result_ptr = current_vertex;
- (*result_ptr)++;
-}
-
-void topological_sort(int** graph, int num_vertices, int* result)
-{
- int i;
- int* colors = (int*)BJAM_CALLOC(num_vertices, sizeof(int));
- for (i = 0; i < num_vertices; ++i)
- colors[i] = white;
-
- for(i = 0; i < num_vertices; ++i)
- if (colors[i] == white)
- do_ts(graph, i, colors, &result);
-
- BJAM_FREE(colors);
-}
-
-LIST *order( FRAME *frame, int flags )
-{
- LIST* arg = lol_get( frame->args, 0 );
- LIST* result = L0;
- int src;
- LISTITER iter = list_begin(arg), end = list_end(arg);
-
- /* We need to create a graph of order dependencies between
- the passed objects. We assume that there are no duplicates
- passed to 'add_pair'.
- */
- int length = list_length(arg);
- int** graph = (int**)BJAM_CALLOC(length, sizeof(int*));
- int* order = (int*)BJAM_MALLOC((length+1)*sizeof(int));
-
- for(src = 0; iter != end; iter = list_next(iter), ++src) {
- /* For all object this one depend upon, add elements
- to 'graph' */
- LIST* dependencies = var_get(frame->module, list_item(iter));
- int index = 0;
- LISTITER dep_iter = list_begin(dependencies), dep_end = list_end(dependencies);
-
- graph[src] = (int*)BJAM_CALLOC(list_length(dependencies)+1, sizeof(int));
- for(; dep_iter != dep_end; dep_iter = list_next(dep_iter)) {
- int dst = list_index(arg, list_item(dep_iter));
- if (dst != -1)
- graph[src][index++] = dst;
- }
- graph[src][index] = -1;
- }
-
- topological_sort(graph, length, order);
-
- {
- int index = length-1;
- for(; index >= 0; --index) {
- int i;
- iter = list_begin(arg), end = list_end(arg);
- for (i = 0; i < order[index]; ++i, iter = list_next(iter));
- result = list_push_back(result, object_copy(list_item(iter)));
- }
- }
-
- /* Clean up */
- {
- int i;
- for(i = 0; i < length; ++i)
- BJAM_FREE(graph[i]);
- BJAM_FREE(graph);
- BJAM_FREE(order);
- }
-
- return result;
-}
-
-void init_order()
-{
- {
- const char* args[] = { "first", "second", 0 };
- declare_native_rule("class@order", "add-pair", args, add_pair, 1);
- }
-
- {
- const char* args[] = { "objects", "*", 0 };
- declare_native_rule("class@order", "order", args, order, 1);
- }
-
-
-}
diff --git a/tools/build/v2/engine/modules/path.c b/tools/build/v2/engine/modules/path.c
deleted file mode 100644
index ca243f03d6..0000000000
--- a/tools/build/v2/engine/modules/path.c
+++ /dev/null
@@ -1,32 +0,0 @@
-/* Copyright Vladimir Prus 2003. Distributed under the Boost */
-/* Software License, Version 1.0. (See accompanying */
-/* file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) */
-
-#include "../native.h"
-#include "../timestamp.h"
-#include "../object.h"
-
-LIST *path_exists( FRAME *frame, int flags )
-{
- LIST* l = lol_get( frame->args, 0 );
-
- time_t time;
- timestamp(list_front(l), &time);
- if (time != 0)
- {
- return list_new(object_new("true"));
- }
- else
- {
- return L0;
- }
-}
-
-void init_path()
-{
- {
- const char* args[] = { "location", 0 };
- declare_native_rule("path", "exists", args, path_exists, 1);
- }
-
-}
diff --git a/tools/build/v2/engine/modules/property-set.c b/tools/build/v2/engine/modules/property-set.c
deleted file mode 100644
index 9d0c5cf633..0000000000
--- a/tools/build/v2/engine/modules/property-set.c
+++ /dev/null
@@ -1,117 +0,0 @@
-/* Copyright Vladimir Prus 2003. Distributed under the Boost */
-/* Software License, Version 1.0. (See accompanying */
-/* file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) */
-
-#include "../native.h"
-#include "../timestamp.h"
-#include "../object.h"
-#include "../strings.h"
-#include "../lists.h"
-#include "../variable.h"
-#include "../compile.h"
-
-LIST* get_grist(char* f)
-{
- char* end = strchr(f, '>');
- string s[1];
- LIST* result;
-
- string_new(s);
-
- string_append_range(s, f, end+1);
- result = list_new(object_new(s->value));
-
- string_free(s);
- return result;
-}
-
-/*
-rule create ( raw-properties * )
-{
- raw-properties = [ sequence.unique
- [ sequence.insertion-sort $(raw-properties) ] ] ;
-
- local key = $(raw-properties:J=-:E=) ;
-
- if ! $(.ps.$(key))
- {
- .ps.$(key) = [ new property-set $(raw-properties) ] ;
- }
- return $(.ps.$(key)) ;
-}
-*/
-
-LIST *property_set_create( FRAME *frame, int flags )
-{
- LIST* properties = lol_get( frame->args, 0 );
- LIST* sorted = L0;
-#if 0
- LIST* order_sensitive = 0;
-#endif
- LIST* unique;
- LIST* val;
- string var[1];
- OBJECT* name;
- LISTITER iter, end;
-
-#if 0
- /* Sort all properties which are not order sensitive */
- for(tmp = properties; tmp; tmp = tmp->next) {
- LIST* g = get_grist(tmp->string);
- LIST* att = call_rule("feature.attributes", frame, g, 0);
- if (list_in(att, "order-sensitive")) {
- order_sensitive = list_new( order_sensitive, copystr(tmp->string));
- } else {
- sorted = list_new( sorted, copystr(tmp->string));
- }
- list_free(att);
- }
-
- sorted = list_sort(sorted);
- sorted = list_append(sorted, order_sensitive);
- unique = list_unique(sorted);
-#endif
- sorted = list_sort(properties);
- unique = list_unique(sorted);
-
- string_new(var);
- string_append(var, ".ps.");
-
- iter = list_begin( unique ), end = list_end( unique );
- for( ; iter != end; iter = list_next( iter ) ) {
- string_append(var, object_str( list_item( iter ) ));
- string_push_back(var, '-');
- }
- name = object_new(var->value);
- val = var_get(frame->module, name);
- if (list_empty(val))
- {
- OBJECT* rulename = object_new("new");
- val = call_rule(rulename, frame,
- list_append(list_new(object_new("property-set")), unique), 0);
- object_free(rulename);
-
- var_set(frame->module, name, list_copy(val), VAR_SET);
- }
- else
- {
- list_free(unique);
- val = list_copy(val);
- }
- object_free(name);
-
- string_free(var);
- /* The 'unique' variable is freed in 'call_rule'. */
- list_free(sorted);
-
- return val;
-
-}
-
-void init_property_set()
-{
- {
- const char* args[] = { "raw-properties", "*", 0 };
- declare_native_rule("property-set", "create", args, property_set_create, 1);
- }
-}
diff --git a/tools/build/v2/engine/modules/regex.c b/tools/build/v2/engine/modules/regex.c
deleted file mode 100644
index 9002f4bad3..0000000000
--- a/tools/build/v2/engine/modules/regex.c
+++ /dev/null
@@ -1,98 +0,0 @@
-/* Copyright Vladimir Prus 2003. Distributed under the Boost */
-/* Software License, Version 1.0. (See accompanying */
-/* file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) */
-
-#include "../native.h"
-#include "../timestamp.h"
-#include "../object.h"
-#include "../strings.h"
-#include "../regexp.h"
-#include "../compile.h"
-
-/*
-rule transform ( list * : pattern : indices * )
-{
- indices ?= 1 ;
- local result ;
- for local e in $(list)
- {
- local m = [ MATCH $(pattern) : $(e) ] ;
- if $(m)
- {
- result += $(m[$(indices)]) ;
- }
- }
- return $(result) ;
-}
-*/
-LIST *regex_transform( FRAME *frame, int flags )
-{
- LIST* l = lol_get( frame->args, 0 );
- LIST* pattern = lol_get( frame->args, 1 );
- LIST* indices_list = lol_get(frame->args, 2);
- int* indices = 0;
- int size;
- int* p;
- LIST* result = L0;
-
- string buf[1];
- string_new(buf);
-
- if (!list_empty(indices_list))
- {
- LISTITER iter = list_begin(indices_list), end = list_end(indices_list);
- size = list_length(indices_list);
- indices = (int*)BJAM_MALLOC(size*sizeof(int));
- for(p = indices; iter != end; iter = list_next(iter))
- {
- *p++ = atoi(object_str(list_item(iter)));
- }
- }
- else
- {
- size = 1;
- indices = (int*)BJAM_MALLOC(sizeof(int));
- *indices = 1;
- }
-
- {
- /* Result is cached and intentionally never freed */
- regexp *re = regex_compile( list_front( pattern ) );
-
- LISTITER iter = list_begin( l ), end = list_end( l );
- for( ; iter != end; iter = list_next( iter ) )
- {
- if( regexec( re, object_str( list_item( iter ) ) ) )
- {
- int i = 0;
- for(; i < size; ++i)
- {
- int index = indices[i];
- /* Skip empty submatches. Not sure it's right in all cases,
- but surely is right for the case for which this routine
- is optimized -- header scanning.
- */
- if (re->startp[index] != re->endp[index])
- {
- string_append_range( buf, re->startp[index], re->endp[index] );
- result = list_push_back( result, object_new( buf->value ) );
- string_truncate( buf, 0 );
- }
- }
- }
- }
- string_free( buf );
- }
-
- BJAM_FREE(indices);
-
- return result;
-}
-
-void init_regex()
-{
- {
- const char* args[] = { "list", "*", ":", "pattern", ":", "indices", "*", 0 };
- declare_native_rule("regex", "transform", args, regex_transform, 2);
- }
-}
diff --git a/tools/build/v2/engine/modules/sequence.c b/tools/build/v2/engine/modules/sequence.c
deleted file mode 100644
index 2b539966de..0000000000
--- a/tools/build/v2/engine/modules/sequence.c
+++ /dev/null
@@ -1,47 +0,0 @@
-/* Copyright Vladimir Prus 2003. Distributed under the Boost */
-/* Software License, Version 1.0. (See accompanying */
-/* file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) */
-
-#include "../native.h"
-#include "../object.h"
-
-# ifndef max
-# define max( a,b ) ((a)>(b)?(a):(b))
-# endif
-
-
-LIST *sequence_select_highest_ranked( FRAME *frame, int flags )
-{
- /* Returns all of 'elements' for which corresponding element in parallel */
- /* list 'rank' is equal to the maximum value in 'rank'. */
-
- LIST* elements = lol_get( frame->args, 0 );
- LIST* rank = lol_get( frame->args, 1 );
- LISTITER iter, end, elements_iter, elements_end;
-
- LIST* result = L0;
- LIST* tmp;
- int highest_rank = -1;
-
- iter = list_begin(rank), end = list_end(rank);
- for (; iter != end; iter = list_next(iter))
- highest_rank = max(highest_rank, atoi(object_str(list_item(iter))));
-
- iter = list_begin(rank), end = list_end(rank);
- elements_iter = list_begin(elements), elements_end = list_end(elements);
- for (; iter != end; iter = list_next(iter), elements_iter = list_next(elements_iter))
- if (atoi(object_str(list_item(iter))) == highest_rank)
- result = list_push_back(result, object_copy(list_item(elements_iter)));
-
- return result;
-}
-
-void init_sequence()
-{
- {
- const char* args[] = { "elements", "*", ":", "rank", "*", 0 };
- declare_native_rule("sequence", "select-highest-ranked", args,
- sequence_select_highest_ranked, 1);
- }
-
-}
diff --git a/tools/build/v2/engine/native.c b/tools/build/v2/engine/native.c
deleted file mode 100644
index b1d4278313..0000000000
--- a/tools/build/v2/engine/native.c
+++ /dev/null
@@ -1,39 +0,0 @@
-/* Copyright Vladimir Prus 2003. Distributed under the Boost */
-/* Software License, Version 1.0. (See accompanying */
-/* file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) */
-
-#include "native.h"
-#include "hash.h"
-#include "object.h"
-#include "assert.h"
-
-void declare_native_rule( const char * module, const char * rule, const char * * args,
- LIST * (*f)( FRAME *, int ), int version )
-{
- OBJECT * module_obj = 0;
- module_t * m;
- if ( module )
- {
- module_obj = object_new( module );
- }
- m = bindmodule( module_obj );
- if ( module_obj )
- {
- object_free( module_obj );
- }
- if (m->native_rules == 0)
- {
- m->native_rules = hashinit( sizeof( native_rule_t ), "native rules");
- }
-
- {
- native_rule_t *np;
- OBJECT * name = object_new( rule );
- int found;
- np = (native_rule_t *)hash_insert( m->native_rules, name, &found );
- np->name = name;
- assert( !found );
- np->procedure = function_builtin( f, 0, args );
- np->version = version;
- }
-}
diff --git a/tools/build/v2/engine/native.h b/tools/build/v2/engine/native.h
deleted file mode 100644
index cdd63c844b..0000000000
--- a/tools/build/v2/engine/native.h
+++ /dev/null
@@ -1,37 +0,0 @@
-/* Copyright David Abrahams 2003. Distributed under the Boost */
-/* Software License, Version 1.0. (See accompanying */
-/* file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) */
-
-#ifndef NATIVE_H_VP_2003_12_09
-#define NATIVE_H_VP_2003_12_09
-
-#include "lists.h"
-#include "object.h"
-#include "frames.h"
-#include "function.h"
-#include "rules.h"
-
-struct native_rule_t
-{
- OBJECT * name;
- FUNCTION * procedure;
- /* Version of the interface that the native rule provides.
- It's possible that we want to change the set parameter
- for existing native rule. In that case, version number
- should be incremented so that Boost.Build can check for
- version it relies on.
-
- Versions are numbered from 1.
- */
- int version;
-};
-
-/* MSVC debugger gets confused unless this is provided */
-typedef struct native_rule_t native_rule_t ;
-
-void declare_native_rule( const char * module, const char * rule, const char * * args,
- LIST * (*f)( FRAME *, int ), int version );
-
-
-
-#endif
diff --git a/tools/build/v2/engine/object.c b/tools/build/v2/engine/object.c
deleted file mode 100644
index 399f04ae84..0000000000
--- a/tools/build/v2/engine/object.c
+++ /dev/null
@@ -1,379 +0,0 @@
-/*
- * Copyright 1993, 1995 Christopher Seiwald.
- * Copyright 2011 Steven Watanabe
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-# include "jam.h"
-# include "object.h"
-# include <stddef.h>
-# include <stdlib.h>
-# include <assert.h>
-
-/*
- * object.c - object manipulation routines
- *
- * External functions:
- *
- * object_new() - create an object from a string
- * object_copy() - return a copy of an object
- * object_free() - free an object
- * object_str() - get the string value of an object
- * object_done() - free string tables
- *
- * This implementation builds a hash table of all strings, so that multiple
- * calls of object_new() on the same string allocate memory for the string once.
- * Strings are never actually freed.
- */
-
-#define OBJECT_MAGIC 0xa762e0e3u
-
-#ifndef object_copy
-
-struct hash_header
-{
-#ifndef NDEBUG
- unsigned int magic;
-#endif
- unsigned int hash;
- struct hash_item * next;
-};
-
-#endif
-
-struct hash_item
-{
- struct hash_header header;
- char data[1];
-};
-
-#define ALLOC_ALIGNMENT ( sizeof( struct hash_item ) - sizeof( struct hash_header ) )
-
-typedef struct string_set
-{
- unsigned int num;
- unsigned int size;
- struct hash_item * * data;
-} string_set;
-
-static string_set strhash;
-static int strtotal = 0;
-static int strcount_in = 0;
-static int strcount_out = 0;
-
-
-/*
- * Immortal string allocator implementation speeds string allocation and cuts
- * down on internal fragmentation.
- */
-
-# define STRING_BLOCK 4096
-typedef struct strblock
-{
- struct strblock * next;
- char data[STRING_BLOCK];
-} strblock;
-
-static strblock * strblock_chain = 0;
-
-/* Storage remaining in the current strblock */
-static char * storage_start = 0;
-static char * storage_finish = 0;
-
-
-/*
- * allocate() - Allocate n bytes of immortal string storage.
- */
-
-static char * allocate( size_t n )
-{
-#ifdef BJAM_NEWSTR_NO_ALLOCATE
- return (char*)BJAM_MALLOC(n);
-#else
- /* See if we can grab storage from an existing block. */
- size_t remaining = storage_finish - storage_start;
- n = ((n + ALLOC_ALIGNMENT - 1) / ALLOC_ALIGNMENT) * ALLOC_ALIGNMENT;
- if ( remaining >= n )
- {
- char * result = storage_start;
- storage_start += n;
- return result;
- }
- else /* Must allocate a new block. */
- {
- strblock * new_block;
- size_t nalloc = n;
- if ( nalloc < STRING_BLOCK )
- nalloc = STRING_BLOCK;
-
- /* Allocate a new block and link into the chain. */
- new_block = (strblock *)BJAM_MALLOC( offsetof( strblock, data[0] ) + nalloc * sizeof( new_block->data[0] ) );
- if ( new_block == 0 )
- return 0;
- new_block->next = strblock_chain;
- strblock_chain = new_block;
-
- /* Take future allocations out of the larger remaining space. */
- if ( remaining < nalloc - n )
- {
- storage_start = new_block->data + n;
- storage_finish = new_block->data + nalloc;
- }
- return new_block->data;
- }
-#endif
-}
-
-static unsigned int hash_keyval( const char * key )
-{
- unsigned int hash = 0;
- unsigned i;
- unsigned int len = strlen( key );
-
- for ( i = 0; i < len / sizeof( unsigned int ); ++i )
- {
- unsigned int val;
- memcpy( &val, key, sizeof( unsigned int ) );
- hash = hash * 2147059363 + val;
- key += sizeof( unsigned int );
- }
-
- {
- unsigned int val = 0;
- memcpy( &val, key, len % sizeof( unsigned int ) );
- hash = hash * 2147059363 + val;
- }
-
- hash += (hash >> 17);
-
- return hash;
-}
-
-static void string_set_init(string_set * set)
-{
- set->size = 0;
- set->num = 4;
- set->data = (struct hash_item * *)BJAM_MALLOC( set->num * sizeof( struct hash_item * ) );
- memset( set->data, 0, set->num * sizeof( struct hash_item * ) );
-}
-
-static void string_set_done(string_set * set)
-{
- BJAM_FREE( set->data );
-}
-
-static void string_set_resize(string_set *set)
-{
- unsigned i;
- string_set new_set;
- new_set.num = set->num * 2;
- new_set.size = set->size;
- new_set.data = (struct hash_item * *)BJAM_MALLOC( sizeof( struct hash_item * ) * new_set.num );
- memset(new_set.data, 0, sizeof(struct hash_item *) * new_set.num);
- for ( i = 0; i < set->num; ++i )
- {
- while ( set->data[i] )
- {
- struct hash_item * temp = set->data[i];
- unsigned pos = temp->header.hash % new_set.num;
- set->data[i] = temp->header.next;
- temp->header.next = new_set.data[pos];
- new_set.data[pos] = temp;
- }
- }
- BJAM_FREE( set->data );
- *set = new_set;
-}
-
-static const char * string_set_insert ( string_set * set, const char * string )
-{
- unsigned hash = hash_keyval( string );
- unsigned pos = hash % set->num;
- unsigned l;
-
- struct hash_item * result;
-
- for ( result = set->data[pos]; result; result = result->header.next )
- {
- if ( strcmp( result->data, string ) == 0 )
- {
- return result->data;
- }
- }
-
- if( set->size >= set->num )
- {
- string_set_resize( set );
- pos = hash % set->num;
- }
-
- l = strlen( string );
- result = (struct hash_item *)allocate( sizeof( struct hash_header ) + l + 1 );
- result->header.hash = hash;
- result->header.next = set->data[pos];
-#ifndef NDEBUG
- result->header.magic = OBJECT_MAGIC;
-#endif
- memcpy( result->data, string, l + 1 );
- assert( hash_keyval( result->data ) == result->header.hash );
- set->data[pos] = result;
- strtotal += l + 1;
- ++set->size;
-
- return result->data;
-}
-
-
-static struct hash_item * object_get_item( OBJECT * obj )
-{
- return (struct hash_item *)( (char *)obj - offsetof( struct hash_item, data ) );
-}
-
-
-static void object_validate( OBJECT * obj )
-{
- assert( object_get_item( obj )->header.magic == OBJECT_MAGIC );
-}
-
-
-/*
- * object_new() - create an object from a string.
- */
-
-OBJECT * object_new( const char * string )
-{
-#ifdef BJAM_NO_MEM_CACHE
- int l = strlen( string );
- struct hash_item * m = (struct hash_item *)BJAM_MALLOC( sizeof(struct hash_header) + l + 1 );
-
- strtotal += l + 1;
- memcpy( m->data, string, l + 1 );
- m->header.magic = OBJECT_MAGIC;
- return (OBJECT *)m->data;
-#else
- if ( ! strhash.data )
- string_set_init( &strhash );
-
- strcount_in += 1;
-
- return (OBJECT *)string_set_insert( &strhash, string );
-#endif
-}
-
-#ifndef object_copy
-
-/*
- * object_copy() - return a copy of an object
- */
-
-OBJECT * object_copy( OBJECT * obj )
-{
- object_validate( obj );
-#ifdef BJAM_NO_MEM_CACHE
- return object_new( object_str( obj ) );
-#else
- strcount_in += 1;
- return obj;
-#endif
-}
-
-
-/*
- * object_free() - free an object
- */
-
-void object_free( OBJECT * obj )
-{
- object_validate( obj );
-#ifdef BJAM_NO_MEM_CACHE
- BJAM_FREE( object_get_item( obj ) );
-#endif
- strcount_out += 1;
-}
-
-
-/*
- * object_str() - return the
- */
-
-const char * object_str( OBJECT * obj )
-{
- object_validate( obj );
- return (const char *)obj;
-}
-
-
-/*
- * object_equal() - compare two objects
- */
-
-int object_equal( OBJECT * lhs, OBJECT * rhs )
-{
- object_validate( lhs );
- object_validate( rhs );
-#ifdef BJAM_NO_MEM_CACHE
- return strcmp(object_str(lhs), object_str(rhs)) == 0;
-#else
- assert( (lhs == rhs) == ( strcmp(object_str(lhs), object_str(rhs)) == 0 ) );
- return lhs == rhs;
-#endif
-}
-
-
-/*
- * object_hash() - returns the hash value of an object
- */
-
-unsigned int object_hash( OBJECT * obj )
-{
- object_validate( obj );
-#ifdef BJAM_NO_MEM_CACHE
- return hash_keyval( object_str( obj ) );
-#else
- return object_get_item( obj )->header.hash;
-#endif
-}
-
-#endif
-
-/*
- * object_done() - free string tables.
- */
-
-void object_done()
-{
-
-#ifdef BJAM_NEWSTR_NO_ALLOCATE
-
- unsigned i;
-
- for ( i = 0; i < strhash.num; ++i )
- {
- while ( strhash.data[i] )
- {
- struct hash_item * item = strhash.data[i];
- strhash.data[i] = item->header.next;
- BJAM_FREE( item );
- }
- }
-
-#else
-
- /* Reclaim string blocks. */
- while ( strblock_chain != 0 )
- {
- strblock * n = strblock_chain->next;
- BJAM_FREE(strblock_chain);
- strblock_chain = n;
- }
-
-#endif
-
- string_set_done( &strhash );
-
- if ( DEBUG_MEM )
- printf( "%dK in strings\n", strtotal / 1024 );
-
- /* printf( "--- %d strings of %d dangling\n", strcount_in-strcount_out, strcount_in ); */
-}
diff --git a/tools/build/v2/engine/object.h b/tools/build/v2/engine/object.h
deleted file mode 100644
index 1c123f8b0f..0000000000
--- a/tools/build/v2/engine/object.h
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Copyright 2011 Steven Watanabe
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/*
- * object.h - object manipulation routines
- */
-
-#ifndef BOOST_JAM_OBJECT_H
-#define BOOST_JAM_OBJECT_H
-
-typedef struct _object OBJECT;
-
-OBJECT * object_new ( const char * );
-void object_done ( void );
-
-#if defined(NDEBUG) && !defined(BJAM_NO_MEM_CACHE)
-
-struct hash_header
-{
- unsigned int hash;
- struct hash_item * next;
-};
-
-#define object_str( obj ) ( (const char *)( obj ) )
-#define object_copy( obj ) ( obj )
-#define object_free( obj ) ( (void)0 )
-#define object_equal( lhs, rhs ) ( ( lhs ) == ( rhs ) )
-#define object_hash( obj ) ( ((struct hash_header *)( (char *)( obj ) - sizeof(struct hash_header) ))->hash )
-
-#else
-
-const char * object_str ( OBJECT * );
-OBJECT * object_copy ( OBJECT * );
-void object_free ( OBJECT * );
-int object_equal ( OBJECT *, OBJECT * );
-unsigned int object_hash ( OBJECT * );
-
-#endif
-
-#endif
diff --git a/tools/build/v2/engine/option.h b/tools/build/v2/engine/option.h
deleted file mode 100644
index 99ef620da5..0000000000
--- a/tools/build/v2/engine/option.h
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Copyright 1993, 1995 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/*
- * option.h - command line option processing
- *
- * {o >o
- * \ -) "Command line option."
- */
-
-typedef struct bjam_option
-{
- char flag; /* filled in by getoption() */
- char *val; /* set to random address if true */
-} bjam_option;
-
-# define N_OPTS 256
-
-int getoptions( int argc, char **argv, char *opts, bjam_option *optv );
-char * getoptval( bjam_option *optv, char opt, int subopt );
diff --git a/tools/build/v2/engine/output.c b/tools/build/v2/engine/output.c
deleted file mode 100644
index a0154c6ea3..0000000000
--- a/tools/build/v2/engine/output.c
+++ /dev/null
@@ -1,125 +0,0 @@
-/*
- Copyright 2007 Rene Rivera
- Distributed under the Boost Software License, Version 1.0.
- (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-*/
-
-#include "jam.h"
-#include "output.h"
-#include "object.h"
-#include <stdio.h>
-
-#define bjam_out (stdout)
-#define bjam_err (stderr)
-
-static void out_
-(
- char const * data,
- FILE * io
-)
-{
- while ( *data )
- {
- size_t len = strcspn(data,"\r");
- data += fwrite(data,1,len,io);
- if ( *data == '\r' ) ++data;
- }
-}
-
-
-void out_action
-(
- char const * action,
- char const * target,
- char const * command,
- char const * out_data,
- char const * err_data,
- int exit_reason
-)
-{
- /* Print out the action+target line, if the action is quite the action
- * should be null.
- */
- if ( action )
- {
- fprintf( bjam_out, "%s %s\n", action, target );
- }
-
- /* Print out the command executed if given -d+2. */
- if ( DEBUG_EXEC )
- {
- fputs( command, bjam_out );
- fputc( '\n', bjam_out );
- }
-
- /* Print out the command executed to the command stream. */
- if ( globs.cmdout )
- {
- fputs( command, globs.cmdout );
- }
-
- switch ( exit_reason )
- {
- case EXIT_OK:
- break;
- case EXIT_FAIL:
- break;
- case EXIT_TIMEOUT:
- {
- /* Process expired, make user aware with explicit message. */
- if ( action )
- {
- /* But only output for non-quietly actions. */
- fprintf( bjam_out, "%ld second time limit exceeded\n", globs.timeout );
- }
- break;
- }
- default:
- break;
- }
-
- /* Print out the command output, if requested, or if the program failed. */
- if ( action || exit_reason != EXIT_OK)
- {
- /* But only output for non-quietly actions. */
- if ( ( 0 != out_data ) &&
- ( ( globs.pipe_action & 1 /* STDOUT_FILENO */ ) ||
- ( globs.pipe_action == 0 ) ) )
- {
- out_( out_data, bjam_out );
- }
- if ( ( 0 != err_data ) &&
- ( globs.pipe_action & 2 /* STDERR_FILENO */ ) )
- {
- out_( err_data, bjam_err );
- }
- }
-
- fflush( bjam_out );
- fflush( bjam_err );
- fflush( globs.cmdout );
-}
-
-
-OBJECT * outf_int( int value )
-{
- char buffer[50];
- sprintf( buffer, "%i", value );
- return object_new( buffer );
-}
-
-
-OBJECT * outf_double( double value )
-{
- char buffer[50];
- sprintf( buffer, "%f", value );
- return object_new( buffer );
-}
-
-
-OBJECT * outf_time( time_t value )
-{
- char buffer[50];
- strftime( buffer, 49, "%Y-%m-%d %H:%M:%SZ", gmtime( &value ) );
- return object_new( buffer );
-}
diff --git a/tools/build/v2/engine/output.h b/tools/build/v2/engine/output.h
deleted file mode 100644
index 64175e67ef..0000000000
--- a/tools/build/v2/engine/output.h
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- Copyright 2007 Rene Rivera
- Distributed under the Boost Software License, Version 1.0.
- (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-*/
-
-#ifndef BJAM_OUTPUT_H
-#define BJAM_OUTPUT_H
-
-#include "object.h"
-#include <time.h>
-
-#define EXIT_OK 0
-#define EXIT_FAIL 1
-#define EXIT_TIMEOUT 2
-
-void out_action(
- const char * action,
- const char * target,
- const char * command,
- const char * out_data,
- const char * err_data,
- int exit_reason
- );
-
-OBJECT * outf_int( int value );
-OBJECT * outf_double( double value );
-OBJECT * outf_time( time_t value );
-
-#endif
diff --git a/tools/build/v2/engine/parse.c b/tools/build/v2/engine/parse.c
deleted file mode 100644
index 167ad1745a..0000000000
--- a/tools/build/v2/engine/parse.c
+++ /dev/null
@@ -1,132 +0,0 @@
-/*
- * Copyright 1993, 2000 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/* This file is ALSO:
- * Copyright 2001-2004 David Abrahams.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-#include "jam.h"
-#include "lists.h"
-#include "parse.h"
-#include "scan.h"
-#include "object.h"
-#include "modules.h"
-#include "frames.h"
-#include "function.h"
-
-/*
- * parse.c - make and destroy parse trees as driven by the parser
- *
- * 09/07/00 (seiwald) - ref count on PARSE to avoid freeing when used,
- * as per Matt Armstrong.
- * 09/11/00 (seiwald) - structure reworked to reflect that (*func)()
- * returns a LIST *.
- */
-
-static PARSE * yypsave;
-
-void parse_file( OBJECT * f, FRAME * frame )
-{
- /* Suspend scan of current file and push this new file in the stream. */
- yyfparse( f );
-
- /* Now parse each block of rules and execute it. Execute it outside of the
- * parser so that recursive calls to yyrun() work (no recursive yyparse's).
- */
-
- for ( ; ; )
- {
- PARSE * p;
- FUNCTION * func;
-
- /* Filled by yyparse() calling parse_save(). */
- yypsave = 0;
-
- /* If parse error or empty parse, outta here. */
- if ( yyparse() || !( p = yypsave ) )
- break;
-
- /* Run the parse tree. */
- func = function_compile( p );
- parse_free( p );
- list_free( function_run( func, frame, stack_global() ) );
- function_free( func );
- }
-}
-
-
-void parse_save( PARSE * p )
-{
- yypsave = p;
-}
-
-
-PARSE * parse_make(
- int type,
- PARSE * left,
- PARSE * right,
- PARSE * third,
- OBJECT * string,
- OBJECT * string1,
- int num )
-{
- PARSE * p = (PARSE *)BJAM_MALLOC( sizeof( PARSE ) );
-
- p->type = type;
- p->left = left;
- p->right = right;
- p->third = third;
- p->string = string;
- p->string1 = string1;
- p->num = num;
- p->refs = 1;
- p->rulename = 0;
-
- if ( left )
- {
- p->file = object_copy( left->file );
- p->line = left->line;
- }
- else
- {
- yyinput_stream( &p->file, &p->line );
- p->file = object_copy( p->file );
- }
-
- return p;
-}
-
-
-void parse_refer( PARSE * p )
-{
- ++p->refs;
-}
-
-
-void parse_free( PARSE * p )
-{
- if ( --p->refs )
- return;
-
- if ( p->string )
- object_free( p->string );
- if ( p->string1 )
- object_free( p->string1 );
- if ( p->left )
- parse_free( p->left );
- if ( p->right )
- parse_free( p->right );
- if ( p->third )
- parse_free( p->third );
- if ( p->rulename )
- object_free( p->rulename );
- if ( p->file )
- object_free( p->file );
-
- BJAM_FREE( (char *)p );
-}
diff --git a/tools/build/v2/engine/parse.h b/tools/build/v2/engine/parse.h
deleted file mode 100644
index 882a029f42..0000000000
--- a/tools/build/v2/engine/parse.h
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Copyright 1993, 2000 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/* This file is ALSO:
- * Copyright 2001-2004 David Abrahams.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-#ifndef PARSE_DWA20011020_H
-#define PARSE_DWA20011020_H
-
-#include "frames.h"
-#include "modules.h"
-#include "lists.h"
-
-/*
- * parse.h - make and destroy parse trees as driven by the parser.
- */
-
-#define PARSE_APPEND 0
-#define PARSE_FOREACH 1
-#define PARSE_IF 2
-#define PARSE_EVAL 3
-#define PARSE_INCLUDE 4
-#define PARSE_LIST 5
-#define PARSE_LOCAL 6
-#define PARSE_MODULE 7
-#define PARSE_CLASS 8
-#define PARSE_NULL 9
-#define PARSE_ON 10
-#define PARSE_RULE 11
-#define PARSE_RULES 12
-#define PARSE_SET 13
-#define PARSE_SETCOMP 14
-#define PARSE_SETEXEC 15
-#define PARSE_SETTINGS 16
-#define PARSE_SWITCH 17
-#define PARSE_WHILE 18
-
-/*
- * Parse tree node.
- */
-
-struct _PARSE {
- int type;
- PARSE * left;
- PARSE * right;
- PARSE * third;
- OBJECT * string;
- OBJECT * string1;
- int num;
- int refs;
- OBJECT * rulename;
- OBJECT * file;
- int line;
-};
-
-void parse_file( OBJECT *, FRAME * );
-void parse_save( PARSE * );
-
-PARSE * parse_make(
- int type,
- PARSE * left,
- PARSE * right,
- PARSE * third,
- OBJECT * string,
- OBJECT * string1,
- int num );
-
-void parse_refer ( PARSE * );
-void parse_free ( PARSE * );
-LIST * parse_evaluate( PARSE *, FRAME * );
-
-#endif
diff --git a/tools/build/v2/engine/patchlevel.h b/tools/build/v2/engine/patchlevel.h
deleted file mode 100644
index 18224c1c31..0000000000
--- a/tools/build/v2/engine/patchlevel.h
+++ /dev/null
@@ -1,17 +0,0 @@
-/*
- * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/* Keep JAMVERSYM in sync with VERSION. */
-/* It can be accessed as $(JAMVERSION) in the Jamfile. */
-
-#define VERSION_MAJOR 2011
-#define VERSION_MINOR 12
-#define VERSION_PATCH 0
-#define VERSION_MAJOR_SYM "2011"
-#define VERSION_MINOR_SYM "12"
-#define VERSION_PATCH_SYM "00"
-#define VERSION "2011.12"
-#define JAMVERSYM "JAMVERSION=2011.12"
diff --git a/tools/build/v2/engine/pathsys.h b/tools/build/v2/engine/pathsys.h
deleted file mode 100644
index 978dbf4a7a..0000000000
--- a/tools/build/v2/engine/pathsys.h
+++ /dev/null
@@ -1,97 +0,0 @@
-/*
- * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/*
- * pathsys.h - PATHNAME struct
- */
-
-/*
- * PATHNAME - a name of a file, broken into <grist>dir/base/suffix(member)
- *
- * <grist> is salt to distinguish between targets that otherwise would
- * have the same name: it never appears in the bound name of a target.
- * (member) is an archive member name: the syntax is arbitrary, but must
- * agree in path_parse(), path_build() and the Jambase.
- */
-
-#ifndef PATHSYS_VP_20020211_H
-# define PATHSYS_VP_20020211_H
-
-#include "jam.h"
-#include "strings.h"
-#include "object.h"
-
-typedef struct _pathname PATHNAME;
-typedef struct _pathpart PATHPART;
-
-struct _pathpart
-{
- const char * ptr;
- int len;
-};
-
-struct _pathname
-{
- PATHPART part[6];
-
-#define f_grist part[0]
-#define f_root part[1]
-#define f_dir part[2]
-#define f_base part[3]
-#define f_suffix part[4]
-#define f_member part[5]
-};
-
-void path_build( PATHNAME * f, string * file, int binding );
-void path_build1( PATHNAME * f, string * file );
-
-void path_parse( const char * file, PATHNAME * f );
-void path_parent( PATHNAME * f );
-
-#ifdef NT
-
-/** Returns object_new-allocated string with long equivivalent of 'short_name'.
- If none exists -- i.e. 'short_path' is already long path, it's returned
- unaltered. */
-OBJECT * short_path_to_long_path( OBJECT * short_path );
-
-#endif
-
-/** Given a path, returns an object that can be
- used as a unique key for that path. Equivalent
- paths such as a/b, A\B, and a\B on NT all yield the
- same key.
- */
-OBJECT * path_as_key( OBJECT * path );
-void path_add_key( OBJECT * path );
-
-#ifdef USE_PATHUNIX
-/** Returns a static pointer to the system dependent path to the temporary
- directory. NOTE: *without* a trailing path separator.
-*/
-const char * path_tmpdir( void );
-
-/** Returns a new temporary name.
-*/
-OBJECT * path_tmpnam( void );
-
-/** Returns a new temporary path.
-*/
-OBJECT * path_tmpfile( void );
-#endif
-
-/** Give the first argument to 'main', return a full path to
- our executable. Returns null in the unlikely case it
- cannot be determined. Caller is responsible for freeing
- the string.
-
- Implemented in jam.c
-*/
-char * executable_path (const char *argv0);
-
-void path_done( void );
-
-#endif
diff --git a/tools/build/v2/engine/pathunix.c b/tools/build/v2/engine/pathunix.c
deleted file mode 100644
index a8428df8d6..0000000000
--- a/tools/build/v2/engine/pathunix.c
+++ /dev/null
@@ -1,587 +0,0 @@
-/*
- * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/* This file is ALSO:
- * Copyright 2001-2004 David Abrahams.
- * Copyright 2005 Rene Rivera.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-# include "jam.h"
-# include "pathsys.h"
-# include "strings.h"
-# include "object.h"
-# include "filesys.h"
-# include <time.h>
-# include <stdlib.h>
-# include <assert.h>
-# ifndef OS_NT
-# include <unistd.h>
-# endif
-
-# ifdef USE_PATHUNIX
-
-/*
- * pathunix.c - manipulate file names on UNIX, NT, OS2, AmigaOS
- *
- * External routines:
- *
- * path_parse() - split a file name into dir/base/suffix/member
- * path_build() - build a filename given dir/base/suffix/member
- * path_parent() - make a PATHNAME point to its parent dir
- *
- * File_parse() and path_build() just manipuate a string and a structure;
- * they do not make system calls.
- *
- * 04/08/94 (seiwald) - Coherent/386 support added.
- * 12/26/93 (seiwald) - handle dir/.suffix properly in path_build()
- * 12/19/94 (mikem) - solaris string table insanity support
- * 12/21/94 (wingerd) Use backslashes for pathnames - the NT way.
- * 02/14/95 (seiwald) - parse and build /xxx properly
- * 02/23/95 (wingerd) Compilers on NT can handle "/" in pathnames, so we
- * should expect hdr searches to come up with strings
- * like "thing/thing.h". So we need to test for "/" as
- * well as "\" when parsing pathnames.
- * 03/16/95 (seiwald) - fixed accursed typo on line 69.
- * 05/03/96 (seiwald) - split from filent.c, fileunix.c
- * 12/20/96 (seiwald) - when looking for the rightmost . in a file name,
- * don't include the archive member name.
- * 01/13/01 (seiwald) - turn on \ handling on UNIX, on by accident
- */
-
-/*
- * path_parse() - split a file name into dir/base/suffix/member
- */
-
-void path_parse( const char * file, PATHNAME * f )
-{
- const char * p;
- const char * q;
- const char * end;
-
- memset( (char *)f, 0, sizeof( *f ) );
-
- /* Look for <grist> */
-
- if ( ( file[0] == '<' ) && ( p = strchr( file, '>' ) ) )
- {
- f->f_grist.ptr = file;
- f->f_grist.len = p - file;
- file = p + 1;
- }
-
- /* Look for dir/ */
-
- p = strrchr( file, '/' );
-
-# if PATH_DELIM == '\\'
- /* On NT, look for dir\ as well */
- {
- char *p1 = strrchr( file, '\\' );
- p = p1 > p ? p1 : p;
- }
-# endif
-
- if ( p )
- {
- f->f_dir.ptr = file;
- f->f_dir.len = p - file;
-
- /* Special case for / - dirname is /, not "" */
-
- if ( !f->f_dir.len )
- f->f_dir.len = 1;
-
-# if PATH_DELIM == '\\'
- /* Special case for D:/ - dirname is D:/, not "D:" */
-
- if ( f->f_dir.len == 2 && file[1] == ':' )
- f->f_dir.len = 3;
-# endif
-
- file = p + 1;
- }
-
- end = file + strlen( file );
-
- /* Look for (member) */
-
- if ( ( p = strchr( file, '(' ) ) && ( end[ -1 ] == ')' ) )
- {
- f->f_member.ptr = p + 1;
- f->f_member.len = end - p - 2;
- end = p;
- }
-
- /* Look for .suffix */
- /* This would be memrchr() */
-
- p = 0;
- q = file;
-
- while ( ( q = (char *)memchr( q, '.', end - q ) ) )
- p = q++;
-
- if ( p )
- {
- f->f_suffix.ptr = p;
- f->f_suffix.len = end - p;
- end = p;
- }
-
- /* Leaves base */
-
- f->f_base.ptr = file;
- f->f_base.len = end - file;
-}
-
-/*
- * path_delims - the string of legal path delimiters
- */
-static char path_delims[] = {
- PATH_DELIM,
-# if PATH_DELIM == '\\'
- '/',
-# endif
- 0
-};
-
-/*
- * is_path_delim() - true iff c is a path delimiter
- */
-static int is_path_delim( char c )
-{
- char* p = strchr( path_delims, c );
- return p && *p;
-}
-
-/*
- * as_path_delim() - convert c to a path delimiter if it isn't one
- * already
- */
-static char as_path_delim( char c )
-{
- return is_path_delim( c ) ? c : PATH_DELIM;
-}
-
-/*
- * path_build() - build a filename given dir/base/suffix/member
- *
- * To avoid changing slash direction on NT when reconstituting paths,
- * instead of unconditionally appending PATH_DELIM we check the
- * past-the-end character of the previous path element. If it is in
- * path_delims, we append that, and only append PATH_DELIM as a last
- * resort. This heuristic is based on the fact that PATHNAME objects
- * are usually the result of calling path_parse, which leaves the
- * original slashes in the past-the-end position. Correctness depends
- * on the assumption that all strings are zero terminated, so a
- * past-the-end character will always be available.
- *
- * As an attendant patch, we had to ensure that backslashes are used
- * explicitly in timestamp.c
- */
-
-void
-path_build(
- PATHNAME *f,
- string *file,
- int binding )
-{
- file_build1( f, file );
-
- /* Don't prepend root if it's . or directory is rooted */
-# if PATH_DELIM == '/'
-
- if ( f->f_root.len
- && !( f->f_root.len == 1 && f->f_root.ptr[0] == '.' )
- && !( f->f_dir.len && f->f_dir.ptr[0] == '/' ) )
-
-# else /* unix */
-
- if ( f->f_root.len
- && !( f->f_root.len == 1 && f->f_root.ptr[0] == '.' )
- && !( f->f_dir.len && f->f_dir.ptr[0] == '/' )
- && !( f->f_dir.len && f->f_dir.ptr[0] == '\\' )
- && !( f->f_dir.len && f->f_dir.ptr[1] == ':' ) )
-
-# endif /* unix */
-
- {
- string_append_range( file, f->f_root.ptr, f->f_root.ptr + f->f_root.len );
- /* If 'root' already ends with path delimeter,
- don't add yet another one. */
- if ( ! is_path_delim( f->f_root.ptr[f->f_root.len-1] ) )
- string_push_back( file, as_path_delim( f->f_root.ptr[f->f_root.len] ) );
- }
-
- if ( f->f_dir.len )
- string_append_range( file, f->f_dir.ptr, f->f_dir.ptr + f->f_dir.len );
-
- /* UNIX: Put / between dir and file */
- /* NT: Put \ between dir and file */
-
- if ( f->f_dir.len && ( f->f_base.len || f->f_suffix.len ) )
- {
- /* UNIX: Special case for dir \ : don't add another \ */
- /* NT: Special case for dir / : don't add another / */
-
-# if PATH_DELIM == '\\'
- if ( !( f->f_dir.len == 3 && f->f_dir.ptr[1] == ':' ) )
-# endif
- if ( !( f->f_dir.len == 1 && is_path_delim( f->f_dir.ptr[0] ) ) )
- string_push_back( file, as_path_delim( f->f_dir.ptr[f->f_dir.len] ) );
- }
-
- if ( f->f_base.len )
- {
- string_append_range( file, f->f_base.ptr, f->f_base.ptr + f->f_base.len );
- }
-
- if ( f->f_suffix.len )
- {
- string_append_range( file, f->f_suffix.ptr, f->f_suffix.ptr + f->f_suffix.len );
- }
-
- if ( f->f_member.len )
- {
- string_push_back( file, '(' );
- string_append_range( file, f->f_member.ptr, f->f_member.ptr + f->f_member.len );
- string_push_back( file, ')' );
- }
-}
-
-/*
- * path_parent() - make a PATHNAME point to its parent dir
- */
-
-void
-path_parent( PATHNAME *f )
-{
- /* just set everything else to nothing */
-
- f->f_base.ptr =
- f->f_suffix.ptr =
- f->f_member.ptr = "";
-
- f->f_base.len =
- f->f_suffix.len =
- f->f_member.len = 0;
-}
-
-#ifdef NT
-#include <windows.h>
-
-/* The definition of this in winnt.h is not ANSI-C compatible. */
-#undef INVALID_FILE_ATTRIBUTES
-#define INVALID_FILE_ATTRIBUTES ((DWORD)-1)
-
-OBJECT * path_as_key( OBJECT * path );
-static void path_write_key( char * path_, string * out );
-
-void ShortPathToLongPath( char * short_path, string * out )
-{
- const char * new_element;
- unsigned long saved_size;
- char * p;
-
- if ( short_path[0] == '\0' )
- {
- return;
- }
-
- if ( short_path[0] == '\\' && short_path[1] == '\0')
- {
- string_push_back( out, '\\' );
- return;
- }
-
- if ( short_path[1] == ':' &&
- ( short_path[2] == '\0' ||
- ( short_path[2] == '\\' && short_path[3] == '\0' ) ) )
- {
- string_push_back( out, toupper( short_path[0] ) );
- string_push_back( out, ':' );
- string_push_back( out, '\\' );
- return;
- }
-
- /* '/' already handled. */
- if ( ( p = strrchr( short_path, '\\' ) ) )
- {
- char saved;
- new_element = p + 1;
-
- /* special case \ */
- if ( p == short_path )
- ++p;
-
- /* special case D:\ */
- if ( p == short_path + 2 && short_path[1] == ':' )
- ++p;
-
- saved = *p;
- *p = '\0';
- path_write_key( short_path, out );
- *p = saved;
- }
- else
- {
- new_element = short_path;
- }
-
- if ( out->size && out->value[ out->size - 1 ] != '\\' )
- {
- string_push_back( out, '\\' );
- }
-
- saved_size = out->size;
- string_append( out, new_element );
-
- if ( ! ( new_element[0] == '.' && new_element[1] == '\0' ||
- new_element[0] == '.' && new_element[1] == '.'
- && new_element[2] == '\0' ) )
- {
- WIN32_FIND_DATA fd;
- HANDLE hf = 0;
- hf = FindFirstFile( out->value, &fd );
-
- /* If the file exists, replace the name. */
- if ( hf != INVALID_HANDLE_VALUE )
- {
- string_truncate( out, saved_size );
- string_append( out, fd.cFileName );
- FindClose( hf );
- }
- }
-}
-
-OBJECT * short_path_to_long_path( OBJECT * short_path )
-{
- return path_as_key( short_path );
-}
-
-struct path_key_entry
-{
- OBJECT * path;
- OBJECT * key;
-};
-
-static struct hash * path_key_cache;
-
-static void path_write_key( char * path_, string * out )
-{
- struct path_key_entry * result;
- OBJECT * path = object_new( path_ );
- int found;
-
- /* This is only called by path_as_key, which initializes the cache. */
- assert( path_key_cache );
-
- result = (struct path_key_entry *)hash_insert( path_key_cache, path, &found );
- if ( !found )
- {
- /* path_ is already normalized. */
- result->path = path;
- ShortPathToLongPath( path_, out );
- result->key = object_new( out->value );
- }
- else
- {
- object_free( path );
- string_append( out, object_str( result->key ) );
- }
-
-}
-
-static void normalize_path( string * path )
-{
- char * s;
- for ( s = path->value; s < path->value + path->size; ++s )
- {
- if ( *s == '/' )
- *s = '\\';
- else
- *s = tolower( *s );
- }
- /* Strip trailing "/" */
- if ( path->size != 0 && path->size != 3 && path->value[ path->size - 1 ] == '\\' )
- {
- string_pop_back( path );
- }
-}
-
-void path_add_key( OBJECT * path )
-{
- struct path_key_entry * result;
- int found;
-
- if ( ! path_key_cache )
- path_key_cache = hashinit( sizeof( struct path_key_entry ), "path to key" );
-
- result = (struct path_key_entry *)hash_insert( path_key_cache, path, &found );
- if ( !found )
- {
- string buf[1];
- OBJECT * normalized;
- struct path_key_entry * nresult;
- result->path = path;
- string_copy( buf, object_str( path ) );
- normalize_path( buf );
- normalized = object_new( buf->value );
- string_free( buf );
- nresult = (struct path_key_entry *)hash_insert( path_key_cache, normalized, &found );
- if ( !found || nresult == result )
- {
- nresult->path = object_copy( normalized );
- nresult->key = object_copy( path );
- }
- object_free( normalized );
- if ( nresult != result )
- {
- result->path = object_copy( path );
- result->key = object_copy( nresult->key );
- }
- }
-}
-
-OBJECT * path_as_key( OBJECT * path )
-{
- struct path_key_entry * result;
- int found;
-
- if ( ! path_key_cache )
- path_key_cache = hashinit( sizeof( struct path_key_entry ), "path to key" );
-
- result = (struct path_key_entry *)hash_insert( path_key_cache, path, &found );
- if ( !found )
- {
- string buf[1];
- OBJECT * normalized;
- struct path_key_entry * nresult;
- result->path = path;
- string_copy( buf, object_str( path ) );
- normalize_path( buf );
- normalized = object_new( buf->value );
- nresult = (struct path_key_entry *)hash_insert( path_key_cache, normalized, &found );
- if ( !found || nresult == result )
- {
- string long_path[1];
- nresult->path = normalized;
- string_new( long_path );
- ShortPathToLongPath( buf->value, long_path );
- nresult->path = object_copy( normalized );
- nresult->key = object_new( long_path->value );
- string_free( long_path );
- }
- string_free( buf );
- object_free( normalized );
- if ( nresult != result )
- {
- result->path = object_copy( path );
- result->key = object_copy( nresult->key );
- }
- }
-
- return object_copy( result->key );
-}
-
-static void free_path_key_entry( void * xentry, void * data )
-{
- struct path_key_entry * entry = (struct path_key_entry *)xentry;
- object_free( entry->path );
- object_free( entry->key );
-}
-
-void path_done( void )
-{
- if ( path_key_cache )
- {
- hashenumerate( path_key_cache, &free_path_key_entry, (void *)0 );
- hashdone( path_key_cache );
- }
-}
-
-#else
-
-void path_add_key( OBJECT * path )
-{
-}
-
-OBJECT * path_as_key( OBJECT * path )
-{
- return object_copy( path );
-}
-
-void path_done( void )
-{
-}
-
-#endif
-
-static string path_tmpdir_buffer[1];
-static const char * path_tmpdir_result = 0;
-
-const char * path_tmpdir()
-{
- if (!path_tmpdir_result)
- {
- # ifdef OS_NT
- DWORD pathLength = 0;
- pathLength = GetTempPath(pathLength,NULL);
- string_new(path_tmpdir_buffer);
- string_reserve(path_tmpdir_buffer,pathLength);
- pathLength = GetTempPathA(pathLength,path_tmpdir_buffer[0].value);
- path_tmpdir_buffer[0].value[pathLength-1] = '\0';
- path_tmpdir_buffer[0].size = pathLength-1;
- # else
- const char * t = getenv("TMPDIR");
- if (!t)
- {
- t = "/tmp";
- }
- string_new(path_tmpdir_buffer);
- string_append(path_tmpdir_buffer,t);
- # endif
- path_tmpdir_result = path_tmpdir_buffer[0].value;
- }
- return path_tmpdir_result;
-}
-
-OBJECT * path_tmpnam(void)
-{
- char name_buffer[64];
- # ifdef OS_NT
- unsigned long c0 = GetCurrentProcessId();
- # else
- unsigned long c0 = getpid();
- # endif
- static unsigned long c1 = 0;
- if (0 == c1) c1 = time(0)&0xffff;
- c1 += 1;
- sprintf(name_buffer,"jam%lx%lx.000",c0,c1);
- return object_new(name_buffer);
-}
-
-OBJECT * path_tmpfile(void)
-{
- OBJECT * result = 0;
- OBJECT * tmpnam;
-
- string file_path;
- string_copy(&file_path,path_tmpdir());
- string_push_back(&file_path,PATH_DELIM);
- tmpnam = path_tmpnam();
- string_append(&file_path,object_str(tmpnam));
- object_free(tmpnam);
- result = object_new(file_path.value);
- string_free(&file_path);
-
- return result;
-}
-
-
-# endif /* unix, NT, OS/2, AmigaOS */
diff --git a/tools/build/v2/engine/pwd.c b/tools/build/v2/engine/pwd.c
deleted file mode 100644
index 93d8126032..0000000000
--- a/tools/build/v2/engine/pwd.c
+++ /dev/null
@@ -1,76 +0,0 @@
-/* Copyright Vladimir Prus 2002, Rene Rivera 2005. Distributed under the Boost */
-/* Software License, Version 1.0. (See accompanying */
-/* file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) */
-
-#include "jam.h"
-#include "lists.h"
-#include "object.h"
-#include "pathsys.h"
-#include "mem.h"
-
-#include <limits.h>
-#include <errno.h>
-
-/* MinGW on windows declares PATH_MAX in limits.h */
-#if defined(NT) && ! defined(__GNUC__)
-#include <direct.h>
-#define PATH_MAX _MAX_PATH
-#else
-#include <unistd.h>
-#if defined(__COMO__)
- #include <linux/limits.h>
-#endif
-#endif
-
-#ifndef PATH_MAX
- #define PATH_MAX 1024
-#endif
-
-/* The current directory can't change in bjam, so optimize this to cache
-** the result.
-*/
-static OBJECT * pwd_result = NULL;
-
-
-LIST*
-pwd(void)
-{
- if (!pwd_result)
- {
- int buffer_size = PATH_MAX;
- char * result_buffer = 0;
- do
- {
- char * buffer = BJAM_MALLOC_RAW(buffer_size);
- result_buffer = getcwd(buffer,buffer_size);
- if (result_buffer)
- {
- #ifdef NT
- OBJECT * result = object_new(result_buffer);
- pwd_result = short_path_to_long_path(result);
- object_free( result );
- #else
- pwd_result = object_new(result_buffer);
- #endif
- }
- buffer_size *= 2;
- BJAM_FREE_RAW(buffer);
- }
- while (!pwd_result && errno == ERANGE);
-
- if (!pwd_result)
- {
- perror("can not get current directory");
- return L0;
- }
- }
- return list_new( object_copy( pwd_result ) );
-}
-
-void pwd_done( void )
-{
- if( pwd_result )
- {
- object_free( pwd_result );
- }
-}
diff --git a/tools/build/v2/engine/pwd.h b/tools/build/v2/engine/pwd.h
deleted file mode 100644
index e6ed268bb2..0000000000
--- a/tools/build/v2/engine/pwd.h
+++ /dev/null
@@ -1,11 +0,0 @@
-/* Copyright Vladimir Prus 2002. Distributed under the Boost */
-/* Software License, Version 1.0. (See accompanying */
-/* file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) */
-
-#ifndef PWD_H
-#define PWD_H
-
-LIST * pwd( void );
-void pwd_done( void );
-
-#endif
diff --git a/tools/build/v2/engine/regexp.c b/tools/build/v2/engine/regexp.c
deleted file mode 100644
index b51b85b09c..0000000000
--- a/tools/build/v2/engine/regexp.c
+++ /dev/null
@@ -1,1328 +0,0 @@
-/*
- * regcomp and regexec -- regsub and regerror are elsewhere
- *
- * Copyright (c) 1986 by University of Toronto.
- * Written by Henry Spencer. Not derived from licensed software.
- *
- * Permission is granted to anyone to use this software for any
- * purpose on any computer system, and to redistribute it freely,
- * subject to the following restrictions:
- *
- * 1. The author is not responsible for the consequences of use of
- * this software, no matter how awful, even if they arise
- * from defects in it.
- *
- * 2. The origin of this software must not be misrepresented, either
- * by explicit claim or by omission.
- *
- * 3. Altered versions must be plainly marked as such, and must not
- * be misrepresented as being the original software.
- *** THIS IS AN ALTERED VERSION. It was altered by John Gilmore,
- *** hoptoad!gnu, on 27 Dec 1986, to add \n as an alternative to |
- *** to assist in implementing egrep.
- *** THIS IS AN ALTERED VERSION. It was altered by John Gilmore,
- *** hoptoad!gnu, on 27 Dec 1986, to add \< and \> for word-matching
- *** as in BSD grep and ex.
- *** THIS IS AN ALTERED VERSION. It was altered by John Gilmore,
- *** hoptoad!gnu, on 28 Dec 1986, to optimize characters quoted with \.
- *** THIS IS AN ALTERED VERSION. It was altered by James A. Woods,
- *** ames!jaw, on 19 June 1987, to quash a regcomp() redundancy.
- *** THIS IS AN ALTERED VERSION. It was altered by Christopher Seiwald
- *** seiwald@vix.com, on 28 August 1993, for use in jam. Regmagic.h
- *** was moved into regexp.h, and the include of regexp.h now uses "'s
- *** to avoid conflicting with the system regexp.h. Const, bless its
- *** soul, was removed so it can compile everywhere. The declaration
- *** of strchr() was in conflict on AIX, so it was removed (as it is
- *** happily defined in string.h).
- *** THIS IS AN ALTERED VERSION. It was altered by Christopher Seiwald
- *** seiwald@perforce.com, on 20 January 2000, to use function prototypes.
- *
- * Beware that some of this code is subtly aware of the way operator precedence
- * is structured in regular expressions. Serious changes in regular-expression
- * syntax might require a total rethink.
- */
-
-
-#include "jam.h"
-#include "regexp.h"
-#include <stdio.h>
-#include <ctype.h>
-#ifndef ultrix
- #include <stdlib.h>
-#endif
-#include <string.h>
-
-
-/*
- * The "internal use only" fields in regexp.h are present to pass info from
- * compile to execute that permits the execute phase to run lots faster on
- * simple cases. They are:
- :
- * regstart char that must begin a match; '\0' if none obvious.
- * reganch is the match anchored (at beginning-of-line only)?
- * regmust string (pointer into program) that match must include, or NULL.
- * regmlen length of regmust string.
- *
- * Regstart and reganch permit very fast decisions on suitable starting points
- * for a match, cutting down the work a lot. Regmust permits fast rejection of
- * lines that cannot possibly match. The regmust tests are costly enough that
- * regcomp() supplies a regmust only if the r.e. contains something potentially
- * expensive (at present, the only such thing detected is * or + at the start of
- * the r.e., which can involve a lot of backup). Regmlen is supplied because the
- * test in regexec() needs it and regcomp() is computing it anyway.
- */
-
-/*
- * Structure for regexp "program". This is essentially a linear encoding of a
- * nondeterministic finite-state machine (aka syntax charts or "railroad normal
- * form" in parsing technology). Each node is an opcode plus a "next" pointer,
- * possibly plus an operand. "Next" pointers of all nodes except BRANCH
- * implement concatenation; a "next" pointer with a BRANCH on both ends of it is
- * connecting two alternatives. [Here we have one of the subtle syntax
- * dependencies: an individual BRANCH, as opposed to a collection of them, is
- * never concatenated with anything because of operator precedence.] The operand
- * of some types of node is a literal string; for others, it is a node leading
- * into a sub-FSM. In particular, the operand of a BRANCH node is the first node
- * of the branch. [NB this is *not* a tree structure: the tail of the branch
- * connects to the thing following the set of BRANCHes.] The opcodes are:
- */
-
-/* definition number opnd? meaning */
-#define END 0 /* no End of program. */
-#define BOL 1 /* no Match "" at beginning of line. */
-#define EOL 2 /* no Match "" at end of line. */
-#define ANY 3 /* no Match any one character. */
-#define ANYOF 4 /* str Match any character in this string. */
-#define ANYBUT 5 /* str Match any character not in this string. */
-#define BRANCH 6 /* node Match this alternative, or the next... */
-#define BACK 7 /* no Match "", "next" ptr points backward. */
-#define EXACTLY 8 /* str Match this string. */
-#define NOTHING 9 /* no Match empty string. */
-#define STAR 10 /* node Match this (simple) thing 0 or more times. */
-#define PLUS 11 /* node Match this (simple) thing 1 or more times. */
-#define WORDA 12 /* no Match "" at wordchar, where prev is nonword */
-#define WORDZ 13 /* no Match "" at nonwordchar, where prev is word */
-#define OPEN 20 /* no Mark this point in input as start of #n. */
- /* OPEN+1 is number 1, etc. */
-#define CLOSE 30 /* no Analogous to OPEN. */
-
-
-/*
- * Opcode notes:
- *
- * BRANCH The set of branches constituting a single choice are hooked
- * together with their "next" pointers, since precedence prevents
- * anything being concatenated to any individual branch. The
- * "next" pointer of the last BRANCH in a choice points to the
- * thing following the whole choice. This is also where the
- * final "next" pointer of each individual branch points; each
- * branch starts with the operand node of a BRANCH node.
- *
- * BACK Normal "next" pointers all implicitly point forward; BACK
- * exists to make loop structures possible.
- *
- * STAR,PLUS '?', and complex '*' and '+', are implemented as circular
- * BRANCH structures using BACK. Simple cases (one character
- * per match) are implemented with STAR and PLUS for speed
- * and to minimize recursive plunges.
- *
- * OPEN,CLOSE ...are numbered at compile time.
- */
-
-/*
- * A node is one char of opcode followed by two chars of "next" pointer.
- * "Next" pointers are stored as two 8-bit pieces, high order first. The
- * value is a positive offset from the opcode of the node containing it.
- * An operand, if any, simply follows the node. (Note that much of the
- * code generation knows about this implicit relationship.)
- *
- * Using two bytes for the "next" pointer is vast overkill for most things,
- * but allows patterns to get big without disasters.
- */
-#define OP(p) (*(p))
-#define NEXT(p) (((*((p)+1)&0377)<<8) + (*((p)+2)&0377))
-#define OPERAND(p) ((p) + 3)
-
-/*
- * See regmagic.h for one further detail of program structure.
- */
-
-
-/*
- * Utility definitions.
- */
-#ifndef CHARBITS
-#define UCHARAT(p) ((int)*(const unsigned char *)(p))
-#else
-#define UCHARAT(p) ((int)*(p)&CHARBITS)
-#endif
-
-#define FAIL(m) { regerror(m); return(NULL); }
-#define ISMULT(c) ((c) == '*' || (c) == '+' || (c) == '?')
-
-/*
- * Flags to be passed up and down.
- */
-#define HASWIDTH 01 /* Known never to match null string. */
-#define SIMPLE 02 /* Simple enough to be STAR/PLUS operand. */
-#define SPSTART 04 /* Starts with * or +. */
-#define WORST 0 /* Worst case. */
-
-/*
- * Global work variables for regcomp().
- */
-static char *regparse; /* Input-scan pointer. */
-static int regnpar; /* () count. */
-static char regdummy;
-static char *regcode; /* Code-emit pointer; &regdummy = don't. */
-static long regsize; /* Code size. */
-
-/*
- * Forward declarations for regcomp()'s friends.
- */
-#ifndef STATIC
-#define STATIC static
-#endif
-STATIC char *reg( int paren, int *flagp );
-STATIC char *regbranch( int *flagp );
-STATIC char *regpiece( int *flagp );
-STATIC char *regatom( int *flagp );
-STATIC char *regnode( int op );
-STATIC char *regnext( register char *p );
-STATIC void regc( int b );
-STATIC void reginsert( char op, char *opnd );
-STATIC void regtail( char *p, char *val );
-STATIC void regoptail( char *p, char *val );
-#ifdef STRCSPN
-STATIC int strcspn();
-#endif
-
-/*
- - regcomp - compile a regular expression into internal code
- *
- * We can't allocate space until we know how big the compiled form will be,
- * but we can't compile it (and thus know how big it is) until we've got a
- * place to put the code. So we cheat: we compile it twice, once with code
- * generation turned off and size counting turned on, and once "for real".
- * This also means that we don't allocate space until we are sure that the
- * thing really will compile successfully, and we never have to move the
- * code and thus invalidate pointers into it. (Note that it has to be in
- * one piece because free() must be able to free it all.)
- *
- * Beware that the optimization-preparation code in here knows about some
- * of the structure of the compiled regexp.
- */
-regexp *
-regcomp( const char *exp )
-{
- register regexp *r;
- register char *scan;
- register char *longest;
- register unsigned len;
- int flags;
-
- if (exp == NULL)
- FAIL("NULL argument");
-
- /* First pass: determine size, legality. */
-#ifdef notdef
- if (exp[0] == '.' && exp[1] == '*') exp += 2; /* aid grep */
-#endif
- regparse = (char *)exp;
- regnpar = 1;
- regsize = 0L;
- regcode = &regdummy;
- regc(MAGIC);
- if (reg(0, &flags) == NULL)
- return(NULL);
-
- /* Small enough for pointer-storage convention? */
- if (regsize >= 32767L) /* Probably could be 65535L. */
- FAIL("regexp too big");
-
- /* Allocate space. */
- r = (regexp *)BJAM_MALLOC(sizeof(regexp) + (unsigned)regsize);
- if (r == NULL)
- FAIL("out of space");
-
- /* Second pass: emit code. */
- regparse = (char *)exp;
- regnpar = 1;
- regcode = r->program;
- regc(MAGIC);
- if (reg(0, &flags) == NULL)
- return(NULL);
-
- /* Dig out information for optimizations. */
- r->regstart = '\0'; /* Worst-case defaults. */
- r->reganch = 0;
- r->regmust = NULL;
- r->regmlen = 0;
- scan = r->program+1; /* First BRANCH. */
- if (OP(regnext(scan)) == END) { /* Only one top-level choice. */
- scan = OPERAND(scan);
-
- /* Starting-point info. */
- if (OP(scan) == EXACTLY)
- r->regstart = *OPERAND(scan);
- else if (OP(scan) == BOL)
- r->reganch++;
-
- /*
- * If there's something expensive in the r.e., find the
- * longest literal string that must appear and make it the
- * regmust. Resolve ties in favor of later strings, since
- * the regstart check works with the beginning of the r.e.
- * and avoiding duplication strengthens checking. Not a
- * strong reason, but sufficient in the absence of others.
- */
- if (flags&SPSTART) {
- longest = NULL;
- len = 0;
- for (; scan != NULL; scan = regnext(scan))
- if (OP(scan) == EXACTLY && strlen(OPERAND(scan)) >= len) {
- longest = OPERAND(scan);
- len = strlen(OPERAND(scan));
- }
- r->regmust = longest;
- r->regmlen = len;
- }
- }
-
- return(r);
-}
-
-/*
- - reg - regular expression, i.e. main body or parenthesized thing
- *
- * Caller must absorb opening parenthesis.
- *
- * Combining parenthesis handling with the base level of regular expression
- * is a trifle forced, but the need to tie the tails of the branches to what
- * follows makes it hard to avoid.
- */
-static char *
-reg(
- int paren, /* Parenthesized? */
- int *flagp )
-{
- register char *ret;
- register char *br;
- register char *ender;
- register int parno = 0;
- int flags;
-
- *flagp = HASWIDTH; /* Tentatively. */
-
- /* Make an OPEN node, if parenthesized. */
- if (paren) {
- if (regnpar >= NSUBEXP)
- FAIL("too many ()");
- parno = regnpar;
- regnpar++;
- ret = regnode(OPEN+parno);
- } else
- ret = NULL;
-
- /* Pick up the branches, linking them together. */
- br = regbranch(&flags);
- if (br == NULL)
- return(NULL);
- if (ret != NULL)
- regtail(ret, br); /* OPEN -> first. */
- else
- ret = br;
- if (!(flags&HASWIDTH))
- *flagp &= ~HASWIDTH;
- *flagp |= flags&SPSTART;
- while (*regparse == '|' || *regparse == '\n') {
- regparse++;
- br = regbranch(&flags);
- if (br == NULL)
- return(NULL);
- regtail(ret, br); /* BRANCH -> BRANCH. */
- if (!(flags&HASWIDTH))
- *flagp &= ~HASWIDTH;
- *flagp |= flags&SPSTART;
- }
-
- /* Make a closing node, and hook it on the end. */
- ender = regnode((paren) ? CLOSE+parno : END);
- regtail(ret, ender);
-
- /* Hook the tails of the branches to the closing node. */
- for (br = ret; br != NULL; br = regnext(br))
- regoptail(br, ender);
-
- /* Check for proper termination. */
- if (paren && *regparse++ != ')') {
- FAIL("unmatched ()");
- } else if (!paren && *regparse != '\0') {
- if (*regparse == ')') {
- FAIL("unmatched ()");
- } else
- FAIL("junk on end"); /* "Can't happen". */
- /* NOTREACHED */
- }
-
- return(ret);
-}
-
-/*
- - regbranch - one alternative of an | operator
- *
- * Implements the concatenation operator.
- */
-static char *
-regbranch( int *flagp )
-{
- register char *ret;
- register char *chain;
- register char *latest;
- int flags;
-
- *flagp = WORST; /* Tentatively. */
-
- ret = regnode(BRANCH);
- chain = NULL;
- while (*regparse != '\0' && *regparse != ')' &&
- *regparse != '\n' && *regparse != '|') {
- latest = regpiece(&flags);
- if (latest == NULL)
- return(NULL);
- *flagp |= flags&HASWIDTH;
- if (chain == NULL) /* First piece. */
- *flagp |= flags&SPSTART;
- else
- regtail(chain, latest);
- chain = latest;
- }
- if (chain == NULL) /* Loop ran zero times. */
- (void) regnode(NOTHING);
-
- return(ret);
-}
-
-/*
- - regpiece - something followed by possible [*+?]
- *
- * Note that the branching code sequences used for ? and the general cases
- * of * and + are somewhat optimized: they use the same NOTHING node as
- * both the endmarker for their branch list and the body of the last branch.
- * It might seem that this node could be dispensed with entirely, but the
- * endmarker role is not redundant.
- */
-static char *
-regpiece( int *flagp )
-{
- register char *ret;
- register char op;
- register char *next;
- int flags;
-
- ret = regatom(&flags);
- if (ret == NULL)
- return(NULL);
-
- op = *regparse;
- if (!ISMULT(op)) {
- *flagp = flags;
- return(ret);
- }
-
- if (!(flags&HASWIDTH) && op != '?')
- FAIL("*+ operand could be empty");
- *flagp = (op != '+') ? (WORST|SPSTART) : (WORST|HASWIDTH);
-
- if (op == '*' && (flags&SIMPLE))
- reginsert(STAR, ret);
- else if (op == '*') {
- /* Emit x* as (x&|), where & means "self". */
- reginsert(BRANCH, ret); /* Either x */
- regoptail(ret, regnode(BACK)); /* and loop */
- regoptail(ret, ret); /* back */
- regtail(ret, regnode(BRANCH)); /* or */
- regtail(ret, regnode(NOTHING)); /* null. */
- } else if (op == '+' && (flags&SIMPLE))
- reginsert(PLUS, ret);
- else if (op == '+') {
- /* Emit x+ as x(&|), where & means "self". */
- next = regnode(BRANCH); /* Either */
- regtail(ret, next);
- regtail(regnode(BACK), ret); /* loop back */
- regtail(next, regnode(BRANCH)); /* or */
- regtail(ret, regnode(NOTHING)); /* null. */
- } else if (op == '?') {
- /* Emit x? as (x|) */
- reginsert(BRANCH, ret); /* Either x */
- regtail(ret, regnode(BRANCH)); /* or */
- next = regnode(NOTHING); /* null. */
- regtail(ret, next);
- regoptail(ret, next);
- }
- regparse++;
- if (ISMULT(*regparse))
- FAIL("nested *?+");
-
- return(ret);
-}
-
-/*
- - regatom - the lowest level
- *
- * Optimization: gobbles an entire sequence of ordinary characters so that
- * it can turn them into a single node, which is smaller to store and
- * faster to run. Backslashed characters are exceptions, each becoming a
- * separate node; the code is simpler that way and it's not worth fixing.
- */
-static char *
-regatom( int *flagp )
-{
- register char *ret;
- int flags;
-
- *flagp = WORST; /* Tentatively. */
-
- switch (*regparse++) {
- /* FIXME: these chars only have meaning at beg/end of pat? */
- case '^':
- ret = regnode(BOL);
- break;
- case '$':
- ret = regnode(EOL);
- break;
- case '.':
- ret = regnode(ANY);
- *flagp |= HASWIDTH|SIMPLE;
- break;
- case '[': {
- register int classr;
- register int classend;
-
- if (*regparse == '^') { /* Complement of range. */
- ret = regnode(ANYBUT);
- regparse++;
- } else
- ret = regnode(ANYOF);
- if (*regparse == ']' || *regparse == '-')
- regc(*regparse++);
- while (*regparse != '\0' && *regparse != ']') {
- if (*regparse == '-') {
- regparse++;
- if (*regparse == ']' || *regparse == '\0')
- regc('-');
- else {
- classr = UCHARAT(regparse-2)+1;
- classend = UCHARAT(regparse);
- if (classr > classend+1)
- FAIL("invalid [] range");
- for (; classr <= classend; classr++)
- regc(classr);
- regparse++;
- }
- } else
- regc(*regparse++);
- }
- regc('\0');
- if (*regparse != ']')
- FAIL("unmatched []");
- regparse++;
- *flagp |= HASWIDTH|SIMPLE;
- }
- break;
- case '(':
- ret = reg(1, &flags);
- if (ret == NULL)
- return(NULL);
- *flagp |= flags&(HASWIDTH|SPSTART);
- break;
- case '\0':
- case '|':
- case '\n':
- case ')':
- FAIL("internal urp"); /* Supposed to be caught earlier. */
- break;
- case '?':
- case '+':
- case '*':
- FAIL("?+* follows nothing");
- break;
- case '\\':
- switch (*regparse++) {
- case '\0':
- FAIL("trailing \\");
- break;
- case '<':
- ret = regnode(WORDA);
- break;
- case '>':
- ret = regnode(WORDZ);
- break;
- /* FIXME: Someday handle \1, \2, ... */
- default:
- /* Handle general quoted chars in exact-match routine */
- goto de_fault;
- }
- break;
- de_fault:
- default:
- /*
- * Encode a string of characters to be matched exactly.
- *
- * This is a bit tricky due to quoted chars and due to
- * '*', '+', and '?' taking the SINGLE char previous
- * as their operand.
- *
- * On entry, the char at regparse[-1] is going to go
- * into the string, no matter what it is. (It could be
- * following a \ if we are entered from the '\' case.)
- *
- * Basic idea is to pick up a good char in ch and
- * examine the next char. If it's *+? then we twiddle.
- * If it's \ then we frozzle. If it's other magic char
- * we push ch and terminate the string. If none of the
- * above, we push ch on the string and go around again.
- *
- * regprev is used to remember where "the current char"
- * starts in the string, if due to a *+? we need to back
- * up and put the current char in a separate, 1-char, string.
- * When regprev is NULL, ch is the only char in the
- * string; this is used in *+? handling, and in setting
- * flags |= SIMPLE at the end.
- */
- {
- char *regprev;
- register char ch;
-
- regparse--; /* Look at cur char */
- ret = regnode(EXACTLY);
- for ( regprev = 0 ; ; ) {
- ch = *regparse++; /* Get current char */
- switch (*regparse) { /* look at next one */
-
- default:
- regc(ch); /* Add cur to string */
- break;
-
- case '.': case '[': case '(':
- case ')': case '|': case '\n':
- case '$': case '^':
- case '\0':
- /* FIXME, $ and ^ should not always be magic */
- magic:
- regc(ch); /* dump cur char */
- goto done; /* and we are done */
-
- case '?': case '+': case '*':
- if (!regprev) /* If just ch in str, */
- goto magic; /* use it */
- /* End mult-char string one early */
- regparse = regprev; /* Back up parse */
- goto done;
-
- case '\\':
- regc(ch); /* Cur char OK */
- switch (regparse[1]){ /* Look after \ */
- case '\0':
- case '<':
- case '>':
- /* FIXME: Someday handle \1, \2, ... */
- goto done; /* Not quoted */
- default:
- /* Backup point is \, scan * point is after it. */
- regprev = regparse;
- regparse++;
- continue; /* NOT break; */
- }
- }
- regprev = regparse; /* Set backup point */
- }
- done:
- regc('\0');
- *flagp |= HASWIDTH;
- if (!regprev) /* One char? */
- *flagp |= SIMPLE;
- }
- break;
- }
-
- return(ret);
-}
-
-/*
- - regnode - emit a node
- */
-static char * /* Location. */
-regnode( int op )
-{
- register char *ret;
- register char *ptr;
-
- ret = regcode;
- if (ret == &regdummy) {
- regsize += 3;
- return(ret);
- }
-
- ptr = ret;
- *ptr++ = op;
- *ptr++ = '\0'; /* Null "next" pointer. */
- *ptr++ = '\0';
- regcode = ptr;
-
- return(ret);
-}
-
-/*
- - regc - emit (if appropriate) a byte of code
- */
-static void
-regc( int b )
-{
- if (regcode != &regdummy)
- *regcode++ = b;
- else
- regsize++;
-}
-
-/*
- - reginsert - insert an operator in front of already-emitted operand
- *
- * Means relocating the operand.
- */
-static void
-reginsert(
- char op,
- char *opnd )
-{
- register char *src;
- register char *dst;
- register char *place;
-
- if (regcode == &regdummy) {
- regsize += 3;
- return;
- }
-
- src = regcode;
- regcode += 3;
- dst = regcode;
- while (src > opnd)
- *--dst = *--src;
-
- place = opnd; /* Op node, where operand used to be. */
- *place++ = op;
- *place++ = '\0';
- *place++ = '\0';
-}
-
-/*
- - regtail - set the next-pointer at the end of a node chain
- */
-static void
-regtail(
- char *p,
- char *val )
-{
- register char *scan;
- register char *temp;
- register int offset;
-
- if (p == &regdummy)
- return;
-
- /* Find last node. */
- scan = p;
- for (;;) {
- temp = regnext(scan);
- if (temp == NULL)
- break;
- scan = temp;
- }
-
- if (OP(scan) == BACK)
- offset = scan - val;
- else
- offset = val - scan;
- *(scan+1) = (offset>>8)&0377;
- *(scan+2) = offset&0377;
-}
-
-/*
- - regoptail - regtail on operand of first argument; nop if operandless
- */
-
-static void
-regoptail(
- char *p,
- char *val )
-{
- /* "Operandless" and "op != BRANCH" are synonymous in practice. */
- if (p == NULL || p == &regdummy || OP(p) != BRANCH)
- return;
- regtail(OPERAND(p), val);
-}
-
-/*
- * regexec and friends
- */
-
-/*
- * Global work variables for regexec().
- */
-static const char *reginput; /* String-input pointer. */
-static const char *regbol; /* Beginning of input, for ^ check. */
-static const char **regstartp; /* Pointer to startp array. */
-static const char **regendp; /* Ditto for endp. */
-
-/*
- * Forwards.
- */
-STATIC int regtry( regexp *prog, const char *string );
-STATIC int regmatch( char *prog );
-STATIC int regrepeat( char *p );
-
-#ifdef DEBUG
-int regnarrate = 0;
-void regdump();
-STATIC char *regprop();
-#endif
-
-/*
- - regexec - match a regexp against a string
- */
-int
-regexec(
- register regexp *prog,
- register const char *string )
-{
- register char *s;
-
- /* Be paranoid... */
- if (prog == NULL || string == NULL) {
- regerror("NULL parameter");
- return(0);
- }
-
- /* Check validity of program. */
- if (UCHARAT(prog->program) != MAGIC) {
- regerror("corrupted program");
- return(0);
- }
-
- /* If there is a "must appear" string, look for it. */
- if ( prog->regmust != NULL )
- {
- s = (char *)string;
- while ( ( s = strchr( s, prog->regmust[ 0 ] ) ) != NULL )
- {
- if ( !strncmp( s, prog->regmust, prog->regmlen ) )
- break; /* Found it. */
- ++s;
- }
- if ( s == NULL ) /* Not present. */
- return 0;
- }
-
- /* Mark beginning of line for ^ . */
- regbol = (char *)string;
-
- /* Simplest case: anchored match need be tried only once. */
- if ( prog->reganch )
- return regtry( prog, string );
-
- /* Messy cases: unanchored match. */
- s = (char *)string;
- if (prog->regstart != '\0')
- /* We know what char it must start with. */
- while ((s = strchr(s, prog->regstart)) != NULL) {
- if (regtry(prog, s))
- return(1);
- s++;
- }
- else
- /* We do not -- general case. */
- do {
- if ( regtry( prog, s ) )
- return( 1 );
- } while ( *s++ != '\0' );
-
- /* Failure. */
- return 0;
-}
-
-
-/*
- * regtry() - try match at specific point.
- */
-
-static int /* 0 failure, 1 success */
-regtry(
- regexp *prog,
- const char *string )
-{
- register int i;
- register const char * * sp;
- register const char * * ep;
-
- reginput = string;
- regstartp = prog->startp;
- regendp = prog->endp;
-
- sp = prog->startp;
- ep = prog->endp;
- for ( i = NSUBEXP; i > 0; --i )
- {
- *sp++ = NULL;
- *ep++ = NULL;
- }
- if ( regmatch( prog->program + 1 ) )
- {
- prog->startp[ 0 ] = string;
- prog->endp[ 0 ] = reginput;
- return 1;
- }
- else
- return 0;
-}
-
-
-/*
- * regmatch() - main matching routine.
- *
- * Conceptually the strategy is simple: check to see whether the current node
- * matches, call self recursively to see whether the rest matches, and then act
- * accordingly. In practice we make some effort to avoid recursion, in
- * particular by going through "ordinary" nodes (that do not need to know
- * whether the rest of the match failed) by a loop instead of by recursion.
- */
-
-static int /* 0 failure, 1 success */
-regmatch( char * prog )
-{
- char * scan; /* Current node. */
- char * next; /* Next node. */
-
- scan = prog;
-#ifdef DEBUG
- if (scan != NULL && regnarrate)
- fprintf(stderr, "%s(\n", regprop(scan));
-#endif
- while (scan != NULL) {
-#ifdef DEBUG
- if (regnarrate)
- fprintf(stderr, "%s...\n", regprop(scan));
-#endif
- next = regnext(scan);
-
- switch (OP(scan)) {
- case BOL:
- if (reginput != regbol)
- return(0);
- break;
- case EOL:
- if (*reginput != '\0')
- return(0);
- break;
- case WORDA:
- /* Must be looking at a letter, digit, or _ */
- if ((!isalnum(*reginput)) && *reginput != '_')
- return(0);
- /* Prev must be BOL or nonword */
- if (reginput > regbol &&
- (isalnum(reginput[-1]) || reginput[-1] == '_'))
- return(0);
- break;
- case WORDZ:
- /* Must be looking at non letter, digit, or _ */
- if (isalnum(*reginput) || *reginput == '_')
- return(0);
- /* We don't care what the previous char was */
- break;
- case ANY:
- if (*reginput == '\0')
- return(0);
- reginput++;
- break;
- case EXACTLY: {
- register int len;
- register char *opnd;
-
- opnd = OPERAND(scan);
- /* Inline the first character, for speed. */
- if (*opnd != *reginput)
- return(0);
- len = strlen(opnd);
- if (len > 1 && strncmp(opnd, reginput, len) != 0)
- return(0);
- reginput += len;
- }
- break;
- case ANYOF:
- if (*reginput == '\0' || strchr(OPERAND(scan), *reginput) == NULL)
- return(0);
- reginput++;
- break;
- case ANYBUT:
- if (*reginput == '\0' || strchr(OPERAND(scan), *reginput) != NULL)
- return(0);
- reginput++;
- break;
- case NOTHING:
- break;
- case BACK:
- break;
- case OPEN+1:
- case OPEN+2:
- case OPEN+3:
- case OPEN+4:
- case OPEN+5:
- case OPEN+6:
- case OPEN+7:
- case OPEN+8:
- case OPEN+9: {
- register int no;
- register const char *save;
-
- no = OP(scan) - OPEN;
- save = reginput;
-
- if (regmatch(next)) {
- /*
- * Don't set startp if some later
- * invocation of the same parentheses
- * already has.
- */
- if (regstartp[no] == NULL)
- regstartp[no] = save;
- return(1);
- } else
- return(0);
- }
- break;
- case CLOSE+1:
- case CLOSE+2:
- case CLOSE+3:
- case CLOSE+4:
- case CLOSE+5:
- case CLOSE+6:
- case CLOSE+7:
- case CLOSE+8:
- case CLOSE+9: {
- register int no;
- register const char *save;
-
- no = OP(scan) - CLOSE;
- save = reginput;
-
- if (regmatch(next)) {
- /*
- * Don't set endp if some later
- * invocation of the same parentheses
- * already has.
- */
- if (regendp[no] == NULL)
- regendp[no] = save;
- return(1);
- } else
- return(0);
- }
- break;
- case BRANCH: {
- register const char *save;
-
- if (OP(next) != BRANCH) /* No choice. */
- next = OPERAND(scan); /* Avoid recursion. */
- else {
- do {
- save = reginput;
- if (regmatch(OPERAND(scan)))
- return(1);
- reginput = save;
- scan = regnext(scan);
- } while (scan != NULL && OP(scan) == BRANCH);
- return(0);
- /* NOTREACHED */
- }
- }
- break;
- case STAR:
- case PLUS: {
- register char nextch;
- register int no;
- register const char *save;
- register int min;
-
- /*
- * Lookahead to avoid useless match attempts
- * when we know what character comes next.
- */
- nextch = '\0';
- if (OP(next) == EXACTLY)
- nextch = *OPERAND(next);
- min = (OP(scan) == STAR) ? 0 : 1;
- save = reginput;
- no = regrepeat(OPERAND(scan));
- while (no >= min) {
- /* If it could work, try it. */
- if (nextch == '\0' || *reginput == nextch)
- if (regmatch(next))
- return(1);
- /* Couldn't or didn't -- back up. */
- no--;
- reginput = save + no;
- }
- return(0);
- }
- break;
- case END:
- return(1); /* Success! */
- break;
- default:
- regerror("memory corruption");
- return(0);
- break;
- }
-
- scan = next;
- }
-
- /*
- * We get here only if there's trouble -- normally "case END" is
- * the terminating point.
- */
- regerror("corrupted pointers");
- return(0);
-}
-
-/*
- - regrepeat - repeatedly match something simple, report how many
- */
-static int
-regrepeat( char *p )
-{
- register int count = 0;
- register const char *scan;
- register char *opnd;
-
- scan = reginput;
- opnd = OPERAND(p);
- switch (OP(p)) {
- case ANY:
- count = strlen(scan);
- scan += count;
- break;
- case EXACTLY:
- while (*opnd == *scan) {
- count++;
- scan++;
- }
- break;
- case ANYOF:
- while (*scan != '\0' && strchr(opnd, *scan) != NULL) {
- count++;
- scan++;
- }
- break;
- case ANYBUT:
- while (*scan != '\0' && strchr(opnd, *scan) == NULL) {
- count++;
- scan++;
- }
- break;
- default: /* Oh dear. Called inappropriately. */
- regerror("internal foulup");
- count = 0; /* Best compromise. */
- break;
- }
- reginput = scan;
-
- return(count);
-}
-
-/*
- - regnext - dig the "next" pointer out of a node
- */
-static char *
-regnext( register char *p )
-{
- register int offset;
-
- if (p == &regdummy)
- return(NULL);
-
- offset = NEXT(p);
- if (offset == 0)
- return(NULL);
-
- if (OP(p) == BACK)
- return(p-offset);
- else
- return(p+offset);
-}
-
-#ifdef DEBUG
-
-STATIC char *regprop();
-
-/*
- - regdump - dump a regexp onto stdout in vaguely comprehensible form
- */
-void
-regdump( regexp *r )
-{
- register char *s;
- register char op = EXACTLY; /* Arbitrary non-END op. */
- register char *next;
-
-
- s = r->program + 1;
- while (op != END) { /* While that wasn't END last time... */
- op = OP(s);
- printf("%2d%s", s-r->program, regprop(s)); /* Where, what. */
- next = regnext(s);
- if (next == NULL) /* Next ptr. */
- printf("(0)");
- else
- printf("(%d)", (s-r->program)+(next-s));
- s += 3;
- if (op == ANYOF || op == ANYBUT || op == EXACTLY) {
- /* Literal string, where present. */
- while (*s != '\0') {
- putchar(*s);
- s++;
- }
- s++;
- }
- putchar('\n');
- }
-
- /* Header fields of interest. */
- if (r->regstart != '\0')
- printf("start `%c' ", r->regstart);
- if (r->reganch)
- printf("anchored ");
- if (r->regmust != NULL)
- printf("must have \"%s\"", r->regmust);
- printf("\n");
-}
-
-/*
- - regprop - printable representation of opcode
- */
-static char *
-regprop( char *op )
-{
- register char *p;
- static char buf[50];
-
- (void) strcpy(buf, ":");
-
- switch (OP(op)) {
- case BOL:
- p = "BOL";
- break;
- case EOL:
- p = "EOL";
- break;
- case ANY:
- p = "ANY";
- break;
- case ANYOF:
- p = "ANYOF";
- break;
- case ANYBUT:
- p = "ANYBUT";
- break;
- case BRANCH:
- p = "BRANCH";
- break;
- case EXACTLY:
- p = "EXACTLY";
- break;
- case NOTHING:
- p = "NOTHING";
- break;
- case BACK:
- p = "BACK";
- break;
- case END:
- p = "END";
- break;
- case OPEN+1:
- case OPEN+2:
- case OPEN+3:
- case OPEN+4:
- case OPEN+5:
- case OPEN+6:
- case OPEN+7:
- case OPEN+8:
- case OPEN+9:
- sprintf(buf+strlen(buf), "OPEN%d", OP(op)-OPEN);
- p = NULL;
- break;
- case CLOSE+1:
- case CLOSE+2:
- case CLOSE+3:
- case CLOSE+4:
- case CLOSE+5:
- case CLOSE+6:
- case CLOSE+7:
- case CLOSE+8:
- case CLOSE+9:
- sprintf(buf+strlen(buf), "CLOSE%d", OP(op)-CLOSE);
- p = NULL;
- break;
- case STAR:
- p = "STAR";
- break;
- case PLUS:
- p = "PLUS";
- break;
- case WORDA:
- p = "WORDA";
- break;
- case WORDZ:
- p = "WORDZ";
- break;
- default:
- regerror("corrupted opcode");
- break;
- }
- if (p != NULL)
- (void) strcat(buf, p);
- return(buf);
-}
-#endif
-
-/*
- * The following is provided for those people who do not have strcspn() in
- * their C libraries. They should get off their butts and do something
- * about it; at least one public-domain implementation of those (highly
- * useful) string routines has been published on Usenet.
- */
-#ifdef STRCSPN
-/*
- * strcspn - find length of initial segment of s1 consisting entirely
- * of characters not from s2
- */
-
-static int
-strcspn(
- char *s1,
- char *s2 )
-{
- register char *scan1;
- register char *scan2;
- register int count;
-
- count = 0;
- for (scan1 = s1; *scan1 != '\0'; scan1++) {
- for (scan2 = s2; *scan2 != '\0';) /* ++ moved down. */
- if (*scan1 == *scan2++)
- return(count);
- count++;
- }
- return(count);
-}
-#endif
diff --git a/tools/build/v2/engine/regexp.h b/tools/build/v2/engine/regexp.h
deleted file mode 100644
index fccfb7dff8..0000000000
--- a/tools/build/v2/engine/regexp.h
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * Definitions etc. for regexp(3) routines.
- *
- * Caveat: this is V8 regexp(3) [actually, a reimplementation thereof],
- * not the System V one.
- */
-#ifndef REGEXP_DWA20011023_H
-# define REGEXP_DWA20011023_H
-
-#define NSUBEXP 10
-typedef struct regexp {
- const char *startp[NSUBEXP];
- const char *endp[NSUBEXP];
- char regstart; /* Internal use only. */
- char reganch; /* Internal use only. */
- char *regmust; /* Internal use only. */
- int regmlen; /* Internal use only. */
- char program[1]; /* Unwarranted chumminess with compiler. */
-} regexp;
-
-regexp *regcomp( const char *exp );
-int regexec( regexp *prog, const char *string );
-void regerror( const char *s );
-
-/*
- * The first byte of the regexp internal "program" is actually this magic
- * number; the start node begins in the second byte.
- */
-#define MAGIC 0234
-
-#endif
-
diff --git a/tools/build/v2/engine/rules.c b/tools/build/v2/engine/rules.c
deleted file mode 100644
index b9ff6191f5..0000000000
--- a/tools/build/v2/engine/rules.c
+++ /dev/null
@@ -1,724 +0,0 @@
-/*
- * Copyright 1993, 1995 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-# include "jam.h"
-# include "lists.h"
-# include "parse.h"
-# include "variable.h"
-# include "rules.h"
-# include "object.h"
-# include "hash.h"
-# include "modules.h"
-# include "search.h"
-# include "lists.h"
-# include "pathsys.h"
-# include "timestamp.h"
-
-/* This file is ALSO:
- * Copyright 2001-2004 David Abrahams.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-/*
- * rules.c - access to RULEs, TARGETs, and ACTIONs
- *
- * External routines:
- *
- * bindrule() - return pointer to RULE, creating it if necessary.
- * bindtarget() - return pointer to TARGET, creating it if necessary.
- * touch_target() - mark a target to simulate being new.
- * targetlist() - turn list of target names into a TARGET chain.
- * targetentry() - add a TARGET to a chain of TARGETS.
- * actionlist() - append to an ACTION chain.
- * addsettings() - add a deferred "set" command to a target.
- * pushsettings() - set all target specific variables.
- * popsettings() - reset target specific variables to their pre-push values.
- * freesettings() - delete a settings list.
- * rules_done() - free RULE and TARGET tables.
- *
- * 04/12/94 (seiwald) - actionlist() now just appends a single action.
- * 08/23/94 (seiwald) - Support for '+=' (append to variable)
- */
-
-static void set_rule_actions( RULE *, rule_actions * );
-static void set_rule_body ( RULE *, FUNCTION * procedure );
-
-static struct hash * targethash = 0;
-
-
-/*
- * target_include() - adds the 'included' TARGET to the list of targets included
- * by the 'including' TARGET. Such targets are modeled as dependencies of the
- * internal include node belonging to the 'including' TARGET.
- */
-
-void target_include( TARGET * including, TARGET * included )
-{
- TARGET * internal;
- if ( !including->includes )
- {
- including->includes = copytarget( including );
- including->includes->original_target = including;
- }
- internal = including->includes;
- internal->depends = targetentry( internal->depends, included );
-}
-
-
-/*
- * enter_rule() - return pointer to RULE, creating it if necessary in
- * target_module.
- */
-
-static RULE * enter_rule( OBJECT * rulename, module_t * target_module )
-{
- int found;
- RULE * r;
-
- r = (RULE *)hash_insert( demand_rules(target_module), rulename, &found );
- if ( !found )
- {
- r->name = object_copy( rulename );
- r->procedure = 0;
- r->module = 0;
- r->actions = 0;
- r->exported = 0;
- r->module = target_module;
- }
- return r;
-}
-
-
-/*
- * define_rule() - return pointer to RULE, creating it if necessary in
- * target_module. Prepare it to accept a body or action originating in
- * src_module.
- */
-
-static RULE * define_rule
-(
- module_t * src_module,
- OBJECT * rulename,
- module_t * target_module
-)
-{
- RULE * r = enter_rule( rulename, target_module );
- if ( r->module != src_module ) /* if the rule was imported from elsewhere, clear it now */
- {
- set_rule_body( r, 0 );
- set_rule_actions( r, 0 );
- r->module = src_module; /* r will be executed in the source module */
- }
- return r;
-}
-
-
-void rule_free( RULE * r )
-{
- object_free( r->name );
- r->name = 0;
- if ( r->procedure )
- function_free( r->procedure );
- r->procedure = 0;
- if ( r->actions )
- actions_free( r->actions );
- r->actions = 0;
-}
-
-
-/*
- * bindtarget() - return pointer to TARGET, creating it if necessary.
- */
-
-TARGET * bindtarget( OBJECT * target_name )
-{
- int found;
- TARGET * t;
-
- if ( !targethash )
- targethash = hashinit( sizeof( TARGET ), "targets" );
-
- t = (TARGET *)hash_insert( targethash, target_name, &found );
- if ( !found )
- {
- memset( (char *)t, '\0', sizeof( *t ) );
- t->name = object_copy( target_name );
- t->boundname = object_copy( t->name ); /* default for T_FLAG_NOTFILE */
- }
-
- return t;
-}
-
-
-static void bind_explicitly_located_target( void * xtarget, void * data )
-{
- TARGET * t = (TARGET *)xtarget;
- if ( !( t->flags & T_FLAG_NOTFILE ) )
- {
- /* Check if there's a setting for LOCATE */
- SETTINGS * s = t->settings;
- for ( ; s ; s = s->next )
- {
- if ( strcmp( object_str( s->symbol ), "LOCATE" ) == 0 )
- {
- pushsettings( root_module(), t->settings );
- /* We are binding a target with explicit LOCATE. So third
- * argument is of no use: nothing will be returned through it.
- */
- object_free( t->boundname );
- t->boundname = search( t->name, &t->time, 0, 0 );
- popsettings( root_module(), t->settings );
- break;
- }
- }
- }
-}
-
-
-void bind_explicitly_located_targets()
-{
- if ( targethash )
- hashenumerate( targethash, bind_explicitly_located_target, (void *)0 );
-}
-
-
-/*
- * copytarget() - make a new target with the old target's name.
- *
- * Not entered into hash table -- for internal nodes.
- */
-
-TARGET * copytarget( const TARGET * ot )
-{
- TARGET * t = (TARGET *)BJAM_MALLOC( sizeof( *t ) );
- memset( (char *)t, '\0', sizeof( *t ) );
- t->name = object_copy( ot->name );
- t->boundname = object_copy( t->name );
-
- t->flags |= T_FLAG_NOTFILE | T_FLAG_INTERNAL;
-
- return t;
-}
-
-
-/*
- * touch_target() - mark a target to simulate being new.
- */
-
-void touch_target( OBJECT * t )
-{
- bindtarget( t )->flags |= T_FLAG_TOUCHED;
-}
-
-
-/*
- * targetlist() - turn list of target names into a TARGET chain.
- *
- * Inputs:
- * chain existing TARGETS to append to
- * targets list of target names
- */
-
-TARGETS * targetlist( TARGETS * chain, LIST * target_names )
-{
- LISTITER iter = list_begin( target_names ), end = list_end( target_names );
- for ( ; iter != end; iter = list_next( iter ) )
- chain = targetentry( chain, bindtarget( list_item( iter ) ) );
- return chain;
-}
-
-
-/*
- * targetentry() - add a TARGET to a chain of TARGETS.
- *
- * Inputs:
- * chain existing TARGETS to append to
- * target new target to append
- */
-
-TARGETS * targetentry( TARGETS * chain, TARGET * target )
-{
- TARGETS * c = (TARGETS *)BJAM_MALLOC( sizeof( TARGETS ) );
- c->target = target;
-
- if ( !chain ) chain = c;
- else chain->tail->next = c;
- chain->tail = c;
- c->next = 0;
-
- return chain;
-}
-
-
-/*
- * targetchain() - append two TARGET chains.
- *
- * Inputs:
- * chain exisitng TARGETS to append to
- * target new target to append
- */
-
-TARGETS * targetchain( TARGETS * chain, TARGETS * targets )
-{
- if ( !targets ) return chain;
- if ( !chain ) return targets;
-
- chain->tail->next = targets;
- chain->tail = targets->tail;
-
- return chain;
-}
-
-/*
- * action_free - decrement the ACTIONs refrence count
- * and (maybe) free it.
- */
-
-void action_free ( ACTION * action )
-{
- if ( --action->refs == 0 )
- {
- freetargets( action->targets );
- freetargets( action->sources );
- BJAM_FREE( action );
- }
-}
-
-/*
- * actionlist() - append to an ACTION chain.
- */
-
-ACTIONS * actionlist( ACTIONS * chain, ACTION * action )
-{
- ACTIONS * actions = (ACTIONS *)BJAM_MALLOC( sizeof( ACTIONS ) );
-
- actions->action = action;
-
- ++action->refs;
- if ( !chain ) chain = actions;
- else chain->tail->next = actions;
- chain->tail = actions;
- actions->next = 0;
-
- return chain;
-}
-
-static SETTINGS * settings_freelist;
-
-
-/*
- * addsettings() - add a deferred "set" command to a target.
- *
- * Adds a variable setting (varname=list) onto a chain of settings for a
- * particular target. 'flag' controls the relationship between new and old
- * values in the same way as in var_set() function (see variable.c). Returns
- * the head of the settings chain.
- */
-
-SETTINGS * addsettings( SETTINGS * head, int flag, OBJECT * symbol, LIST * value )
-{
- SETTINGS * v;
-
- /* Look for previous settings. */
- for ( v = head; v; v = v->next )
- if ( object_equal( v->symbol, symbol ) )
- break;
-
- /* If not previously set, alloc a new. */
- /* If appending, do so. */
- /* Else free old and set new. */
- if ( !v )
- {
- v = settings_freelist;
-
- if ( v )
- settings_freelist = v->next;
- else
- v = (SETTINGS *)BJAM_MALLOC( sizeof( *v ) );
-
- v->symbol = object_copy( symbol );
- v->value = value;
- v->next = head;
- head = v;
- }
- else if ( flag == VAR_APPEND )
- {
- v->value = list_append( v->value, value );
- }
- else if ( flag != VAR_DEFAULT )
- {
- list_free( v->value );
- v->value = value;
- }
- else
- list_free( value );
-
- /* Return (new) head of list. */
- return head;
-}
-
-
-/*
- * pushsettings() - set all target specific variables.
- */
-
-void pushsettings( struct module_t * module, SETTINGS * v )
-{
- for ( ; v; v = v->next )
- v->value = var_swap( module, v->symbol, v->value );
-}
-
-
-/*
- * popsettings() - reset target specific variables to their pre-push values.
- */
-
-void popsettings( struct module_t * module, SETTINGS * v )
-{
- pushsettings( module, v ); /* just swap again */
-}
-
-
-/*
- * copysettings() - duplicate a settings list, returning the new copy.
- */
-
-SETTINGS * copysettings( SETTINGS * head )
-{
- SETTINGS * copy = 0;
- SETTINGS * v;
- for ( v = head; v; v = v->next )
- copy = addsettings( copy, VAR_SET, v->symbol, list_copy( v->value ) );
- return copy;
-}
-
-
-/*
- * freetargets() - delete a targets list.
- */
-
-void freetargets( TARGETS * chain )
-{
- while ( chain )
- {
- TARGETS * n = chain->next;
- BJAM_FREE( chain );
- chain = n;
- }
-}
-
-
-/*
- * freeactions() - delete an action list.
- */
-
-void freeactions( ACTIONS * chain )
-{
- while ( chain )
- {
- ACTIONS * n = chain->next;
- action_free( chain->action );
- BJAM_FREE( chain );
- chain = n;
- }
-}
-
-
-/*
- * freesettings() - delete a settings list.
- */
-
-void freesettings( SETTINGS * v )
-{
- while ( v )
- {
- SETTINGS * n = v->next;
- object_free( v->symbol );
- list_free( v->value );
- v->next = settings_freelist;
- settings_freelist = v;
- v = n;
- }
-}
-
-
-static void freetarget( void * xt, void * data )
-{
- TARGET * t = (TARGET *)xt;
- if ( t->name ) object_free ( t->name );
- if ( t->boundname ) object_free ( t->boundname );
- if ( t->settings ) freesettings( t->settings );
- if ( t->depends ) freetargets ( t->depends );
- if ( t->dependants ) freetargets ( t->dependants );
- if ( t->parents ) freetargets ( t->parents );
- if ( t->actions ) freeactions ( t->actions );
-
- if ( t->includes )
- {
- freetarget( t->includes, (void *)0 );
- BJAM_FREE( t->includes );
- }
-}
-
-
-/*
- * rules_done() - free RULE and TARGET tables.
- */
-
-void rules_done()
-{
- if ( targethash )
- {
- hashenumerate( targethash, freetarget, 0 );
- hashdone( targethash );
- }
- while ( settings_freelist )
- {
- SETTINGS * n = settings_freelist->next;
- BJAM_FREE( settings_freelist );
- settings_freelist = n;
- }
-}
-
-
-/*
- * actions_refer() - add a new reference to the given actions.
- */
-
-void actions_refer( rule_actions * a )
-{
- ++a->reference_count;
-}
-
-
-/*
- * actions_free() - release a reference to the given actions.
- */
-
-void actions_free( rule_actions * a )
-{
- if ( --a->reference_count <= 0 )
- {
- function_free( a->command );
- list_free( a->bindlist );
- BJAM_FREE( a );
- }
-}
-
-/*
- * set_rule_body() - set the argument list and procedure of the given rule.
- */
-
-static void set_rule_body( RULE * rule, FUNCTION * procedure )
-{
- if ( procedure )
- function_refer( procedure );
- if ( rule->procedure )
- function_free( rule->procedure );
- rule->procedure = procedure;
-}
-
-
-/*
- * global_name() - given a rule, return the name for a corresponding rule in the
- * global module.
- */
-
-static OBJECT * global_rule_name( RULE * r )
-{
- if ( r->module == root_module() )
- return object_copy( r->name );
-
- {
- char name[4096] = "";
- if ( r->module->name )
- {
- strncat( name, object_str( r->module->name ), sizeof( name ) - 1 );
- strncat( name, ".", sizeof( name ) - 1 );
- }
- strncat( name, object_str( r->name ), sizeof( name ) - 1 );
- return object_new( name );
- }
-}
-
-
-/*
- * global_rule() - given a rule, produce the corresponding entry in the global
- * module.
- */
-
-static RULE * global_rule( RULE * r )
-{
- if ( r->module == root_module() )
- return r;
-
- {
- OBJECT * name = global_rule_name( r );
- RULE * result = define_rule( r->module, name, root_module() );
- object_free( name );
- return result;
- }
-}
-
-
-/*
- * new_rule_body() - make a new rule named rulename in the given module, with
- * the given argument list and procedure. If exported is true, the rule is
- * exported to the global module as modulename.rulename.
- */
-
-RULE * new_rule_body( module_t * m, OBJECT * rulename, FUNCTION * procedure, int exported )
-{
- RULE * local = define_rule( m, rulename, m );
- local->exported = exported;
- set_rule_body( local, procedure );
-
- /* Mark the procedure with the global rule name, regardless of whether the
- * rule is exported. That gives us something reasonably identifiable that we
- * can use, e.g. in profiling output. Only do this once, since this could be
- * called multiple times with the same procedure.
- */
- if ( function_rulename( procedure ) == 0 )
- function_set_rulename( procedure, global_rule_name( local ) );
-
- return local;
-}
-
-
-static void set_rule_actions( RULE * rule, rule_actions * actions )
-{
- if ( actions )
- actions_refer( actions );
- if ( rule->actions )
- actions_free( rule->actions );
- rule->actions = actions;
-}
-
-
-static rule_actions * actions_new( FUNCTION * command, LIST * bindlist, int flags )
-{
- rule_actions * result = (rule_actions *)BJAM_MALLOC( sizeof( rule_actions ) );
- function_refer( command );
- result->command = command;
- result->bindlist = bindlist;
- result->flags = flags;
- result->reference_count = 0;
- return result;
-}
-
-
-RULE * new_rule_actions( module_t * m, OBJECT * rulename, FUNCTION * command, LIST * bindlist, int flags )
-{
- RULE * local = define_rule( m, rulename, m );
- RULE * global = global_rule( local );
- set_rule_actions( local, actions_new( command, bindlist, flags ) );
- set_rule_actions( global, local->actions );
- return local;
-}
-
-
-/*
- * Looks for a rule in the specified module, and returns it, if found. First
- * checks if the rule is present in the module's rule table. Second, if name of
- * the rule is in the form name1.name2 and name1 is in the list of imported
- * modules, look in module 'name1' for rule 'name2'.
- */
-
-RULE * lookup_rule( OBJECT * rulename, module_t * m, int local_only )
-{
- RULE * r;
- RULE * result = 0;
- module_t * original_module = m;
-
- if ( m->class_module )
- m = m->class_module;
-
- if ( m->rules && ( r = (RULE *)hash_find( m->rules, rulename ) ) )
- result = r;
- else if ( !local_only && m->imported_modules )
- {
- /* Try splitting the name into module and rule. */
- char *p = strchr( object_str( rulename ), '.' ) ;
- if ( p )
- {
- string buf[1];
- OBJECT * module_part;
- OBJECT * rule_part;
- string_new( buf );
- string_append_range( buf, object_str( rulename ), p );
- module_part = object_new( buf->value );
- rule_part = object_new( p + 1 );
- /* Now, r->name keeps the module name, and p+1 keeps the rule name.
- */
- if ( hash_find( m->imported_modules, module_part ) )
- result = lookup_rule( rule_part, bindmodule( module_part ), 1 );
- object_free( rule_part );
- object_free( module_part );
- string_free( buf );
- }
- }
-
- if ( result )
- {
- if ( local_only && !result->exported )
- result = 0;
- else
- {
- /* Lookup started in class module. We have found a rule in class
- * module, which is marked for execution in that module, or in some
- * instances. Mark it for execution in the instance where we started
- * the lookup.
- */
- int execute_in_class = ( result->module == m );
- int execute_in_some_instance = ( result->module->class_module &&
- ( result->module->class_module == m ) );
- if ( ( original_module != m ) &&
- ( execute_in_class || execute_in_some_instance ) )
- result->module = original_module;
- }
- }
-
- return result;
-}
-
-
-RULE * bindrule( OBJECT * rulename, module_t * m )
-{
- RULE * result = lookup_rule( rulename, m, 0 );
- if ( !result )
- result = lookup_rule( rulename, root_module(), 0 );
- /* We have only one caller, 'evaluate_rule', which will complain about
- * calling an undefined rule. We could issue the error here, but we do not
- * have the necessary information, such as frame.
- */
- if ( !result )
- result = enter_rule( rulename, m );
- return result;
-}
-
-
-RULE * import_rule( RULE * source, module_t * m, OBJECT * name )
-{
- RULE * dest = define_rule( source->module, name, m );
- set_rule_body( dest, source->procedure );
- set_rule_actions( dest, source->actions );
- return dest;
-}
-
-
-void rule_localize( RULE * rule, module_t * m )
-{
- rule->module = m;
- if ( rule->procedure )
- {
- FUNCTION * procedure = function_unbind_variables( rule->procedure );
- function_refer( procedure );
- function_free( rule->procedure );
- rule->procedure = procedure;
- }
-}
-
diff --git a/tools/build/v2/engine/rules.h b/tools/build/v2/engine/rules.h
deleted file mode 100644
index 823fbd1f55..0000000000
--- a/tools/build/v2/engine/rules.h
+++ /dev/null
@@ -1,269 +0,0 @@
-/*
- * Copyright 1993, 1995 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/* This file is ALSO:
- * Copyright 2001-2004 David Abrahams.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-#ifndef RULES_DWA_20011020_H
-#define RULES_DWA_20011020_H
-
-#include "modules.h"
-#include "jam.h"
-#include "function.h"
-
-
-/*
- * rules.h - targets, rules, and related information
- *
- * This file describes the structures holding the targets, rules, and
- * related information accumulated by interpreting the statements
- * of the jam files.
- *
- * The following are defined:
- *
- * RULE - a generic jam rule, the product of RULE and ACTIONS.
- * ACTIONS - a chain of ACTIONs.
- * ACTION - a RULE instance with targets and sources.
- * SETTINGS - variables to set when executing a TARGET's ACTIONS.
- * TARGETS - a chain of TARGETs.
- * TARGET - an entity (e.g. a file) that can be built.
- *
- * 04/11/94 (seiwald) - Combined deps & headers into deps[2] in TARGET.
- * 04/12/94 (seiwald) - actionlist() now just appends a single action.
- * 06/01/94 (seiwald) - new 'actions existing' does existing sources
- * 12/20/94 (seiwald) - NOTIME renamed NOTFILE.
- * 01/19/95 (seiwald) - split DONTKNOW into CANTFIND/CANTMAKE.
- * 02/02/95 (seiwald) - new LEAVES modifier on targets.
- * 02/14/95 (seiwald) - new NOUPDATE modifier on targets.
- */
-
-typedef struct _rule RULE;
-typedef struct _target TARGET;
-typedef struct _targets TARGETS;
-typedef struct _action ACTION;
-typedef struct _actions ACTIONS;
-typedef struct _settings SETTINGS ;
-
-/* RULE - a generic jam rule, the product of RULE and ACTIONS. */
-
-/* Build actions corresponding to a rule. */
-struct rule_actions
-{
- int reference_count;
- FUNCTION * command; /* command string from ACTIONS */
- LIST * bindlist;
- int flags; /* modifiers on ACTIONS */
-
-#define RULE_NEWSRCS 0x01 /* $(>) is updated sources only */
-#define RULE_TOGETHER 0x02 /* combine actions on single target */
-#define RULE_IGNORE 0x04 /* ignore return status of executes */
-#define RULE_QUIETLY 0x08 /* do not mention it unless verbose */
-#define RULE_PIECEMEAL 0x10 /* split exec so each $(>) is small */
-#define RULE_EXISTING 0x20 /* $(>) is pre-exisitng sources only */
-};
-
-typedef struct rule_actions rule_actions;
-typedef struct argument_list argument_list;
-
-struct _rule
-{
- OBJECT * name;
- FUNCTION * procedure;
- rule_actions * actions; /* build actions, or NULL for no actions */
- module_t * module; /* module in which this rule is executed */
- int exported; /* nonzero if this rule is supposed to appear in
- * the global module and be automatically
- * imported into other modules
- */
-};
-
-/* ACTIONS - a chain of ACTIONs. */
-struct _actions
-{
- ACTIONS * next;
- ACTIONS * tail; /* valid only for head */
- ACTION * action;
-};
-
-/* ACTION - a RULE instance with targets and sources. */
-struct _action
-{
- RULE * rule;
- TARGETS * targets;
- TARGETS * sources; /* aka $(>) */
- char running; /* has been started */
-#define A_INIT 0
-#define A_RUNNING_NOEXEC 1
-#define A_RUNNING 2
- char status; /* see TARGET status */
- int refs;
-};
-
-/* SETTINGS - variables to set when executing a TARGET's ACTIONS. */
-struct _settings
-{
- SETTINGS * next;
- OBJECT * symbol; /* symbol name for var_set() */
- LIST * value; /* symbol value for var_set() */
-};
-
-/* TARGETS - a chain of TARGETs. */
-struct _targets
-{
- TARGETS * next;
- TARGETS * tail; /* valid only for head */
- TARGET * target;
-};
-
-/* TARGET - an entity (e.g. a file) that can be built. */
-struct _target
-{
- OBJECT * name;
- OBJECT * boundname; /* if search() relocates target */
- ACTIONS * actions; /* rules to execute, if any */
- SETTINGS * settings; /* variables to define */
-
- short flags; /* status info */
-
-#define T_FLAG_TEMP 0x0001 /* TEMPORARY applied */
-#define T_FLAG_NOCARE 0x0002 /* NOCARE applied */
-#define T_FLAG_NOTFILE 0x0004 /* NOTFILE applied */
-#define T_FLAG_TOUCHED 0x0008 /* ALWAYS applied or -t target */
-#define T_FLAG_LEAVES 0x0010 /* LEAVES applied */
-#define T_FLAG_NOUPDATE 0x0020 /* NOUPDATE applied */
-#define T_FLAG_VISITED 0x0040 /* CWM: Used in debugging */
-
-/* This flag has been added to support a new built-in rule named "RMBAD". It is
- * used to force removal of outdated targets whose dependencies fail to build.
- */
-#define T_FLAG_RMOLD 0x0080 /* RMBAD applied */
-
-/* This flag was added to support a new built-in rule named "FAIL_EXPECTED" used
- * to indicate that the result of running a given action should be inverted,
- * i.e. ok <=> fail. This is useful for launching certain test runs from a
- * Jamfile.
- */
-#define T_FLAG_FAIL_EXPECTED 0x0100 /* FAIL_EXPECTED applied */
-
-#define T_FLAG_INTERNAL 0x0200 /* internal INCLUDES node */
-
-/* Indicates that the target must be a file. This prevents matching non-files,
- * like directories, when a target is searched.
- */
-#define T_FLAG_ISFILE 0x0400
-
-#define T_FLAG_PRECIOUS 0x0800
-
- char binding; /* how target relates to a real file or
- * folder
- */
-
-#define T_BIND_UNBOUND 0 /* a disembodied name */
-#define T_BIND_MISSING 1 /* could not find real file */
-#define T_BIND_PARENTS 2 /* using parent's timestamp */
-#define T_BIND_EXISTS 3 /* real file, timestamp valid */
-
- TARGETS * depends; /* dependencies */
- TARGETS * dependants; /* the inverse of dependencies */
- TARGETS * rebuilds; /* targets that should be force-rebuilt
- * whenever this one is
- */
- TARGET * includes; /* internal includes node */
- TARGET * original_target; /* original_target->includes = this */
- char rescanned;
-
- time_t time; /* update time */
- time_t leaf; /* update time of leaf sources */
-
- char fate; /* make0()'s diagnosis */
-
-#define T_FATE_INIT 0 /* nothing done to target */
-#define T_FATE_MAKING 1 /* make0(target) on stack */
-
-#define T_FATE_STABLE 2 /* target did not need updating */
-#define T_FATE_NEWER 3 /* target newer than parent */
-
-#define T_FATE_SPOIL 4 /* >= SPOIL rebuilds parents */
-#define T_FATE_ISTMP 4 /* unneeded temp target oddly present */
-
-#define T_FATE_BUILD 5 /* >= BUILD rebuilds target */
-#define T_FATE_TOUCHED 5 /* manually touched with -t */
-#define T_FATE_REBUILD 6
-#define T_FATE_MISSING 7 /* is missing, needs updating */
-#define T_FATE_NEEDTMP 8 /* missing temp that must be rebuild */
-#define T_FATE_OUTDATED 9 /* is out of date, needs updating */
-#define T_FATE_UPDATE 10 /* deps updated, needs updating */
-
-#define T_FATE_BROKEN 11 /* >= BROKEN ruins parents */
-#define T_FATE_CANTFIND 11 /* no rules to make missing target */
-#define T_FATE_CANTMAKE 12 /* can not find dependencies */
-
- char progress; /* tracks make1() progress */
-
-#define T_MAKE_INIT 0 /* make1(target) not yet called */
-#define T_MAKE_ONSTACK 1 /* make1(target) on stack */
-#define T_MAKE_ACTIVE 2 /* make1(target) in make1b() */
-#define T_MAKE_RUNNING 3 /* make1(target) running commands */
-#define T_MAKE_DONE 4 /* make1(target) done */
-#define T_MAKE_NOEXEC_DONE 5 /* make1(target) done with -n in effect */
-
-#ifdef OPT_SEMAPHORE
- #define T_MAKE_SEMAPHORE 5 /* Special target type for semaphores */
-#endif
-
-#ifdef OPT_SEMAPHORE
- TARGET * semaphore; /* used in serialization */
-#endif
-
- char status; /* exec_cmd() result */
-
- int asynccnt; /* child deps outstanding */
- TARGETS * parents; /* used by make1() for completion */
- char * cmds; /* type-punned command list */
-
- const char * failed;
-};
-
-
-/* Action related functions. */
-void action_free ( ACTION * );
-ACTIONS * actionlist ( ACTIONS *, ACTION * );
-void freeactions ( ACTIONS * );
-SETTINGS * addsettings ( SETTINGS *, int flag, OBJECT * symbol, LIST * value );
-void pushsettings ( struct module_t * module, SETTINGS * );
-void popsettings ( struct module_t * module, SETTINGS * );
-SETTINGS * copysettings ( SETTINGS * );
-void freesettings ( SETTINGS * );
-void actions_refer( rule_actions * );
-void actions_free ( rule_actions * );
-
-/* Rule related functions. */
-RULE * bindrule ( OBJECT * rulename, module_t * );
-RULE * import_rule ( RULE * source, module_t *, OBJECT * name );
-void rule_localize ( RULE * rule, module_t * module );
-RULE * new_rule_body ( module_t *, OBJECT * rulename, FUNCTION * func, int exprt );
-RULE * new_rule_actions( module_t *, OBJECT * rulename, FUNCTION * command, LIST * bindlist, int flags );
-void rule_free ( RULE * );
-
-/* Target related functions. */
-void bind_explicitly_located_targets();
-TARGET * bindtarget ( OBJECT * target_name );
-TARGET * copytarget ( TARGET const * t );
-void freetargets ( TARGETS * );
-TARGETS * targetchain ( TARGETS * chain, TARGETS * );
-TARGETS * targetentry ( TARGETS * chain, TARGET * );
-void target_include ( TARGET * including, TARGET * included );
-TARGETS * targetlist ( TARGETS * chain, LIST * target_names );
-void touch_target ( OBJECT * t );
-void clear_includes ( TARGET * );
-
-/* Final module cleanup. */
-void rules_done();
-
-#endif
diff --git a/tools/build/v2/engine/scan.c b/tools/build/v2/engine/scan.c
deleted file mode 100644
index 915ec21f44..0000000000
--- a/tools/build/v2/engine/scan.c
+++ /dev/null
@@ -1,419 +0,0 @@
-/*
- * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-#include "jam.h"
-#include "lists.h"
-#include "parse.h"
-#include "scan.h"
-#include "jamgram.h"
-#include "jambase.h"
-#include "object.h"
-#include "constants.h"
-
-/*
- * scan.c - the jam yacc scanner
- *
- * 12/26/93 (seiwald) - bump buf in yylex to 10240 - yuk.
- * 09/16/94 (seiwald) - check for overflows, unmatched {}'s, etc.
- * Also handle tokens abutting EOF by remembering
- * to return EOF now matter how many times yylex()
- * reinvokes yyline().
- * 02/11/95 (seiwald) - honor only punctuation keywords if SCAN_PUNCT.
- * 07/27/95 (seiwald) - Include jamgram.h after scan.h, so that YYSTYPE is
- * defined before Linux's yacc tries to redefine it.
- */
-
-struct keyword
-{
- char * word;
- int type;
-} keywords[] =
-{
-#include "jamgramtab.h"
- { 0, 0 }
-};
-
-struct include
-{
- struct include * next; /* next serial include file */
- char * string; /* pointer into current line */
- char * * strings; /* for yyfparse() -- text to parse */
- FILE * file; /* for yyfparse() -- file being read */
- OBJECT * fname; /* for yyfparse() -- file name */
- int line; /* line counter for error messages */
- char buf[ 512 ]; /* for yyfparse() -- line buffer */
-};
-
-static struct include * incp = 0; /* current file; head of chain */
-
-static int scanmode = SCAN_NORMAL;
-static int anyerrors = 0;
-
-
-static char * symdump( YYSTYPE * );
-
-#define BIGGEST_TOKEN 10240 /* no single token can be larger */
-
-
-/*
- * Set parser mode: normal, string, or keyword.
- */
-
-void yymode( int n )
-{
- scanmode = n;
-}
-
-
-void yyerror( const char * s )
-{
- /* We use yylval instead of incp to access the error location information as
- * the incp pointer will already be reset to 0 in case the error occurred at
- * EOF.
- *
- * The two may differ only if we get an error while reading a lexical token
- * spanning muliple lines, e.g. a multi-line string literal or action body,
- * in which case yylval location information will hold the information about
- * where this token started while incp will hold the information about where
- * reading it broke.
- *
- * TODO: Test the theory about when yylval and incp location information are
- * the same and when they differ.
- */
- printf( "%s:%d: %s at %s\n", object_str( yylval.file ), yylval.line, s, symdump( &yylval ) );
- ++anyerrors;
-}
-
-
-int yyanyerrors()
-{
- return anyerrors != 0;
-}
-
-
-void yyfparse( OBJECT * s )
-{
- struct include * i = (struct include *)BJAM_MALLOC( sizeof( *i ) );
-
- /* Push this onto the incp chain. */
- i->string = "";
- i->strings = 0;
- i->file = 0;
- i->fname = object_copy( s );
- i->line = 0;
- i->next = incp;
- incp = i;
-
- /* If the filename is "+", it means use the internal jambase. */
- if ( !strcmp( object_str( s ), "+" ) )
- i->strings = jambase;
-}
-
-
-/*
- * yyline() - read new line and return first character.
- *
- * Fabricates a continuous stream of characters across include files, returning
- * EOF at the bitter end.
- */
-
-int yyline()
-{
- struct include * i = incp;
-
- if ( !incp )
- return EOF;
-
- /* Once we start reading from the input stream, we reset the include
- * insertion point so that the next include file becomes the head of the
- * list.
- */
-
- /* If there is more data in this line, return it. */
- if ( *i->string )
- return *i->string++;
-
- /* If we are reading from an internal string list, go to the next string. */
- if ( i->strings )
- {
- if ( *i->strings )
- {
- ++i->line;
- i->string = *(i->strings++);
- return *i->string++;
- }
- }
- else
- {
- /* If necessary, open the file. */
- if ( !i->file )
- {
- FILE * f = stdin;
- if ( strcmp( object_str( i->fname ), "-" ) && !( f = fopen( object_str( i->fname ), "r" ) ) )
- perror( object_str( i->fname ) );
- i->file = f;
- }
-
- /* If there is another line in this file, start it. */
- if ( i->file && fgets( i->buf, sizeof( i->buf ), i->file ) )
- {
- ++i->line;
- i->string = i->buf;
- return *i->string++;
- }
- }
-
- /* This include is done. Free it up and return EOF so yyparse() returns to
- * parse_file().
- */
-
- incp = i->next;
-
- /* Close file, free name. */
- if ( i->file && ( i->file != stdin ) )
- fclose( i->file );
- object_free( i->fname );
- BJAM_FREE( (char *)i );
-
- return EOF;
-}
-
-
-/*
- * yylex() - set yylval to current token; return its type.
- *
- * Macros to move things along:
- *
- * yychar() - return and advance character; invalid after EOF.
- * yyprev() - back up one character; invalid before yychar().
- *
- * yychar() returns a continuous stream of characters, until it hits the EOF of
- * the current include file.
- */
-
-#define yychar() ( *incp->string ? *incp->string++ : yyline() )
-#define yyprev() ( incp->string-- )
-
-int yylex()
-{
- int c;
- char buf[ BIGGEST_TOKEN ];
- char * b = buf;
-
- if ( !incp )
- goto eof;
-
- /* Get first character (whitespace or of token). */
- c = yychar();
-
- if ( scanmode == SCAN_STRING )
- {
- /* If scanning for a string (action's {}'s), look for the closing brace.
- * We handle matching braces, if they match.
- */
-
- int nest = 1;
-
- while ( ( c != EOF ) && ( b < buf + sizeof( buf ) ) )
- {
- if ( c == '{' )
- ++nest;
-
- if ( ( c == '}' ) && !--nest )
- break;
-
- *b++ = c;
-
- c = yychar();
-
- /* Turn trailing "\r\n" sequences into plain "\n" for Cygwin. */
- if ( ( c == '\n' ) && ( b[ -1 ] == '\r' ) )
- --b;
- }
-
- /* We ate the ending brace -- regurgitate it. */
- if ( c != EOF )
- yyprev();
-
- /* Check for obvious errors. */
- if ( b == buf + sizeof( buf ) )
- {
- yyerror( "action block too big" );
- goto eof;
- }
-
- if ( nest )
- {
- yyerror( "unmatched {} in action block" );
- goto eof;
- }
-
- *b = 0;
- yylval.type = STRING;
- yylval.string = object_new( buf );
- yylval.file = incp->fname;
- yylval.line = incp->line;
- }
- else
- {
- char * b = buf;
- struct keyword * k;
- int inquote = 0;
- int notkeyword;
-
- /* Eat white space. */
- for ( ;; )
- {
- /* Skip past white space. */
- while ( ( c != EOF ) && isspace( c ) )
- c = yychar();
-
- /* Not a comment? */
- if ( c != '#' )
- break;
-
- /* Swallow up comment line. */
- while ( ( ( c = yychar() ) != EOF ) && ( c != '\n' ) ) ;
- }
-
- /* c now points to the first character of a token. */
- if ( c == EOF )
- goto eof;
-
- yylval.file = incp->fname;
- yylval.line = incp->line;
-
- /* While scanning the word, disqualify it for (expensive) keyword lookup
- * when we can: $anything, "anything", \anything
- */
- notkeyword = c == '$';
-
- /* Look for white space to delimit word. "'s get stripped but preserve
- * white space. \ protects next character.
- */
- while
- (
- ( c != EOF ) &&
- ( b < buf + sizeof( buf ) ) &&
- ( inquote || !isspace( c ) )
- )
- {
- if ( c == '"' )
- {
- /* begin or end " */
- inquote = !inquote;
- notkeyword = 1;
- }
- else if ( c != '\\' )
- {
- /* normal char */
- *b++ = c;
- }
- else if ( ( c = yychar() ) != EOF )
- {
- /* \c */
- if (c == 'n')
- c = '\n';
- else if (c == 'r')
- c = '\r';
- else if (c == 't')
- c = '\t';
- *b++ = c;
- notkeyword = 1;
- }
- else
- {
- /* \EOF */
- break;
- }
-
- c = yychar();
- }
-
- /* Check obvious errors. */
- if ( b == buf + sizeof( buf ) )
- {
- yyerror( "string too big" );
- goto eof;
- }
-
- if ( inquote )
- {
- yyerror( "unmatched \" in string" );
- goto eof;
- }
-
- /* We looked ahead a character - back up. */
- if ( c != EOF )
- yyprev();
-
- /* Scan token table. Do not scan if it is obviously not a keyword or if
- * it is an alphabetic when were looking for punctuation.
- */
-
- *b = 0;
- yylval.type = ARG;
-
- if ( !notkeyword && !( isalpha( *buf ) && ( scanmode == SCAN_PUNCT ) ) )
- for ( k = keywords; k->word; ++k )
- if ( ( *buf == *k->word ) && !strcmp( k->word, buf ) )
- {
- yylval.type = k->type;
- yylval.keyword = k->word; /* used by symdump */
- break;
- }
-
- if ( yylval.type == ARG )
- yylval.string = object_new( buf );
- }
-
- if ( DEBUG_SCAN )
- printf( "scan %s\n", symdump( &yylval ) );
-
- return yylval.type;
-
-eof:
- /* We do not reset yylval.file & yylval.line here so unexpected EOF error
- * messages would include correct error location information.
- */
- yylval.type = EOF;
- return yylval.type;
-}
-
-
-static char * symdump( YYSTYPE * s )
-{
- static char buf[ BIGGEST_TOKEN + 20 ];
- switch ( s->type )
- {
- case EOF : sprintf( buf, "EOF" ); break;
- case 0 : sprintf( buf, "unknown symbol %s", object_str( s->string ) ); break;
- case ARG : sprintf( buf, "argument %s" , object_str( s->string ) ); break;
- case STRING: sprintf( buf, "string \"%s\"" , object_str( s->string ) ); break;
- default : sprintf( buf, "keyword %s" , s->keyword ); break;
- }
- return buf;
-}
-
-
-/*
- * Get information about the current file and line, for those epsilon
- * transitions that produce a parse.
- */
-
-void yyinput_stream( OBJECT * * name, int * line )
-{
- if ( incp )
- {
- *name = incp->fname;
- *line = incp->line;
- }
- else
- {
- *name = constant_builtin;
- *line = -1;
- }
-}
diff --git a/tools/build/v2/engine/scan.h b/tools/build/v2/engine/scan.h
deleted file mode 100644
index b672d00262..0000000000
--- a/tools/build/v2/engine/scan.h
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Copyright 1993, 1995 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/*
- * scan.h - the jam yacc scanner
- *
- * External functions:
- *
- * yyerror( char *s ) - print a parsing error message.
- * yyfparse( char *s ) - scan include file s.
- * yylex() - parse the next token, returning its type.
- * yymode() - adjust lexicon of scanner.
- * yyparse() - declaration for yacc parser.
- * yyanyerrors() - indicate if any parsing errors occured.
- *
- * The yymode() function is for the parser to adjust the lexicon of the scanner.
- * Aside from normal keyword scanning, there is a mode to handle action strings
- * (look only for the closing }) and a mode to ignore most keywords when looking
- * for a punctuation keyword. This allows non-punctuation keywords to be used in
- * lists without quoting.
- */
-
-/*
- * YYSTYPE - value of a lexical token
- */
-
-#define YYSTYPE YYSYMBOL
-
-typedef struct _YYSTYPE
-{
- int type;
- OBJECT * string;
- PARSE * parse;
- LIST * list;
- int number;
- OBJECT * file;
- int line;
- const char * keyword;
-} YYSTYPE;
-
-extern YYSTYPE yylval;
-
-void yymode( int n );
-void yyerror( const char * s );
-int yyanyerrors();
-void yyfparse( OBJECT * s );
-int yyline();
-int yylex();
-int yyparse();
-void yyinput_stream( OBJECT * * name, int * line );
-
-# define SCAN_NORMAL 0 /* normal parsing */
-# define SCAN_STRING 1 /* look only for matching } */
-# define SCAN_PUNCT 2 /* only punctuation keywords */
diff --git a/tools/build/v2/engine/search.c b/tools/build/v2/engine/search.c
deleted file mode 100644
index e3d287a679..0000000000
--- a/tools/build/v2/engine/search.c
+++ /dev/null
@@ -1,252 +0,0 @@
-/*
- * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/* This file is ALSO:
- * Copyright 2001-2004 David Abrahams.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-#include "jam.h"
-#include "lists.h"
-#include "search.h"
-#include "timestamp.h"
-#include "pathsys.h"
-#include "variable.h"
-#include "object.h"
-#include "compile.h"
-#include "strings.h"
-#include "hash.h"
-#include "filesys.h"
-#include <string.h>
-
-
-typedef struct _binding
-{
- OBJECT * binding;
- OBJECT * target;
-} BINDING;
-
-static struct hash *explicit_bindings = 0;
-
-
-void call_bind_rule
-(
- OBJECT * target_,
- OBJECT * boundname_
-)
-{
- LIST * bind_rule = var_get( root_module(), constant_BINDRULE );
- if ( !list_empty( bind_rule ) )
- {
- OBJECT * target = object_copy( target_ );
- OBJECT * boundname = object_copy( boundname_ );
- if ( boundname && target )
- {
- /* Prepare the argument list. */
- FRAME frame[1];
- frame_init( frame );
-
- /* First argument is the target name. */
- lol_add( frame->args, list_new( target ) );
-
- lol_add( frame->args, list_new( boundname ) );
- if ( lol_get( frame->args, 1 ) )
- list_free( evaluate_rule( list_front( bind_rule ), frame ) );
-
- /* Clean up */
- frame_free( frame );
- }
- else
- {
- if ( boundname )
- object_free( boundname );
- if ( target )
- object_free( target );
- }
- }
-}
-
-/*
- * search.c - find a target along $(SEARCH) or $(LOCATE)
- * First, check if LOCATE is set. If so, use it to determine
- * the location of target and return, regardless of whether anything
- * exists on that location.
- *
- * Second, examine all directories in SEARCH. If there's file already
- * or there's another target with the same name which was placed
- * to this location via LOCATE setting, stop and return the location.
- * In case of previous target, return it's name via the third argument.
- *
- * This bevahiour allow to handle dependency on generated files. If
- * caller does not expect that target is generated, 0 can be passed as
- * the third argument.
- */
-
-OBJECT *
-search(
- OBJECT * target,
- time_t *time,
- OBJECT * * another_target,
- int file
-)
-{
- PATHNAME f[1];
- LIST * varlist;
- string buf[1];
- int found = 0;
- /* Will be set to 1 if target location is specified via LOCATE. */
- int explicitly_located = 0;
- OBJECT * boundname = 0;
-
- if ( another_target )
- *another_target = 0;
-
- if (! explicit_bindings )
- explicit_bindings = hashinit( sizeof(BINDING),
- "explicitly specified locations");
-
- string_new( buf );
- /* Parse the filename */
-
- path_parse( object_str( target ), f );
-
- f->f_grist.ptr = 0;
- f->f_grist.len = 0;
-
- varlist = var_get( root_module(), constant_LOCATE );
- if ( !list_empty( varlist ) )
- {
- OBJECT * key;
- f->f_root.ptr = object_str( list_front( varlist ) );
- f->f_root.len = strlen( object_str( list_front( varlist ) ) );
-
- path_build( f, buf, 1 );
-
- if ( DEBUG_SEARCH )
- printf( "locate %s: %s\n", object_str( target ), buf->value );
-
- explicitly_located = 1;
-
- key = object_new( buf->value );
- timestamp( key, time );
- object_free( key );
- found = 1;
- }
- else if ( varlist = var_get( root_module(), constant_SEARCH ), !list_empty( varlist ) )
- {
- LISTITER iter = list_begin( varlist ), end = list_end( varlist );
- for ( ; iter != end; iter = list_next( iter ) )
- {
- BINDING * ba;
- file_info_t *ff;
- OBJECT * key;
- OBJECT * test_path;
-
- f->f_root.ptr = object_str( list_item( iter ) );
- f->f_root.len = strlen( object_str( list_item( iter ) ) );
-
- string_truncate( buf, 0 );
- path_build( f, buf, 1 );
-
- if ( DEBUG_SEARCH )
- printf( "search %s: %s\n", object_str( target ), buf->value );
-
- test_path = object_new( buf->value );
- key = path_as_key( test_path );
- object_free( test_path );
- ff = file_query( key );
- timestamp( key, time );
-
- if ( ( ba = (BINDING *)hash_find( explicit_bindings, key ) ) )
- {
- if ( DEBUG_SEARCH )
- printf(" search %s: found explicitly located target %s\n",
- object_str( target ), object_str( ba->target ) );
- if ( another_target )
- *another_target = ba->target;
- found = 1;
- object_free( key );
- break;
- }
- else if ( ff && ff->time )
- {
- if ( !file || ff->is_file )
- {
- found = 1;
- object_free( key );
- break;
- }
- }
- object_free( key );
- }
- }
-
- if ( !found )
- {
- /* Look for the obvious */
- /* This is a questionable move. Should we look in the */
- /* obvious place if SEARCH is set? */
- OBJECT * key;
-
- f->f_root.ptr = 0;
- f->f_root.len = 0;
-
- string_truncate( buf, 0 );
- path_build( f, buf, 1 );
-
- if ( DEBUG_SEARCH )
- printf( "search %s: %s\n", object_str( target ), buf->value );
-
- key = object_new( buf->value );
- timestamp( key, time );
- object_free( key );
- }
-
- boundname = object_new( buf->value );
- string_free( buf );
-
- if ( explicitly_located )
- {
- int found;
- BINDING * ba;
- OBJECT * key = path_as_key( boundname );
- /* CONSIDER: we probably should issue a warning is another file
- is explicitly bound to the same location. This might break
- compatibility, though. */
- ba = (BINDING *)hash_insert( explicit_bindings, key, &found );
- if ( !found )
- {
- ba->binding = key;
- ba->target = target;
- }
- else
- {
- object_free( key );
- }
- }
-
- /* prepare a call to BINDRULE if the variable is set */
- call_bind_rule( target, boundname );
-
- return boundname;
-}
-
-
-static void free_binding( void * xbinding, void * data )
-{
- BINDING * binding = (BINDING *)xbinding;
- object_free( binding->binding );
-}
-
-void search_done( void )
-{
- if ( explicit_bindings )
- {
- hashenumerate( explicit_bindings, free_binding, (void *)0 );
- hashdone( explicit_bindings );
- }
-}
diff --git a/tools/build/v2/engine/search.h b/tools/build/v2/engine/search.h
deleted file mode 100644
index 0b6583996b..0000000000
--- a/tools/build/v2/engine/search.h
+++ /dev/null
@@ -1,20 +0,0 @@
-/*
- * Copyright 1993, 1995 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/*
- * search.h - find a target along $(SEARCH) or $(LOCATE)
- */
-
-#ifndef SEARCH_SW20111118_H
-#define SEARCH_SW20111118_H
-
-#include "object.h"
-#include <time.h>
-
-OBJECT * search( OBJECT * target, time_t * time, OBJECT * * another_target, int file );
-void search_done( void );
-
-#endif
diff --git a/tools/build/v2/engine/strings.c b/tools/build/v2/engine/strings.c
deleted file mode 100644
index 8956123779..0000000000
--- a/tools/build/v2/engine/strings.c
+++ /dev/null
@@ -1,201 +0,0 @@
-/* Copyright David Abrahams 2004. Distributed under the Boost */
-/* Software License, Version 1.0. (See accompanying */
-/* file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) */
-
-#include "jam.h"
-#include "strings.h"
-#include <stdlib.h>
-#include <string.h>
-#include <assert.h>
-#include <stdio.h>
-
-
-#ifndef NDEBUG
-# define JAM_STRING_MAGIC ((char)0xcf)
-# define JAM_STRING_MAGIC_SIZE 4
-static void assert_invariants( string* self )
-{
- int i;
-
- if ( self->value == 0 )
- {
- assert( self->size == 0 );
- assert( self->capacity == 0 );
- assert( self->opt[0] == 0 );
- return;
- }
-
- assert( self->size < self->capacity );
- assert( ( self->capacity <= sizeof(self->opt) ) == ( self->value == self->opt ) );
- assert( strlen( self->value ) == self->size );
-
- for (i = 0; i < 4; ++i)
- {
- assert( self->magic[i] == JAM_STRING_MAGIC );
- assert( self->value[self->capacity + i] == JAM_STRING_MAGIC );
- }
-}
-#else
-# define JAM_STRING_MAGIC_SIZE 0
-# define assert_invariants(x) do {} while (0)
-#endif
-
-void string_new( string* s )
-{
- s->value = s->opt;
- s->size = 0;
- s->capacity = sizeof(s->opt);
- s->opt[0] = 0;
-#ifndef NDEBUG
- memset(s->magic, JAM_STRING_MAGIC, sizeof(s->magic));
-#endif
- assert_invariants( s );
-}
-
-void string_free( string* s )
-{
- assert_invariants( s );
- if ( s->value != s->opt )
- BJAM_FREE( s->value );
- string_new( s );
-}
-
-static void string_reserve_internal( string* self, size_t capacity )
-{
- if ( self->value == self->opt )
- {
- self->value = (char*)BJAM_MALLOC_ATOMIC( capacity + JAM_STRING_MAGIC_SIZE );
- self->value[0] = 0;
- strncat( self->value, self->opt, sizeof(self->opt) );
- assert( strlen( self->value ) <= self->capacity ); /* This is a regression test */
- }
- else
- {
- self->value = (char*)BJAM_REALLOC( self->value, capacity + JAM_STRING_MAGIC_SIZE );
- }
-#ifndef NDEBUG
- memcpy( self->value + capacity, self->magic, JAM_STRING_MAGIC_SIZE );
-#endif
- self->capacity = capacity;
-}
-
-void string_reserve( string* self, size_t capacity )
-{
- assert_invariants( self );
- if ( capacity <= self->capacity )
- return;
- string_reserve_internal( self, capacity );
- assert_invariants( self );
-}
-
-static void extend_full( string* self, char const* start, char const* finish )
-{
- size_t new_size = self->capacity + ( finish - start );
- size_t new_capacity = self->capacity;
- size_t old_size = self->capacity;
- while ( new_capacity < new_size + 1)
- new_capacity <<= 1;
- string_reserve_internal( self, new_capacity );
- memcpy( self->value + old_size, start, new_size - old_size );
- self->value[new_size] = 0;
- self->size = new_size;
-}
-
-void string_append( string* self, char const* rhs )
-{
- char* p = self->value + self->size;
- char* end = self->value + self->capacity;
- assert_invariants( self );
-
- while ( *rhs && p != end)
- *p++ = *rhs++;
-
- if ( p != end )
- {
- *p = 0;
- self->size = p - self->value;
- }
- else
- {
- extend_full( self, rhs, rhs + strlen(rhs) );
- }
- assert_invariants( self );
-}
-
-void string_append_range( string* self, char const* start, char const* finish )
-{
- char* p = self->value + self->size;
- char* end = self->value + self->capacity;
- assert_invariants( self );
-
- while ( p != end && start != finish )
- *p++ = *start++;
-
- if ( p != end )
- {
- *p = 0;
- self->size = p - self->value;
- }
- else
- {
- extend_full( self, start, finish );
- }
- assert_invariants( self );
-}
-
-void string_copy( string* s, char const* rhs )
-{
- string_new( s );
- string_append( s, rhs );
-}
-
-void string_truncate( string* self, size_t n )
-{
- assert_invariants( self );
- assert( n <= self->capacity );
- self->value[self->size = n] = 0;
- assert_invariants( self );
-}
-
-void string_pop_back( string* self )
-{
- string_truncate( self, self->size - 1 );
-}
-
-void string_push_back( string* self, char x )
-{
- string_append_range( self, &x, &x + 1 );
-}
-
-char string_back( string* self )
-{
- assert_invariants( self );
- return self->value[self->size - 1];
-}
-
-#ifndef NDEBUG
-void string_unit_test()
-{
- string s[1];
- int i;
- char buffer[sizeof(s->opt) * 2 + 2];
- int limit = sizeof(buffer) > 254 ? 254 : sizeof(buffer);
-
- string_new(s);
-
- for (i = 0; i < limit; ++i)
- {
- string_push_back( s, (char)(i + 1) );
- };
-
- for (i = 0; i < limit; ++i)
- {
- assert( i < s->size );
- assert( s->value[i] == (char)(i + 1));
- }
-
- string_free(s);
-
-}
-#endif
-
diff --git a/tools/build/v2/engine/strings.h b/tools/build/v2/engine/strings.h
deleted file mode 100644
index 33c77bd7f8..0000000000
--- a/tools/build/v2/engine/strings.h
+++ /dev/null
@@ -1,34 +0,0 @@
-#ifndef STRINGS_DWA20011024_H
-# define STRINGS_DWA20011024_H
-
-/* Copyright David Abrahams 2004. Distributed under the Boost */
-/* Software License, Version 1.0. (See accompanying */
-/* file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) */
-
-# include <stddef.h>
-
-typedef struct string
-{
- char* value;
- unsigned long size;
- unsigned long capacity;
- char opt[32];
-#ifndef NDEBUG
- char magic[4];
-#endif
-} string;
-
-void string_new( string* );
-void string_copy( string*, char const* );
-void string_free( string* );
-void string_append( string*, char const* );
-void string_append_range( string*, char const*, char const* );
-void string_push_back( string* s, char x );
-void string_reserve( string*, size_t );
-void string_truncate( string*, size_t );
-void string_pop_back( string* );
-char string_back( string* );
-void string_unit_test();
-
-#endif
-
diff --git a/tools/build/v2/engine/subst.c b/tools/build/v2/engine/subst.c
deleted file mode 100644
index 156670f1e9..0000000000
--- a/tools/build/v2/engine/subst.c
+++ /dev/null
@@ -1,113 +0,0 @@
-#include <stddef.h>
-#include "jam.h"
-#include "regexp.h"
-#include "hash.h"
-
-#include "object.h"
-#include "lists.h"
-#include "compile.h"
-#include "frames.h"
-#include "builtins.h"
-
-struct regex_entry
-{
- OBJECT* pattern;
- regexp* regex;
-};
-typedef struct regex_entry regex_entry;
-
-static struct hash* regex_hash;
-
-regexp* regex_compile( OBJECT* pattern )
-{
- int found;
- regex_entry * e ;
-
- if ( !regex_hash )
- regex_hash = hashinit(sizeof(regex_entry), "regex");
-
- e = (regex_entry *)hash_insert( regex_hash, pattern, &found );
- if ( !found )
- {
- e->pattern = object_copy( pattern );
- e->regex = regcomp( (char*)pattern );
- }
-
- return e->regex;
-}
-
-LIST * builtin_subst( FRAME * frame, int flags )
-{
- LIST* result = L0;
- LIST* arg1 = lol_get( frame->args, 0 );
- LISTITER iter = list_begin( arg1 ), end = list_end( arg1 );
-
- if ( iter != end && list_next( iter ) != end && list_next( list_next( iter ) ) != end )
- {
-
- const char* source = object_str( list_item( iter ) );
- OBJECT * pattern = list_item( list_next( iter ) );
- regexp* repat = regex_compile( pattern );
-
- if ( regexec( repat, (char*)source) )
- {
- LISTITER subst = list_next( iter );
-
- while ( ( subst = list_next( subst ) ) != end )
- {
-# define BUFLEN 4096
- char buf[BUFLEN + 1];
- const char* in = object_str( list_item( subst ) );
- char* out = buf;
-
- for ( ; *in && out < buf + BUFLEN; ++in )
- {
- if ( *in == '\\' || *in == '$' )
- {
- ++in;
- if ( *in == 0 )
- {
- break;
- }
- else if ( *in >= '0' && *in <= '9' )
- {
- unsigned n = *in - '0';
- const size_t srclen = repat->endp[n] - repat->startp[n];
- const size_t remaining = buf + BUFLEN - out;
- const size_t len = srclen < remaining ? srclen : remaining;
- memcpy( out, repat->startp[n], len );
- out += len;
- continue;
- }
- /* fall through and copy the next character */
- }
- *out++ = *in;
- }
- *out = 0;
-
- result = list_push_back( result, object_new( buf ) );
-#undef BUFLEN
- }
- }
- }
-
- return result;
-}
-
-
-static void free_regex( void * xregex, void * data )
-{
- regex_entry * regex = (regex_entry *)xregex;
- object_free( regex->pattern );
- BJAM_FREE( regex->regex );
-}
-
-
-void regex_done()
-{
- if ( regex_hash )
- {
- hashenumerate( regex_hash, free_regex, (void *)0 );
- hashdone( regex_hash );
- }
-}
diff --git a/tools/build/v2/engine/timestamp.c b/tools/build/v2/engine/timestamp.c
deleted file mode 100644
index 6e4ed7326a..0000000000
--- a/tools/build/v2/engine/timestamp.c
+++ /dev/null
@@ -1,223 +0,0 @@
-/*
- * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/* This file is ALSO:
- * Copyright 2001-2004 David Abrahams.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-# include "jam.h"
-
-# include "hash.h"
-# include "filesys.h"
-# include "pathsys.h"
-# include "timestamp.h"
-# include "object.h"
-# include "strings.h"
-
-/*
- * timestamp.c - get the timestamp of a file or archive member
- *
- * 09/22/00 (seiwald) - downshift names on OS2, too
- */
-
-/*
- * BINDING - all known files
- */
-
-typedef struct _binding BINDING;
-
-struct _binding {
- OBJECT * name;
- short flags;
-
-# define BIND_SCANNED 0x01 /* if directory or arch, has been scanned */
-
- short progress;
-
-# define BIND_INIT 0 /* never seen */
-# define BIND_NOENTRY 1 /* timestamp requested but file never found */
-# define BIND_SPOTTED 2 /* file found but not timed yet */
-# define BIND_MISSING 3 /* file found but can't get timestamp */
-# define BIND_FOUND 4 /* file found and time stamped */
-
- time_t time; /* update time - 0 if not exist */
-};
-
-static struct hash * bindhash = 0;
-static void time_enter( void *, OBJECT *, int, time_t );
-
-static char * time_progress[] =
-{
- "INIT",
- "NOENTRY",
- "SPOTTED",
- "MISSING",
- "FOUND"
-};
-
-
-/*
- * timestamp() - return timestamp on a file, if present.
- */
-
-void timestamp( OBJECT * target, time_t * time )
-{
- PROFILE_ENTER( timestamp );
-
- PATHNAME f1;
- PATHNAME f2;
- int found;
- BINDING * b;
- string buf[ 1 ];
-
- target = path_as_key( target );
-
- string_new( buf );
-
- if ( !bindhash )
- bindhash = hashinit( sizeof( BINDING ), "bindings" );
-
- /* Quick path - is it there? */
-
- b = (BINDING *)hash_insert( bindhash, target, &found );
- if ( !found )
- {
- b->name = object_copy( target ); /* never freed */
- b->time = b->flags = 0;
- b->progress = BIND_INIT;
- }
-
- if ( b->progress != BIND_INIT )
- goto afterscanning;
-
- b->progress = BIND_NOENTRY;
-
- /* Not found - have to scan for it. */
- path_parse( object_str( target ), &f1 );
-
- /* Scan directory if not already done so. */
- {
- int found;
- BINDING * b;
- OBJECT * name;
-
- f2 = f1;
- f2.f_grist.len = 0;
- path_parent( &f2 );
- path_build( &f2, buf, 0 );
-
- name = object_new( buf->value );
-
- b = (BINDING *)hash_insert( bindhash, name, &found );
- if ( !found )
- {
- b->name = object_copy( name );
- b->time = b->flags = 0;
- b->progress = BIND_INIT;
- }
-
- if ( !( b->flags & BIND_SCANNED ) )
- {
- file_dirscan( name, time_enter, bindhash );
- b->flags |= BIND_SCANNED;
- }
-
- object_free( name );
- }
-
- /* Scan archive if not already done so. */
- if ( f1.f_member.len )
- {
- int found;
- BINDING * b;
- OBJECT * name;
-
- f2 = f1;
- f2.f_grist.len = 0;
- f2.f_member.len = 0;
- string_truncate( buf, 0 );
- path_build( &f2, buf, 0 );
-
- name = object_new( buf->value );
-
- b = (BINDING *)hash_insert( bindhash, name, &found );
- if ( !found )
- {
- b->name = object_copy( name );
- b->time = b->flags = 0;
- b->progress = BIND_INIT;
- }
-
- if ( !( b->flags & BIND_SCANNED ) )
- {
- file_archscan( buf->value, time_enter, bindhash );
- b->flags |= BIND_SCANNED;
- }
-
- object_free( name );
- }
-
- afterscanning:
-
- if ( b->progress == BIND_SPOTTED )
- {
- b->progress = file_time( b->name, &b->time ) < 0
- ? BIND_MISSING
- : BIND_FOUND;
- }
-
- *time = b->progress == BIND_FOUND ? b->time : 0;
- string_free( buf );
-
- object_free( target );
-
- PROFILE_EXIT( timestamp );
-}
-
-
-static void time_enter( void * closure, OBJECT * target, int found, time_t time )
-{
- int item_found;
- BINDING * b;
- struct hash * bindhash = (struct hash *)closure;
-
- target = path_as_key( target );
-
- b = (BINDING *)hash_insert( bindhash, target, &item_found );
- if ( !item_found )
- {
- b->name = object_copy( target );
- b->flags = 0;
- }
-
- b->time = time;
- b->progress = found ? BIND_FOUND : BIND_SPOTTED;
-
- if ( DEBUG_BINDSCAN )
- printf( "time ( %s ) : %s\n", object_str( target ), time_progress[ b->progress ] );
-
- object_free( target );
-}
-
-static void free_timestamps ( void * xbinding, void * data )
-{
- object_free( ((BINDING *)xbinding)->name );
-}
-
-/*
- * stamps_done() - free timestamp tables.
- */
-
-void stamps_done()
-{
- if ( bindhash )
- {
- hashenumerate( bindhash, free_timestamps, (void *)0 );
- hashdone( bindhash );
- }
-}
diff --git a/tools/build/v2/engine/timestamp.h b/tools/build/v2/engine/timestamp.h
deleted file mode 100644
index 26b7e8d1c9..0000000000
--- a/tools/build/v2/engine/timestamp.h
+++ /dev/null
@@ -1,20 +0,0 @@
-/*
- * Copyright 1993, 1995 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/*
- * timestamp.h - get the timestamp of a file or archive member
- */
-
-#ifndef TIMESTAMP_H_SW_2011_11_18
-#define TIMESTAMP_H_SW_2011_11_18
-
-#include "object.h"
-#include "time.h"
-
-void timestamp( OBJECT * target, time_t * time );
-void stamps_done();
-
-#endif
diff --git a/tools/build/v2/engine/variable.c b/tools/build/v2/engine/variable.c
deleted file mode 100644
index 21eedf395a..0000000000
--- a/tools/build/v2/engine/variable.c
+++ /dev/null
@@ -1,353 +0,0 @@
-/*
- * Copyright 1993, 2000 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/* This file is ALSO:
- * Copyright 2001-2004 David Abrahams.
- * Copyright 2005 Reece H. Dunn.
- * Copyright 2005 Rene Rivera.
- * Distributed under the Boost Software License, Version 1.0.
- * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- */
-
-#include "jam.h"
-#include "lists.h"
-#include "parse.h"
-#include "variable.h"
-#include "hash.h"
-#include "filesys.h"
-#include "object.h"
-#include "strings.h"
-#include "pathsys.h"
-#include "modules.h"
-#include <stdlib.h>
-#include <stdio.h>
-
-/*
- * variable.c - handle Jam multi-element variables.
- *
- * External routines:
- *
- * var_defines() - load a bunch of variable=value settings.
- * var_string() - expand a string with variables in it.
- * var_get() - get value of a user defined symbol.
- * var_set() - set a variable in jam's user defined symbol table.
- * var_swap() - swap a variable's value with the given one.
- * var_done() - free variable tables.
- *
- * Internal routines:
- *
- * var_enter() - make new var symbol table entry, returning var ptr.
- * var_dump() - dump a variable to stdout.
- *
- * 04/13/94 (seiwald) - added shorthand L0 for null list pointer
- * 08/23/94 (seiwald) - Support for '+=' (append to variable)
- * 01/22/95 (seiwald) - split environment variables at blanks or :'s
- * 05/10/95 (seiwald) - split path variables at SPLITPATH (not :)
- * 09/11/00 (seiwald) - defunct var_list() removed
- */
-
-/*
- * VARIABLE - a user defined multi-value variable
- */
-
-typedef struct _variable VARIABLE ;
-
-struct _variable
-{
- OBJECT * symbol;
- LIST * value;
-};
-
-static LIST * * var_enter( struct module_t * module, OBJECT * symbol );
-static void var_dump( OBJECT * symbol, LIST * value, char * what );
-
-
-/*
- * var_defines() - load a bunch of variable=value settings
- *
- * If preprocess is false, take the value verbatim.
- *
- * Otherwise, if the variable value is enclosed in quotes, strip the
- * quotes.
- *
- * Otherwise, if variable name ends in PATH, split value at :'s.
- *
- * Otherwise, split the value at blanks.
- */
-
-void var_defines( struct module_t * module, char * const * e, int preprocess )
-{
- string buf[1];
-
- string_new( buf );
-
- for ( ; *e; ++e )
- {
- char * val;
- OBJECT * varname;
-
-# ifdef OS_MAC
- /* On the mac (MPW), the var=val is actually var\0val */
- /* Think different. */
-
- if ( ( val = strchr( *e, '=' ) ) || ( val = *e + strlen( *e ) ) )
-# else
- if ( ( val = strchr( *e, '=' ) ) )
-# endif
- {
- LIST * l = L0;
- char * pp;
- char * p;
-# ifdef OPT_NO_EXTERNAL_VARIABLE_SPLIT
- char split = '\0';
-# else
- # ifdef OS_MAC
- char split = ',';
- # else
- char split = ' ';
- # endif
-# endif
- size_t len = strlen( val + 1 );
-
- int quoted = ( val[1] == '"' ) && ( val[len] == '"' ) &&
- ( len > 1 );
-
- if ( quoted && preprocess )
- {
- string_append_range( buf, val + 2, val + len );
- l = list_push_back( l, object_new( buf->value ) );
- string_truncate( buf, 0 );
- }
- else
- {
- /* Split *PATH at :'s, not spaces. */
- if ( val - 4 >= *e )
- {
- if ( !strncmp( val - 4, "PATH", 4 ) ||
- !strncmp( val - 4, "Path", 4 ) ||
- !strncmp( val - 4, "path", 4 ) )
- split = SPLITPATH;
- }
-
- /* Do the split. */
- for
- (
- pp = val + 1;
- preprocess && ( ( p = strchr( pp, split ) ) != 0 );
- pp = p + 1
- )
- {
- string_append_range( buf, pp, p );
- l = list_push_back( l, object_new( buf->value ) );
- string_truncate( buf, 0 );
- }
-
- l = list_push_back( l, object_new( pp ) );
- }
-
- /* Get name. */
- string_append_range( buf, *e, val );
- varname = object_new( buf->value );
- var_set( module, varname, l, VAR_SET );
- object_free( varname );
- string_truncate( buf, 0 );
- }
- }
- string_free( buf );
-}
-
-
-static LIST * saved_var = L0;
-
-/*
- * var_get() - get value of a user defined symbol.
- *
- * Returns NULL if symbol unset.
- */
-
-LIST * var_get( struct module_t * module, OBJECT * symbol )
-{
- LIST * result = L0;
-#ifdef OPT_AT_FILES
- /* Some "fixed" variables... */
- if ( object_equal( symbol, constant_TMPDIR ) )
- {
- list_free( saved_var );
- result = saved_var = list_new( object_new( path_tmpdir() ) );
- }
- else if ( object_equal( symbol, constant_TMPNAME ) )
- {
- list_free( saved_var );
- result = saved_var = list_new( path_tmpnam() );
- }
- else if ( object_equal( symbol, constant_TMPFILE ) )
- {
- list_free( saved_var );
- result = saved_var = list_new( path_tmpfile() );
- }
- else if ( object_equal( symbol, constant_STDOUT ) )
- {
- list_free( saved_var );
- result = saved_var = list_new( object_copy( constant_STDOUT ) );
- }
- else if ( object_equal( symbol, constant_STDERR ) )
- {
- list_free( saved_var );
- result = saved_var = list_new( object_copy( constant_STDERR ) );
- }
- else
-#endif
- {
- VARIABLE * v;
- int n;
-
- if ( ( n = module_get_fixed_var( module, symbol ) ) != -1 )
- {
- if ( DEBUG_VARGET )
- var_dump( symbol, module->fixed_variables[ n ], "get" );
- result = module->fixed_variables[ n ];
- }
- else if ( module->variables && ( v = (VARIABLE *)hash_find( module->variables, symbol ) ) )
- {
- if ( DEBUG_VARGET )
- var_dump( v->symbol, v->value, "get" );
- result = v->value;
- }
- }
- return result;
-}
-
-
-LIST * var_get_and_clear_raw( module_t * module, OBJECT * symbol )
-{
- LIST * result = L0;
- VARIABLE * v;
-
- if ( module->variables && ( v = (VARIABLE *)hash_find( module->variables, symbol ) ) )
- {
- result = v->value;
- v->value = L0;
- }
-
- return result;
-}
-
-/*
- * var_set() - set a variable in Jam's user defined symbol table.
- *
- * 'flag' controls the relationship between new and old values of the variable:
- * SET replaces the old with the new; APPEND appends the new to the old; DEFAULT
- * only uses the new if the variable was previously unset.
- *
- * Copies symbol. Takes ownership of value.
- */
-
-void var_set( struct module_t * module, OBJECT * symbol, LIST * value, int flag )
-{
- LIST * * v = var_enter( module, symbol );
-
- if ( DEBUG_VARSET )
- var_dump( symbol, value, "set" );
-
- switch ( flag )
- {
- case VAR_SET:
- /* Replace value */
- list_free( *v );
- *v = value;
- break;
-
- case VAR_APPEND:
- /* Append value */
- *v = list_append( *v, value );
- break;
-
- case VAR_DEFAULT:
- /* Set only if unset */
- if ( list_empty( *v ) )
- *v = value;
- else
- list_free( value );
- break;
- }
-}
-
-
-/*
- * var_swap() - swap a variable's value with the given one.
- */
-
-LIST * var_swap( struct module_t * module, OBJECT * symbol, LIST * value )
-{
- LIST * * v = var_enter( module, symbol );
- LIST * oldvalue = *v;
- if ( DEBUG_VARSET )
- var_dump( symbol, value, "set" );
- *v = value;
- return oldvalue;
-}
-
-
-/*
- * var_enter() - make new var symbol table entry, returning var ptr.
- */
-
-static LIST * * var_enter( struct module_t * module, OBJECT * symbol )
-{
- int found;
- VARIABLE * v;
- int n;
-
- if ( ( n = module_get_fixed_var( module, symbol ) ) != -1 )
- {
- return &module->fixed_variables[ n ];
- }
-
- if ( !module->variables )
- module->variables = hashinit( sizeof( VARIABLE ), "variables" );
-
- v = (VARIABLE *)hash_insert( module->variables, symbol, &found );
- if ( !found )
- {
- v->symbol = object_copy( symbol );
- v->value = L0;
- }
-
- return &v->value;
-}
-
-
-/*
- * var_dump() - dump a variable to stdout.
- */
-
-static void var_dump( OBJECT * symbol, LIST * value, char * what )
-{
- printf( "%s %s = ", what, object_str( symbol ) );
- list_print( value );
- printf( "\n" );
-}
-
-
-/*
- * var_done() - free variable tables.
- */
-
-static void delete_var_( void * xvar, void * data )
-{
- VARIABLE * v = (VARIABLE *)xvar;
- object_free( v->symbol );
- list_free( v-> value );
-}
-
-
-void var_done( struct module_t * module )
-{
- list_free( saved_var );
- saved_var = L0;
- hashenumerate( module->variables, delete_var_, (void *)0 );
- hash_free( module->variables );
-}
diff --git a/tools/build/v2/engine/variable.h b/tools/build/v2/engine/variable.h
deleted file mode 100644
index aa27d56d6b..0000000000
--- a/tools/build/v2/engine/variable.h
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Copyright 1993, 2000 Christopher Seiwald.
- *
- * This file is part of Jam - see jam.c for Copyright information.
- */
-
-/*
- * variable.h - handle jam multi-element variables
- */
-
-#ifndef VARIABLE_SW20111119_H
-#define VARIABLE_SW20111119_H
-
-struct module_t;
-
-void var_defines( struct module_t * module, char * const * e, int preprocess );
-LIST * var_get( struct module_t * module, OBJECT * symbol );
-void var_set( struct module_t * module, OBJECT * symbol, LIST * value, int flag );
-LIST * var_swap( struct module_t * module, OBJECT * symbol, LIST * value );
-void var_done( struct module_t * module );
-
-/*
- * Defines for var_set().
- */
-
-# define VAR_SET 0 /* override previous value */
-# define VAR_APPEND 1 /* append to previous value */
-# define VAR_DEFAULT 2 /* set only if no previous value */
-
-#endif
diff --git a/tools/build/v2/example/boost-build.jam b/tools/build/v2/example/boost-build.jam
deleted file mode 100644
index efcc231fe2..0000000000
--- a/tools/build/v2/example/boost-build.jam
+++ /dev/null
@@ -1,6 +0,0 @@
-# Copyright 2002, 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-
-boost-build ../kernel ;
diff --git a/tools/build/v2/example/versioned/hello.cpp b/tools/build/v2/example/versioned/hello.cpp
deleted file mode 100644
index 11fef734b3..0000000000
--- a/tools/build/v2/example/versioned/hello.cpp
+++ /dev/null
@@ -1,12 +0,0 @@
-// Copyright Rene Rivera, 2003
-// Distributed under the Boost Software License, Version 1.0.
-// (See accompanying file LICENSE_1_0.txt
-// or copy at http://www.boost.org/LICENSE_1_0.txt)
-
-#include <iostream>
-
-int main()
-{
- std::cout << "Hello there!\n";
- return 1;
-}
diff --git a/tools/build/v2/example/versioned/jamfile.jam b/tools/build/v2/example/versioned/jamfile.jam
deleted file mode 100644
index 913cdf4d78..0000000000
--- a/tools/build/v2/example/versioned/jamfile.jam
+++ /dev/null
@@ -1,9 +0,0 @@
-# Copyright 2003 Rene Rivera
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-#~ exe hello : hello.cpp : <version>1.0 ;
-lib hello : hello.cpp : <version>1.0 ;
-
-symlink hello_debug hello_release : hello/<variant>debug hello/<variant>release ;
-symlink links/hello_release : hello/<variant>release ;
diff --git a/tools/build/v2/example/versioned/jamroot.jam b/tools/build/v2/example/versioned/jamroot.jam
deleted file mode 100644
index 981d3eb508..0000000000
--- a/tools/build/v2/example/versioned/jamroot.jam
+++ /dev/null
@@ -1,8 +0,0 @@
-# Copyright 2003 Rene Rivera
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-
-import gcc ;
-import toolset ;
-import modifiers ;
diff --git a/tools/build/v2/hacking.txt b/tools/build/v2/hacking.txt
deleted file mode 100644
index 2acc253370..0000000000
--- a/tools/build/v2/hacking.txt
+++ /dev/null
@@ -1,154 +0,0 @@
-Copyright 2003, 2006 Vladimir Prus
-Distributed under the Boost Software License, Version 1.0.
-(See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-
- ----------------------------------
- Boost.Build contributor guidelines
- ----------------------------------
-
-Boost.Build is an open-source project. This means that we welcome and appreciate
-all contributions --- be it ideas, bug reports, or patches. This document
-contains guidelines which helps to assure that development goes on smoothly, and
-changes are made quickly.
-
-The guidelines are not mandatory, and you can decide for yourself which one to
-follow. But note, that 10 mins that you spare writing a comment, for example,
-might lead to significally longer delay for everyone.
-
-Before contributing, make sure you are subscribed to our mailing list
-
- boost-build@lists.boost.org
-
-Additional resources include
-
- - The issue tracker
- http://zigzag.cs.msu.su/boost.build
-
- - commits mailing list:
- boost-build@lists.sourceforge.net
- http://sourceforge.net/mailarchive/forum.php?forum_id=9097
-
-
-BUGS and PATCHES
-
-Both bugs and patches can be send to our mailing list.
-
-When reporting a bug, please try to provide the following information.
-
- - What you did. A minimal reproducible testcase is very much appreciated.
- Shell script with some annotations is much better than verbose description
- of the problem. A regression test is the best (see test/test_system.html).
- - What you got.
- - What you expected.
- - What version of Boost.Build and Boost.Jam did you use. If possible,
- please try to test with the CVS HEAD state.
-
-When submitting a patch, please:
-
- - make a single patch for a single logical change
- - follow the policies and coding conventions below,
- - send patches in unified diff format,
- (using either "cvs diff -u" or "diff -u")
- - provide a log message together with the patch
- - put the patch and the log message as attachment to your email.
-
-The purpose of log message serves to communicate what was changed, and *why*.
-Without a good log message, you might spend a lot of time later, wondering where
-a strange piece of code came from and why it was necessary.
-
-The good log message mentions each changed file and each rule/method, saying
-what happend to it, and why. Consider, the following log message
-
- Better direct request handling.
-
- * new/build-request.jam
- (directly-requested-properties-adjuster): Redo.
-
- * new/targets.jam
- (main-target.generate-really): Adjust properties here.
-
- * new/virtual-target.jam
- (register-actual-name): New rule.
- (virtual-target.actualize-no-scanner): Call the above, to detected bugs,
- where two virtual target correspond to one Jam target name.
-
-The log messages for the last two files are good. They tell what was changed.
-The change to the first file is clearly undercommented.
-
-It's OK to use terse log messages for uninteresting changes, like ones induced
-by interface changes elsewhere.
-
-
-POLICIES.
-
-1. Testing.
-
-All serious changes must be tested. New rules must be tested by the module where
-they are declared. Test system (test/test_system.html) should be used to verify
-user-observable behaviour.
-
-2. Documentation.
-
-It turns out that it's hard to have too much comments, but it's easy to have too
-little. Please prepend each rule with a comment saying what the rule does and
-what arguments mean. Stop for a minute and consider if the comment makes sense
-for anybody else, and completely describes what the rules does. Generic phrases
-like "adjusts properties" are really not enough.
-
-When applicable, make changes to the user documentation as well.
-
-
-CODING CONVENTIONS.
-
- 1. All names of rules and variables are lowercase with "-" to separate
- words.
-
- rule call-me-ishmael ( ) ...
-
- 2. Names with dots in them are "intended globals". Ordinary globals use a
- dot prefix:
-
- .foobar
- $(.foobar)
-
- 3. Pseudofunctions or associations are <parameter>.<property>:
-
- $(argument).name = hello ;
- $($(argument).name)
-
- 4. Class attribute names are prefixed with "self.":
-
- self.x
- $(self.x)
-
- 5. Builtin rules are called via their ALL_UPPERCASE_NAMES:
-
- DEPENDS $(target) : $(sources) ;
-
- 6. Opening and closing braces go on separate lines:
-
- if $(a)
- {
- #
- }
- else
- {
- #
- }
-
-HTML DOCUMENTATION.
-
- Please pass HTML files though HTML Tidy (http://tidy.sf.net) before
- comitting. This has to important purposes:
- - detecting bad HTML
- - converting files to uniform indentation style, which inverses effect of
- different editors and makes differences between revisions much smaller and
- easy for review.
-
- Alas, the way Tidy indents HTML differs between version. Please use the
- version available at
-
- http://tidy.sourceforge.net/src/old/tidy_src_020411.tgz
-
- and "-i -wrap 78" command line parameters.
diff --git a/tools/build/v2/index.html b/tools/build/v2/index.html
deleted file mode 100644
index 63df93fecb..0000000000
--- a/tools/build/v2/index.html
+++ /dev/null
@@ -1,165 +0,0 @@
-<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
-
-<!-- Copyright 2004 Aleksey Gurtovoy -->
-<!-- Copyright 2004, 2005, 2006 Vladimir Prus -->
-<!-- Distributed under the Boost Software License, Version 1.0. -->
-<!-- (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) -->
-
-<html>
- <head>
- <meta name="generator" content=
- "HTML Tidy for Linux/x86 (vers 1st April 2002), see www.w3.org">
- <meta name="generator" content="Microsoft FrontPage 5.0">
- <meta http-equiv="Content-Type" content=
- "text/html; charset=windows-1252">
- <!-- tidy options: &dash;&dash;tidy-mark false -i -wrap 78 !-->
-<style type="text/css">
-div.sidebar {
- margin-left: 1em ;
- border: medium outset ;
- padding: 0em 1em ;
- background-color: #adbed2;
- border-color: #000000;
- border-width: 1;
- width: 40% ;
- float: right ;
- clear: right }
-}
-
-div.sidebar p.rubric {
- font-family: sans-serif ;
- font-size: medium }
-</style>
-
- <title>Boost.Build V2</title>
- </head>
-
- <body bgcolor="#FFFFFF" text="#000000">
-
- <p align="center"><img src="boost_build.png" width="396" height="60" alt="Boost.Build V2"></img>
-
- <div class="contents sidebar topic" id="index">
- <p>
- <b>Quick access</b>
- <ul>
- <li>Download: <a href=
- "http://prdownloads.sourceforge.net/boost/boost-build-2.0-m12.zip">[zip]
- </a>, <a href=
- "http://prdownloads.sourceforge.net/boost/boost-build-2.0-m12.tar.bz2">[tar.bz2]
- </a>
- <li>Nightly build: <a href="http://boost.org/boost-build2/boost-build.zip">[zip]</a>,
- <a href="http://boost.org/boost-build2/boost-build.tar.bz2">[tar.bz2]</a>
- <li><a href="../../../doc/html/bbv2.html">Documentation</a>
- (<a href="doc/userman.pdf">PDF</a>)
- <li><a
- href="http://www.crystalclearsoftware.com/cgi-bin/boost_wiki/wiki.pl?Boost.Build_V2">Wiki
- (User-contibuted documentation)</a>
- <li>Feedback: <a
- href="http://lists.boost.org/mailman/listinfo.cgi/boost-build">[mailing list]</a>,
- <a
- href="news://news.gmane.org/gmane.comp.lib.boost.build">[newsgroup]</a>
- <ul>
- <li>Before posting, <a href="http://lists.boost.org/mailman/listinfo.cgi/boost-build">subscribe</a>
- <!--
- <li><form method="get" action="http://search.gmane.org/">
- <input type="text" name="query">
- <input type="hidden" name="group" value="gmane.comp.lib.boost.build">
- <input type="submit" value="Search">
- </form> -->
- </ul>
- <li><a href="https://trac.lvk.cs.msu.su/boost.build">Bug tracker</a>
-<!-- <li>Rate Boost.Build: <a href="http://freshmeat.net/rate/38012/">Freshmeat</a> -->
- </ul>
- </p>
- </div>
-
-<!-- <h1>Boost.Build V2</h1> -->
-
-
- <h2>Overview</h2>
-
- <p>Boost.Build is an easy way to build C++ projects, everywhere. You
- name you executables and libraries and list their sources. Boost.Build
- takes care about compiling your sources with right options, creating
- static and shared libraries, making executables, and other chores --
- whether you're using gcc, msvc, or a dozen more supported C++
- compilers -- on Windows, OSX, Linux and commercial UNIX systems.
-
- <p>Some of the most important features:
- <ul>
- <li><b>Simple and high level build description</b>. In most
- cases a name of target and list of sources is all you need.</li>
-
- <li><b>Portability</b>. Most important build properties have symbolic
- names that work everywhere. Why memorize compiler flags necessary
- for multi-threaded 64-bit shared library, if Boost.Build can do it for you?
-
- <li><b>Variant builds</b>. When you build the same project
- twice with different properties, all produced files are placed
- in different directories, so you can build with 2 versions of
- gcc, or both debug and release variants in one invocation.</li>
-
- <li><b>Global dependencies</b>. No matter what directory you build
- in, Boost.Build will always check all dependencies in your entire
- project, preventing inconsistent binaries. And it's easy to
- use one Boost.Build project in other, again with full dependency
- tracking.
-
- <li><b>Usage requirements</b>. A target can specify properties,
- like include paths and preprocessor defines, that are necessary to use
- it. Those properties will be automatically applied whenever the target
- is used.</li>
-
- <li><b>Standalone</b>. Boost.Build's only dependency is a C compiler,
- so it's easy to setup. You can even include all of Boost.Build in your
- project. Boost.Build does not depend on C++ Boost in any way.</li>
- </ul>
-
- <h2>Status and future</h2>
-
- <p>Boost.Build is ready to use today, and new features are being actively
- developed.
-
- <p>The current version of 2.0 Milestone 12, which added support for
- precompiled headers on gcc, and added 3 new C++ compilers
- (<a href="http://svn.boost.org/svn/boost/trunk/tools/build/v2/changes.txt">full changelog</a>).
-
- <p>Milestone 13 is planned as bugfix release. Milestone 14 will
- focus on improving user documentation. Milestone 15 will see most
- of Boost.Build reimplemented in Python, to make extending
- Boost.Build even easier for end users (see <a href="https://trac.lvk.cs.msu.su/boost.build/wiki/PythonPort">PythonPort</a>).
- The specific issues planned for each release can be found on the
- <a href="https://trac.lvk.cs.msu.su/boost.build/roadmap">roadmap</a>.
-
-
-
- <h2>Feedback and contributing</h2>
-
- <p>Should you have any questions or comments, we'd be glad to hear them.
- Post everything to the <a href="http://lists.boost.org/mailman/listinfo.cgi/boost-build">mailing list</a>.</p>
-
- <p>Bugs and feature requests can be entered at our
- <a href="https://trac.lvk.cs.msu.su/boost.build">bug tracker</a>.
-
- <p>If you'd like to help with development, just pick a bug
- in the tracker that you'd like to fix, or feel free to implement
- any feature you like. There's a separate
- <a href="hacking.txt">guidelines document</a> for working on code.</p>
- <hr>
-
- <p>&copy; Copyright David Abrahams and Vladimir Prus 2002-2007.
- Permission to copy, use, modify, sell and distribute this document is
- granted provided this copyright notice appears in all copies. This
- document is provided "as is" without express or implied warranty, and
- with no claim as to its suitability for any purpose.</p>
-
- <p>Revised
- <!--webbot bot="Timestamp" s-type="EDITED" s-format="%d %B, %Y" startspan
- -->Oct 4, 2007
- <!--webbot bot="Timestamp" endspan i-checksum="13972"
- -->
- </p>
-
- </body>
-</html>
-
diff --git a/tools/build/v2/kernel/bootstrap.jam b/tools/build/v2/kernel/bootstrap.jam
deleted file mode 100644
index 89048af922..0000000000
--- a/tools/build/v2/kernel/bootstrap.jam
+++ /dev/null
@@ -1,263 +0,0 @@
-# Copyright 2003 Dave Abrahams
-# Copyright 2003, 2005, 2006 Rene Rivera
-# Copyright 2003, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# First of all, check the jam version
-
-if $(JAM_VERSION:J="") < 030112
-{
- ECHO "error: Boost.Jam version 3.1.12 or later required" ;
- EXIT ;
-}
-
-local required-rules = GLOB-RECURSIVELY HAS_NATIVE_RULE ;
-
-for local r in $(required-rules)
-{
- if ! $(r) in [ RULENAMES ]
- {
- ECHO "error: builtin rule '$(r)' is not present" ;
- ECHO "error: your version of bjam is likely out of date" ;
- ECHO "error: please get a fresh version from SVN." ;
- EXIT ;
- }
-}
-
-local native =
- regex transform 2
- ;
-while $(native)
-{
- if ! [ HAS_NATIVE_RULE $(native[1]) :
- $(native[2]) :
- $(native[3]) ]
- {
- ECHO "error: missing native rule '$(native[1]).$(native[2])'" ;
- ECHO "error: or interface version of that rule is too low" ;
- ECHO "error: your version of bjam is likely out of date" ;
- ECHO "error: please get a fresh version from SVN." ;
- EXIT ;
- }
- native = $(native[4-]) ;
-}
-
-# Check that the builtin .ENVIRON module is present. We don't have a
-# builtin to check that a module is present, so we assume that the PATH
-# environment variable is always set and verify that the .ENVIRON module
-# has non-empty value of that variable.
-module .ENVIRON
-{
- local p = $(PATH) $(Path) $(path) ;
- if ! $(p)
- {
- ECHO "error: no builtin module .ENVIRON is found" ;
- ECHO "error: your version of bjam is likely out of date" ;
- ECHO "error: please get a fresh version from SVN." ;
- EXIT ;
- }
-}
-
-# Check that @() functionality is present. Similarly to modules,
-# we don't have a way to test that directly. Instead we check that
-# $(TMPNAME) functionality is present which was added at roughly
-# the same time (more precisely it was added just before).
-{
- if ! $(TMPNAME)
- {
- ECHO "error: no @() functionality found" ;
- ECHO "error: your version of bjam is likely out of date" ;
- ECHO "error: please get a fresh version from SVN." ;
- EXIT ;
- }
-}
-
-# Make sure that \n escape is avaiable.
-if "\n" = "n"
-{
- if $(OS) = CYGWIN
- {
- ECHO "warning: escape sequences are not supported" ;
- ECHO "warning: this will cause major misbehaviour on cygwin" ;
- ECHO "warning: your version of bjam is likely out of date" ;
- ECHO "warning: please get a fresh version from SVN." ;
- }
-}
-
-# Bootstrap the module system. Then bring the import rule into the global module.
-#
-SEARCH on <module@>modules.jam = $(.bootstrap-file:D) ;
-module modules { include <module@>modules.jam ; }
-IMPORT modules : import : : import ;
-
-{
- # Add module subdirectories to the BOOST_BUILD_PATH, which allows
- # us to make an incremental refactoring step by moving modules to
- # the appropriate subdirectories, thereby achieving some physical
- # separation of different layers without changing all of our code
- # to specify subdirectories in import statements or use an extra
- # level of qualification on imported names.
-
- local subdirs =
- kernel # only the most-intrinsic modules: modules, errors
- util # low-level substrate: string/number handling, etc.
- build # essential elements of the build system architecture
- tools # toolsets for handling specific build jobs and targets.
- contrib # user contributed (unreviewed) modules
- . # build-system.jam lives here
- ;
- local whereami = [ NORMALIZE_PATH $(.bootstrap-file:DT) ] ;
- BOOST_BUILD_PATH += $(whereami:D)/$(subdirs) ;
-
- modules.poke .ENVIRON : BOOST_BUILD_PATH : $(BOOST_BUILD_PATH) ;
-
- modules.poke : EXTRA_PYTHONPATH : $(whereami) ;
-}
-
-# Reload the modules, to clean up things. The modules module can tolerate
-# being included twice.
-#
-import modules ;
-
-# Process option plugins first to alow them to prevent loading
-# the rest of the build system.
-#
-import option ;
-local dont-build = [ option.process ] ;
-
-# Should we skip building, i.e. loading the build system, according
-# to the options processed?
-#
-if ! $(dont-build)
-{
- if ! --python in $(ARGV)
- {
- # Allow users to override the build system file from the
- # command-line (mostly for testing)
- local build-system = [ MATCH --build-system=(.*) : $(ARGV) ] ;
- build-system ?= build-system ;
-
- # Use last element in case of multiple command-line options
- import $(build-system[-1]) ;
- }
- else
- {
- ECHO "Boost.Build V2 Python port (experimental)" ;
-
- # Define additional interface that is exposed to Python code. Python code will
- # also have access to select bjam builtins in the 'bjam' module, but some
- # things are easier to define outside C.
- module python_interface
- {
- rule load ( module-name : location )
- {
- USER_MODULE $(module-name) ;
- # Make all rules in the loaded module available in
- # the global namespace, so that we don't have
- # to bother specifying "right" module when calling
- # from Python.
- module $(module-name)
- {
- __name__ = $(1) ;
- include $(2) ;
- local rules = [ RULENAMES $(1) ] ;
- IMPORT $(1) : $(rules) : $(1) : $(1).$(rules) ;
- }
- }
-
- rule peek ( module-name ? : variables + )
- {
- module $(<)
- {
- return $($(>)) ;
- }
- }
-
- rule set-variable ( module-name : name : value * )
- {
- module $(<)
- {
- $(>) = $(3) ;
- }
- }
-
- rule set-top-level-targets ( targets * )
- {
- DEPENDS all : $(targets) ;
- }
-
- rule call-in-module ( m : rulename : * )
- {
- module $(m)
- {
- return [ $(2) $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ] ;
- }
- }
-
-
- rule set-update-action ( action : targets * : sources * : properties * )
- {
- $(action) $(targets) : $(sources) : $(properties) ;
- }
-
- rule set-update-action-in-module ( m : action : targets * : sources * : properties * )
- {
- module $(m)
- {
- $(2) $(3) : $(4) : $(5) ;
- }
- }
-
- rule set-target-variable ( targets + : variable : value * : append ? )
- {
- if $(append)
- {
- $(variable) on $(targets) += $(value) ;
- }
- else
- {
- $(variable) on $(targets) = $(value) ;
- }
- }
-
- rule get-target-variable ( targets + : variable )
- {
- return [ on $(targets) return $($(variable)) ] ;
- }
-
- rule import-rules-from-parent ( parent-module : this-module : user-rules * )
- {
- IMPORT $(parent-module) : $(user-rules) : $(this-module) : $(user-rules) ;
- EXPORT $(this-module) : $(user-rules) ;
- }
-
- rule mark-included ( targets * : includes * ) {
- NOCARE $(includes) ;
- INCLUDES $(targets) : $(includes) ;
- ISFILE $(includes) ;
- }
- }
-
- PYTHON_IMPORT_RULE bootstrap : bootstrap : PyBB : bootstrap ;
- modules.poke PyBB : root : [ NORMALIZE_PATH $(.bootstrap-file:DT)/.. ] ;
-
- module PyBB
- {
- local ok = [ bootstrap $(root) ] ;
- if ! $(ok)
- {
- EXIT ;
- }
- }
-
-
- #PYTHON_IMPORT_RULE boost.build.build_system : main : PyBB : main ;
-
- #module PyBB
- #{
- # main ;
- #}
-
- }
-}
diff --git a/tools/build/v2/kernel/class.jam b/tools/build/v2/kernel/class.jam
deleted file mode 100644
index b8e55af355..0000000000
--- a/tools/build/v2/kernel/class.jam
+++ /dev/null
@@ -1,420 +0,0 @@
-# Copyright 2001, 2002, 2003 Dave Abrahams
-# Copyright 2002, 2005 Rene Rivera
-# Copyright 2002, 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Polymorphic class system built on top of core Jam facilities.
-#
-# Classes are defined by 'class' keywords::
-#
-# class myclass
-# {
-# rule __init__ ( arg1 ) # constructor
-# {
-# self.attribute = $(arg1) ;
-# }
-#
-# rule method1 ( ) # method
-# {
-# return [ method2 ] ;
-# }
-#
-# rule method2 ( ) # method
-# {
-# return $(self.attribute) ;
-# }
-# }
-#
-# The __init__ rule is the constructor, and sets member variables.
-#
-# New instances are created by invoking [ new <class> <args...> ]:
-#
-# local x = [ new myclass foo ] ; # x is a new myclass object
-# assert.result foo : [ $(x).method1 ] ; # $(x).method1 returns "foo"
-#
-# Derived class are created by mentioning base classes in the declaration::
-#
-# class derived : myclass
-# {
-# rule __init__ ( arg )
-# {
-# myclass.__init__ $(arg) ; # call base __init__
-#
-# }
-#
-# rule method2 ( ) # method override
-# {
-# return $(self.attribute)XXX ;
-# }
-# }
-#
-# All methods operate virtually, replacing behavior in the base classes. For
-# example::
-#
-# local y = [ new derived foo ] ; # y is a new derived object
-# assert.result fooXXX : [ $(y).method1 ] ; # $(y).method1 returns "foo"
-#
-# Each class instance is its own core Jam module. All instance attributes and
-# methods are accessible without additional qualification from within the class
-# instance. All rules imported in class declaration, or visible in base classses
-# are also visible. Base methods are available in qualified form:
-# base-name.method-name. By convention, attribute names are prefixed with
-# "self.".
-
-import modules ;
-import numbers ;
-
-
-rule xinit ( instance : class )
-{
- module $(instance)
- {
- __class__ = $(2) ;
- __name__ = $(1) ;
- }
-}
-
-
-rule new ( class args * : * )
-{
- .next-instance ?= 1 ;
- local id = object($(class))@$(.next-instance) ;
-
- xinit $(id) : $(class) ;
-
- INSTANCE $(id) : class@$(class) ;
- IMPORT_MODULE $(id) ;
- $(id).__init__ $(args) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
-
- # Bump the next unique object name.
- .next-instance = [ numbers.increment $(.next-instance) ] ;
-
- # Return the name of the new instance.
- return $(id) ;
-}
-
-
-rule bases ( class )
-{
- module class@$(class)
- {
- return $(__bases__) ;
- }
-}
-
-
-rule is-derived ( class : bases + )
-{
- local stack = $(class) ;
- local visited found ;
- while ! $(found) && $(stack)
- {
- local top = $(stack[1]) ;
- stack = $(stack[2-]) ;
- if ! ( $(top) in $(visited) )
- {
- visited += $(top) ;
- stack += [ bases $(top) ] ;
-
- if $(bases) in $(visited)
- {
- found = true ;
- }
- }
- }
- return $(found) ;
-}
-
-
-# Returns true if the 'value' is a class instance.
-#
-rule is-instance ( value )
-{
- return [ MATCH "^(object\\()[^@]+\\)@.*" : $(value) ] ;
-}
-
-
-# Check if the given value is of the given type.
-#
-rule is-a (
- instance # The value to check.
- : type # The type to test for.
-)
-{
- if [ is-instance $(instance) ]
- {
- return [ class.is-derived [ modules.peek $(instance) : __class__ ] : $(type) ] ;
- }
-}
-
-
-local rule typecheck ( x )
-{
- local class-name = [ MATCH "^\\[(.*)\\]$" : [ BACKTRACE 1 ] ] ;
- if ! [ is-a $(x) : $(class-name) ]
- {
- return "Expected an instance of "$(class-name)" but got \""$(x)"\" for argument" ;
- }
-}
-
-
-rule __test__ ( )
-{
- import assert ;
- import "class" : new ;
-
- # This will be the construction function for a class called 'myclass'.
- #
- class myclass
- {
- import assert ;
-
- rule __init__ ( x_ * : y_ * )
- {
- # Set some instance variables.
- x = $(x_) ;
- y = $(y_) ;
- foo += 10 ;
- }
-
- rule set-x ( newx * )
- {
- x = $(newx) ;
- }
-
- rule get-x ( )
- {
- return $(x) ;
- }
-
- rule set-y ( newy * )
- {
- y = $(newy) ;
- }
-
- rule get-y ( )
- {
- return $(y) ;
- }
-
- rule f ( )
- {
- return [ g $(x) ] ;
- }
-
- rule g ( args * )
- {
- if $(x) in $(y)
- {
- return $(x) ;
- }
- else if $(y) in $(x)
- {
- return $(y) ;
- }
- else
- {
- return ;
- }
- }
-
- rule get-class ( )
- {
- return $(__class__) ;
- }
-
- rule get-instance ( )
- {
- return $(__name__) ;
- }
-
- rule invariant ( )
- {
- assert.equal 1 : 1 ;
- }
-
- rule get-foo ( )
- {
- return $(foo) ;
- }
- }
-# class myclass ;
-
- class derived1 : myclass
- {
- rule __init__ ( z_ )
- {
- myclass.__init__ $(z_) : X ;
- z = $(z_) ;
- }
-
- # Override g.
- #
- rule g ( args * )
- {
- return derived1.g ;
- }
-
- rule h ( )
- {
- return derived1.h ;
- }
-
- rule get-z ( )
- {
- return $(z) ;
- }
-
- # Check that 'assert.equal' visible in base class is visible here.
- #
- rule invariant2 ( )
- {
- assert.equal 2 : 2 ;
- }
-
- # Check that 'assert.variable-not-empty' visible in base class is
- # visible here.
- #
- rule invariant3 ( )
- {
- local v = 10 ;
- assert.variable-not-empty v ;
- }
- }
-# class derived1 : myclass ;
-
- class derived2 : myclass
- {
- rule __init__ ( )
- {
- myclass.__init__ 1 : 2 ;
- }
-
- # Override g.
- #
- rule g ( args * )
- {
- return derived2.g ;
- }
-
- # Test the ability to call base class functions with qualification.
- #
- rule get-x ( )
- {
- return [ myclass.get-x ] ;
- }
- }
-# class derived2 : myclass ;
-
- class derived2a : derived2
- {
- rule __init__
- {
- derived2.__init__ ;
- }
- }
-# class derived2a : derived2 ;
-
- local rule expect_derived2 ( [derived2] x ) { }
-
- local a = [ new myclass 3 4 5 : 4 5 ] ;
- local b = [ new derived1 4 ] ;
- local b2 = [ new derived1 4 ] ;
- local c = [ new derived2 ] ;
- local d = [ new derived2 ] ;
- local e = [ new derived2a ] ;
-
- expect_derived2 $(d) ;
- expect_derived2 $(e) ;
-
- # Argument checking is set up to call exit(1) directly on failure, and we
- # can not hijack that with try, so we should better not do this test by
- # default. We could fix this by having errors look up and invoke the EXIT
- # rule instead; EXIT can be hijacked (;-)
- if --fail-typecheck in [ modules.peek : ARGV ]
- {
- try ;
- {
- expect_derived2 $(a) ;
- }
- catch
- "Expected an instance of derived2 but got" instead
- ;
- }
-
- #try ;
- #{
- # new bad_subclass ;
- #}
- #catch
- # bad_subclass.bad_subclass failed to call base class constructor myclass.__init__
- # ;
-
- #try ;
- #{
- # class bad_subclass ;
- #}
- #catch bad_subclass has already been declared ;
-
- assert.result 3 4 5 : $(a).get-x ;
- assert.result 4 5 : $(a).get-y ;
- assert.result 4 : $(b).get-x ;
- assert.result X : $(b).get-y ;
- assert.result 4 : $(b).get-z ;
- assert.result 1 : $(c).get-x ;
- assert.result 2 : $(c).get-y ;
- assert.result 4 5 : $(a).f ;
- assert.result derived1.g : $(b).f ;
- assert.result derived2.g : $(c).f ;
- assert.result derived2.g : $(d).f ;
-
- assert.result 10 : $(b).get-foo ;
-
- $(a).invariant ;
- $(b).invariant2 ;
- $(b).invariant3 ;
-
- # Check that the __class__ attribute is getting properly set.
- assert.result myclass : $(a).get-class ;
- assert.result derived1 : $(b).get-class ;
- assert.result $(a) : $(a).get-instance ;
-
- $(a).set-x a.x ;
- $(b).set-x b.x ;
- $(c).set-x c.x ;
- $(d).set-x d.x ;
- assert.result a.x : $(a).get-x ;
- assert.result b.x : $(b).get-x ;
- assert.result c.x : $(c).get-x ;
- assert.result d.x : $(d).get-x ;
-
- class derived3 : derived1 derived2
- {
- rule __init__ ( )
- {
- }
- }
-
- assert.result : bases myclass ;
- assert.result myclass : bases derived1 ;
- assert.result myclass : bases derived2 ;
- assert.result derived1 derived2 : bases derived3 ;
-
- assert.true is-derived derived1 : myclass ;
- assert.true is-derived derived2 : myclass ;
- assert.true is-derived derived3 : derived1 ;
- assert.true is-derived derived3 : derived2 ;
- assert.true is-derived derived3 : derived1 derived2 myclass ;
- assert.true is-derived derived3 : myclass ;
-
- assert.false is-derived myclass : derived1 ;
-
- assert.true is-instance $(a) ;
- assert.false is-instance bar ;
-
- assert.true is-a $(a) : myclass ;
- assert.true is-a $(c) : derived2 ;
- assert.true is-a $(d) : myclass ;
- assert.false is-a literal : myclass ;
-}
diff --git a/tools/build/v2/kernel/errors.jam b/tools/build/v2/kernel/errors.jam
deleted file mode 100644
index 63b11e8674..0000000000
--- a/tools/build/v2/kernel/errors.jam
+++ /dev/null
@@ -1,274 +0,0 @@
-# Copyright 2003 Dave Abrahams
-# Copyright 2004 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Print a stack backtrace leading to this rule's caller. Each argument
-# represents a line of output to be printed after the first line of the
-# backtrace.
-#
-rule backtrace ( skip-frames prefix messages * : * )
-{
- local frame-skips = 5 9 13 17 21 25 29 33 37 41 45 49 53 57 61 65 69 73 77 81 ;
- local drop-elements = $(frame-skips[$(skip-frames)]) ;
- if ! ( $(skip-frames) in 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 )
- {
- ECHO "warning: backtrace doesn't support skipping $(skip-frames) frames;"
- "using 1 instead." ;
- drop-elements = 5 ;
- }
-
- local args = $(.args) ;
- if $(.user-modules-only)
- {
- local bt = [ nearest-user-location ] ;
- ECHO "$(prefix) at $(bt) " ;
- for local n in $(args)
- {
- if $($(n))-is-not-empty
- {
- ECHO $(prefix) $($(n)) ;
- }
- }
- }
- else
- {
- # Get the whole backtrace, then drop the initial quadruples
- # corresponding to the frames that must be skipped.
- local bt = [ BACKTRACE ] ;
- bt = $(bt[$(drop-elements)-]) ;
-
- while $(bt)
- {
- local m = [ MATCH ^(.+)\\.$ : $(bt[3]) ] ;
- ECHO $(bt[1]):$(bt[2]): "in" $(bt[4]) "from module" $(m) ;
-
- # The first time through, print each argument on a separate line.
- for local n in $(args)
- {
- if $($(n))-is-not-empty
- {
- ECHO $(prefix) $($(n)) ;
- }
- }
- args = ; # Kill args so that this never happens again.
-
- # Move on to the next quadruple.
- bt = $(bt[5-]) ;
- }
- }
-}
-
-.args ?= messages 2 3 4 5 6 7 8 9 ;
-.disabled ?= ;
-.last-error-$(.args) ?= ;
-
-
-# try-catch --
-#
-# This is not really an exception-handling mechanism, but it does allow us to
-# perform some error-checking on our error-checking. Errors are suppressed after
-# a try, and the first one is recorded. Use catch to check that the error
-# message matched expectations.
-
-# Begin looking for error messages.
-#
-rule try ( )
-{
- .disabled += true ;
- .last-error-$(.args) = ;
-}
-
-
-# Stop looking for error messages; generate an error if an argument of messages
-# is not found in the corresponding argument in the error call.
-#
-rule catch ( messages * : * )
-{
- .disabled = $(.disabled[2-]) ; # Pop the stack.
-
- import sequence ;
-
- if ! $(.last-error-$(.args))-is-not-empty
- {
- error-skip-frames 3 expected an error, but none occurred ;
- }
- else
- {
- for local n in $(.args)
- {
- if ! $($(n)) in $(.last-error-$(n))
- {
- local v = [ sequence.join $($(n)) : " " ] ;
- v ?= "" ;
- local joined = [ sequence.join $(.last-error-$(n)) : " " ] ;
-
- .last-error-$(.args) = ;
- error-skip-frames 3 expected \"$(v)\" in argument $(n) of error
- : got \"$(joined)\" instead ;
- }
- }
- }
-}
-
-
-rule error-skip-frames ( skip-frames messages * : * )
-{
- if ! $(.disabled)
- {
- backtrace $(skip-frames) error: $(messages) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
- EXIT ;
- }
- else if ! $(.last-error-$(.args))
- {
- for local n in $(.args)
- {
- # Add an extra empty string so that we always have
- # something in the event of an error
- .last-error-$(n) = $($(n)) "" ;
- }
- }
-}
-
-if --no-error-backtrace in [ modules.peek : ARGV ]
-{
- .no-error-backtrace = true ;
-}
-
-
-# Print an error message with a stack backtrace and exit.
-#
-rule error ( messages * : * )
-{
- if $(.no-error-backtrace)
- {
- # Print each argument on a separate line.
- for local n in $(.args)
- {
- if $($(n))-is-not-empty
- {
- if ! $(first-printed)
- {
- ECHO error: $($(n)) ;
- first-printed = true ;
- }
- else
- {
- ECHO $($(n)) ;
- }
- }
- }
- EXIT ;
- }
- else
- {
- error-skip-frames 3 $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
- }
-}
-
-
-# Same as 'error', but the generated backtrace will include only user files.
-#
-rule user-error ( messages * : * )
-{
- .user-modules-only = 1 ;
- error-skip-frames 3 $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
-}
-
-
-# Print a warning message with a stack backtrace and exit.
-#
-rule warning
-{
- backtrace 2 warning: $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
-}
-
-
-# Convert an arbitrary argument list into a list with ":" separators and quoted
-# elements representing the same information. This is mostly useful for
-# formatting descriptions of arguments with which a rule was called when
-# reporting an error.
-#
-rule lol->list ( * )
-{
- local result ;
- local remaining = 1 2 3 4 5 6 7 8 9 ;
- while $($(remaining))
- {
- local n = $(remaining[1]) ;
- remaining = $(remaining[2-]) ;
-
- if $(n) != 1
- {
- result += ":" ;
- }
- result += \"$($(n))\" ;
- }
- return $(result) ;
-}
-
-
-# Return the file:line for the nearest entry in backtrace which correspond to a
-# user module.
-#
-rule nearest-user-location ( )
-{
- local bt = [ BACKTRACE ] ;
-
- local result ;
- while $(bt) && ! $(result)
- {
- local m = [ MATCH ^(.+)\\.$ : $(bt[3]) ] ;
- local user-modules = ([Jj]amroot(.jam|.v2|)|([Jj]amfile(.jam|.v2|)|user-config.jam|site-config.jam|project-root.jam) ;
-
- if [ MATCH $(user-modules) : $(bt[1]:D=) ]
- {
- result = $(bt[1]):$(bt[2]) ;
- }
- bt = $(bt[5-]) ;
- }
- return $(result) ;
-}
-
-
-# If optimized rule is available in Jam, use it.
-if NEAREST_USER_LOCATION in [ RULENAMES ]
-{
- rule nearest-user-location ( )
- {
- local r = [ NEAREST_USER_LOCATION ] ;
- return $(r[1]):$(r[2]) ;
- }
-}
-
-
-rule __test__ ( )
-{
- # Show that we can correctly catch an expected error.
- try ;
- {
- error an error occurred : somewhere ;
- }
- catch an error occurred : somewhere ;
-
- # Show that unexpected errors generate real errors.
- try ;
- {
- try ;
- {
- error an error occurred : somewhere ;
- }
- catch an error occurred : nowhere ;
- }
- catch expected \"nowhere\" in argument 2 ;
-
- # Show that not catching an error where one was expected is an error.
- try ;
- {
- try ;
- {
- }
- catch ;
- }
- catch expected an error, but none occurred ;
-}
diff --git a/tools/build/v2/kernel/modules.jam b/tools/build/v2/kernel/modules.jam
deleted file mode 100644
index 1f75354fc2..0000000000
--- a/tools/build/v2/kernel/modules.jam
+++ /dev/null
@@ -1,354 +0,0 @@
-# Copyright 2003 Dave Abrahams
-# Copyright 2003, 2005 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Essentially an include guard; ensures that no module is loaded multiple times.
-.loaded ?= ;
-
-# A list of modules currently being loaded for error reporting of circular
-# dependencies.
-.loading ?= ;
-
-# A list of modules needing to be tested using their __test__ rule.
-.untested ?= ;
-
-# A list of modules which have been tested using their __test__ rule.
-.tested ?= ;
-
-
-# Runs internal Boost Build unit tests for the specified module. The module's
-# __test__ rule is executed in its own module to eliminate any inadvertent
-# effects of testing module dependencies (such as assert) on the module itself.
-#
-local rule run-module-test ( m )
-{
- local tested-modules = [ modules.peek modules : .tested ] ;
-
- if ( ! $(m) in $(tested-modules) ) # Avoid recursive test invocations.
- && ( ( --debug in $(argv) ) || ( --debug-module=$(m) in $(argv) ) )
- {
- modules.poke modules : .tested : $(tested-modules) $(m) ;
-
- if ! ( __test__ in [ RULENAMES $(m) ] )
- {
- local argv = [ peek : ARGV ] ;
- if ! ( --quiet in $(argv) ) && ( --debug-tests in $(argv) )
- {
- ECHO warning: no __test__ rule defined in module $(m) ;
- }
- }
- else
- {
- if ! ( --quiet in $(argv) )
- {
- ECHO testing module $(m)... ;
- }
-
- local test-module = __test-$(m)__ ;
- IMPORT $(m) : [ RULENAMES $(m) ] : $(test-module) : [ RULENAMES $(m) ] ;
- IMPORT $(m) : __test__ : $(test-module) : __test__ : LOCALIZE ;
- module $(test-module)
- {
- __test__ ;
- }
- }
- }
-}
-
-
-# Return the binding of the given module.
-#
-rule binding ( module )
-{
- return $($(module).__binding__) ;
-}
-
-
-# Sets the module-local value of a variable. This is the most reliable way to
-# set a module-local variable in a different module; it eliminates issues of
-# name shadowing due to dynamic scoping.
-#
-rule poke ( module-name ? : variables + : value * )
-{
- module $(<)
- {
- $(>) = $(3) ;
- }
-}
-
-
-# Returns the module-local value of a variable. This is the most reliable way to
-# examine a module-local variable in a different module; it eliminates issues of
-# name shadowing due to dynamic scoping.
-#
-rule peek ( module-name ? : variables + )
-{
- module $(<)
- {
- return $($(>)) ;
- }
-}
-
-
-# Call the given rule locally in the given module. Use this for rules accepting
-# rule names as arguments, so that the passed rule may be invoked in the context
-# of the rule's caller (for example, if the rule accesses module globals or is a
-# local rule). Note that rules called this way may accept at most 8 parameters.
-#
-rule call-in ( module-name ? : rule-name args * : * )
-{
- module $(module-name)
- {
- return [ $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ] ;
- }
-}
-
-
-# Given a possibly qualified rule name and arguments, remove any initial module
-# qualification from the rule and invoke it in that module. If there is no
-# module qualification, the rule is invoked in the global module. Note that
-# rules called this way may accept at most 8 parameters.
-#
-rule call-locally ( qualified-rule-name args * : * )
-{
- local module-rule = [ MATCH (.*)\\.(.*) : $(qualified-rule-name) ] ;
- local rule-name = $(module-rule[2]) ;
- rule-name ?= $(qualified-rule-name) ;
- # We pass only 8 parameters here since Boost Jam allows at most 9 rule
- # parameter positions and the call-in rule already uses up the initial
- # position for the module name.
- return [ call-in $(module-rule[1]) : $(rule-name) $(args) : $(2) : $(3) :
- $(4) : $(5) : $(6) : $(7) : $(8) ] ;
-}
-
-
-# Load the indicated module if it is not already loaded.
-#
-rule load (
- module-name # Name of module to load. Rules will be defined in this
- # module.
- : filename ? # (partial) path to file; Defaults to $(module-name).jam.
- : search * # Directories in which to search for filename. Defaults to
- # $(BOOST_BUILD_PATH).
-)
-{
- # Avoid loading modules twice.
- if ! ( $(module-name) in $(.loaded) )
- {
- filename ?= $(module-name).jam ;
-
- # Mark the module loaded so we do not try to load it recursively.
- .loaded += $(module-name) ;
-
- # Suppress tests if any module loads are already in progress.
- local suppress-test = $(.loading[1]) ;
-
- # Push this module on the loading stack.
- .loading += $(module-name) ;
-
- # Remember that it is untested.
- .untested += $(module-name) ;
-
- # Insert the new module's __name__ and __file__ globals.
- poke $(module-name) : __name__ : $(module-name) ;
- poke $(module-name) : __file__ : $(filename) ;
-
- module $(module-name)
- {
- # Add some grist so that the module will have a unique target name.
- local module-target = $(__file__:G=module@) ;
-
- local search = $(3) ;
- search ?= [ modules.peek : BOOST_BUILD_PATH ] ;
- SEARCH on $(module-target) = $(search) ;
- BINDRULE on $(module-target) = modules.record-binding ;
-
- include $(module-target) ;
-
- # Allow the module to see its own names with full qualification.
- local rules = [ RULENAMES $(__name__) ] ;
- IMPORT $(__name__) : $(rules) : $(__name__) : $(__name__).$(rules) ;
- }
-
- if $(module-name) != modules && ! [ binding $(module-name) ]
- {
- import errors ;
- errors.error "Could not find module" $(module-name) in $(search) ;
- }
-
- # Pop the loading stack. Must happen before testing or we will run into
- # a circular loading dependency.
- .loading = $(.loading[1--2]) ;
-
- # Run any pending tests if this is an outer load.
- if ! $(suppress-test)
- {
- local argv = [ peek : ARGV ] ;
- for local m in $(.untested)
- {
- run-module-test $(m) ;
- }
- .untested = ;
- }
- }
- else if $(module-name) in $(.loading)
- {
- import errors ;
- errors.error loading \"$(module-name)\"
- : circular module loading dependency:
- : $(.loading)" ->" $(module-name) ;
- }
-}
-
-
-# This helper is used by load (above) to record the binding (path) of each
-# loaded module.
-#
-rule record-binding ( module-target : binding )
-{
- $(.loading[-1]).__binding__ = $(binding) ;
-}
-
-
-# Transform each path in the list, with all backslashes converted to forward
-# slashes and all detectable redundancy removed. Something like this is probably
-# needed in path.jam, but I am not sure of that, I do not understand it, and I
-# am not ready to move all of path.jam into the kernel.
-#
-local rule normalize-raw-paths ( paths * )
-{
- local result ;
- for p in $(paths:T)
- {
- result += [ NORMALIZE_PATH $(p) ] ;
- }
- return $(result) ;
-}
-
-
-.cwd = [ PWD ] ;
-
-
-# Load the indicated module and import rule names into the current module. Any
-# members of rules-opt will be available without qualification in the caller's
-# module. Any members of rename-opt will be taken as the names of the rules in
-# the caller's module, in place of the names they have in the imported module.
-# If rules-opt = '*', all rules from the indicated module are imported into the
-# caller's module. If rename-opt is supplied, it must have the same number of
-# elements as rules-opt.
-#
-rule import ( module-names + : rules-opt * : rename-opt * )
-{
- if ( $(rules-opt) = * || ! $(rules-opt) ) && $(rename-opt)
- {
- import errors ;
- errors.error "Rule aliasing is only available for explicit imports." ;
- }
-
- if $(module-names[2]) && ( $(rules-opt) || $(rename-opt) )
- {
- import errors ;
- errors.error "When loading multiple modules, no specific rules or"
- "renaming is allowed" ;
- }
-
- local caller = [ CALLER_MODULE ] ;
-
- # Import each specified module
- for local m in $(module-names)
- {
- if ! $(m) in $(.loaded)
- {
- # If the importing module isn't already in the BOOST_BUILD_PATH,
- # prepend it to the path. We don't want to invert the search order
- # of modules that are already there.
-
- local caller-location ;
- if $(caller)
- {
- caller-location = [ binding $(caller) ] ;
- caller-location = $(caller-location:D) ;
- caller-location = [ normalize-raw-paths $(caller-location:R=$(.cwd)) ] ;
- }
-
- local search = [ peek : BOOST_BUILD_PATH ] ;
- search = [ normalize-raw-paths $(search:R=$(.cwd)) ] ;
-
- if $(caller-location) && ! $(caller-location) in $(search)
- {
- search = $(caller-location) $(search) ;
- }
-
- load $(m) : : $(search) ;
- }
-
- IMPORT_MODULE $(m) : $(caller) ;
-
- if $(rules-opt)
- {
- local source-names ;
- if $(rules-opt) = *
- {
- local all-rules = [ RULENAMES $(m) ] ;
- source-names = $(all-rules) ;
- }
- else
- {
- source-names = $(rules-opt) ;
- }
- local target-names = $(rename-opt) ;
- target-names ?= $(source-names) ;
- IMPORT $(m) : $(source-names) : $(caller) : $(target-names) ;
- }
- }
-}
-
-
-# Define exported copies in $(target-module) of all rules exported from
-# $(source-module). Also make them available in the global module with
-# qualification, so that it is just as though the rules were defined originally
-# in $(target-module).
-#
-rule clone-rules ( source-module target-module )
-{
- local rules = [ RULENAMES $(source-module) ] ;
-
- IMPORT $(source-module) : $(rules) : $(target-module) : $(rules) : LOCALIZE ;
- EXPORT $(target-module) : $(rules) ;
- IMPORT $(target-module) : $(rules) : : $(target-module).$(rules) ;
-}
-
-
-# These rules need to be available in all modules to implement module loading
-# itself and other fundamental operations.
-local globalize = peek poke record-binding ;
-IMPORT modules : $(globalize) : : modules.$(globalize) ;
-
-
-rule __test__ ( )
-{
- import assert ;
- import modules : normalize-raw-paths ;
-
- module modules.__test__
- {
- foo = bar ;
- }
-
- assert.result bar : peek modules.__test__ : foo ;
-
- poke modules.__test__ : foo : bar baz ;
- assert.result bar baz : peek modules.__test__ : foo ;
-
- assert.result c:/foo/bar : normalize-raw-paths c:/x/../foo/./xx/yy/../../bar ;
- assert.result . : normalize-raw-paths . ;
- assert.result .. : normalize-raw-paths .. ;
- assert.result ../.. : normalize-raw-paths ../.. ;
- assert.result .. : normalize-raw-paths ./.. ;
- assert.result / / : normalize-raw-paths / \\ ;
- assert.result a : normalize-raw-paths a ;
- assert.result a : normalize-raw-paths a/ ;
- assert.result /a : normalize-raw-paths /a/ ;
- assert.result / : normalize-raw-paths /a/.. ;
-}
diff --git a/tools/build/v2/notes/README.txt b/tools/build/v2/notes/README.txt
deleted file mode 100644
index c2e6ff6298..0000000000
--- a/tools/build/v2/notes/README.txt
+++ /dev/null
@@ -1,8 +0,0 @@
-Copyright 2005 Vladimir Prus
-Distributed under the Boost Software License, Version 1.0.
-(See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-
-This directory contains various development notes. Some of them
-may eventually find the way into documentation, so are purely
-imlementation comments.
diff --git a/tools/build/v2/options/help.jam b/tools/build/v2/options/help.jam
deleted file mode 100644
index b507e1edd6..0000000000
--- a/tools/build/v2/options/help.jam
+++ /dev/null
@@ -1,212 +0,0 @@
-# Copyright 2003 Dave Abrahams
-# Copyright 2003, 2006 Rene Rivera
-# Copyright 2003, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# This module is the plug-in handler for the --help and --help-.*
-# command-line options
-import modules ;
-import assert ;
-import doc : do-scan set-option set-output set-output-file print-help-usage print-help-top ;
-import sequence ;
-import set ;
-import project ;
-import print ;
-import os ;
-import version ;
-import path ;
-
-# List of possible modules, but which really aren't.
-#
-.not-modules =
- boost-build bootstrap site-config test user-config
- -tools allyourbase boost-base features python stlport testing unit-tests ;
-
-# The help system options are parsed here and handed off to the doc
-# module to translate into documentation requests and actions. The
-# understood options are:
-#
-# --help-disable-<option>
-# --help-doc-options
-# --help-enable-<option>
-# --help-internal
-# --help-options
-# --help-usage
-# --help-output <type>
-# --help-output-file <file>
-# --help [<module-or-class>]
-#
-rule process (
- command # The option.
- : values * # The values, starting after the "=".
- )
-{
- assert.result --help : MATCH ^(--help).* : $(command) ;
- local did-help = ;
- switch $(command)
- {
- case --help-internal :
- local path-to-modules = [ modules.peek : BOOST_BUILD_PATH ] ;
- path-to-modules ?= . ;
- local possible-modules = [ GLOB $(path-to-modules) : *\\.jam ] ;
- local not-modules = [ GLOB $(path-to-modules) : *$(.not-modules)\\.jam ] ;
- local modules-to-list =
- [ sequence.insertion-sort
- [ set.difference $(possible-modules:D=:S=) : $(not-modules:D=:S=) ] ] ;
- local modules-to-scan ;
- for local m in $(modules-to-list)
- {
- local module-files = [ GLOB $(path-to-modules) : $(m)\\.jam ] ;
- modules-to-scan += $(module-files[1]) ;
- }
- do-scan $(modules-to-scan) : print-help-all ;
- did-help = true ;
-
- case --help-enable-* :
- local option = [ MATCH --help-enable-(.*) : $(command) ] ; option = $(option:L) ;
- set-option $(option) : enabled ;
- did-help = true ;
-
- case --help-disable-* :
- local option = [ MATCH --help-disable-(.*) : $(command) ] ; option = $(option:L) ;
- set-option $(option) ;
- did-help = true ;
-
- case --help-output :
- set-output $(values[1]) ;
- did-help = true ;
-
- case --help-output-file :
- set-output-file $(values[1]) ;
- did-help = true ;
-
- case --help-doc-options :
- local doc-module-spec = [ split-symbol doc ] ;
- do-scan $(doc-module-spec[1]) : print-help-options ;
- did-help = true ;
-
- case --help-options :
- print-help-usage ;
- did-help = true ;
-
- case --help :
- local spec = $(values[1]) ;
- if $(spec)
- {
- local spec-parts = [ split-symbol $(spec) ] ;
- if $(spec-parts)
- {
- if $(spec-parts[2])
- {
- do-scan $(spec-parts[1]) : print-help-classes $(spec-parts[2]) ;
- do-scan $(spec-parts[1]) : print-help-rules $(spec-parts[2]) ;
- do-scan $(spec-parts[1]) : print-help-variables $(spec-parts[2]) ;
- }
- else
- {
- do-scan $(spec-parts[1]) : print-help-module ;
- }
- }
- else
- {
- EXIT "Unrecognized help option '"$(command)" "$(spec)"'." ;
- }
- }
- else
- {
- version.print ;
- ECHO ;
- # First print documentation from the current Jamfile, if any.
- # FIXME: Generally, this duplication of project.jam logic is bad.
- local names = [ modules.peek project : JAMROOT ]
- [ modules.peek project : JAMFILE ] ;
- local project-file = [ path.glob . : $(names) ] ;
- if ! $(project-file)
- {
- project-file = [ path.glob-in-parents . : $(names) ] ;
- }
-
- for local p in $(project-file)
- {
- do-scan $(p) : print-help-project $(p) ;
- }
-
- # Next any user-config help.
- local user-path = [ os.home-directories ] [ os.environ BOOST_BUILD_PATH ] ;
- local user-config = [ GLOB $(user-path) : user-config.jam ] ;
- if $(user-config)
- {
- do-scan $(user-config[1]) : print-help-config user $(user-config[1]) ;
- }
-
- # Next any site-config help.
- local site-config = [ GLOB $(user-path) : site-config.jam ] ;
- if $(site-config)
- {
- do-scan $(site-config[1]) : print-help-config site $(site-config[1]) ;
- }
-
- # Then the overall help.
- print-help-top ;
- }
- did-help = true ;
- }
- if $(did-help)
- {
- UPDATE all ;
- NOCARE all ;
- }
- return $(did-help) ;
-}
-
-# Split a reference to a symbol into module and symbol parts.
-#
-local rule split-symbol (
- symbol # The symbol to split.
- )
-{
- local path-to-modules = [ modules.peek : BOOST_BUILD_PATH ] ;
- path-to-modules ?= . ;
- local module-name = $(symbol) ;
- local symbol-name = ;
- local result = ;
- while ! $(result)
- {
- local module-path = [ GLOB $(path-to-modules) : $(module-name)\\.jam ] ;
- if $(module-path)
- {
- # The 'module-name' in fact refers to module. Return the full
- # module path and a symbol within it. If 'symbol' passed to this
- # rule is already module, 'symbol-name' will be empty. Otherwise,
- # it's initialized on the previous loop iteration.
- # In case there are several modules by this name,
- # use the first one.
- result = $(module-path[1]) $(symbol-name) ;
- }
- else
- {
- if ! $(module-name:S)
- {
- result = - ;
- }
- else
- {
- local next-symbol-part = [ MATCH ^.(.*) : $(module-name:S) ] ;
- if $(symbol-name)
- {
- symbol-name = $(next-symbol-part).$(symbol-name) ;
- }
- else
- {
- symbol-name = $(next-symbol-part) ;
- }
- module-name = $(module-name:B) ;
- }
- }
- }
- if $(result) != -
- {
- return $(result) ;
- }
-}
diff --git a/tools/build/v2/roll.sh b/tools/build/v2/roll.sh
deleted file mode 100755
index 4a8f52ec4b..0000000000
--- a/tools/build/v2/roll.sh
+++ /dev/null
@@ -1,66 +0,0 @@
-#!/bin/bash
-
-# Copyright 2004 Aleksey Gurtovoy
-# Copyright 2006 Rene Rivera
-# Copyright 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-set -e
-
-# Capture the version
-revision=`svnversion .`
-echo "SVN Revision $revision" >> timestamp.txt
-date >> timestamp.txt
-
-# This one is not fully finished
-rm -rf example/versioned
-
-# Remove unnecessary top-level files
-find . -maxdepth 1 -type f | egrep -v "boost-build.jam|timestamp.txt|roll.sh|bootstrap.jam|build-system.jam|boost_build.png|index.html|hacking.txt|site-config.jam|user-config.jam|bootstrap.sh|bootstrap.bat|Jamroot.jam" | xargs rm -f
-
-# Build the documentation
-touch doc/jamroot.jam
-export BOOST_BUILD_PATH=`pwd`
-./bootstrap.sh
-cd doc
-../bjam --v2
-../bjam --v2 pdf
-cp `find bin -name "*.pdf"` ../..
-mv ../../standalone.pdf ../../userman.pdf
-cp ../../userman.pdf .
-rm -rf bin
-cd ..
-rm bjam
-
-# Get the boost logo.
-wget http://boost.sf.net/boost-build2/boost.png
-
-# Adjust the links, so they work with the standalone package
-perl -pi -e 's%../../../boost.png%boost.png%' index.html
-perl -pi -e 's%../../../doc/html/bbv2.html%doc/html/index.html%' index.html
-perl -pi -e 's%../../../doc/html/bbv2.installation.html%doc/html/bbv2.installation.html%' index.html
-
-# Make packages
-find . -name ".svn" | xargs rm -rf
-rm roll.sh
-chmod a+x engine/build.bat
-cd .. && zip -r boost-build.zip boost-build && tar --bzip2 -cf boost-build.tar.bz2 boost-build
-# Copy packages to a location where they are grabbed for beta.boost.org
-cp userman.pdf boost-build.zip boost-build.tar.bz2 ~/public_html/boost_build_nightly
-cd boost-build
-
-chmod -R u+w *
-# Upload docs to sourceforge
-x=`cat <<EOF
-<script src="http://www.google-analytics.com/urchin.js" type="text/javascript">
-</script>
-<script type="text/javascript">
-_uacct = "UA-2917240-2";
-urchinTracker();
-</script>
-EOF`
-echo $x
-perl -pi -e "s|</body>|$x</body>|" index.html `find doc -name '*.html'`
-scp -r doc example boost_build.png *.html hacking.txt vladimir_prus,boost@web.sourceforge.net:/home/groups/b/bo/boost/htdocs/boost-build2
-scp ../userman.pdf vladimir_prus,boost@web.sourceforge.net:/home/groups/b/bo/boost/htdocs/boost-build2/doc
diff --git a/tools/build/v2/test/BoostBuild.py b/tools/build/v2/test/BoostBuild.py
deleted file mode 100644
index 9ad9202877..0000000000
--- a/tools/build/v2/test/BoostBuild.py
+++ /dev/null
@@ -1,949 +0,0 @@
-# Copyright 2002-2005 Vladimir Prus.
-# Copyright 2002-2003 Dave Abrahams.
-# Copyright 2006 Rene Rivera.
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-import TestCmd
-import copy
-import fnmatch
-import glob
-import math
-import os
-import re
-import shutil
-import string
-import StringIO
-import sys
-import tempfile
-import time
-import traceback
-import tree
-import types
-
-from xml.sax.saxutils import escape
-
-
-annotations = []
-
-
-def print_annotation(name, value, xml):
- """Writes some named bits of information about test run.
- """
- if xml:
- print escape(name) + " {{{"
- print escape(value)
- print "}}}"
- else:
- print name + " {{{"
- print value
- print "}}}"
-
-def flush_annotations(xml=0):
- global annotations
- for ann in annotations:
- print_annotation(ann[0], ann[1], xml)
- annotations = []
-
-def clear_annotations():
- global annotations
- annotations = []
-
-defer_annotations = 0
-
-
-def set_defer_annotations(n):
- global defer_annotations
- defer_annotations = n
-
-
-def annotation(name, value):
- """Records an annotation about the test run.
- """
- annotations.append((name, value))
- if not defer_annotations:
- flush_annotations()
-
-
-def get_toolset():
- toolset = None;
- for arg in sys.argv[1:]:
- if not arg.startswith('-'):
- toolset = arg
- return toolset or 'gcc'
-
-
-# Detect the host OS.
-windows = False
-cygwin = False
-if os.environ.get('OS', '').lower().startswith('windows'):
- windows = True
-
-if os.__dict__.has_key('uname') and \
- os.uname()[0].lower().startswith('cygwin'):
- windows = True
- cygwin = True
-
-suffixes = {}
-
-
-# Configuration stating whether Boost Build is expected to automatically prepend
-# prefixes to built library targets.
-lib_prefix = "lib"
-dll_prefix = "lib"
-
-# Prepare the map of suffixes
-def prepare_suffix_map(toolset):
- global windows
- global suffixes
- global cygwin
- global lib_prefix
- global dll_prefix
- suffixes = {'.exe': '', '.dll': '.so', '.lib': '.a', '.obj': '.o'}
- suffixes['.implib'] = '.no_implib_files_on_this_platform'
- if windows:
- suffixes = {}
- if toolset in ["gcc"]:
- suffixes['.lib'] = '.a' # static libs have '.a' suffix with mingw...
- suffixes['.obj'] = '.o'
- if cygwin:
- suffixes['.implib'] = '.lib.a'
- else:
- suffixes['.implib'] = '.lib'
- if os.__dict__.has_key('uname') and (os.uname()[0] == 'Darwin'):
- suffixes['.dll'] = '.dylib'
-
- lib_prefix = "lib"
- dll_prefix = "lib"
- if cygwin:
- dll_prefix = "cyg"
- elif windows and not toolset in ["gcc"]:
- dll_prefix = None
-
-
-def re_remove(sequence, regex):
- me = re.compile(regex)
- result = filter(lambda x: me.match(x), sequence)
- if 0 == len(result):
- raise ValueError()
- for r in result:
- sequence.remove(r)
-
-
-def glob_remove(sequence, pattern):
- result = fnmatch.filter(sequence, pattern)
- if 0 == len(result):
- raise ValueError()
- for r in result:
- sequence.remove(r)
-
-
-
-#
-# FIXME: this is copy-pasted from TestSCons.py
-# Should be moved to TestCmd.py?
-#
-if os.name == 'posix':
- def _failed(self, status=0):
- if self.status is None:
- return None
- return _status(self) != status
- def _status(self):
- if os.WIFEXITED(self.status):
- return os.WEXITSTATUS(self.status)
- else:
- return -1
-elif os.name == 'nt':
- def _failed(self, status=0):
- return not self.status is None and self.status != status
- def _status(self):
- return self.status
-
-
-class Tester(TestCmd.TestCmd):
- """Main tester class for Boost Build.
-
- Optional arguments:
-
- `arguments` - Arguments passed to the run executable.
- `executable` - Name of the executable to invoke.
- `match` - Function to use for compating actual and
- expected file contents.
- `boost_build_path` - Boost build path to be passed to the run
- executable.
- `translate_suffixes` - Whether to update suffixes on the the file
- names passed from the test script so they
- match those actually created by the current
- toolset. For example, static library files
- are specified by using the .lib suffix but
- when the 'gcc' toolset is used it actually
- creates them using the .a suffix.
- `pass_toolset` - Whether the test system should pass the
- specified toolset to the run executable.
- `use_test_config` - Whether the test system should tell the run
- executable to read in the test_config.jam
- configuration file.
- `ignore_toolset_requirements` - Whether the test system should tell the run
- executable to ignore toolset requirements.
- `workdir` - indicates an absolute directory where the
- test will be run from.
-
- Optional arguments inherited from the base class:
-
- `description` - Test description string displayed in case of
- a failed test.
- `subdir' - List of subdirectories to automatically
- create under the working directory. Each
- subdirectory needs to be specified
- separately parent coming before its child.
- `verbose` - Flag that may be used to enable more verbose
- test system output. Note that it does not
- also enable more verbose build system
- output like the --verbose command line
- option does.
- """
- def __init__(self, arguments="", executable="bjam",
- match=TestCmd.match_exact, boost_build_path=None,
- translate_suffixes=True, pass_toolset=True, use_test_config=True,
- ignore_toolset_requirements=True, workdir="", pass_d0=True, **keywords):
-
- self.original_workdir = os.getcwd()
- if workdir != '' and not os.path.isabs(workdir):
- raise "Parameter workdir <"+workdir+"> must point to an absolute directory: "
-
- self.last_build_time_start = 0
- self.last_build_time_finish = 0
- self.translate_suffixes = translate_suffixes
- self.use_test_config = use_test_config
-
- self.toolset = get_toolset()
- self.pass_toolset = pass_toolset
- self.ignore_toolset_requirements = ignore_toolset_requirements
-
- prepare_suffix_map(pass_toolset and self.toolset or 'gcc')
-
- if not '--default-bjam' in sys.argv:
- jam_build_dir = ""
- if os.name == 'nt':
- jam_build_dir = "bin.ntx86"
- elif (os.name == 'posix') and os.__dict__.has_key('uname'):
- if os.uname()[0].lower().startswith('cygwin'):
- jam_build_dir = "bin.cygwinx86"
- if 'TMP' in os.environ and os.environ['TMP'].find('~') != -1:
- print 'Setting $TMP to /tmp to get around problem with short path names'
- os.environ['TMP'] = '/tmp'
- elif os.uname()[0] == 'Linux':
- cpu = os.uname()[4]
- if re.match("i.86", cpu):
- jam_build_dir = "bin.linuxx86";
- else:
- jam_build_dir = "bin.linux" + os.uname()[4]
- elif os.uname()[0] == 'SunOS':
- jam_build_dir = "bin.solaris"
- elif os.uname()[0] == 'Darwin':
- if os.uname()[4] == 'i386':
- jam_build_dir = "bin.macosxx86"
- else:
- jam_build_dir = "bin.macosxppc"
- elif os.uname()[0] == "AIX":
- jam_build_dir = "bin.aix"
- elif os.uname()[0] == "IRIX64":
- jam_build_dir = "bin.irix"
- elif os.uname()[0] == "FreeBSD":
- jam_build_dir = "bin.freebsd"
- elif os.uname()[0] == "OSF1":
- jam_build_dir = "bin.osf"
- else:
- raise "Don't know directory where Jam is built for this system: " + os.name + "/" + os.uname()[0]
- else:
- raise "Don't know directory where Jam is built for this system: " + os.name
-
- # Find where jam_src is located. Try for the debug version if it is
- # lying around.
- dirs = [os.path.join('../engine', jam_build_dir + '.debug'),
- os.path.join('../engine', jam_build_dir),
- ]
- for d in dirs:
- if os.path.exists(d):
- jam_build_dir = d
- break
- else:
- print "Cannot find built Boost.Jam"
- sys.exit(1)
-
- verbosity = ['-d0', '--quiet']
- if not pass_d0:
- verbosity = []
- if '--verbose' in sys.argv:
- keywords['verbose'] = True
- verbosity = ['-d+2']
-
- if boost_build_path is None:
- boost_build_path = self.original_workdir + "/.."
-
- program_list = []
-
- if '--default-bjam' in sys.argv:
- program_list.append(executable)
- inpath_bjam = True
- else:
- program_list.append(os.path.join(jam_build_dir, executable))
- inpath_bjam = None
- program_list.append('-sBOOST_BUILD_PATH="' + boost_build_path + '"')
- if verbosity:
- program_list += verbosity
- if arguments:
- program_list += arguments.split(" ")
-
- TestCmd.TestCmd.__init__(
- self
- , program=program_list
- , match=match
- , workdir=workdir
- , inpath=inpath_bjam
- , **keywords)
-
- os.chdir(self.workdir)
-
- def cleanup(self):
- try:
- TestCmd.TestCmd.cleanup(self)
- os.chdir(self.original_workdir)
- except AttributeError:
- # When this is called during TestCmd.TestCmd.__del__ we can have
- # both 'TestCmd' and 'os' unavailable in our scope. Do nothing in
- # this case.
- pass
-
- #
- # Methods that change the working directory's content.
- #
- def set_tree(self, tree_location):
- # It is not possible to remove the current directory.
- d = os.getcwd()
- os.chdir(os.path.dirname(self.workdir))
- shutil.rmtree(self.workdir, ignore_errors=False)
-
- if not os.path.isabs(tree_location):
- tree_location = os.path.join(self.original_workdir, tree_location)
- shutil.copytree(tree_location, self.workdir)
-
- os.chdir(d)
-
- def make_writable(unused, dir, entries):
- for e in entries:
- name = os.path.join(dir, e)
- os.chmod(name, os.stat(name)[0] | 0222)
-
- os.path.walk(".", make_writable, None)
-
- def write(self, file, content):
- self.wait_for_time_change_since_last_build()
- nfile = self.native_file_name(file)
- try:
- os.makedirs(os.path.dirname(nfile))
- except Exception, e:
- pass
- open(nfile, "wb").write(content)
-
- def rename(self, old, new):
- try:
- os.makedirs(os.path.dirname(new))
- except:
- pass
-
- try:
- os.remove(new)
- except:
- pass
-
- os.rename(old, new)
- self.touch(new);
-
- def copy(self, src, dst):
- self.wait_for_time_change_since_last_build()
- try:
- self.write(dst, self.read(src, 1))
- except:
- self.fail_test(1)
-
- def copy_preserving_timestamp(self, src, dst):
- src_name = self.native_file_name(src)
- dst_name = self.native_file_name(dst)
- stats = os.stat(src_name)
- self.write(dst, self.read(src, 1))
- os.utime(dst_name, (stats.st_atime, stats.st_mtime))
-
- def touch(self, names):
- self.wait_for_time_change_since_last_build()
- for name in self.adjust_names(names):
- os.utime(self.native_file_name(name), None)
-
- def rm(self, names):
- if not type(names) == types.ListType:
- names = [names]
-
- if names == ["."]:
- # If we're deleting the entire workspace, there's no
- # need to wait for a clock tick.
- self.last_build_time_start = 0
- self.last_build_time_finish = 0
-
- self.wait_for_time_change_since_last_build()
-
- # Avoid attempts to remove the current directory.
- os.chdir(self.original_workdir)
- for name in names:
- n = self.native_file_name(name)
- n = glob.glob(n)
- if n: n = n[0]
- if not n:
- n = self.glob_file(string.replace(name, "$toolset", self.toolset+"*"))
- if n:
- if os.path.isdir(n):
- shutil.rmtree(n, ignore_errors=False)
- else:
- os.unlink(n)
-
- # Create working dir root again in case we removed it.
- if not os.path.exists(self.workdir):
- os.mkdir(self.workdir)
- os.chdir(self.workdir)
-
- def expand_toolset(self, name):
- """Expands $toolset in the given file to tested toolset.
- """
- content = self.read(name)
- content = string.replace(content, "$toolset", self.toolset)
- self.write(name, content)
-
- def dump_stdio(self):
- annotation("STDOUT", self.stdout())
- annotation("STDERR", self.stderr())
-
- #
- # FIXME: Large portion copied from TestSCons.py, should be moved?
- #
- def run_build_system(self, extra_args="", subdir="", stdout=None, stderr="",
- status=0, match=None, pass_toolset=None, use_test_config=None,
- ignore_toolset_requirements=None, expected_duration=None, **kw):
-
- self.last_build_time_start = time.time()
-
- try:
- if os.path.isabs(subdir):
- if stderr:
- print "You must pass a relative directory to subdir <"+subdir+">."
- status = 1
- return
-
- self.previous_tree = tree.build_tree(self.workdir)
-
- if match is None:
- match = self.match
-
- if pass_toolset is None:
- pass_toolset = self.pass_toolset
-
- if use_test_config is None:
- use_test_config = self.use_test_config
-
- if ignore_toolset_requirements is None:
- ignore_toolset_requirements = self.ignore_toolset_requirements
-
- try:
- kw['program'] = []
- kw['program'] += self.program
- if extra_args:
- kw['program'] += extra_args.split(" ")
- if pass_toolset:
- kw['program'].append("toolset=" + self.toolset)
- if use_test_config:
- kw['program'].append('--test-config="%s"'
- % os.path.join(self.original_workdir, "test-config.jam"))
- if ignore_toolset_requirements:
- kw['program'].append("--ignore-toolset-requirements")
- if "--python" in sys.argv:
- kw['program'].append("--python")
- kw['chdir'] = subdir
- self.last_program_invocation = kw['program']
- apply(TestCmd.TestCmd.run, [self], kw)
- except:
- self.dump_stdio()
- raise
- finally:
- self.last_build_time_finish = time.time()
-
- if (status != None) and _failed(self, status):
- expect = ''
- if status != 0:
- expect = " (expected %d)" % status
-
- annotation("failure", '"%s" returned %d%s'
- % (kw['program'], _status(self), expect))
-
- annotation("reason", "unexpected status returned by bjam")
- self.fail_test(1)
-
- if not (stdout is None) and not match(self.stdout(), stdout):
- annotation("failure", "Unexpected stdout")
- annotation("Expected STDOUT", stdout)
- annotation("Actual STDOUT", self.stdout())
- stderr = self.stderr()
- if stderr:
- annotation("STDERR", stderr)
- self.maybe_do_diff(self.stdout(), stdout)
- self.fail_test(1, dump_stdio=False)
-
- # Intel tends to produce some messages to stderr which make tests fail.
- intel_workaround = re.compile("^xi(link|lib): executing.*\n", re.M)
- actual_stderr = re.sub(intel_workaround, "", self.stderr())
-
- if not (stderr is None) and not match(actual_stderr, stderr):
- annotation("failure", "Unexpected stderr")
- annotation("Expected STDERR", stderr)
- annotation("Actual STDERR", self.stderr())
- annotation("STDOUT", self.stdout())
- self.maybe_do_diff(actual_stderr, stderr)
- self.fail_test(1, dump_stdio=False)
-
- if not expected_duration is None:
- actual_duration = self.last_build_time_finish - self.last_build_time_start
- if (actual_duration > expected_duration):
- print "Test run lasted %f seconds while it was expected to " \
- "finish in under %f seconds." % (actual_duration,
- expected_duration)
- self.fail_test(1, dump_stdio=False)
-
- self.tree = tree.build_tree(self.workdir)
- self.difference = tree.trees_difference(self.previous_tree, self.tree)
- if self.difference.empty():
- # If nothing was changed, there's no need to wait
- self.last_build_time_start = 0
- self.last_build_time_finish = 0
- self.difference.ignore_directories()
- self.unexpected_difference = copy.deepcopy(self.difference)
-
- def glob_file(self, name):
- result = None
- if hasattr(self, 'difference'):
- for f in self.difference.added_files+self.difference.modified_files+self.difference.touched_files:
- if fnmatch.fnmatch(f, name):
- result = self.native_file_name(f)
- break
- if not result:
- result = glob.glob(self.native_file_name(name))
- if result:
- result = result[0]
- return result
-
- def read(self, name, binary=False):
- try:
- if self.toolset:
- name = string.replace(name, "$toolset", self.toolset+"*")
- name = self.glob_file(name)
- openMode = "r"
- if binary:
- openMode += "b"
- else:
- openMode += "U"
- return open(name, openMode).read()
- except:
- annotation("failure", "Could not open '%s'" % name)
- self.fail_test(1)
- return ''
-
- def read_and_strip(self, name):
- if not self.glob_file(name):
- return ''
- f = open(self.glob_file(name), "rb")
- lines = f.readlines()
- result = string.join(map(string.rstrip, lines), "\n")
- if lines and lines[-1][-1] == '\n':
- return result + '\n'
- else:
- return result
-
- def fail_test(self, condition, dump_stdio=True, *args):
- if not condition:
- return
-
- if hasattr(self, 'difference'):
- f = StringIO.StringIO()
- self.difference.pprint(f)
- annotation("changes caused by the last build command", f.getvalue())
-
- if dump_stdio:
- self.dump_stdio()
-
- if '--preserve' in sys.argv:
- print
- print "*** Copying the state of working dir into 'failed_test' ***"
- print
- path = os.path.join(self.original_workdir, "failed_test")
- if os.path.isdir(path):
- shutil.rmtree(path, ignore_errors=False)
- elif os.path.exists(path):
- raise "Path " + path + " already exists and is not a directory";
- shutil.copytree(self.workdir, path)
- print "The failed command was:"
- print ' '.join(self.last_program_invocation)
-
- at = TestCmd.caller(traceback.extract_stack(), 0)
- annotation("stacktrace", at)
- sys.exit(1)
-
- # A number of methods below check expectations with actual difference
- # between directory trees before and after a build. All the 'expect*'
- # methods require exact names to be passed. All the 'ignore*' methods allow
- # wildcards.
-
- # All names can be lists, which are taken to be directory components.
- def expect_addition(self, names):
- for name in self.adjust_names(names):
- try:
- glob_remove(self.unexpected_difference.added_files, name)
- except:
- annotation("failure", "File %s not added as expected" % name)
- self.fail_test(1)
-
- def ignore_addition(self, wildcard):
- self.ignore_elements(self.unexpected_difference.added_files, wildcard)
-
- def expect_removal(self, names):
- for name in self.adjust_names(names):
- try:
- glob_remove(self.unexpected_difference.removed_files, name)
- except:
- annotation("failure", "File %s not removed as expected" % name)
- self.fail_test(1)
-
- def ignore_removal(self, wildcard):
- self.ignore_elements(self.unexpected_difference.removed_files, wildcard)
-
- def expect_modification(self, names):
- for name in self.adjust_names(names):
- try:
- glob_remove(self.unexpected_difference.modified_files, name)
- except:
- annotation("failure", "File %s not modified as expected" % name)
- self.fail_test(1)
-
- def ignore_modification(self, wildcard):
- self.ignore_elements(self.unexpected_difference.modified_files, \
- wildcard)
-
- def expect_touch(self, names):
- d = self.unexpected_difference
- for name in self.adjust_names(names):
- # We need to check both touched and modified files. The reason is
- # that:
- # (1) Windows binaries such as obj, exe or dll files have slight
- # differences even with identical inputs due to Windows PE
- # format headers containing an internal timestamp.
- # (2) Intel's compiler for Linux has the same behaviour.
- filesets = [d.modified_files, d.touched_files]
-
- while filesets:
- try:
- glob_remove(filesets[-1], name)
- break
- except ValueError:
- filesets.pop()
-
- if not filesets:
- annotation("failure", "File %s not touched as expected" % name)
- self.fail_test(1)
-
- def ignore_touch(self, wildcard):
- self.ignore_elements(self.unexpected_difference.touched_files, wildcard)
-
- def ignore(self, wildcard):
- self.ignore_elements(self.unexpected_difference.added_files, wildcard)
- self.ignore_elements(self.unexpected_difference.removed_files, wildcard)
- self.ignore_elements(self.unexpected_difference.modified_files, wildcard)
- self.ignore_elements(self.unexpected_difference.touched_files, wildcard)
-
- def expect_nothing(self, names):
- for name in self.adjust_names(names):
- if name in self.difference.added_files:
- annotation("failure",
- "File %s added, but no action was expected" % name)
- self.fail_test(1)
- if name in self.difference.removed_files:
- annotation("failure",
- "File %s removed, but no action was expected" % name)
- self.fail_test(1)
- pass
- if name in self.difference.modified_files:
- annotation("failure",
- "File %s modified, but no action was expected" % name)
- self.fail_test(1)
- if name in self.difference.touched_files:
- annotation("failure",
- "File %s touched, but no action was expected" % name)
- self.fail_test(1)
-
- def expect_nothing_more(self):
- # Not totally sure about this change, but I do not see a good
- # alternative.
- if windows:
- self.ignore('*.ilk') # MSVC incremental linking files.
- self.ignore('*.pdb') # MSVC program database files.
- self.ignore('*.rsp') # Response files.
- self.ignore('*.tds') # Borland debug symbols.
- self.ignore('*.manifest') # MSVC DLL manifests.
-
- # Debug builds of bjam built with gcc produce this profiling data.
- self.ignore('gmon.out')
- self.ignore('*/gmon.out')
-
- self.ignore("bin/config.log")
-
- self.ignore("*.pyc")
-
- if not self.unexpected_difference.empty():
- annotation('failure', 'Unexpected changes found')
- output = StringIO.StringIO()
- self.unexpected_difference.pprint(output)
- annotation("unexpected changes", output.getvalue())
- self.fail_test(1)
-
- def __expect_line(self, content, expected, expected_to_exist):
- expected = expected.strip()
- lines = content.splitlines()
- found = False
- for line in lines:
- line = line.strip()
- if fnmatch.fnmatch(line, expected):
- found = True
- break
-
- if expected_to_exist and not found:
- annotation("failure",
- "Did not find expected line:\n%s\nin output:\n%s" %
- (expected, content))
- self.fail_test(1)
- if not expected_to_exist and found:
- annotation("failure",
- "Found an unexpected line:\n%s\nin output:\n%s" %
- (expected, content))
- self.fail_test(1)
-
- def expect_output_line(self, line, expected_to_exist=True):
- self.__expect_line(self.stdout(), line, expected_to_exist)
-
- def expect_content_line(self, name, line, expected_to_exist=True):
- content = self.__read_file(name)
- self.__expect_line(content, line, expected_to_exist)
-
- def __read_file(self, name, exact=False):
- name = self.adjust_names(name)[0]
- result = ""
- try:
- if exact:
- result = self.read(name)
- else:
- result = string.replace(self.read_and_strip(name), "\\", "/")
- except (IOError, IndexError):
- print "Note: could not open file", name
- self.fail_test(1)
- return result
-
- def expect_content(self, name, content, exact=False):
- actual = self.__read_file(name, exact)
- content = string.replace(content, "$toolset", self.toolset+"*")
-
- matched = False
- if exact:
- matched = fnmatch.fnmatch(actual, content)
- else:
- def sorted_(x):
- x.sort()
- return x
- actual_ = map(lambda x: sorted_(x.split()), actual.splitlines())
- content_ = map(lambda x: sorted_(x.split()), content.splitlines())
- if len(actual_) == len(content_):
- matched = map(
- lambda x, y: map(lambda n, p: fnmatch.fnmatch(n, p), x, y),
- actual_, content_)
- matched = reduce(
- lambda x, y: x and reduce(
- lambda a, b: a and b,
- y),
- matched)
-
- if not matched:
- print "Expected:\n"
- print content
- print "Got:\n"
- print actual
- self.fail_test(1)
-
- def maybe_do_diff(self, actual, expected):
- if os.environ.has_key("DO_DIFF") and os.environ["DO_DIFF"] != '':
- e = tempfile.mktemp("expected")
- a = tempfile.mktemp("actual")
- open(e, "w").write(expected)
- open(a, "w").write(actual)
- print "DIFFERENCE"
- if os.system("diff -u " + e + " " + a):
- print "Unable to compute difference: diff -u %s %s" % (e, a)
- os.unlink(e)
- os.unlink(a)
- else:
- print "Set environmental variable 'DO_DIFF' to examine difference."
-
- # Helpers.
- def mul(self, *arguments):
- if len(arguments) == 0:
- return None
-
- here = arguments[0]
- if type(here) == type(''):
- here = [here]
-
- if len(arguments) > 1:
- there = apply(self.mul, arguments[1:])
- result = []
- for i in here:
- for j in there:
- result.append(i + j)
- return result
-
- return here
-
- # Internal methods.
- def ignore_elements(self, list, wildcard):
- """Removes in-place, element of 'list' that match the given wildcard.
- """
- list[:] = filter(lambda x, w=wildcard: not fnmatch.fnmatch(x, w), list)
-
- def adjust_lib_name(self, name):
- global lib_prefix
- global dll_prefix
- result = name
-
- pos = string.rfind(name, ".")
- if pos != -1:
- suffix = name[pos:]
- if suffix == ".lib":
- (head, tail) = os.path.split(name)
- if lib_prefix:
- tail = lib_prefix + tail
- result = os.path.join(head, tail)
- elif suffix == ".dll":
- (head, tail) = os.path.split(name)
- if dll_prefix:
- tail = dll_prefix + tail
- result = os.path.join(head, tail)
- # If we want to use this name in a Jamfile, we better convert \ to /, as
- # otherwise we would have to quote \.
- result = string.replace(result, "\\", "/")
- return result
-
- def adjust_suffix(self, name):
- if not self.translate_suffixes:
- return name
-
- pos = string.rfind(name, ".")
- if pos != -1:
- suffix = name[pos:]
- name = name[:pos]
-
- if suffixes.has_key(suffix):
- suffix = suffixes[suffix]
- else:
- suffix = ''
-
- return name + suffix
-
- # Acceps either a string or a list of strings and returns a list of strings.
- # Adjusts suffixes on all names.
- def adjust_names(self, names):
- if type(names) == types.StringType:
- names = [names]
- r = map(self.adjust_lib_name, names)
- r = map(self.adjust_suffix, r)
- r = map(lambda x, t=self.toolset: string.replace(x, "$toolset", t+"*"), r)
- return r
-
- def native_file_name(self, name):
- name = self.adjust_names(name)[0]
- elements = string.split(name, "/")
- return os.path.normpath(apply(os.path.join, [self.workdir]+elements))
-
- # Wait while time is no longer equal to the time last "run_build_system"
- # call finished. Used to avoid subsequent builds treating existing files as
- # 'current'.
- def wait_for_time_change_since_last_build(self):
- while 1:
- # In fact, I'm not sure why "+ 2" as opposed to "+ 1" is needed but
- # empirically, "+ 1" sometimes causes 'touch' and other functions
- # not to bump the file time enough for a rebuild to happen.
- if math.floor(time.time()) < math.floor(self.last_build_time_finish) + 2:
- time.sleep(0.1)
- else:
- break
-
-
-class List:
-
- def __init__(self, s=""):
- elements = []
- if isinstance(s, type("")):
- # Have to handle espaced spaces correctly.
- s = string.replace(s, "\ ", '\001')
- elements = string.split(s)
- else:
- elements = s;
-
- self.l = []
- for e in elements:
- self.l.append(string.replace(e, '\001', ' '))
-
- def __len__(self):
- return len(self.l)
-
- def __getitem__(self, key):
- return self.l[key]
-
- def __setitem__(self, key, value):
- self.l[key] = value
-
- def __delitem__(self, key):
- del self.l[key]
-
- def __str__(self):
- return str(self.l)
-
- def __repr__(self):
- return (self.__module__ + '.List('
- + repr(string.join(self.l, ' '))
- + ')')
-
- def __mul__(self, other):
- result = List()
- if not isinstance(other, List):
- other = List(other)
- for f in self:
- for s in other:
- result.l.append(f + s)
- return result
-
- def __rmul__(self, other):
- if not isinstance(other, List):
- other = List(other)
- return List.__mul__(other, self)
-
- def __add__(self, other):
- result = List()
- result.l = self.l[:] + other.l[:]
- return result
-
-# Quickie tests. Should use doctest instead.
-if __name__ == '__main__':
- assert str(List("foo bar") * "/baz") == "['foo/baz', 'bar/baz']"
- assert repr("foo/" * List("bar baz")) == "__main__.List('foo/bar foo/baz')"
- print 'tests passed'
diff --git a/tools/build/v2/test/TestCmd.py b/tools/build/v2/test/TestCmd.py
deleted file mode 100644
index b14e942655..0000000000
--- a/tools/build/v2/test/TestCmd.py
+++ /dev/null
@@ -1,651 +0,0 @@
-"""
-TestCmd.py: a testing framework for commands and scripts.
-
-The TestCmd module provides a framework for portable automated testing of
-executable commands and scripts (in any language, not just Python), especially
-commands and scripts that require file system interaction.
-
-In addition to running tests and evaluating conditions, the TestCmd module
-manages and cleans up one or more temporary workspace directories, and provides
-methods for creating files and directories in those workspace directories from
-in-line data, here-documents), allowing tests to be completely self-contained.
-
-A TestCmd environment object is created via the usual invocation:
-
- test = TestCmd()
-
-The TestCmd module provides pass_test(), fail_test(), and no_result() unbound
-methods that report test results for use with the Aegis change management
-system. These methods terminate the test immediately, reporting PASSED, FAILED
-or NO RESULT respectively and exiting with status 0 (success), 1 or 2
-respectively. This allows for a distinction between an actual failed test and a
-test that could not be properly evaluated because of an external condition (such
-as a full file system or incorrect permissions).
-"""
-
-# Copyright 2000 Steven Knight
-# This module is free software, and you may redistribute it and/or modify
-# it under the same terms as Python itself, so long as this copyright message
-# and disclaimer are retained in their original form.
-#
-# IN NO EVENT SHALL THE AUTHOR BE LIABLE TO ANY PARTY FOR DIRECT, INDIRECT,
-# SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OF
-# THIS CODE, EVEN IF THE AUTHOR HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
-# DAMAGE.
-#
-# THE AUTHOR SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
-# PARTICULAR PURPOSE. THE CODE PROVIDED HEREUNDER IS ON AN "AS IS" BASIS,
-# AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE,
-# SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
-
-# Copyright 2002-2003 Vladimir Prus.
-# Copyright 2002-2003 Dave Abrahams.
-# Copyright 2006 Rene Rivera.
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-
-from string import join, split
-
-__author__ = "Steven Knight <knight@baldmt.com>"
-__revision__ = "TestCmd.py 0.D002 2001/08/31 14:56:12 software"
-__version__ = "0.02"
-
-from types import *
-
-import os
-import os.path
-import popen2
-import re
-import shutil
-import stat
-import sys
-import tempfile
-import traceback
-
-
-tempfile.template = 'testcmd.'
-
-_Cleanup = []
-
-def _clean():
- global _Cleanup
- list = _Cleanup[:]
- _Cleanup = []
- list.reverse()
- for test in list:
- test.cleanup()
-
-sys.exitfunc = _clean
-
-
-def caller(tblist, skip):
- string = ""
- arr = []
- for file, line, name, text in tblist:
- if file[-10:] == "TestCmd.py":
- break
- arr = [(file, line, name, text)] + arr
- atfrom = "at"
- for file, line, name, text in arr[skip:]:
- if name == "?":
- name = ""
- else:
- name = " (" + name + ")"
- string = string + ("%s line %d of %s%s\n" % (atfrom, line, file, name))
- atfrom = "\tfrom"
- return string
-
-
-def fail_test(self=None, condition=True, function=None, skip=0):
- """Cause the test to fail.
-
- By default, the fail_test() method reports that the test FAILED and exits
- with a status of 1. If a condition argument is supplied, the test fails only
- if the condition is true.
- """
- if not condition:
- return
- if not function is None:
- function()
- of = ""
- desc = ""
- sep = " "
- if not self is None:
- if self.program:
- of = " of " + join(self.program, " ")
- sep = "\n\t"
- if self.description:
- desc = " [" + self.description + "]"
- sep = "\n\t"
-
- at = caller(traceback.extract_stack(), skip)
-
- sys.stderr.write("FAILED test" + of + desc + sep + at + """
-in directory: """ + os.getcwd() )
- sys.exit(1)
-
-
-def no_result(self=None, condition=True, function=None, skip=0):
- """Causes a test to exit with no valid result.
-
- By default, the no_result() method reports NO RESULT for the test and exits
- with a status of 2. If a condition argument is supplied, the test fails only
- if the condition is true.
- """
- if not condition:
- return
- if not function is None:
- function()
- of = ""
- desc = ""
- sep = " "
- if not self is None:
- if self.program:
- of = " of " + self.program
- sep = "\n\t"
- if self.description:
- desc = " [" + self.description + "]"
- sep = "\n\t"
-
- at = caller(traceback.extract_stack(), skip)
- sys.stderr.write("NO RESULT for test" + of + desc + sep + at)
- sys.exit(2)
-
-
-def pass_test(self=None, condition=True, function=None):
- """Causes a test to pass.
-
- By default, the pass_test() method reports PASSED for the test and exits
- with a status of 0. If a condition argument is supplied, the test passes
- only if the condition is true.
- """
- if not condition:
- return
- if not function is None:
- function()
- sys.stderr.write("PASSED\n")
- sys.exit(0)
-
-
-def match_exact(lines=None, matches=None):
- """Returns whether the given lists or strings containing lines separated
- using newline characters contain exactly the same data.
- """
- if not type(lines) is ListType:
- lines = split(lines, "\n")
- if not type(matches) is ListType:
- matches = split(matches, "\n")
- if len(lines) != len(matches):
- return
- for i in range(len(lines)):
- if lines[i] != matches[i]:
- return
- return 1
-
-
-def match_re(lines=None, res=None):
- """Given lists or strings contain lines separated using newline characters.
- This function matches those lines one by one, interpreting the lines in the
- res parameter as regular expressions.
- """
- if not type(lines) is ListType:
- lines = split(lines, "\n")
- if not type(res) is ListType:
- res = split(res, "\n")
- if len(lines) != len(res):
- return
- for i in range(len(lines)):
- if not re.compile("^" + res[i] + "$").search(lines[i]):
- return
- return 1
-
-
-class TestCmd:
- """Class TestCmd.
- """
-
- def __init__(self, description=None, program=None, workdir=None,
- subdir=None, verbose=False, match=None, inpath=None):
-
- self._cwd = os.getcwd()
- self.description_set(description)
- if inpath:
- self.program = program
- else:
- self.program_set(program)
- self.verbose_set(verbose)
- if not match is None:
- self.match_func = match
- else:
- self.match_func = match_re
- self._dirlist = []
- self._preserve = {'pass_test': 0, 'fail_test': 0, 'no_result': 0}
- if os.environ.has_key('PRESERVE') and not os.environ['PRESERVE'] is '':
- self._preserve['pass_test'] = os.environ['PRESERVE']
- self._preserve['fail_test'] = os.environ['PRESERVE']
- self._preserve['no_result'] = os.environ['PRESERVE']
- else:
- try:
- self._preserve['pass_test'] = os.environ['PRESERVE_PASS']
- except KeyError:
- pass
- try:
- self._preserve['fail_test'] = os.environ['PRESERVE_FAIL']
- except KeyError:
- pass
- try:
- self._preserve['no_result'] = os.environ['PRESERVE_NO_RESULT']
- except KeyError:
- pass
- self._stdout = []
- self._stderr = []
- self.status = None
- self.condition = 'no_result'
- self.workdir_set(workdir)
- self.subdir(subdir)
-
- def __del__(self):
- self.cleanup()
-
- def __repr__(self):
- return "%x" % id(self)
-
- def cleanup(self, condition=None):
- """Removes any temporary working directories for the specified TestCmd
- environment. If the environment variable PRESERVE was set when the
- TestCmd environment was created, temporary working directories are not
- removed. If any of the environment variables PRESERVE_PASS,
- PRESERVE_FAIL or PRESERVE_NO_RESULT were set when the TestCmd
- environment was created, then temporary working directories are not
- removed if the test passed, failed or had no result, respectively.
- Temporary working directories are also preserved for conditions
- specified via the preserve method.
-
- Typically, this method is not called directly, but is used when the
- script exits to clean up temporary working directories as appropriate
- for the exit status.
- """
- if not self._dirlist:
- return
- if condition is None:
- condition = self.condition
- if self._preserve[condition]:
- for dir in self._dirlist:
- print "Preserved directory", dir
- else:
- list = self._dirlist[:]
- list.reverse()
- for dir in list:
- self.writable(dir, 1)
- shutil.rmtree(dir, ignore_errors = 1)
-
- self._dirlist = []
- self.workdir = None
- os.chdir(self._cwd)
- try:
- global _Cleanup
- _Cleanup.remove(self)
- except (AttributeError, ValueError):
- pass
-
- def description_set(self, description):
- """Set the description of the functionality being tested.
- """
- self.description = description
-
- def fail_test(self, condition=True, function=None, skip=0):
- """Cause the test to fail.
- """
- if not condition:
- return
- self.condition = 'fail_test'
- fail_test(self = self,
- condition = condition,
- function = function,
- skip = skip)
-
- def match(self, lines, matches):
- """Compare actual and expected file contents.
- """
- return self.match_func(lines, matches)
-
- def match_exact(self, lines, matches):
- """Compare actual and expected file contents.
- """
- return match_exact(lines, matches)
-
- def match_re(self, lines, res):
- """Compare actual and expected file contents.
- """
- return match_re(lines, res)
-
- def no_result(self, condition=True, function=None, skip=0):
- """Report that the test could not be run.
- """
- if not condition:
- return
- self.condition = 'no_result'
- no_result(self = self,
- condition = condition,
- function = function,
- skip = skip)
-
- def pass_test(self, condition=True, function=None):
- """Cause the test to pass.
- """
- if not condition:
- return
- self.condition = 'pass_test'
- pass_test(self = self, condition = condition, function = function)
-
- def preserve(self, *conditions):
- """Arrange for the temporary working directories for the specified
- TestCmd environment to be preserved for one or more conditions. If no
- conditions are specified, arranges for the temporary working directories
- to be preserved for all conditions.
- """
- if conditions is ():
- conditions = ('pass_test', 'fail_test', 'no_result')
- for cond in conditions:
- self._preserve[cond] = 1
-
- def program_set(self, program):
- """Set the executable program or script to be tested.
- """
- if program and program[0] and not os.path.isabs(program[0]):
- program[0] = os.path.join(self._cwd, program[0])
- self.program = program
-
- def read(self, file, mode='rb'):
- """Reads and returns the contents of the specified file name. The file
- name may be a list, in which case the elements are concatenated with the
- os.path.join() method. The file is assumed to be under the temporary
- working directory unless it is an absolute path name. The I/O mode for
- the file may be specified; it must begin with an 'r'. The default is
- 'rb' (binary read).
- """
- if type(file) is ListType:
- file = apply(os.path.join, tuple(file))
- if not os.path.isabs(file):
- file = os.path.join(self.workdir, file)
- if mode[0] != 'r':
- raise ValueError, "mode must begin with 'r'"
- return open(file, mode).read()
-
- def run(self, program=None, arguments=None, chdir=None, stdin=None):
- """Runs a test of the program or script for the test environment.
- Standard output and error output are saved for future retrieval via the
- stdout() and stderr() methods.
- """
- if chdir:
- oldcwd = os.getcwd()
- if not os.path.isabs(chdir):
- chdir = os.path.join(self.workpath(chdir))
- if self.verbose:
- sys.stderr.write("chdir(" + chdir + ")\n")
- os.chdir(chdir)
- cmd = []
- if program and program[0]:
- if program[0] != self.program[0] and not os.path.isabs(program[0]):
- program[0] = os.path.join(self._cwd, program[0])
- cmd += program
- else:
- cmd += self.program
- if arguments:
- cmd += arguments.split(" ")
- if self.verbose:
- sys.stderr.write(join(cmd, " ") + "\n")
- try:
- p = popen2.Popen3(cmd, 1)
- except AttributeError:
- # We end up here in case the popen2.Popen3 class is not available
- # (e.g. on Windows). We will be using the os.popen3() Python API
- # which takes a string parameter and so needs its executable quoted
- # in case its name contains spaces.
- cmd[0] = '"' + cmd[0] + '"'
- command_string = join(cmd, " ")
- if ( os.name == 'nt' ):
- # This is a workaround for a longstanding Python bug on Windows
- # when using os.popen(), os.system() and similar functions to
- # execute a command containing quote characters. The bug seems
- # to be related to the quote stripping functionality used by the
- # Windows cmd.exe interpreter when its /S is not specified.
- #
- # Cleaned up quote from the cmd.exe help screen as displayed on
- # Windows XP SP2:
- #
- # 1. If all of the following conditions are met, then quote
- # characters on the command line are preserved:
- #
- # - no /S switch
- # - exactly two quote characters
- # - no special characters between the two quote
- # characters, where special is one of: &<>()@^|
- # - there are one or more whitespace characters between
- # the two quote characters
- # - the string between the two quote characters is the
- # name of an executable file.
- #
- # 2. Otherwise, old behavior is to see if the first character
- # is a quote character and if so, strip the leading
- # character and remove the last quote character on the
- # command line, preserving any text after the last quote
- # character.
- #
- # This causes some commands containing quotes not to be executed
- # correctly. For example:
- #
- # "\Long folder name\aaa.exe" --name="Jurko" --no-surname
- #
- # would get its outermost quotes stripped and would be executed
- # as:
- #
- # \Long folder name\aaa.exe" --name="Jurko --no-surname
- #
- # which would report an error about '\Long' not being a valid
- # command.
- #
- # cmd.exe help seems to indicate it would be enough to add an
- # extra space character in front of the command to avoid this
- # but this does not work, most likely due to the shell first
- # stripping all leading whitespace characters from the command.
- #
- # Solution implemented here is to quote the whole command in
- # case it contains any quote characters. Note thought this will
- # not work correctly should Python ever fix this bug.
- # (01.05.2008.) (Jurko)
- if command_string.find('"') != -1:
- command_string = '"' + command_string + '"'
- (tochild, fromchild, childerr) = os.popen3(command_string)
- if stdin:
- if type(stdin) is ListType:
- for line in stdin:
- tochild.write(line)
- else:
- tochild.write(stdin)
- tochild.close()
- self._stdout.append(fromchild.read())
- self._stderr.append(childerr.read())
- fromchild.close()
- self.status = childerr.close()
- if not self.status:
- self.status = 0
- except:
- raise
- else:
- if stdin:
- if type(stdin) is ListType:
- for line in stdin:
- p.tochild.write(line)
- else:
- p.tochild.write(stdin)
- p.tochild.close()
- self._stdout.append(p.fromchild.read())
- self._stderr.append(p.childerr.read())
- self.status = p.wait()
-
- if self.verbose:
- sys.stdout.write(self._stdout[-1])
- sys.stderr.write(self._stderr[-1])
-
- if chdir:
- os.chdir(oldcwd)
-
- def stderr(self, run=None):
- """Returns the error output from the specified run number. If there is
- no specified run number, then returns the error output of the last run.
- If the run number is less than zero, then returns the error output from
- that many runs back from the current run.
- """
- if not run:
- run = len(self._stderr)
- elif run < 0:
- run = len(self._stderr) + run
- run = run - 1
- if (run < 0):
- return ''
- return self._stderr[run]
-
- def stdout(self, run=None):
- """Returns the standard output from the specified run number. If there
- is no specified run number, then returns the standard output of the last
- run. If the run number is less than zero, then returns the standard
- output from that many runs back from the current run.
- """
- if not run:
- run = len(self._stdout)
- elif run < 0:
- run = len(self._stdout) + run
- run = run - 1
- if (run < 0):
- return ''
- return self._stdout[run]
-
- def subdir(self, *subdirs):
- """Create new subdirectories under the temporary working directory, one
- for each argument. An argument may be a list, in which case the list
- elements are concatenated using the os.path.join() method.
- Subdirectories multiple levels deep must be created using a separate
- argument for each level:
-
- test.subdir('sub', ['sub', 'dir'], ['sub', 'dir', 'ectory'])
-
- Returns the number of subdirectories actually created.
- """
- count = 0
- for sub in subdirs:
- if sub is None:
- continue
- if type(sub) is ListType:
- sub = apply(os.path.join, tuple(sub))
- new = os.path.join(self.workdir, sub)
- try:
- os.mkdir(new)
- except:
- pass
- else:
- count = count + 1
- return count
-
- def unlink (self, file):
- """Unlinks the specified file name. The file name may be a list, in
- which case the elements are concatenated using the os.path.join()
- method. The file is assumed to be under the temporary working directory
- unless it is an absolute path name.
- """
- if type(file) is ListType:
- file = apply(os.path.join, tuple(file))
- if not os.path.isabs(file):
- file = os.path.join(self.workdir, file)
- os.unlink(file)
-
- def verbose_set(self, verbose):
- """Set the verbose level.
- """
- self.verbose = verbose
-
- def workdir_set(self, path):
- """Creates a temporary working directory with the specified path name.
- If the path is a null string (''), a unique directory name is created.
- """
-
- if os.path.isabs(path):
- self.workdir = path
- else:
- if (path != None):
- if path == '':
- path = tempfile.mktemp()
- if path != None:
- os.mkdir(path)
- self._dirlist.append(path)
- global _Cleanup
- try:
- _Cleanup.index(self)
- except ValueError:
- _Cleanup.append(self)
- # We'd like to set self.workdir like this:
- # self.workdir = path
- # But symlinks in the path will report things differently from
- # os.getcwd(), so chdir there and back to fetch the canonical
- # path.
- cwd = os.getcwd()
- os.chdir(path)
- self.workdir = os.getcwd()
- os.chdir(cwd)
- else:
- self.workdir = None
-
- def workpath(self, *args):
- """Returns the absolute path name to a subdirectory or file within the
- current temporary working directory. Concatenates the temporary working
- directory name with the specified arguments using the os.path.join()
- method.
- """
- return apply(os.path.join, (self.workdir,) + tuple(args))
-
- def writable(self, top, write):
- """Make the specified directory tree writable (write == 1) or not
- (write == None).
- """
-
- def _walk_chmod(arg, dirname, names):
- st = os.stat(dirname)
- os.chmod(dirname, arg(st[stat.ST_MODE]))
- for name in names:
- n = os.path.join(dirname, name)
- st = os.stat(n)
- os.chmod(n, arg(st[stat.ST_MODE]))
-
- def _mode_writable(mode):
- return stat.S_IMODE(mode|0200)
-
- def _mode_non_writable(mode):
- return stat.S_IMODE(mode&~0200)
-
- if write:
- f = _mode_writable
- else:
- f = _mode_non_writable
- try:
- os.path.walk(top, _walk_chmod, f)
- except:
- pass # Ignore any problems changing modes.
-
- def write(self, file, content, mode='wb'):
- """Writes the specified content text (second argument) to the specified
- file name (first argument). The file name may be a list, in which case
- the elements are concatenated using the os.path.join() method. The file
- is created under the temporary working directory. Any subdirectories in
- the path must already exist. The I/O mode for the file may be specified;
- it must begin with a 'w'. The default is 'wb' (binary write).
- """
- if type(file) is ListType:
- file = apply(os.path.join, tuple(file))
- if not os.path.isabs(file):
- file = os.path.join(self.workdir, file)
- if mode[0] != 'w':
- raise ValueError, "mode must begin with 'w'"
- open(file, mode).write(content)
diff --git a/tools/build/v2/test/abs_workdir.py b/tools/build/v2/test/abs_workdir.py
deleted file mode 100644
index 29fab06fa4..0000000000
--- a/tools/build/v2/test/abs_workdir.py
+++ /dev/null
@@ -1,34 +0,0 @@
-# Niklaus Giger, 2005-03-15
-# Testing whether we may run a test in absolute directories. There are no tests
-# for temporary directories as this is implictly tested in a lot of other cases.
-
-import BoostBuild
-import os
-import string
-
-t = BoostBuild.Tester(arguments="pwd", executable="jam", workdir=os.getcwd(),
- pass_toolset=0)
-
-t.write("jamroot.jam", """
-actions print_pwd { pwd ; }
-print_pwd pwd ;
-ALWAYS pwd ;
-""")
-
-t.run_build_system(status=0)
-
-if 'TMP' in os.environ:
- tmp_dir = os.environ.get('TMP')
-else:
- tmp_dir = "/tmp"
-
-if string.rfind(t.stdout(), tmp_dir) != -1:
- t.fail_test(1)
-
-if string.rfind(t.stdout(), 'build/v2/test') == -1:
- t.fail_test(1)
-
-t.run_build_system(status=1, subdir="/must/fail/with/absolute/path",
- stderr=None)
-
-t.cleanup()
diff --git a/tools/build/v2/test/absolute_sources.py b/tools/build/v2/test/absolute_sources.py
deleted file mode 100644
index 4053b35ca1..0000000000
--- a/tools/build/v2/test/absolute_sources.py
+++ /dev/null
@@ -1,97 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2003, 2004 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Test that sources with absolute names are handled OK.
-
-import BoostBuild
-
-t = BoostBuild.Tester()
-
-t.write("jamroot.jam", """
-path-constant TOP : . ;
-""")
-
-t.write("jamfile.jam", """
-local pwd = [ PWD ] ;
-ECHO $(pwd) XXXXX ;
-exe hello : $(pwd)/hello.cpp $(TOP)/empty.cpp ;
-""")
-
-t.write("hello.cpp", "int main() {}\n")
-
-t.write("empty.cpp", "\n")
-
-t.run_build_system()
-t.expect_addition("bin/$toolset/debug/hello.exe")
-t.rm(".")
-
-# Test a contrived case. There, absolute name is used in a standalone project
-# (not Jamfile). Moreover, the target with an absolute name is returned by
-# 'alias' and used from another project.
-t.write("a.cpp", """
-int main() {}
-""")
-
-t.write("jamfile.jam", """
-exe a : /standalone//a ;
-""")
-
-t.write("jamroot.jam", """
-import standalone ;
-""")
-
-t.write("standalone.jam", """
-import project ;
-
-project.initialize $(__name__) ;
-project standalone ;
-
-local pwd = [ PWD ] ;
-alias a : $(pwd)/a.cpp ;
-""")
-
-t.write("standalone.py", """
-from b2.manager import get_manager
-
-# FIXME: this is ugly as death
-get_manager().projects().initialize(__name__)
-
-import os ;
-
-# This use of list as parameter is also ugly.
-project(['standalone'])
-
-pwd = os.getcwd()
-alias('a', [os.path.join(pwd, 'a.cpp')])
-""")
-
-t.run_build_system()
-t.expect_addition("bin/$toolset/debug/a.exe")
-
-# Test absolute path in target ids.
-t.rm(".")
-
-t.write("d1/jamroot.jam", "")
-
-t.write("d1/jamfile.jam", """
-exe a : a.cpp ;
-""")
-
-t.write("d1/a.cpp", """
-int main() {}
-""")
-
-t.write("d2/jamroot.jam", "")
-
-t.write("d2/jamfile.jam", """
-local pwd = [ PWD ] ;
-alias x : $(pwd)/../d1//a ;
-""")
-
-t.run_build_system(subdir="d2")
-t.expect_addition("d1/bin/$toolset/debug/a.exe")
-
-t.cleanup()
diff --git a/tools/build/v2/test/alias.py b/tools/build/v2/test/alias.py
deleted file mode 100644
index 107b12e87e..0000000000
--- a/tools/build/v2/test/alias.py
+++ /dev/null
@@ -1,107 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2003 Dave Abrahams
-# Copyright 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import BoostBuild
-
-
-################################################################################
-#
-# test_alias_rule()
-# -----------------
-#
-################################################################################
-
-def test_alias_rule(t):
- """Basic alias rule test.
- """
-
- t.write("jamroot.jam", """
-exe a : a.cpp ;
-exe b : b.cpp ;
-exe c : c.cpp ;
-
-alias bin1 : a ;
-alias bin2 : a b ;
-
-alias src : s.cpp ;
-exe hello : hello.cpp src ;
-""")
-
- t.write("a.cpp", "int main() {}\n")
- t.copy("a.cpp", "b.cpp")
- t.copy("a.cpp", "c.cpp")
- t.copy("a.cpp", "hello.cpp")
- t.write("s.cpp", "")
-
- # Check that targets to which "bin1" refers are updated, and only those.
- t.run_build_system("bin1")
- t.expect_addition(BoostBuild.List("bin/$toolset/debug/") * "a.exe a.obj")
- t.expect_nothing_more()
-
- # Try again with "bin2"
- t.run_build_system("bin2")
- t.expect_addition(BoostBuild.List("bin/$toolset/debug/") * "b.exe b.obj")
- t.expect_nothing_more()
-
- # Try building everything, making sure 'hello' target is created.
- t.run_build_system()
- t.expect_addition(BoostBuild.List("bin/$toolset/debug/") * \
- "hello.exe hello.obj")
- t.expect_addition("bin/$toolset/debug/s.obj")
- t.expect_addition(BoostBuild.List("bin/$toolset/debug/") * "c.exe c.obj")
- t.expect_nothing_more()
-
-
-################################################################################
-#
-# test_alias_source_usage_requirements()
-# --------------------------------------
-#
-################################################################################
-
-def test_alias_source_usage_requirements(t):
- """Check whether usage requirements are propagated via "alias". In case they
- are not, linking will fail as there will be no main() function defined
- anywhere in the source.
- """
-
- t.write("jamroot.jam", """
-lib l : l.cpp : : : <define>WANT_MAIN ;
-alias la : l ;
-exe main : main.cpp la ;
-""")
-
- t.write("l.cpp", """
-void
-#if defined(_WIN32)
-__declspec(dllexport)
-#endif
-foo() {}
-""")
-
- t.write("main.cpp", """
-#ifdef WANT_MAIN
-int main() {}
-#endif
-""")
-
- t.run_build_system()
-
-
-################################################################################
-#
-# main()
-# ------
-#
-################################################################################
-
-t = BoostBuild.Tester()
-
-test_alias_rule(t)
-test_alias_source_usage_requirements(t)
-
-t.cleanup()
diff --git a/tools/build/v2/test/alternatives.py b/tools/build/v2/test/alternatives.py
deleted file mode 100644
index 6d423212f0..0000000000
--- a/tools/build/v2/test/alternatives.py
+++ /dev/null
@@ -1,113 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2003 Dave Abrahams
-# Copyright 2003, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Test main target alternatives.
-
-import BoostBuild
-import string
-
-t = BoostBuild.Tester()
-
-# Test that basic alternatives selection works.
-t.write("jamroot.jam", "")
-
-t.write("jamfile.jam", """
-exe a : a_empty.cpp ;
-exe a : a.cpp : <variant>release ;
-""")
-
-t.write("a_empty.cpp", "")
-
-t.write("a.cpp", "int main() {}\n")
-
-t.run_build_system("release")
-
-t.expect_addition("bin/$toolset/release/a.exe")
-
-# Test that alternative selection works for ordinary properties, in particular
-# user-defined.
-t.write("jamroot.jam", "")
-
-t.write("jamfile.jam", """
-import feature ;
-feature.feature X : off on : propagated ;
-exe a : b.cpp ;
-exe a : a.cpp : <X>on ;
-""")
-t.write("b.cpp", "int main() {}\n")
-
-t.rm("bin")
-
-t.run_build_system()
-t.expect_addition("bin/$toolset/debug/b.obj")
-
-t.run_build_system("X=on")
-t.expect_addition("bin/$toolset/debug/X-on/a.obj")
-
-t.rm("bin")
-
-# Test that everything works ok even with default build.
-t.write("jamfile.jam", """
-exe a : a_empty.cpp : <variant>release ;
-exe a : a.cpp : <variant>debug ;
-""")
-
-t.run_build_system()
-t.expect_addition("bin/$toolset/debug/a.exe")
-
-# Test that only properties which are in build request matter for alternative
-# selection. IOW, alternative with <variant>release is better than one with
-# <variant>debug when building release version.
-t.write("jamfile.jam", """
-exe a : a_empty.cpp : <variant>debug ;
-exe a : a.cpp : <variant>release ;
-""")
-
-t.run_build_system("release")
-t.expect_addition("bin/$toolset/release/a.exe")
-
-# Test that free properties do not matter. We really do not want <cxxflags>
-# property in build request to affect alternative selection.
-t.write("jamfile.jam", """
-exe a : a_empty.cpp : <variant>debug <define>FOO <include>BAR ;
-exe a : a.cpp : <variant>release ;
-""")
-
-t.rm("bin/$toolset/release/a.exe")
-t.run_build_system("release define=FOO")
-t.expect_addition("bin/$toolset/release/a.exe")
-
-# Test that ambiguity is reported correctly.
-t.write("jamfile.jam", """
-exe a : a_empty.cpp ;
-exe a : a.cpp ;
-""")
-t.run_build_system("--no-error-backtrace", status=None)
-t.fail_test(string.find(t.stdout(), "No best alternative") == -1)
-
-# Another ambiguity test: two matches properties in one alternative are neither
-# better nor worse than a single one in another alternative.
-t.write("jamfile.jam", """
-exe a : a_empty.cpp : <optimization>off <profiling>off ;
-exe a : a.cpp : <debug-symbols>on ;
-""")
-
-t.run_build_system("--no-error-backtrace", status=None)
-t.fail_test(string.find(t.stdout(), "No best alternative") == -1)
-
-# Test that we can have alternative without sources.
-t.write("jamfile.jam", """
-alias specific-sources ;
-import feature ;
-feature.extend os : MAGIC ;
-alias specific-sources : b.cpp : <os>MAGIC ;
-exe a : a.cpp specific-sources ;
-""")
-t.rm("bin")
-t.run_build_system()
-
-t.cleanup()
diff --git a/tools/build/v2/test/boost-build.jam b/tools/build/v2/test/boost-build.jam
deleted file mode 100644
index ad68a288ed..0000000000
--- a/tools/build/v2/test/boost-build.jam
+++ /dev/null
@@ -1,14 +0,0 @@
-# Copyright 2002, 2003 Dave Abrahams
-# Copyright 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Assume BOOST_BUILD_PATH point to the 'test' directory.
-# We need to leave 'test' there, so that 'test-config.jam'
-# can be found, but also add parent directory, to find
-# all the other modules.
-
-BOOST_BUILD_PATH = $(BOOST_BUILD_PATH)/.. $(BOOST_BUILD_PATH) ;
-
-# Find the boost build system in the ../kernel directory.
-boost-build ../kernel ;
diff --git a/tools/build/v2/test/build_dir.py b/tools/build/v2/test/build_dir.py
deleted file mode 100644
index c5bcbc5b97..0000000000
--- a/tools/build/v2/test/build_dir.py
+++ /dev/null
@@ -1,106 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2003 Dave Abrahams
-# Copyright 2002, 2003, 2005 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Test that we can change build directory using the 'build-dir' project
-# attribute.
-
-import BoostBuild
-import string
-import os
-
-t = BoostBuild.Tester()
-
-
-# Test that top-level project can affect build dir.
-t.write("jamroot.jam", "import gcc ;")
-t.write("jamfile.jam", """
-project : build-dir build ;
-exe a : a.cpp ;
-build-project src ;
-""")
-
-t.write("a.cpp", "int main() {}\n")
-
-t.write("src/jamfile.jam", "exe b : b.cpp ; ")
-
-t.write("src/b.cpp", "int main() {}\n")
-
-t.run_build_system()
-
-t.expect_addition(["build/$toolset/debug/a.exe",
- "build/src/$toolset/debug/b.exe"])
-
-# Test that building from child projects work.
-t.run_build_system(subdir='src')
-t.ignore("build/config.log")
-t.expect_nothing_more()
-
-# Test that project can override build dir.
-t.write("jamfile.jam", """
-exe a : a.cpp ;
-build-project src ;
-""")
-
-t.write("src/jamfile.jam", """
-project : build-dir build ;
-exe b : b.cpp ;
-""")
-
-t.run_build_system()
-t.expect_addition(["bin/$toolset/debug/a.exe",
- "src/build/$toolset/debug/b.exe"])
-
-# Now test the '--build-dir' option.
-t.rm(".")
-t.write("jamroot.jam", "")
-
-# Test that we get an error when no project id is specified.
-t.run_build_system("--build-dir=foo")
-t.fail_test(string.find(t.stdout(),
- "warning: the --build-dir option will be ignored") == -1)
-
-t.write("jamroot.jam", """
-project foo ;
-exe a : a.cpp ;
-build-project sub ;
-""")
-t.write("a.cpp", "int main() {}\n")
-t.write("sub/jamfile.jam", "exe b : b.cpp ;\n")
-t.write("sub/b.cpp", "int main() {}\n")
-
-t.run_build_system("--build-dir=build")
-t.expect_addition(["build/foo/$toolset/debug/a.exe",
- "build/foo/sub/$toolset/debug/b.exe"])
-
-t.write("jamroot.jam", """
-project foo : build-dir bin.v2 ;
-exe a : a.cpp ;
-build-project sub ;
-""")
-
-t.run_build_system("--build-dir=build")
-t.expect_addition(["build/foo/bin.v2/$toolset/debug/a.exe",
- "build/foo/bin.v2/sub/$toolset/debug/b.exe"])
-
-# Try building in subdir. We expect that the entire build tree with be in
-# 'sub/build'. Today, I am not sure if this is what the user expects, but let it
-# be.
-t.rm('build')
-t.run_build_system("--build-dir=build", subdir="sub")
-t.expect_addition(["sub/build/foo/bin.v2/sub/$toolset/debug/b.exe"])
-
-t.write("jamroot.jam", """
-project foo : build-dir %s ;
-exe a : a.cpp ;
-build-project sub ;
-""" % string.replace(os.getcwd(), '\\', '\\\\'))
-
-t.run_build_system("--build-dir=build", status=1)
-t.fail_test(string.find(t.stdout(),
- "Absolute directory specified via 'build-dir' project attribute") == -1)
-
-t.cleanup()
diff --git a/tools/build/v2/test/build_file.py b/tools/build/v2/test/build_file.py
deleted file mode 100644
index 609690fea4..0000000000
--- a/tools/build/v2/test/build_file.py
+++ /dev/null
@@ -1,170 +0,0 @@
-#!/usr/bin/python
-
-# Copyright (C) Vladimir Prus 2006.
-# Copyright (C) Jurko Gospodnetic 2008.
-# Distributed under the Boost Software License, Version 1.0. (See
-# accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-# Tests that we explicitly request a file (not target) to be built by specifying
-# its name on the command line.
-
-import BoostBuild
-
-
-################################################################################
-#
-# test_building_file_from_specific_project()
-# ------------------------------------------
-#
-################################################################################
-
-def test_building_file_from_specific_project():
- t = BoostBuild.Tester()
-
- t.write("jamroot.jam", """
-exe hello : hello.cpp ;
-exe hello2 : hello.cpp ;
-build-project sub ;
-""")
- t.write("hello.cpp", "int main() {}")
- t.write("sub/jamfile.jam", """
-exe hello : hello.cpp ;
-exe hello2 : hello.cpp ;
-exe sub : hello.cpp ;
-""")
- t.write("sub/hello.cpp", "int main() {}")
-
- t.run_build_system("sub " + t.adjust_suffix("hello.obj"))
- t.expect_output_line("*depends on itself*", False)
- t.expect_addition("sub/bin/$toolset/debug/hello.obj")
- t.expect_nothing_more()
-
- t.cleanup()
-
-
-################################################################################
-#
-# test_building_file_from_specific_target()
-# -----------------------------------------
-#
-################################################################################
-
-def test_building_file_from_specific_target():
- t = BoostBuild.Tester()
-
- t.write("jamroot.jam", """
-exe hello1 : hello1.cpp ;
-exe hello2 : hello2.cpp ;
-exe hello3 : hello3.cpp ;
-""")
- t.write("hello1.cpp", "int main() {}")
- t.write("hello2.cpp", "int main() {}")
- t.write("hello3.cpp", "int main() {}")
-
- t.run_build_system("hello1 " + t.adjust_suffix("hello1.obj"))
- t.expect_addition("bin/$toolset/debug/hello1.obj")
- t.expect_nothing_more()
-
- t.cleanup()
-
-
-################################################################################
-#
-# test_building_missing_file_from_specific_target()
-# -------------------------------------------------
-#
-################################################################################
-
-def test_building_missing_file_from_specific_target():
- t = BoostBuild.Tester()
-
- t.write("jamroot.jam", """
-exe hello1 : hello1.cpp ;
-exe hello2 : hello2.cpp ;
-exe hello3 : hello3.cpp ;
-""")
- t.write("hello1.cpp", "int main() {}")
- t.write("hello2.cpp", "int main() {}")
- t.write("hello3.cpp", "int main() {}")
-
- t.run_build_system("hello1 " + t.adjust_suffix("hello2.obj"), status=1)
- t.expect_output_line("don't know how to make*" + t.adjust_suffix("hello2.obj"))
- t.expect_nothing_more()
-
- t.cleanup()
-
-
-################################################################################
-#
-# test_building_multiple_files_with_different_names()
-# ---------------------------------------------------
-#
-################################################################################
-
-def test_building_multiple_files_with_different_names():
- t = BoostBuild.Tester()
-
- t.write("jamroot.jam", """
-exe hello1 : hello1.cpp ;
-exe hello2 : hello2.cpp ;
-exe hello3 : hello3.cpp ;
-""")
- t.write("hello1.cpp", "int main() {}")
- t.write("hello2.cpp", "int main() {}")
- t.write("hello3.cpp", "int main() {}")
-
- t.run_build_system(
- t.adjust_suffix("hello1.obj") + " " +
- t.adjust_suffix("hello2.obj"))
- t.expect_addition("bin/$toolset/debug/hello1.obj")
- t.expect_addition("bin/$toolset/debug/hello2.obj")
- t.expect_nothing_more()
-
- t.cleanup()
-
-
-################################################################################
-#
-# test_building_multiple_files_with_the_same_name()
-# -------------------------------------------------
-#
-################################################################################
-
-def test_building_multiple_files_with_the_same_name():
- t = BoostBuild.Tester()
-
- t.write("jamroot.jam", """
-exe hello : hello.cpp ;
-exe hello2 : hello.cpp ;
-build-project sub ;
-""")
- t.write("hello.cpp", "int main() {}")
- t.write("sub/jamfile.jam", """
-exe hello : hello.cpp ;
-exe hello2 : hello.cpp ;
-exe sub : hello.cpp ;
-""")
- t.write("sub/hello.cpp", "int main() {}")
-
- t.run_build_system(t.adjust_suffix("hello.obj"))
- t.expect_output_line("*depends on itself*", False)
- t.expect_addition("bin/$toolset/debug/hello.obj")
- t.expect_addition("sub/bin/$toolset/debug/hello.obj")
- t.expect_nothing_more()
-
- t.cleanup()
-
-
-################################################################################
-#
-# main()
-# ------
-#
-################################################################################
-
-test_building_file_from_specific_project()
-test_building_file_from_specific_target()
-test_building_missing_file_from_specific_target()
-test_building_multiple_files_with_different_names()
-test_building_multiple_files_with_the_same_name()
diff --git a/tools/build/v2/test/build_no.py b/tools/build/v2/test/build_no.py
deleted file mode 100644
index f6e6f9305e..0000000000
--- a/tools/build/v2/test/build_no.py
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/usr/bin/python
-
-# Copyright (C) Vladimir Prus 2006.
-# Distributed under the Boost Software License, Version 1.0. (See
-# accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-# Tests that <build>no property prevents a target from being built.
-
-import BoostBuild
-
-t = BoostBuild.Tester()
-
-t.write("jamroot.jam", """
-exe hello : hello.cpp : <variant>debug:<build>no ;
-""")
-
-t.write("hello.cpp", """
-int main() {}
-""")
-
-t.run_build_system()
-t.expect_nothing_more()
-
-t.run_build_system("release")
-t.expect_addition("bin/$toolset/release/hello.exe")
-
-t.cleanup()
diff --git a/tools/build/v2/test/builtin_echo.py b/tools/build/v2/test/builtin_echo.py
deleted file mode 100755
index 4d57e96bfc..0000000000
--- a/tools/build/v2/test/builtin_echo.py
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2012 Steven Watanabe
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# This tests the ECHO rule.
-
-import BoostBuild
-
-def test_echo(name):
- t = BoostBuild.Tester(pass_toolset=0)
-
- t.write("file.jam", """
- %s ;
- UPDATE ;
- """ % name)
- t.run_build_system("-ffile.jam", stdout="\n")
-
- t.write("file.jam", """
- %s a message ;
- UPDATE ;
- """ % name)
- t.run_build_system("-ffile.jam", stdout="a message\n")
-
- t.cleanup()
-
-test_echo("ECHO")
-test_echo("Echo")
-test_echo("echo")
diff --git a/tools/build/v2/test/builtin_exit.py b/tools/build/v2/test/builtin_exit.py
deleted file mode 100755
index 2e4fd1215c..0000000000
--- a/tools/build/v2/test/builtin_exit.py
+++ /dev/null
@@ -1,54 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2012 Steven Watanabe
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# This tests the EXIT rule.
-
-import BoostBuild
-
-def test_exit(name):
- t = BoostBuild.Tester(pass_toolset=0)
-
- t.write("file.jam", """
- %s ;
- """ % name)
- t.run_build_system("-ffile.jam", status=1, stdout="\n")
- t.rm(".")
-
- t.write("file.jam", """
- %s : 0 ;
- """ % name)
- t.run_build_system("-ffile.jam", stdout="\n")
- t.rm(".")
-
- t.write("file.jam", """
- %s : 1 ;
- """ % name)
- t.run_build_system("-ffile.jam", status=1, stdout="\n")
- t.rm(".")
-
- t.write("file.jam", """
- %s : 2 ;
- """ % name)
- t.run_build_system("-ffile.jam", status=2, stdout="\n")
- t.rm(".")
-
- t.write("file.jam", """
- %s a message ;
- """ % name)
- t.run_build_system("-ffile.jam", status=1, stdout="a message\n")
- t.rm(".")
-
- t.write("file.jam", """
- %s a message : 0 ;
- """ % name)
- t.run_build_system("-ffile.jam", stdout="a message\n")
- t.rm(".")
-
- t.cleanup()
-
-test_exit("EXIT")
-test_exit("Exit")
-test_exit("exit")
diff --git a/tools/build/v2/test/c_file.py b/tools/build/v2/test/c_file.py
deleted file mode 100644
index 28fe253112..0000000000
--- a/tools/build/v2/test/c_file.py
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Test that C files are compiled by a C compiler.
-
-import BoostBuild
-
-t = BoostBuild.Tester()
-
-t.write("jamroot.jam", """
-project ;
-exe hello : hello.cpp a.c ;
-""")
-
-t.write("hello.cpp", """
-extern "C" int foo();
-int main() { return foo(); }
-""")
-
-t.write("a.c", """
-// This will not compile unless in C mode.
-int foo()
-{
- int new = 0;
- new = (new+1)*7;
- return new;
-}
-""")
-
-t.run_build_system()
-t.expect_addition("bin/$toolset/debug/hello.exe")
-
-t.cleanup()
diff --git a/tools/build/v2/test/chain.py b/tools/build/v2/test/chain.py
deleted file mode 100644
index a7cf1b8314..0000000000
--- a/tools/build/v2/test/chain.py
+++ /dev/null
@@ -1,56 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2003 Dave Abrahams
-# Copyright 2002, 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# This tests that :
-# 1) the 'make' correctly assigns types to produced targets
-# 2) if 'make' creates targets of type CPP, they are correctly used.
-
-import BoostBuild
-
-t = BoostBuild.Tester()
-
-# In order to correctly link this app, 'b.cpp', created by a 'make' rule, should
-# be compiled.
-
-t.write("jamroot.jam", "import gcc ;")
-
-t.write("jamfile.jam", r'''
-import os ;
-if [ os.name ] = NT
-{
- actions create
- {
- echo int main() {} > $(<)
- }
-}
-else
-{
- actions create
- {
- echo "int main() {}" > $(<)
- }
-}
-
-IMPORT $(__name__) : create : : create ;
-
-exe a : l dummy.cpp ;
-
-# Needs to be static lib for Windows - main() cannot appear in DLL.
-static-lib l : a.cpp b.cpp ;
-
-make b.cpp : : create ;
-''')
-
-t.write("a.cpp", "")
-
-t.write("dummy.cpp", "// msvc needs at least one object file\n")
-
-t.run_build_system()
-
-t.expect_addition("bin/$toolset/debug/a.exe")
-
-t.cleanup()
diff --git a/tools/build/v2/test/clean.py b/tools/build/v2/test/clean.py
deleted file mode 100644
index 8c95e6127d..0000000000
--- a/tools/build/v2/test/clean.py
+++ /dev/null
@@ -1,116 +0,0 @@
-#!/usr/bin/python
-
-# Copyright (C) Vladimir Prus 2006.
-# Distributed under the Boost Software License, Version 1.0. (See
-# accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-import BoostBuild
-
-t = BoostBuild.Tester()
-
-t.write("a.cpp", """
-int main() {}
-""")
-
-t.write("jamroot.jam", """
-exe a : a.cpp sub1//sub1 sub2//sub2 sub3//sub3 ;
-""")
-
-t.write("sub1/jamfile.jam", """
-lib sub1 : sub1.cpp sub1_2 ../sub2//sub2 ;
-lib sub1_2 : sub1_2.cpp ;
-""")
-
-t.write("sub1/sub1.cpp", """
-#ifdef _WIN32
-__declspec(dllexport)
-#endif
-void sub1() {}
-""")
-
-t.write("sub1/sub1_2.cpp", """
-#ifdef _WIN32
-__declspec(dllexport)
-#endif
-void sub1() {}
-""")
-
-t.write("sub2/jamfile.jam", """
-lib sub2 : sub2.cpp ;
-""")
-
-t.write("sub2/sub2.cpp", """
-#ifdef _WIN32
-__declspec(dllexport)
-#endif
-void sub2() {}
-""")
-
-t.write("sub3/jamroot.jam", """
-lib sub3 : sub3.cpp ;
-""")
-
-t.write("sub3/sub3.cpp", """
-#ifdef _WIN32
-__declspec(dllexport)
-#endif
-void sub3() {}
-""")
-
-# The 'clean' should not remove files under separate jamroot.jam.
-t.run_build_system()
-t.run_build_system("--clean")
-t.expect_removal("bin/$toolset/debug/a.obj")
-t.expect_removal("sub1/bin/$toolset/debug/sub1.obj")
-t.expect_removal("sub1/bin/$toolset/debug/sub1_2.obj")
-t.expect_removal("sub2/bin/$toolset/debug/sub2.obj")
-t.expect_nothing("sub3/bin/$toolset/debug/sub3.obj")
-
-# The 'clean-all' removes everything it can reach.
-t.run_build_system()
-t.run_build_system("--clean-all")
-t.expect_removal("bin/$toolset/debug/a.obj")
-t.expect_removal("sub1/bin/$toolset/debug/sub1.obj")
-t.expect_removal("sub1/bin/$toolset/debug/sub1_2.obj")
-t.expect_removal("sub2/bin/$toolset/debug/sub2.obj")
-t.expect_nothing("sub3/bin/$toolset/debug/sub3.obj")
-
-# The 'clean' together with project target removes only under that project.
-t.run_build_system()
-t.run_build_system("sub1 --clean")
-t.expect_nothing("bin/$toolset/debug/a.obj")
-t.expect_removal("sub1/bin/$toolset/debug/sub1.obj")
-t.expect_removal("sub1/bin/$toolset/debug/sub1_2.obj")
-t.expect_nothing("sub2/bin/$toolset/debug/sub2.obj")
-t.expect_nothing("sub3/bin/$toolset/debug/sub3.obj")
-
-# And 'clean-all' removes everything.
-t.run_build_system()
-t.run_build_system("sub1 --clean-all")
-t.expect_nothing("bin/$toolset/debug/a.obj")
-t.expect_removal("sub1/bin/$toolset/debug/sub1.obj")
-t.expect_removal("sub1/bin/$toolset/debug/sub1_2.obj")
-t.expect_removal("sub2/bin/$toolset/debug/sub2.obj")
-t.expect_nothing("sub3/bin/$toolset/debug/sub3.obj")
-
-# If main target is explicitly named, we should not remove files from other
-# targets.
-t.run_build_system()
-t.run_build_system("sub1//sub1 --clean")
-t.expect_removal("sub1/bin/$toolset/debug/sub1.obj")
-t.expect_nothing("sub1/bin/$toolset/debug/sub1_2.obj")
-t.expect_nothing("sub2/bin/$toolset/debug/sub2.obj")
-t.expect_nothing("sub3/bin/$toolset/debug/sub3.obj")
-
-# Regression test: sources of the 'cast' rule were mistakenly deleted.
-t.rm(".")
-t.write("jamroot.jam", """
-import cast ;
-cast a cpp : a.h ;
-""")
-t.write("a.h", "")
-t.run_build_system("--clean")
-t.expect_nothing("a.h")
-
-t.cleanup()
diff --git a/tools/build/v2/test/composite.py b/tools/build/v2/test/composite.py
deleted file mode 100644
index ca3fb3272c..0000000000
--- a/tools/build/v2/test/composite.py
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Test that composite properties are handled correctly.
-
-import BoostBuild
-
-t = BoostBuild.Tester()
-
-t.write("jamroot.jam", """
-exe hello : hello.cpp : <variant>release ;
-""")
-
-t.write("hello.cpp", """
-int main() {}
-""")
-
-t.run_build_system()
-
-t.expect_addition("bin/$toolset/release/hello.exe")
-
-t.cleanup()
diff --git a/tools/build/v2/test/conditionals.py b/tools/build/v2/test/conditionals.py
deleted file mode 100644
index b1d78a5406..0000000000
--- a/tools/build/v2/test/conditionals.py
+++ /dev/null
@@ -1,47 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2003 Dave Abrahams
-# Copyright 2002, 2003, 2004 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Test conditional properties.
-
-import BoostBuild
-
-t = BoostBuild.Tester()
-
-# Arrange a project which will build only if 'a.cpp' is compiled with "STATIC"
-# define.
-t.write("a.cpp", """
-#ifdef STATIC
-int main() {}
-#endif
-""")
-
-# Test conditionals in target requirements.
-t.write("jamroot.jam", "exe a : a.cpp : <link>static:<define>STATIC ;")
-t.run_build_system("link=static")
-t.expect_addition("bin/$toolset/debug/link-static/a.exe")
-t.rm("bin")
-
-# Test conditionals in project requirements.
-t.write("jamroot.jam", """
-project : requirements <link>static:<define>STATIC ;
-exe a : a.cpp ;
-""")
-t.run_build_system("link=static")
-t.expect_addition("bin/$toolset/debug/link-static/a.exe")
-t.rm("bin")
-
-# Regression test for a bug found by Ali Azarbayejani. Conditionals inside usage
-# requirement were not being evaluated.
-t.write("jamroot.jam", """
-lib l : l.cpp : : : <link>static:<define>STATIC ;
-exe a : a.cpp l ;
-""")
-t.write("l.cpp", "int i;")
-t.run_build_system("link=static")
-t.expect_addition("bin/$toolset/debug/link-static/a.exe")
-
-t.cleanup()
diff --git a/tools/build/v2/test/conditionals3.py b/tools/build/v2/test/conditionals3.py
deleted file mode 100644
index ca328ff1b7..0000000000
--- a/tools/build/v2/test/conditionals3.py
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Test that conditional properties work, even if property is free, and value
-# includes a colon.
-
-import BoostBuild
-
-t = BoostBuild.Tester()
-
-t.write("jamroot.jam", """
-exe hello : hello.cpp : <variant>debug:<define>CLASS=Foo::Bar ;
-""")
-
-t.write("hello.cpp", """
-namespace Foo { class Bar { } ; }
-int main()
-{
- CLASS c;
- c; // Disables the unused variable warning.
-}
-""")
-
-t.run_build_system(stdout=None, stderr=None)
-t.expect_addition("bin/$toolset/debug/hello.exe")
-
-t.cleanup()
diff --git a/tools/build/v2/test/conditionals_multiple.py b/tools/build/v2/test/conditionals_multiple.py
deleted file mode 100755
index d58d86c27e..0000000000
--- a/tools/build/v2/test/conditionals_multiple.py
+++ /dev/null
@@ -1,312 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2008 Jurko Gospodnetic
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Tests that properties conditioned on more than one other property work as
-# expected.
-
-import BoostBuild
-
-
-################################################################################
-#
-# test_multiple_conditions()
-# --------------------------
-#
-################################################################################
-
-def test_multiple_conditions():
- """Basic tests for properties conditioned on multiple other properties.
- """
-
- t = BoostBuild.Tester("--user-config= --ignore-site-config toolset=testToolset",
- pass_toolset=False, use_test_config=False)
-
- t.write("testToolset.jam", """
-import feature ;
-feature.extend toolset : testToolset ;
-rule init ( ) { }
-""")
-
- t.write("testToolset.py", """
-from b2.build import feature
-feature.extend('toolset', ["testToolset"])
-def init ( ):
- pass
-""")
-
- t.write("jamroot.jam", """
-import feature ;
-import notfile ;
-import toolset ;
-
-feature.feature description : : free incidental ;
-feature.feature aaa : 1 0 : incidental ;
-feature.feature bbb : 1 0 : incidental ;
-feature.feature ccc : 1 0 : incidental ;
-
-rule buildRule ( name : targets ? : properties * )
-{
- for local description in [ feature.get-values description : $(properties) ]
- {
- ECHO "description:" /$(description)/ ;
- }
-}
-
-notfile testTarget1 : @buildRule : :
- <description>d
- <aaa>0:<description>a0
- <aaa>1:<description>a1
- <aaa>0,<bbb>0:<description>a0-b0
- <aaa>0,<bbb>1:<description>a0-b1
- <aaa>1,<bbb>0:<description>a1-b0
- <aaa>1,<bbb>1:<description>a1-b1
- <aaa>0,<bbb>0,<ccc>0:<description>a0-b0-c0
- <aaa>0,<bbb>0,<ccc>1:<description>a0-b0-c1
- <aaa>0,<bbb>1,<ccc>1:<description>a0-b1-c1
- <aaa>1,<bbb>0,<ccc>1:<description>a1-b0-c1
- <aaa>1,<bbb>1,<ccc>0:<description>a1-b1-c0
- <aaa>1,<bbb>1,<ccc>1:<description>a1-b1-c1 ;
-""")
-
- t.run_build_system("aaa=1 bbb=1 ccc=1")
- t.expect_output_line("description: /d/" )
- t.expect_output_line("description: /a0/" , False)
- t.expect_output_line("description: /a1/" )
- t.expect_output_line("description: /a0-b0/" , False)
- t.expect_output_line("description: /a0-b1/" , False)
- t.expect_output_line("description: /a1-b0/" , False)
- t.expect_output_line("description: /a1-b1/" )
- t.expect_output_line("description: /a0-b0-c0/", False)
- t.expect_output_line("description: /a0-b0-c1/", False)
- t.expect_output_line("description: /a0-b1-c1/", False)
- t.expect_output_line("description: /a1-b0-c1/", False)
- t.expect_output_line("description: /a1-b1-c0/", False)
- t.expect_output_line("description: /a1-b1-c1/" )
-
- t.run_build_system("aaa=0 bbb=0 ccc=1")
- t.expect_output_line("description: /d/" )
- t.expect_output_line("description: /a0/" )
- t.expect_output_line("description: /a1/" , False)
- t.expect_output_line("description: /a0-b0/" )
- t.expect_output_line("description: /a0-b1/" , False)
- t.expect_output_line("description: /a1-b0/" , False)
- t.expect_output_line("description: /a1-b1/" , False)
- t.expect_output_line("description: /a0-b0-c0/", False)
- t.expect_output_line("description: /a0-b0-c1/" )
- t.expect_output_line("description: /a0-b1-c1/", False)
- t.expect_output_line("description: /a1-b0-c1/", False)
- t.expect_output_line("description: /a1-b1-c0/", False)
- t.expect_output_line("description: /a1-b1-c1/", False)
-
- t.run_build_system("aaa=0 bbb=0 ccc=0")
- t.expect_output_line("description: /d/" )
- t.expect_output_line("description: /a0/" )
- t.expect_output_line("description: /a1/" , False)
- t.expect_output_line("description: /a0-b0/" )
- t.expect_output_line("description: /a0-b1/" , False)
- t.expect_output_line("description: /a1-b0/" , False)
- t.expect_output_line("description: /a1-b1/" , False)
- t.expect_output_line("description: /a0-b0-c0/" )
- t.expect_output_line("description: /a0-b0-c1/", False)
- t.expect_output_line("description: /a0-b1-c1/", False)
- t.expect_output_line("description: /a1-b0-c1/", False)
- t.expect_output_line("description: /a1-b1-c0/", False)
- t.expect_output_line("description: /a1-b1-c1/", False)
-
- t.cleanup()
-
-
-################################################################################
-#
-# test_multiple_conditions_with_toolset_version()
-# -----------------------------------------------
-#
-################################################################################
-
-def test_multiple_conditions_with_toolset_version():
- """Regression tests for properties conditioned on the toolset version
- subfeature and some additional properties.
- """
-
- toolset = "testToolset" ;
-
- t = BoostBuild.Tester("--user-config= --ignore-site-config", pass_toolset=False, use_test_config=False)
-
- t.write( toolset + ".jam", """
-import feature ;
-feature.extend toolset : %(toolset)s ;
-feature.subfeature toolset %(toolset)s : version : 0 1 ;
-rule init ( version ? ) { }
-""" % {"toolset": toolset})
-
- t.write( "testToolset.py", """
-from b2.build import feature
-feature.extend('toolset', ["testToolset"])
-feature.subfeature('toolset',"testToolset","version",['0','1'])
-def init ( version ):
- pass
- """)
-
- t.write("jamroot.jam", """
-import feature ;
-import notfile ;
-import toolset ;
-
-toolset.using testToolset ;
-
-feature.feature description : : free incidental ;
-feature.feature aaa : 0 1 : incidental ;
-feature.feature bbb : 0 1 : incidental ;
-feature.feature ccc : 0 1 : incidental ;
-
-rule buildRule ( name : targets ? : properties * )
-{
- local ttt = [ feature.get-values toolset : $(properties) ] ;
- local vvv = [ feature.get-values toolset-testToolset:version : $(properties) ] ;
- local aaa = [ feature.get-values aaa : $(properties) ] ;
- local bbb = [ feature.get-values bbb : $(properties) ] ;
- local ccc = [ feature.get-values ccc : $(properties) ] ;
- ECHO "toolset:" /$(ttt)/ "version:" /$(vvv)/ "aaa/bbb/ccc:" /$(aaa)/$(bbb)/$(ccc)/ ;
- for local description in [ feature.get-values description : $(properties) ]
- {
- ECHO "description:" /$(description)/ ;
- }
-}
-
-notfile testTarget1 : @buildRule : :
- <toolset>testToolset,<aaa>0:<description>t-a0
- <toolset>testToolset,<aaa>1:<description>t-a1
-
- <toolset>testToolset-0,<aaa>0:<description>t0-a0
- <toolset>testToolset-0,<aaa>1:<description>t0-a1
- <toolset>testToolset-1,<aaa>0:<description>t1-a0
- <toolset>testToolset-1,<aaa>1:<description>t1-a1
-
- <toolset>testToolset,<aaa>0,<bbb>0:<description>t-a0-b0
- <toolset>testToolset,<aaa>0,<bbb>1:<description>t-a0-b1
- <toolset>testToolset,<aaa>1,<bbb>0:<description>t-a1-b0
- <toolset>testToolset,<aaa>1,<bbb>1:<description>t-a1-b1
-
- <aaa>0,<toolset>testToolset,<bbb>0:<description>a0-t-b0
- <aaa>0,<toolset>testToolset,<bbb>1:<description>a0-t-b1
- <aaa>1,<toolset>testToolset,<bbb>0:<description>a1-t-b0
- <aaa>1,<toolset>testToolset,<bbb>1:<description>a1-t-b1
-
- <aaa>0,<bbb>0,<toolset>testToolset:<description>a0-b0-t
- <aaa>0,<bbb>1,<toolset>testToolset:<description>a0-b1-t
- <aaa>1,<bbb>0,<toolset>testToolset:<description>a1-b0-t
- <aaa>1,<bbb>1,<toolset>testToolset:<description>a1-b1-t
-
- <toolset>testToolset-0,<aaa>0,<bbb>0:<description>t0-a0-b0
- <toolset>testToolset-0,<aaa>0,<bbb>1:<description>t0-a0-b1
- <toolset>testToolset-0,<aaa>1,<bbb>0:<description>t0-a1-b0
- <toolset>testToolset-0,<aaa>1,<bbb>1:<description>t0-a1-b1
- <toolset>testToolset-1,<aaa>0,<bbb>0:<description>t1-a0-b0
- <toolset>testToolset-1,<aaa>0,<bbb>1:<description>t1-a0-b1
- <toolset>testToolset-1,<aaa>1,<bbb>0:<description>t1-a1-b0
- <toolset>testToolset-1,<aaa>1,<bbb>1:<description>t1-a1-b1
-
- <aaa>0,<toolset>testToolset-1,<bbb>0:<description>a0-t1-b0
- <aaa>0,<toolset>testToolset-1,<bbb>1:<description>a0-t1-b1
- <aaa>1,<toolset>testToolset-0,<bbb>0:<description>a1-t0-b0
- <aaa>1,<toolset>testToolset-0,<bbb>1:<description>a1-t0-b1
-
- <bbb>0,<aaa>1,<toolset>testToolset-0:<description>b0-a1-t0
- <bbb>0,<aaa>0,<toolset>testToolset-1:<description>b0-a0-t1
- <bbb>0,<aaa>1,<toolset>testToolset-1:<description>b0-a1-t1
- <bbb>1,<aaa>0,<toolset>testToolset-1:<description>b1-a0-t1
- <bbb>1,<aaa>1,<toolset>testToolset-0:<description>b1-a1-t0
- <bbb>1,<aaa>1,<toolset>testToolset-1:<description>b1-a1-t1 ;
-""")
-
- t.run_build_system("aaa=1 bbb=1 ccc=1 toolset=%s-0" % toolset)
- t.expect_output_line("description: /t-a0/" , False)
- t.expect_output_line("description: /t-a1/" )
- t.expect_output_line("description: /t0-a0/" , False)
- t.expect_output_line("description: /t0-a1/" )
- t.expect_output_line("description: /t1-a0/" , False)
- t.expect_output_line("description: /t1-a1/" , False)
- t.expect_output_line("description: /t-a0-b0/" , False)
- t.expect_output_line("description: /t-a0-b1/" , False)
- t.expect_output_line("description: /t-a1-b0/" , False)
- t.expect_output_line("description: /t-a1-b1/" )
- t.expect_output_line("description: /a0-t-b0/" , False)
- t.expect_output_line("description: /a0-t-b1/" , False)
- t.expect_output_line("description: /a1-t-b0/" , False)
- t.expect_output_line("description: /a1-t-b1/" )
- t.expect_output_line("description: /a0-b0-t/" , False)
- t.expect_output_line("description: /a0-b1-t/" , False)
- t.expect_output_line("description: /a1-b0-t/" , False)
- t.expect_output_line("description: /a1-b1-t/" )
- t.expect_output_line("description: /t0-a0-b0/", False)
- t.expect_output_line("description: /t0-a0-b1/", False)
- t.expect_output_line("description: /t0-a1-b0/", False)
- t.expect_output_line("description: /t0-a1-b1/" )
- t.expect_output_line("description: /t1-a0-b0/", False)
- t.expect_output_line("description: /t1-a0-b1/", False)
- t.expect_output_line("description: /t1-a1-b0/", False)
- t.expect_output_line("description: /t1-a1-b1/", False)
- t.expect_output_line("description: /a0-t1-b0/", False)
- t.expect_output_line("description: /a0-t1-b1/", False)
- t.expect_output_line("description: /a1-t0-b0/", False)
- t.expect_output_line("description: /a1-t0-b1/" )
- t.expect_output_line("description: /b0-a1-t0/", False)
- t.expect_output_line("description: /b0-a0-t1/", False)
- t.expect_output_line("description: /b0-a1-t1/", False)
- t.expect_output_line("description: /b1-a0-t1/", False)
- t.expect_output_line("description: /b1-a1-t0/" )
- t.expect_output_line("description: /b1-a1-t1/", False)
-
- t.run_build_system("aaa=1 bbb=1 ccc=1 toolset=%s-1" % toolset)
- t.expect_output_line("description: /t-a0/" , False)
- t.expect_output_line("description: /t-a1/" )
- t.expect_output_line("description: /t0-a0/" , False)
- t.expect_output_line("description: /t0-a1/" , False)
- t.expect_output_line("description: /t1-a0/" , False)
- t.expect_output_line("description: /t1-a1/" )
- t.expect_output_line("description: /t-a0-b0/" , False)
- t.expect_output_line("description: /t-a0-b1/" , False)
- t.expect_output_line("description: /t-a1-b0/" , False)
- t.expect_output_line("description: /t-a1-b1/" )
- t.expect_output_line("description: /a0-t-b0/" , False)
- t.expect_output_line("description: /a0-t-b1/" , False)
- t.expect_output_line("description: /a1-t-b0/" , False)
- t.expect_output_line("description: /a1-t-b1/" )
- t.expect_output_line("description: /a0-b0-t/" , False)
- t.expect_output_line("description: /a0-b1-t/" , False)
- t.expect_output_line("description: /a1-b0-t/" , False)
- t.expect_output_line("description: /a1-b1-t/" )
- t.expect_output_line("description: /t0-a0-b0/", False)
- t.expect_output_line("description: /t0-a0-b1/", False)
- t.expect_output_line("description: /t0-a1-b0/", False)
- t.expect_output_line("description: /t0-a1-b1/", False)
- t.expect_output_line("description: /t1-a0-b0/", False)
- t.expect_output_line("description: /t1-a0-b1/", False)
- t.expect_output_line("description: /t1-a1-b0/", False)
- t.expect_output_line("description: /t1-a1-b1/" )
- t.expect_output_line("description: /a0-t1-b0/", False)
- t.expect_output_line("description: /a0-t1-b1/", False)
- t.expect_output_line("description: /a1-t0-b0/", False)
- t.expect_output_line("description: /a1-t0-b1/", False)
- t.expect_output_line("description: /b0-a1-t0/", False)
- t.expect_output_line("description: /b0-a0-t1/", False)
- t.expect_output_line("description: /b0-a1-t1/", False)
- t.expect_output_line("description: /b1-a0-t1/", False)
- t.expect_output_line("description: /b1-a1-t0/", False)
- t.expect_output_line("description: /b1-a1-t1/" )
-
- t.cleanup()
-
-
-################################################################################
-#
-# main()
-# ------
-#
-################################################################################
-
-test_multiple_conditions()
-test_multiple_conditions_with_toolset_version()
diff --git a/tools/build/v2/test/configuration.py b/tools/build/v2/test/configuration.py
deleted file mode 100755
index 1125db36bd..0000000000
--- a/tools/build/v2/test/configuration.py
+++ /dev/null
@@ -1,118 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2008 Jurko Gospodnetic
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Test that Boost Build configuration file handling.
-
-import BoostBuild
-import os.path
-import string
-
-
-################################################################################
-#
-# test_user_configuration()
-# -------------------------
-#
-################################################################################
-
-def test_user_configuration():
- """Test Boost Build user configuration handling. Both relative and absolute
- path handling is tested.
- """
-
- t = BoostBuild.Tester("--debug-configuration", pass_toolset=False,
- use_test_config=False)
-
- implicitConfigLoadMessage = "notice: Loading user-config configuration file: *"
- explicitConfigLoadMessage = "notice: Loading explicitly specified user configuration file:"
- testMessage = "_!_!_!_!_!_!_!_!_ %s _!_!_!_!_!_!_!_!_"
- toolsetName = "__myDummyToolset__"
- subdirName = "ASubDirectory"
- configFileNames = ["ups_lala_1.jam", "ups_lala_2.jam",
- os.path.join(subdirName, "ups_lala_3.jam")]
-
- for configFileName in configFileNames:
- message = "ECHO \"%s\" ;" % testMessage % configFileName
- # We need to double any backslashes in the message or Jam will interpret
- # them as escape characters.
- t.write(configFileName, message.replace("\\", "\\\\"))
-
- # Prepare a dummy toolset so we do not get errors in case the default one is
- # not found.
- t.write(toolsetName + ".jam", """
-import feature ;
-feature.extend toolset : %s ;
-rule init ( ) { }
-""" % toolsetName )
-
- # Python version of same dummy toolset.
- t.write(toolsetName + ".py", """
-from b2.build import feature
-feature.extend('toolset', ['%s'])
-def init(): pass
-""" % toolsetName )
-
- t.write("jamroot.jam", "using %s ;" % toolsetName)
-
- t.run_build_system()
- t.expect_output_line(explicitConfigLoadMessage, False)
- t.expect_output_line(testMessage % configFileNames[0], False)
- t.expect_output_line(testMessage % configFileNames[1], False)
- t.expect_output_line(testMessage % configFileNames[2], False)
-
- t.run_build_system("--user-config=")
- t.expect_output_line(implicitConfigLoadMessage, False)
- t.expect_output_line(explicitConfigLoadMessage, False)
- t.expect_output_line(testMessage % configFileNames[0], False)
- t.expect_output_line(testMessage % configFileNames[1], False)
- t.expect_output_line(testMessage % configFileNames[2], False)
-
- t.run_build_system('--user-config=""')
- t.expect_output_line(implicitConfigLoadMessage, False)
- t.expect_output_line(explicitConfigLoadMessage, False)
- t.expect_output_line(testMessage % configFileNames[0], False)
- t.expect_output_line(testMessage % configFileNames[1], False)
- t.expect_output_line(testMessage % configFileNames[2], False)
-
- t.run_build_system('--user-config="%s"' % configFileNames[0])
- t.expect_output_line(implicitConfigLoadMessage, False)
- t.expect_output_line(explicitConfigLoadMessage)
- t.expect_output_line(testMessage % configFileNames[0] )
- t.expect_output_line(testMessage % configFileNames[1], False)
- t.expect_output_line(testMessage % configFileNames[2], False)
-
- t.run_build_system('--user-config="%s"' % configFileNames[2])
- t.expect_output_line(implicitConfigLoadMessage, False)
- t.expect_output_line(explicitConfigLoadMessage)
- t.expect_output_line(testMessage % configFileNames[0], False)
- t.expect_output_line(testMessage % configFileNames[1], False)
- t.expect_output_line(testMessage % configFileNames[2] )
-
- t.run_build_system('--user-config="%s"' % os.path.abspath(configFileNames[1]))
- t.expect_output_line(implicitConfigLoadMessage, False)
- t.expect_output_line(explicitConfigLoadMessage)
- t.expect_output_line(testMessage % configFileNames[0], False)
- t.expect_output_line(testMessage % configFileNames[1] )
- t.expect_output_line(testMessage % configFileNames[2], False)
-
- t.run_build_system('--user-config="%s"' % os.path.abspath(configFileNames[2]))
- t.expect_output_line(implicitConfigLoadMessage, False)
- t.expect_output_line(explicitConfigLoadMessage)
- t.expect_output_line(testMessage % configFileNames[0], False)
- t.expect_output_line(testMessage % configFileNames[1], False)
- t.expect_output_line(testMessage % configFileNames[2] )
-
- t.cleanup()
-
-
-################################################################################
-#
-# main()
-# ------
-#
-################################################################################
-
-test_user_configuration()
diff --git a/tools/build/v2/test/copy_time.py b/tools/build/v2/test/copy_time.py
deleted file mode 100755
index 8943d28575..0000000000
--- a/tools/build/v2/test/copy_time.py
+++ /dev/null
@@ -1,76 +0,0 @@
-#!/usr/bin/python
-#
-# Copyright (c) 2008
-# Steven Watanabe
-#
-# Distributed under the Boost Software License, Version 1.0. (See
-# accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-# Test that the common.copy rule set the modification
-# date of the new file the current time.
-
-import BoostBuild
-
-tester = BoostBuild.Tester()
-
-tester.write("test1.cpp", """
-#include <iostream>
-template<bool, int M, class Next>
-struct time_waster {
- typedef typename time_waster<true, M-1, time_waster>::type type1;
- typedef typename time_waster<false, M-1, time_waster>::type type2;
- typedef void type;
-};
-template<bool B, class Next>
-struct time_waster<B, 0, Next> {
- typedef void type;
-};
-typedef time_waster<true, 10, void>::type type;
-int f() { return 0; }
-""")
-
-tester.write("test2.cpp", """
-#include <iostream>
-template<bool, int M, class Next>
-struct time_waster {
- typedef typename time_waster<true, M-1, time_waster>::type type1;
- typedef typename time_waster<false, M-1, time_waster>::type type2;
- typedef void type;
-};
-template<bool B, class Next>
-struct time_waster<B, 0, Next> {
- typedef void type;
-};
-typedef time_waster<true, 10, void>::type type;
-int g() { return 0; }
-""")
-
-tester.write("jamroot.jam", """
-obj test2 : test2.cpp ;
-obj test1 : test1.cpp : <dependency>test2 ;
-install test2i : test2 : <dependency>test1 ;
-""")
-
-tester.run_build_system()
-
-tester.expect_addition("bin/$toolset/debug/test2.obj")
-tester.expect_addition("bin/$toolset/debug/test1.obj")
-tester.expect_addition("test2i/test2.obj")
-tester.expect_nothing_more()
-
-test2src = tester.read("test2i/test2.obj")
-test2dest = tester.read("bin/$toolset/debug/test2.obj")
-
-if test2src != test2dest:
- BoostBuild.annotation("failure", "The object file was not copied correctly")
- tester.fail_test(1)
-
-del test2src
-del test2dest
-
-tester.run_build_system("-d1")
-tester.expect_output_line("common.copy*", expected_to_exist=False)
-tester.expect_nothing_more()
-
-tester.cleanup()
diff --git a/tools/build/v2/test/core-language/test.jam b/tools/build/v2/test/core-language/test.jam
deleted file mode 100644
index 24dcc2924d..0000000000
--- a/tools/build/v2/test/core-language/test.jam
+++ /dev/null
@@ -1,1353 +0,0 @@
-# Copyright 2011 Steven Watanabe.
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Tools
-
-passed = 0 ;
-failed = 0 ;
-
-rule show-result ( id : test-result )
-{
- if ! ( --quiet in $(ARGV) )
- {
- ECHO $(test-result): $(id) ;
- }
- $(test-result) = [ CALC $($(test-result)) + 1 ] ;
-}
-
-rule check-equal ( id : values * : expected * )
-{
- local test-result ;
- if x$(values) = x$(expected)
- {
- test-result = passed ;
- }
- else
- {
- ECHO error: "[" $(values) "] != [" $(expected) "]" ;
- test-result = failed ;
- }
- show-result $(id) : $(test-result) ;
-}
-
-rule mark-order ( id : result * )
-{
- order += $(id) ;
- return $(result) ;
-}
-
-rule check-order ( id : expected * )
-{
- check-equal $(id) : $(order) : $(expected) ;
- order = ;
-}
-
-# Check variable expansion
-
-{
-
-local v1 = 1 2 3 ;
-local v2 = 4 5 6 ;
-local v3 = 0 1 2 3 4 5 6 7 8 9 10 ;
-local g = g1 g2 ;
-local v4 = String/With/Mixed/Case ;
-local v5 = path\\with\\backslashes ;
-local v6 = <grist>generic/path.txt(member.txt) ;
-local v7 = <Grist1>Dir1/File1.cpp(M1.c) <Grist2>Dir2/File2.hpp(M2.c) ;
-local v8 = <Grist3>Dir3/File3.c(M3.c) <Grist4>Dir4/File4.h(M4.c) ;
-local select1 = GU BL DBST ;
-local case1 = L U ;
-local vars = 7 8 ;
-local sub = 2 1 ;
-local p0 = name ;
-local p1 = dir/name ;
-local p2 = dir/sub/name ;
-local j1 = , - ;
-
-check-equal var-product : $(v1)$(v2) : 14 15 16 24 25 26 34 35 36 ;
-
-check-equal var-set-grist : $(v1:G=grist) : <grist>1 <grist>2 <grist>3 ;
-check-equal var-set-grist-multi : $(v1:G=$(g)) : <g1>1 <g1>2 <g1>3 <g2>1 <g2>2 <g2>3 ;
-
-check-equal var-lower : $(v4:L) : string/with/mixed/case ;
-check-equal var-upper : $(v4:U) : STRING/WITH/MIXED/CASE ;
-check-equal var-LU : $(v4:LU) : STRING/WITH/MIXED/CASE ;
-check-equal var-slashes : $(v5:T) : path/with/backslashes ;
-check-equal var-grist : $(v6:G) : <grist> ;
-check-equal var-base : $(v6:B) : path ;
-check-equal var-suffix : $(v6:S) : .txt ;
-check-equal var-dir : $(v6:D) : generic ;
-check-equal var-member : $(v6:M) : (member.txt) ;
-check-equal var-multi : $(v6:$(select1)) : <GRIST> path generic/path.txt ;
-
-check-equal var-join-0 : $(:J=,) : ;
-check-equal var-join-1 : $(p0:J=,) : name ;
-check-equal var-join-3 : $(v1:J=,) : 1,2,3 ;
-check-equal var-set-grist-join : $(v1:G=grist:J=,) : <grist>1,<grist>2,<grist>3 ;
-# behavior change. In the past, a J= modifier would
-# cause only the last element of the other modifiers
-# to take effect.
-check-equal var-set-grist-multi-join : $(v1:G=$(g):J=,) : <g1>1,<g1>2,<g1>3 <g2>1,<g2>2,<g2>3 ;
-check-equal var-set-grist-multi-join-multi : $(v1:G=$(g):J=$(j1)) : <g1>1,<g1>2,<g1>3 <g1>1-<g1>2-<g1>3 <g2>1,<g2>2,<g2>3 <g2>1-<g2>2-<g2>3 ;
-
-check-equal var-D=-0 : name : $(p0:D=) ;
-check-equal var-D=-1 : name : $(p1:D=) ;
-check-equal var-D=-2 : name : $(p2:D=) ;
-check-equal var-D-0 : "" : $(p0:D) ;
-check-equal var-D-1 : dir : $(p1:D) ;
-check-equal var-D-2 : dir/sub : $(p2:D) ;
-check-equal var-S-1 : "" : $(p0:S) ;
-check-equal var-no-at-file-0 : ($(p0)) : [ MATCH ^@(.*) : "@($(p0))" ] ;
-check-equal var-no-at-file-1 : ($(p0)) : [ MATCH @(.*) : "--@($(p0))" ] ;
-
-if $(OS) = CYGWIN
-{
- local cyg-root = $(:WE=/) ;
- local cyg1 = /cygdrive/c/path1.txt ;
- check-equal cygwin-to-cygdrive : $(cyg1:W) : C:\\path1.txt ;
- local cyg2 = /bin/bash ;
- check-equal cygwin-to-windows : $(cyg2:W) : $(cyg-root)\\bin\\bash ;
- check-equal cygwin-combine-WT : $(cyg2:WT) : $(cyg-root)\\bin\\bash ;
-
- local cyg3 = /home/boost/devel/trunk/bin.v2/ ; # exactly 31 characters
- local win3 = $(cyg-root)\\home\\boost\\devel\\trunk\\bin.v2\\ ;
- # This is is the easiest way to demonstrate a bug
- # that used to cause undefined behavior. Longer paths
- # resulted in a use-after-free error, which happened
- # to work most of the time.
- check-equal cygwin-long-WU : $(cyg3:WU) : $(win3:U) ;
-
- local cyg-grist = <grist>$(cyg1) ;
- check-equal cygwin-grist : $(cyg-grist:W) : <grist>\\cygdrive\\c\\path1.txt ;
-
- check-equal cygwin-WU : $(cyg2:WU) : $(cyg-root:U)\\BIN\\BASH ;
- # behavior change: L now consistently applied after W.
- # used to affect all except the drive letter.
- check-equal cygwin-WL : $(cyg2:WL) : $(cyg-root:L)\\bin\\bash ;
-}
-
-# behavior change
-check-equal var-test1 : $(v7[2]:G:L) : <grist2> ;
-
-check-equal var-multi-product-smm : $(v$(vars)[$(sub)]:G=$(g):$(case1)) :
- <g1>dir2/file2.hpp(m2.c) <G1>DIR2/FILE2.HPP(M2.C)
- <g2>dir2/file2.hpp(m2.c) <G2>DIR2/FILE2.HPP(M2.C)
- <g1>dir1/file1.cpp(m1.c) <G1>DIR1/FILE1.CPP(M1.C)
- <g2>dir1/file1.cpp(m1.c) <G2>DIR1/FILE1.CPP(M1.C)
- <g1>dir4/file4.h(m4.c) <G1>DIR4/FILE4.H(M4.C)
- <g2>dir4/file4.h(m4.c) <G2>DIR4/FILE4.H(M4.C)
- <g1>dir3/file3.c(m3.c) <G1>DIR3/FILE3.C(M3.C)
- <g2>dir3/file3.c(m3.c) <G2>DIR3/FILE3.C(M3.C)
-;
-check-equal var-nopathmods : $(:E=//) : // ;
-
-# showcases all the idiosyncracies of indexing
-# key: h = high, l = low, p = positive, m = minus, e = end.
-
-check-equal var-subscript-one-p : $(v3[3]) : 2 ;
-check-equal var-subscript-one-m : $(v3[-3]) : 8 ;
-check-equal var-subscript-one-0 : $(v3[0]) : 0 ;
-check-equal var-subscript-one-h : $(v3[20]) : ;
-check-equal var-subscript-one-l : $(v3[-20]) : 0 ;
-check-equal var-subscript-range-pp : $(v3[2-4]) : 1 2 3 ;
-check-equal var-subscript-range-pm : $(v3[2--3]) : 1 2 3 4 5 6 7 8 ;
-check-equal var-subscript-range-pe : $(v3[2-]) : 1 2 3 4 5 6 7 8 9 10 ;
-check-equal var-subscript-range-ph : $(v3[2-20]) : 1 2 3 4 5 6 7 8 9 10 ;
-check-equal var-subscript-range-pl : $(v3[2--20]) : ;
-check-equal var-subscript-range-mp : $(v3[-3-10]) : 8 9 ;
-check-equal var-subscript-range-mm : $(v3[-4--2]) : 7 8 9 ;
-check-equal var-subscript-range-me : $(v3[-4-]) : 7 8 9 10 ;
-check-equal var-subscript-range-mh : $(v3[-4-20]) : 7 8 9 10 ;
-check-equal var-subscript-range-mh : $(v3[-4--20]) : ;
-check-equal var-subscript-range-0p : $(v3[0-2]) : 0 1 2 ;
-check-equal var-subscript-range-0m : $(v3[0--4]) : 0 1 2 3 4 5 6 7 8 ;
-check-equal var-subscript-range-0e : $(v3[0-]) : 0 1 2 3 4 5 6 7 8 9 10 ;
-check-equal var-subscript-range-0h : $(v3[0-20]) : 0 1 2 3 4 5 6 7 8 9 10 ;
-check-equal var-subscript-range-0l : $(v3[0--20]) : ;
-check-equal var-subscript-range-hp : $(v3[20-4]) : ;
-check-equal var-subscript-range-hm : $(v3[20--4]) : ;
-check-equal var-subscript-range-he : $(v3[20-]) : ;
-check-equal var-subscript-range-hh : $(v3[20-20]) : ;
-check-equal var-subscript-range-hl : $(v3[20--20]) : ;
-check-equal var-subscript-range-lp : $(v3[-13-4]) : 0 1 2 3 4 5 ;
-check-equal var-subscript-range-lm : $(v3[-13--4]) : 0 1 2 3 4 5 6 7 8 9 ;
-check-equal var-subscript-range-le : $(v3[-13-]) : 0 1 2 3 4 5 6 7 8 9 10 ;
-check-equal var-subscript-range-lh : $(v3[-13-20]) : 0 1 2 3 4 5 6 7 8 9 10 ;
-check-equal var-subscript-range-ll : $(v3[-13--13]) : 0 ;
-check-equal var-subscript-range-empty : $(v3[4-3]) : ;
-
-}
-
-# Check rules
-
-{
-
-rule test-rule
-{
- return $(<) - $(>) - $(1) - $(2) - $(3) - $(4) - $(5) - $(6) - $(7) - $(8) - $(9) - $(10) - $(11) - $(12) - $(13) - $(14) - $(15) - $(16) - $(17) - $(18) - $(19) ;
-}
-
-check-equal rule-arguments-numbered :
- [ test-rule a1 : a2 : a3 : a4 : a5 : a6 : a7 : a8 : a9 : a10 : a11 : a12 : a13 : a14 : a15 : a16 : a17 : a18 : a19 ] :
- a1 - a2 - a1 - a2 - a3 - a4 - a5 - a6 - a7 - a8 - a9 - a10 - a11 - a12 - a13 - a14 - a15 - a16 - a17 - a18 - a19 ;
-
-rule test-rule
-{
- return $(<:L) - $(>:L) - $(1:L) - $(2:L) - $(3:L) - $(4:L) - $(5:L) - $(6:L) - $(7:L) - $(8:L) - $(9:L) - $(10:L) - $(11:L) - $(12:L) - $(13:L) - $(14:L) - $(15:L) - $(16:L) - $(17:L) - $(18:L) - $(19:L) ;
-}
-
-# behavior change
-check-equal rule-arguments-numbered-lower :
- [ test-rule a1 : a2 : a3 : a4 : a5 : a6 : a7 : a8 : a9 : a10 : a11 : a12 : a13 : a14 : a15 : a16 : a17 : a18 : a19 ] :
- a1 - a2 - a1 - a2 - a3 - a4 - a5 - a6 - a7 - a8 - a9 - a10 - a11 - a12 - a13 - a14 - a15 - a16 - a17 - a18 - a19 ;
-
-
-rule test-rule ( p1 : p2 : p3 : p4 : p5 : p6 : p7 : p8 : p9 :
- p10 : p11 : p12 : p13 : p14 : p15 : p16 : p17 : p18 : p19 )
-
-
-{
- return $(p1) - $(p2) - $(p3) - $(p4) - $(p5) - $(p6) - $(p7) - $(p8) - $(p9) - $(p10) - $(p11) - $(p12) - $(p13) - $(p14) - $(p15) - $(p16) - $(p17) - $(p18) - $(p19) ;
-}
-
-check-equal rule-arguments-named :
- [ test-rule a1 : a2 : a3 : a4 : a5 : a6 : a7 : a8 : a9 : a10 : a11 : a12 : a13 : a14 : a15 : a16 : a17 : a18 : a19 ] :
- a1 - a2 - a3 - a4 - a5 - a6 - a7 - a8 - a9 - a10 - a11 - a12 - a13 - a14 - a15 - a16 - a17 - a18 - a19 ;
-
-#
-# test rule indirection
-#
-rule select ( n list * )
-{
- return $(list[$(n)]) ;
-}
-
-rule indirect1 ( rule + : args * )
-{
- return [ $(rule) $(args) ] ;
-}
-
-check-equal rule-indirect-1 : [ indirect1 select 1 : a b c d e ] : a ;
-check-equal rule-indirect-2 : [ indirect1 select 2 : a b c d e ] : b ;
-
-x = reset ;
-rule reset-x ( new-value )
-{
- x = $(new-value) ;
-}
-$(x)-x bar ; # invokes reset-x...
-check-equal rule-reset : $(x) : bar ; # which changes x
-
-rule bar-x ( new-value )
-{
- mark-order r3 ;
-}
-
-# The arguments are evaluated in forward order
-# before the rule name
-$(x)-x [ mark-order r1 : [ reset-x reset ] ] : [ mark-order r2 ] ;
-check-order rule-order : r1 r2 ;
-
-}
-
-# Check append
-
-{
-
-local value = [ mark-order r1 : v1 v2 ] [ mark-order r2 : v3 v4 ] ;
-check-equal append : $(value) : v1 v2 v3 v4 ;
-check-order append-order : r1 r2 ;
-
-}
-
-# Check foreach
-
-{
-
-local v1 = 1 2 3 ;
-local x = old ;
-local result ;
-
-for local x in $(v1)
-{
- result += $(x) + ;
-}
-
-check-equal foreach-local-item : $(result) : 1 + 2 + 3 + ;
-check-equal foreach-local : $(x) : old ;
-
-result = ;
-
-for x in $(v1)
-{
- result += $(x) + ;
-}
-
-check-equal foreach-nonlocal-item : $(result) : 1 + 2 + 3 + ;
-check-equal foreach-nonlocal : $(x) : 3 ;
-
-rule call-foreach ( values * )
-{
- for local x in $(values)
- {
- return $(x) ;
- }
-}
-
-check-equal foreach-result : [ call-foreach 1 2 3 ] : ;
-
-result = ;
-local varname = x ;
-x = old ;
-
-for local $(varname) in $(v1)
-{
- result += $(x) + ;
-}
-
-check-equal foreach-no-expand : $(result) : old + old + old + ;
-
-result = ;
-
-for local v1 in $(v1)
-{
- result += $(v1) + ;
-}
-
-check-equal foreach-order : $(result) : 1 + 2 + 3 + ;
-
-}
-
-# Check if
-
-{
-
-if true
-{
- mark-order r1 ;
-}
-
-check-order if-true : r1 ;
-
-if $(false)
-{
- mark-order r1 ;
-}
-
-check-order if-false : ;
-
-if true
-{
- mark-order r1 ;
-}
-else
-{
- mark-order r2 ;
-}
-
-check-order if-else-true : r1 ;
-
-if $(false)
-{
- mark-order r1 ;
-}
-else
-{
- mark-order r2 ;
-}
-
-check-order if-else-false : r2 ;
-
-rule test-rule
-{
- if true
- {
- return result ;
- }
-}
-
-check-equal if-true-result : [ test-rule ] : result ;
-
-rule test-rule
-{
- local idx = 1 2 ;
- local values = true ;
- while $(idx)
- {
- local v = $(values[$(idx[1])]) ;
- idx = $(idx[2-]) ;
- if $(v)
- {
- return result ;
- }
- }
-}
-
-check-equal if-false-result : [ test-rule ] : ;
-
-rule test-rule
-{
- if true
- {
- return r1 ;
- }
- else
- {
- return r2 ;
- }
-}
-
-check-equal if-else-true-result : [ test-rule ] : r1 ;
-
-rule test-rule
-{
- if $(false)
- {
- return r1 ;
- }
- else
- {
- return r2 ;
- }
-}
-
-check-equal if-else-false-result : [ test-rule ] : r2 ;
-
-}
-
-# Check the evaluation of conditions
-
-{
-
-local test-result ;
-local v1 = "" "" "" ;
-local v2 = ;
-local v3 = a b c ;
-local v4 = a b c d ;
-local v5 = a b d ;
-local v6 = "" "" "" d ;
-
-rule test-comparison ( id : equal less greater )
-{
- check-equal $(id)-empty-1 : [ eval-$(id) $(v1) : $(v2) ] : $(equal) ;
- check-equal $(id)-empty-2 : [ eval-$(id) $(v1) : $(v2) ] : $(equal) ;
- check-equal $(id)-equal : [ eval-$(id) $(v3) : $(v3) ] : $(equal) ;
- check-equal $(id)-less-1 : [ eval-$(id) $(v3) : $(v4) ] : $(less) ;
- check-equal $(id)-less-2 : [ eval-$(id) $(v3) : $(v5) ] : $(less) ;
- check-equal $(id)-less-3 : [ eval-$(id) $(v4) : $(v5) ] : $(less) ;
- check-equal $(id)-greater-1 : [ eval-$(id) $(v4) : $(v3) ] : $(greater) ;
- check-equal $(id)-greater-2 : [ eval-$(id) $(v5) : $(v3) ] : $(greater) ;
- check-equal $(id)-greater-3 : [ eval-$(id) $(v5) : $(v4) ] : $(greater) ;
-}
-
-rule eval-lt ( lhs * : rhs * )
-{
- if $(lhs) < $(rhs) { return true ; }
- else { return false ; }
-}
-
-test-comparison lt : false true false ;
-
-rule eval-gt ( lhs * : rhs * )
-{
- if $(lhs) > $(rhs) { return true ; }
- else { return false ; }
-}
-
-test-comparison gt : false false true ;
-
-rule eval-le ( lhs * : rhs * )
-{
- if $(lhs) <= $(rhs) { return true ; }
- else { return false ; }
-}
-
-test-comparison le : true true false ;
-
-rule eval-ge ( lhs * : rhs * )
-{
- if $(lhs) >= $(rhs) { return true ; }
- else { return false ; }
-}
-
-test-comparison ge : true false true ;
-
-rule eval-eq ( lhs * : rhs * )
-{
- if $(lhs) = $(rhs) { return true ; }
- else { return false ; }
-}
-
-test-comparison eq : true false false ;
-
-rule eval-ne ( lhs * : rhs * )
-{
- if $(lhs) != $(rhs) { return true ; }
- else { return false ; }
-}
-
-test-comparison ne : false true true ;
-
-rule eval-not-lt ( lhs * : rhs * )
-{
- if ! ( $(lhs) < $(rhs) ) { return true ; }
- else { return false ; }
-}
-
-test-comparison not-lt : true false true ;
-
-rule eval-not-gt ( lhs * : rhs * )
-{
- if ! ( $(lhs) > $(rhs) ) { return true ; }
- else { return false ; }
-}
-
-test-comparison not-gt : true true false ;
-
-rule eval-not-le ( lhs * : rhs * )
-{
- if ! ( $(lhs) <= $(rhs) ) { return true ; }
- else { return false ; }
-}
-
-test-comparison not-le : false false true ;
-
-rule eval-not-ge ( lhs * : rhs * )
-{
- if ! ( $(lhs) >= $(rhs) ) { return true ; }
- else { return false ; }
-}
-
-test-comparison not-ge : false true false ;
-
-rule eval-not-eq ( lhs * : rhs * )
-{
- if ! ( $(lhs) = $(rhs) ) { return true ; }
- else { return false ; }
-}
-
-test-comparison not-eq : false true true ;
-
-rule eval-not-ne ( lhs * : rhs * )
-{
- if ! ( $(lhs) != $(rhs) ) { return true ; }
- else { return false ; }
-}
-
-test-comparison not-ne : true false false ;
-
-local v7 = a a a a a a ;
-local v8 = c b ;
-local v9 = c d b ;
-local v10 = c a b c c b a a a ;
-
-rule test-in ( id : subset not-subset )
-{
- check-equal $(id)-0-0 : [ eval-$(id) $(v2) : $(v2) ] : $(subset) ;
- check-equal $(id)-0-empty : [ eval-$(id) $(v2) : $(v1) ] : $(subset) ;
- check-equal $(id)-empty-0 : [ eval-$(id) $(v1) : $(v2) ] : $(not-subset) ;
- check-equal $(id)-equal : [ eval-$(id) $(v3) : $(v3) ] : $(subset) ;
- check-equal $(id)-simple : [ eval-$(id) $(v3) : $(v4) ] : $(subset) ;
- check-equal $(id)-extra : [ eval-$(id) $(v4) : $(v3) ] : $(not-subset) ;
- check-equal $(id)-multiple : [ eval-$(id) $(v7) : $(v3) ] : $(subset) ;
- check-equal $(id)-unordered : [ eval-$(id) $(v8) : $(v3) ] : $(subset) ;
- check-equal $(id)-unordered-extra : [ eval-$(id) $(v9) : $(v3) ] : $(not-subset) ;
- check-equal $(id)-unordered-multiple : [ eval-$(id) $(v10) : $(v3) ] : $(subset) ;
-}
-
-rule eval-in ( lhs * : rhs * )
-{
- if $(lhs) in $(rhs) { return true ; }
- else { return false ; }
-}
-
-test-in "in" : true false ;
-
-rule eval-not-in ( lhs * : rhs * )
-{
- if ! ( $(lhs) in $(rhs) ) { return true ; }
- else { return false ; }
-}
-
-test-in not-in : false true ;
-
-rule test-truth-table ( id : tt tf ft ff )
-{
- check-equal $(id)-tt : [ eval-$(id) 1 : 1 ] : $(tt) ;
- check-equal $(id)-tf : [ eval-$(id) 1 : ] : $(tf) ;
- check-equal $(id)-ft : [ eval-$(id) : 1 ] : $(ft) ;
- check-equal $(id)-ff : [ eval-$(id) : ] : $(ff) ;
-}
-
-rule eval-and ( lhs ? : rhs ? )
-{
- if $(lhs) && $(rhs) { return true ; }
- else { return false ; }
-}
-
-test-truth-table and : true false false false ;
-
-rule eval-or ( lhs ? : rhs ? )
-{
- if $(lhs) || $(rhs) { return true ; }
- else { return false ; }
-}
-
-test-truth-table or : true true true false ;
-
-rule eval-not-and ( lhs ? : rhs ? )
-{
- if ! ( $(lhs) && $(rhs) ) { return true ; }
- else { return false ; }
-}
-
-test-truth-table not-and : false true true true ;
-
-rule eval-not-or ( lhs ? : rhs ? )
-{
- if ! ( $(lhs) || $(rhs) ) { return true ; }
- else { return false ; }
-}
-
-test-truth-table not-or : false false false true ;
-
-if [ mark-order r1 : test1 ] < [ mark-order r2 : test2 ] { }
-check-order lt-order : r1 r2 ;
-if [ mark-order r1 : test1 ] > [ mark-order r2 : test2 ] { }
-check-order gt-order : r1 r2 ;
-if [ mark-order r1 : test1 ] <= [ mark-order r2 : test2 ] { }
-check-order le-order : r1 r2 ;
-if [ mark-order r1 : test1 ] >= [ mark-order r2 : test2 ] { }
-check-order ge-order : r1 r2 ;
-if [ mark-order r1 : test1 ] = [ mark-order r2 : test2 ] { }
-check-order eq-order : r1 r2 ;
-if [ mark-order r1 : test1 ] != [ mark-order r2 : test2 ] { }
-check-order ne-order : r1 r2 ;
-if [ mark-order r1 : test1 ] in [ mark-order r2 : test2 ] { }
-check-order in-order : r1 r2 ;
-
-if [ mark-order r1 : test1 ] && [ mark-order r2 : test2 ] { }
-check-order and-order : r1 r2 ;
-if [ mark-order r1 ] && [ mark-order r2 : test2 ] { }
-check-order and-order-short-circuit : r1 ;
-
-if [ mark-order r1 ] || [ mark-order r2 : test2 ] { }
-check-order or-order : r1 r2 ;
-if [ mark-order r1 : test1 ] || [ mark-order r2 : test2 ] { }
-check-order or-order-short-circuit : r1 ;
-
-}
-
-# Check include
-
-{
-#FIXME:
-# plain include
-# include in module
-# include returns an empty list
-# rule arguments are available inside include
-}
-
-# Check local
-
-{
-
-local v1 = a b c ;
-local v2 = f g h ;
-
-{
- local v1 ;
- check-equal local-no-init : $(v1) : ;
-}
-
-check-equal local-restore : $(v1) : a b c ;
-
-{
- local v1 = d e f ;
- check-equal local-init : $(v1) : d e f ;
-}
-
-check-equal local-restore-init : $(v1) : a b c ;
-
-{
- local v1 v2 ;
- check-equal local-multiple-no-init : $(v1) - $(v2) : - ;
-}
-
-check-equal local-multiple-restore : $(v1) - $(v2) : a b c - f g h ;
-
-{
- local v1 v2 = d e f ;
- check-equal local-multiple-init : $(v1) - $(v2) : d e f - d e f ;
-}
-
-{
- local v1 v1 = d e f ;
- check-equal local-duplicate : $(v1) - $(v1) : d e f - d e f ;
-}
-
-check-equal local-duplicate-restore : $(v1) : a b c ;
-
-{
- local [ mark-order r1 : v1 ] = [ mark-order r2 : d e f ] ;
- check-order local-order : r1 r2 ;
-}
-
-}
-
-# Check module
-
-{
- local var1 = root-module-var ;
- module my_module
- {
- var1 = module-var ;
- rule get ( )
- {
- return $(var1) ;
- }
- local rule not_really ( ) { return nothing ; }
- }
-
- check-equal module-var-not-root : $(var1) : root-module-var ;
-
- check-equal module-rulenames : [ RULENAMES my_module ] : get ;
-
- IMPORT_MODULE my_module ;
- check-equal module-rule-import-module : [ my_module.get ] : module-var ;
-
- IMPORT my_module : get : : module-get ;
- check-equal module-rule-imort : [ module-get ] : module-var ;
-
- IMPORT my_module : get : : module-get : LOCALIZE ;
- check-equal module-rule-imort-localize : [ module-get ] : root-module-var ;
-
-}
-
-# Check class
-{
-#FIXME:
-# ...
-}
-
-# Check on
-
-{
-
-local target1 = test-on-target1 ;
-local target2 = test-on-target2 ;
-local targets = $(target1) $(target2) ;
-local v1 v2 v3 ;
-
-VAR on $(target1) = value1 ;
-V2 on $(target2) = value2 ;
-
-check-equal on-return : [ on $(target1) return $(VAR) ] : value1 ;
-
-rule test-rule
-{
- return $(VAR) ;
-}
-
-check-equal on-rule : [ on $(target1) test-rule ] : value1 ;
-
-check-equal on-multiple : [ on $(targets) return $(V2) ] : ;
-
-rule test-rule
-{
- on $(target1)
- {
- return $(VAR) ;
- }
-}
-
-check-equal on-block : [ test-rule ] : value1 ;
-
-# FIXME: crazy implementation artifacts:
-
-v1 on test-on-target3 = x1 ;
-on test-on-target3
-{
- v1 on test-on-target3 += x1 ;
- v1 = y1 ;
- v2 on test-on-target3 += x2 ;
- v2 = y2 ;
- v3 = y3 ;
-}
-
-check-equal on-swap-old1 : $(v1) : x1 ;
-check-equal on-swap-old2 : [ on test-on-target3 return $(v1) ] : y1 ;
-check-equal on-swap-new1 : $(v2) : x2 ;
-check-equal on-swap-new2 : [ on test-on-target3 return $(v2) ] : y2 ;
-check-equal on-no-swap : $(v3) : y3 ;
-
-}
-
-# Check rule
-
-{
-#FIXME:
-# argument order
-# expand rule name
-}
-
-# Check rules
-
-{
-#FIXME:
-}
-
-# Check set
-
-{
-local v1 ;
-local v2 ;
-local v3 ;
-local vars = v1 v2 v3 ;
-
-v1 = x1 ;
-check-equal set-set-empty : $(v1) : x1 ;
-v2 += x2 ;
-check-equal set-append-empty : $(v2) : x2 ;
-v3 ?= x3 ;
-check-equal set-default-empty : $(v3) : x3 ;
-
-v1 = y1 ;
-check-equal set-set-non-empty : $(v1) : y1 ;
-v2 += y2 ;
-check-equal set-append-non-empty : $(v2) : x2 y2 ;
-v3 ?= y3 ;
-check-equal set-default-non-empty : $(v3) : x3 ;
-
-v1 = ;
-v2 = ;
-v3 = ;
-$(vars) = z ;
-check-equal set-set-empty-group : $(v1) - $(v2) - $(v3) : z - z - z ;
-
-v1 = ;
-v2 = ;
-v3 = ;
-$(vars) += z ;
-check-equal set-append-empty-group : $(v1) - $(v2) - $(v3) : z - z - z ;
-
-v1 = ;
-v2 = ;
-v3 = ;
-$(vars) ?= z ;
-check-equal set-default-empty-group : $(v1) - $(v2) - $(v3) : z - z - z ;
-
-v1 = x1 ;
-v2 = x2 ;
-v3 = x3 ;
-$(vars) = z ;
-check-equal set-set-non-empty-group : $(v1) - $(v2) - $(v3) : z - z - z ;
-
-v1 = x1 ;
-v2 = x2 ;
-v3 = x3 ;
-$(vars) += z ;
-check-equal set-append-non-empty-group : $(v1) - $(v2) - $(v3) : x1 z - x2 z - x3 z ;
-
-v1 = x1 ;
-v2 = x2 ;
-v3 = x3 ;
-$(vars) ?= z ;
-check-equal set-default-non-empty-group : $(v1) - $(v2) - $(v3) : x1 - x2 - x3 ;
-
-v1 = x1 ;
-v2 = ;
-v3 = x3 ;
-$(vars) = z ;
-check-equal set-set-mixed-group : $(v1) - $(v2) - $(v3) : z - z - z ;
-
-v1 = x1 ;
-v2 = ;
-v3 = x3 ;
-$(vars) += z ;
-check-equal set-append-mixed-group : $(v1) - $(v2) - $(v3) : x1 z - z - x3 z ;
-
-v1 = x1 ;
-v2 = ;
-v3 = x3 ;
-$(vars) ?= z ;
-check-equal set-default-mixed-group : $(v1) - $(v2) - $(v3) : x1 - z - x3 ;
-
-vars = v1 v1 ;
-
-v1 = ;
-$(vars) = z ;
-check-equal set-set-duplicate-empty : $(v1) : z ;
-v1 = ;
-$(vars) += z ;
-check-equal set-append-duplicate-empty : $(v1) : z z ;
-v1 = ;
-$(vars) ?= z ;
-check-equal set-default-duplicate-empty : $(v1) : z ;
-
-v1 = x1 ;
-$(vars) = z ;
-check-equal set-set-duplicate-non-empty : $(v1) : z ;
-v1 = x1 ;
-$(vars) += z ;
-check-equal set-append-duplicate-non-empty : $(v1) : x1 z z ;
-v1 = x1 ;
-$(vars) ?= z ;
-check-equal set-default-duplicate-non-empty : $(v1) : x1 ;
-
-rule test-rule { v1 = x1 ; }
-check-equal set-set-result : [ test-rule ] : x1 ;
-rule test-rule { v1 += x1 ; }
-check-equal set-append-result : [ test-rule ] : x1 ;
-rule test-rule { v1 ?= x1 ; }
-check-equal set-default-result : [ test-rule ] : x1 ;
-
-[ mark-order r1 ] = [ mark-order r2 ] ;
-check-order set-set-order : r1 r2 ;
-[ mark-order r1 ] += [ mark-order r2 ] ;
-check-order set-append-order : r1 r2 ;
-[ mark-order r1 ] ?= [ mark-order r2 ] ;
-check-order set-default-order : r1 r2 ;
-
-}
-
-# Check setcomp
-
-{
-#FIXME
-# Expand arguments
-# Don't expand name
-}
-
-# Check setexec
-
-{
-#FIXME:
-# Don't expand name
-# Evaluate bindlist
-}
-
-# Check settings ;
-
-{
-
-local target1 = test-settings-target1 ;
-local target2 = test-settings-target2 ;
-local target3 = test-settings-target3 ;
-local targets = $(target2) $(target3) ;
-
-local vars = v1 v2 v3 ;
-
-v1 on $(target1) = x1 ;
-check-equal settings-set-empty : [ on $(target1) return $(v1) ] : x1 ;
-v2 on $(target1) += x2 ;
-check-equal settings-append-empty : [ on $(target1) return $(v2) ] : x2 ;
-v3 on $(target1) ?= x3 ;
-check-equal settings-default-empty : [ on $(target1) return $(v3) ] : x3 ;
-
-v1 on $(target1) = y1 ;
-check-equal settings-set-non-empty : [ on $(target1) return $(v1) ] : y1 ;
-v2 on $(target1) += y2 ;
-check-equal settings-append-non-empty : [ on $(target1) return $(v2) ] : x2 y2 ;
-v3 on $(target1) ?= y3 ;
-check-equal settings-default-non-empty : [ on $(target1) return $(v3) ] : x3 ;
-
-$(vars) on setting-target2 = z ;
-check-equal settings-set-empty-group : [ on setting-target2 return $(v1) ] - [ on setting-target2 return $(v2) ] - [ on setting-target2 return $(v3) ] : z - z - z ;
-
-$(vars) on setting-target3 += z ;
-check-equal settings-append-empty-group : [ on setting-target3 return $(v1) ] - [ on setting-target3 return $(v2) ] - [ on setting-target3 return $(v3) ] : z - z - z ;
-
-$(vars) on setting-target4 ?= z ;
-check-equal settings-default-empty-group : [ on setting-target4 return $(v1) ] - [ on setting-target4 return $(v2) ] - [ on setting-target4 return $(v3) ] : z - z - z ;
-
-v1 on $(target1) = x1 ;
-v2 on $(target1) = x2 ;
-v3 on $(target1) = x3 ;
-$(vars) on $(target1) = z ;
-check-equal settings-set-non-empty-group : [ on $(target1) return $(v1) ] - [ on $(target1) return $(v2) ] - [ on $(target1) return $(v3) ] : z - z - z ;
-
-v1 on $(target1) = x1 ;
-v2 on $(target1) = x2 ;
-v3 on $(target1) = x3 ;
-$(vars) on $(target1) += z ;
-check-equal settings-append-non-empty-group : [ on $(target1) return $(v1) ] - [ on $(target1) return $(v2) ] - [ on $(target1) return $(v3) ] : x1 z - x2 z - x3 z ;
-
-v1 on $(target1) = x1 ;
-v2 on $(target1) = x2 ;
-v3 on $(target1) = x3 ;
-$(vars) on $(target1) ?= z ;
-check-equal settings-default-non-empty-group : [ on $(target1) return $(v1) ] - [ on $(target1) return $(v2) ] - [ on $(target1) return $(v3) ] : x1 - x2 - x3 ;
-
-v1 on setting-target5 = x1 ;
-v3 on setting-target5 = x3 ;
-$(vars) on setting-target5 = z ;
-check-equal settings-set-mixed-group : [ on setting-target5 return $(v1) ] - [ on setting-target5 return $(v2) ] - [ on setting-target5 return $(v3) ] : z - z - z ;
-
-v1 on setting-target6 = x1 ;
-v3 on setting-target6 = x3 ;
-$(vars) on setting-target6 += z ;
-check-equal settings-append-mixed-group : [ on setting-target6 return $(v1) ] - [ on setting-target6 return $(v2) ] - [ on setting-target6 return $(v3) ] : x1 z - z - x3 z ;
-
-v1 on setting-target7 = x1 ;
-v3 on setting-target7 = x3 ;
-$(vars) on setting-target7 ?= z ;
-check-equal settings-default-mixed-group : [ on setting-target7 return $(v1) ] - [ on setting-target7 return $(v2) ] - [ on setting-target7 return $(v3) ] : x1 - z - x3 ;
-
-vars = v1 v1 ;
-
-$(vars) on setting-target8 = z ;
-check-equal settings-set-duplicate-empty : [ on setting-target8 return $(v1) ] : z ;
-$(vars) on setting-target9 += z ;
-check-equal settings-append-duplicate-empty : [ on setting-target9 return $(v1) ] : z z ;
-$(vars) on setting-target10 ?= z ;
-check-equal settings-default-duplicate-empty : [ on setting-target10 return $(v1) ] : z ;
-
-v1 on $(target1) = x1 ;
-$(vars) on $(target1) = z ;
-check-equal settings-set-duplicate-non-empty : [ on $(target1) return $(v1) ] : z ;
-v1 on $(target1) = x1 ;
-$(vars) on $(target1) += z ;
-check-equal settings-append-duplicate-non-empty : [ on $(target1) return $(v1) ] : x1 z z ;
-v1 on $(target1) = x1 ;
-$(vars) on $(target1) ?= z ;
-check-equal settings-default-duplicate-non-empty : [ on $(target1) return $(v1) ] : x1 ;
-
-v1 on $(target1) = ;
-v1 on $(target1) ?= z ;
-check-equal settings-default-set-but-empty : [ on $(target1) return $(v1) ] : ;
-
-v1 on $(targets) = multi ;
-check-equal settings-set-multi-empty : [ on $(target2) return $(v1) ] - [ on $(target3) return $(v1) ] : multi - multi ;
-v2 on $(targets) += multi ;
-check-equal settings-append-multi-empty : [ on $(target2) return $(v2) ] - [ on $(target3) return $(v2) ] : multi - multi ;
-v3 on $(targets) ?= multi ;
-check-equal settings-default-multi-empty : [ on $(target2) return $(v3) ] - [ on $(target3) return $(v3) ] : multi - multi ;
-
-v1 on $(targets) = multi2 ;
-check-equal settings-set-multi-empty : [ on $(target2) return $(v1) ] - [ on $(target3) return $(v1) ] : multi2 - multi2 ;
-v2 on $(targets) += multi2 ;
-check-equal settings-append-multi-empty : [ on $(target2) return $(v2) ] - [ on $(target3) return $(v2) ] : multi multi2 - multi multi2 ;
-v3 on $(targets) ?= multi2 ;
-check-equal settings-default-multi-empty : [ on $(target2) return $(v3) ] - [ on $(target3) return $(v3) ] : multi - multi ;
-
-rule test-rule { v1 on $(target1) = x1 ; }
-check-equal settings-set-result : [ test-rule ] : x1 ;
-rule test-rule { v1 on $(target1) += x1 ; }
-check-equal settings-append-result : [ test-rule ] : x1 ;
-rule test-rule { v1 on $(target1) ?= x1 ; }
-check-equal settings-default-result : [ test-rule ] : x1 ;
-
-[ mark-order r1 : var ] on [ mark-order r3 : $(target1) ] = [ mark-order r2 : value ] ;
-check-order settings-set-order : r1 r2 r3 ;
-[ mark-order r1 : var ] on [ mark-order r3 : $(target1) ] += [ mark-order r2 : value ] ;
-check-order settings-append-order : r1 r2 r3 ;
-[ mark-order r1 : var ] on [ mark-order r3 : $(target1) ] ?= [ mark-order r2 : value ] ;
-check-order settings-default-order : r1 r2 r3 ;
-
-}
-
-# Check switch
-
-{
-
-local pattern = * ;
-
-switch value
-{
- case * : mark-order r1 ;
-}
-
-check-order switch-match-any : r1 ;
-
-switch value
-{
- case v2 : mark-order r1 ;
-}
-
-check-order switch-no-match : ;
-
-switch value
-{
- case $(pattern) : mark-order r1 ;
-}
-
-check-order switch-no-expand : ;
-
-switch value
-{
- case value : mark-order r1 ;
- case * : mark-order r2 ;
-}
-
-check-order switch-match-several : r1 ;
-
-rule test-rule ( value )
-{
- switch $(value)
- {
- case value : return 1 ;
- }
-}
-
-check-equal switch-result-match : [ test-rule value ] : 1 ;
-check-equal switch-result-match : [ test-rule v1 ] : ;
-
-switch $()
-{
- case "" : mark-order r1 ;
- case * : mark-order r2 ;
-}
-
-check-order switch-empty : r1 ;
-
-local values = v1 v2 v3 ;
-switch $(values)
-{
- case v1 : mark-order r1 ;
- case v2 : mark-order r2 ;
- case v3 : mark-order r3 ;
-}
-
-check-order switch-multiple : r1 ;
-
-# Test glob matching
-
-switch value { case * : mark-order r1 ; }
-check-order switch-glob-star : r1 ;
-
-switch value { case va*e : mark-order r1 ; }
-check-order switch-glob-star-1 : r1 ;
-
-switch value { case *a* : mark-order r1 ; }
-check-order switch-glob-star-2 : r1 ;
-
-switch value { case *a*ue* : mark-order r1 ; }
-check-order switch-glob-star-3 : r1 ;
-
-switch value { case *[eaiou]*ue : mark-order r1 ; }
-check-order switch-glob-group : r1 ;
-
-switch value { case *[eaiou]ue : mark-order r1 ; }
-check-order switch-glob-group-fail : ;
-
-switch value { case ?a?ue : mark-order r1 ; }
-check-order switch-glob-any : r1 ;
-
-switch value { case ?lue : mark-order r1 ; }
-check-order switch-glob-any-fail : ;
-
-}
-
-# Test while
-
-{
-
-local value = 1 2 3 ;
-
-while $(value)
-{
- mark-order r$(value[1]) ;
- value = $(value[2-]) ;
-}
-
-check-order while-exec : r1 r2 r3 ;
-
-rule test-rule
-{
- local value = 1 2 3 ;
- while $(value)
- {
- value = $(value[2-]) ;
- return x ;
- }
-}
-
-check-equal while-result : [ test-rule ] : x ;
-
-rule test-rule
-{
- local value = 1 2 ;
- while $(value)
- {
- value = $(value[2-]) ;
- local inner = $(value) ;
- while $(inner)
- {
- inner = $(inner[2-]) ;
- return x ;
- }
- }
-}
-
-check-equal while-result-2 : [ test-rule ] : ;
-
-}
-
-#
-# test CALLER_MODULE and backtrace
-#
-
-{
- local base = [ BACKTRACE ] ;
- base = $(base[2]) ;
- rule backtrace ( )
- {
- local bt = [ BACKTRACE ] ;
- check-equal backtrace-1-file : $(bt) :
- test.jam [ CALC $(base) + 4 ] "" backtrace
- test.jam [ CALC $(base) + 28 ] module2. module2.f
- test.jam [ CALC $(base) + 19 ] module1. module1.f
- test.jam [ CALC $(base) + 32 ] "" "module scope"
- ;
- }
- module module1
- {
- IMPORT_MODULE module2 : module1 ;
- rule f ( )
- {
- local m = [ CALLER_MODULE ] ;
- check-equal caller-module-root : $(m) ;
- module2.f ;
- }
- }
- module module2
- {
- rule f ( )
- {
- local m = [ CALLER_MODULE ] ;
- check-equal caller-module : module1 : $(m) ;
- backtrace ;
- }
- }
- IMPORT_MODULE module1 ;
- module1.f ;
-}
-
-
-# Test NORMALIZE_PATH
-
-{
-check-equal normalize-path : "." : [ NORMALIZE_PATH ] ;
-check-equal normalize-path : "." : [ NORMALIZE_PATH "" ] ;
-check-equal normalize-path : "." : [ NORMALIZE_PATH "." ] ;
-check-equal normalize-path : ".." : [ NORMALIZE_PATH ".." ] ;
-check-equal normalize-path : "/" : [ NORMALIZE_PATH "/" ] ;
-check-equal normalize-path : "/" : [ NORMALIZE_PATH "\\" ] ;
-check-equal normalize-path : "/" : [ NORMALIZE_PATH "//" ] ;
-check-equal normalize-path : "/" : [ NORMALIZE_PATH "\\\\" ] ;
-check-equal normalize-path : "/" : [ NORMALIZE_PATH "//\\\\//\\\\" ] ;
-check-equal normalize-path : "/" : [ NORMALIZE_PATH "/." ] ;
-check-equal normalize-path : "/" : [ NORMALIZE_PATH "/./" ] ;
-check-equal normalize-path : "/" : [ NORMALIZE_PATH "\\\\///.///\\\\\\" ] ;
-check-equal normalize-path : "." : [ NORMALIZE_PATH "./././././." ] ;
-check-equal normalize-path : "/" : [ NORMALIZE_PATH "/./././././." ] ;
-check-equal normalize-path : "foo" : [ NORMALIZE_PATH "foo" ] ;
-check-equal normalize-path : "foo" : [ NORMALIZE_PATH "foo/" ] ;
-check-equal normalize-path : "foo" : [ NORMALIZE_PATH "foo\\" ] ;
-check-equal normalize-path : "foo" : [ NORMALIZE_PATH "foo\\\\/////" ] ;
-check-equal normalize-path : "foo" : [ NORMALIZE_PATH "foo\\\\/////././." ] ;
-check-equal normalize-path : "foo" : [ NORMALIZE_PATH "foo\\\\/////./././" ] ;
-check-equal normalize-path : "." : [ NORMALIZE_PATH "foo/.." ] ;
-check-equal normalize-path : "." : [ NORMALIZE_PATH "foo////.." ] ;
-check-equal normalize-path : "/" : [ NORMALIZE_PATH "///foo/\\\\/.." ] ;
-check-equal normalize-path : "/" : [ NORMALIZE_PATH "\\\\\\foo\\//\\.." ] ;
-check-equal normalize-path : "." : [ NORMALIZE_PATH "foo/./.." ] ;
-check-equal normalize-path : "." : [ NORMALIZE_PATH "foo/././././.." ] ;
-check-equal normalize-path : "foo" : [ NORMALIZE_PATH "foo/./././bar/./././.././././baz/./././.." ] ;
-check-equal normalize-path : "/foo" : [ NORMALIZE_PATH "/foo/./././bar/./././.././././baz/./././.." ] ;
-check-equal normalize-path : "foo" : [ NORMALIZE_PATH "foo/./././bar/./././////.././././baz/./././.." ] ;
-check-equal normalize-path : "/foo" : [ NORMALIZE_PATH "/foo/./././bar/./././////.././././baz/./././.." ] ;
-check-equal normalize-path : ".." : [ NORMALIZE_PATH "./.." ] ;
-check-equal normalize-path : ".." : [ NORMALIZE_PATH "././././.." ] ;
-check-equal normalize-path : "../.." : [ NORMALIZE_PATH "../.." ] ;
-check-equal normalize-path : "../.." : [ NORMALIZE_PATH "./../.." ] ;
-check-equal normalize-path : "../.." : [ NORMALIZE_PATH "././././../.." ] ;
-check-equal normalize-path : "../.." : [ NORMALIZE_PATH "./.././././.." ] ;
-check-equal normalize-path : "../.." : [ NORMALIZE_PATH "././././.././././.." ] ;
-check-equal normalize-path : "../.." : [ NORMALIZE_PATH "..//\\\\\\//.." ] ;
-check-equal normalize-path : "../.." : [ NORMALIZE_PATH "../..\\\\/\\\\" ] ;
-check-equal normalize-path : "." : [ NORMALIZE_PATH "foo/../bar/../baz/.." ] ;
-check-equal normalize-path : "." : [ NORMALIZE_PATH "foo////..////bar////.//////.////../baz/.." ] ;
-check-equal normalize-path : "/" : [ NORMALIZE_PATH "/foo/../bar/../baz/.." ] ;
-check-equal normalize-path : "/" : [ NORMALIZE_PATH "/foo////..////bar////.//////.////../baz/.." ] ;
-
-# Invalid rooted paths with leading dotdots.
-check-equal normalize-path-invalid : : [ NORMALIZE_PATH "/.." ] ;
-check-equal normalize-path-invalid : : [ NORMALIZE_PATH "/../" ] ;
-check-equal normalize-path-invalid : : [ NORMALIZE_PATH "//\\\\//\\\\/.." ] ;
-check-equal normalize-path-invalid : : [ NORMALIZE_PATH "\\\\//\\\\//\\.." ] ;
-check-equal normalize-path-invalid : : [ NORMALIZE_PATH "/../.." ] ;
-check-equal normalize-path-invalid : : [ NORMALIZE_PATH "/../../.." ] ;
-check-equal normalize-path-invalid : : [ NORMALIZE_PATH "/foo/bar/../baz/../../.." ] ;
-check-equal normalize-path-invalid : : [ NORMALIZE_PATH "/../for/././../././bar/././../././.." ] ;
-check-equal normalize-path-invalid : : [ NORMALIZE_PATH "/../foo/bar" ] ;
-
-}
-
-# Test W32_GETREGNAMES
-
-{
-
-if $(NT)
-{
- local sound = "Beep" "ExtendedSounds" ;
- local r1 = [ W32_GETREGNAMES "HKEY_CURRENT_USER\\Control Panel\\Sound" : values ] ;
- check-equal w32_getregnames : $(sound:L) : $(r1:L) ;
- local r2 = [ W32_GETREGNAMES "HKCU\\Control Panel\\Sound" : values ] ;
- check-equal w32_getregnames : $(sound:L) : $(r2:L) ;
-
- local CurrentControlSet = "Control" "Enum" "Hardware Profiles" "Services" ;
- local r3 = [ W32_GETREGNAMES "HKEY_LOCAL_MACHINE\\SYSTEM\\CurrentControlSet" : subkeys ] ;
- check-equal w32_getregnames : $(CurrentControlSet:L) : $(r3:L) ;
- local r4 = [ W32_GETREGNAMES "HKLM\\SYSTEM\\CurrentControlSet" : subkeys ] ;
- check-equal w32_getregnames : $(CurrentControlSet:L) : $(r4:L) ;
-}
-
-}
-
-# Test SHELL
-
-{
-
-local c = "echo value" ;
-
-check-equal shell : "value\n" : [ SHELL $(c) ] ;
-check-equal shell : "" : [ SHELL $(c) : no-output ] ;
-check-equal shell : "value\n" 0 : [ SHELL $(c) : exit-status ] ;
-check-equal shell : "" 0 : [ SHELL $(c) : no-output : exit-status ] ;
-check-equal command : "value\n" : [ COMMAND $(c) ] ;
-check-equal command : "" : [ COMMAND $(c) : no-output ] ;
-check-equal command : "value\n" 0 : [ COMMAND $(c) : exit-status ] ;
-check-equal command : "" 0 : [ COMMAND $(c) : no-output : exit-status ] ;
-
-}
-
-# Test SUBST
-
-{
-
-# Check that unmatched subst returns an empty list
-check-equal subst-nomatch : [ SUBST "abc" "d+" x ] : ;
-
-# Check that a matched subst works
-check-equal subst-match : [ SUBST "ddd" "d+" x ] : x ;
-
-# Check that we can get multiple substitutions from a single invocation
-check-equal subst-multiple : [ SUBST "x/y/z" "([^/]*)/([^/]*).*" "\\1" $2 "\\1-\\2" ] : x y x-y ;
-
-}
-
-# Test summary
-
-if $(failed) = 0
-{
- status = 0 ;
-}
-else
-{
- status = 1 ;
-}
-
-EXIT $(passed) passed $(failed) failed : $(status) ;
diff --git a/tools/build/v2/test/core_action_status.py b/tools/build/v2/test/core_action_status.py
deleted file mode 100755
index 75dbdf5391..0000000000
--- a/tools/build/v2/test/core_action_status.py
+++ /dev/null
@@ -1,27 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2007 Rene Rivera.
-# Copyright 2011 Steven Watanabe
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import BoostBuild
-
-t = BoostBuild.Tester(pass_toolset=0)
-
-t.write("file.jam", """
- actions quietly .a. { $(ACTION) }
-
- rule .a.
- {
- DEPENDS $(<) : $(>) ;
- }
-
- NOTFILE subtest ;
- .a. subtest_a : subtest ;
- DEPENDS all : subtest_a ;
-""")
-
-t.run_build_system("-ffile.jam -sACTION=invalid", status=1)
-
-t.cleanup()
diff --git a/tools/build/v2/test/core_actions_quietly.py b/tools/build/v2/test/core_actions_quietly.py
deleted file mode 100755
index e8d5d43413..0000000000
--- a/tools/build/v2/test/core_actions_quietly.py
+++ /dev/null
@@ -1,59 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2007 Rene Rivera.
-# Copyright 2011 Steven Watanabe
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import BoostBuild
-
-t = BoostBuild.Tester(pass_toolset=0)
-
-t.write("file.jam", """
- actions quietly .a.
- {
-echo [$(<:B)] 0
-echo [$(<:B)] 1
-echo [$(<:B)] 2
- }
-
- rule .a.
- {
- DEPENDS $(<) : $(>) ;
- }
-
- NOTFILE subtest ;
- .a. subtest_a : subtest ;
- .a. subtest_b : subtest ;
- DEPENDS all : subtest_a subtest_b ;
-""")
-
-t.run_build_system("-ffile.jam -d2", stdout="""...found 4 targets...
-...updating 2 targets...
-.a. subtest_a
-
-echo [subtest_a] 0
-echo [subtest_a] 1
-echo [subtest_a] 2
-
-[subtest_a] 0
-[subtest_a] 1
-[subtest_a] 2
-.a. subtest_b
-
-echo [subtest_b] 0
-echo [subtest_b] 1
-echo [subtest_b] 2
-
-[subtest_b] 0
-[subtest_b] 1
-[subtest_b] 2
-...updated 2 targets...
-""")
-
-t.run_build_system("-ffile.jam -d1", stdout="""...found 4 targets...
-...updating 2 targets...
-...updated 2 targets...
-""")
-
-t.cleanup()
diff --git a/tools/build/v2/test/core_arguments.py b/tools/build/v2/test/core_arguments.py
deleted file mode 100755
index 5a1bc9b655..0000000000
--- a/tools/build/v2/test/core_arguments.py
+++ /dev/null
@@ -1,109 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2001 Dave Abrahams
-# Copyright 2011 Steven Watanabe
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import BoostBuild
-import os
-
-t = BoostBuild.Tester(pass_toolset=0, pass_d0=False)
-
-t.write("echo_args.jam", """
-rule echo_args ( a b ? c ? : d + : e * )
-{
- ECHO a= $(a) b= $(b) c= $(c) ":" d= $(d) ":" e= $(e) ;
-}
-
-rule echo_varargs ( a b ? c ? : d + : e * : * )
-{
- ECHO a= $(a) b= $(b) c= $(c) ":" d= $(d) ":" e= $(e)
- ": rest= "$(4[1]) $(4[2])
- ": "$(5[1]) $(5[2])
- ": "$(6[1]) $(6[2])
- ": "$(7[1]) $(7[2])
- ": "$(8[1]) $(8[2])
- ": "$(9[1]) $(9[2]) ;
-}
-""")
-
-t.write("file.jam", "include echo_args.jam ; echo_args ;")
-t.run_build_system("-ffile.jam", status=1)
-t.expect_output_line("* missing argument a");
-
-t.write("file.jam", "include echo_args.jam ; echo_args 1 2 : 3 : 4 : 5 ;")
-t.run_build_system("-ffile.jam", status=1)
-t.expect_output_line("* extra argument 5");
-
-t.write("file.jam", "include echo_args.jam ; echo_args a b c1 c2 : d ;")
-t.run_build_system("-ffile.jam", status=1)
-t.expect_output_line("* extra argument c2");
-
-# Check modifier '?'
-
-t.write("file.jam", "include echo_args.jam ; echo_args 1 2 3 : 4 ;")
-t.run_build_system("-ffile.jam", status=1)
-t.expect_output_line("a= 1 b= 2 c= 3 : d= 4 : e=");
-
-t.write("file.jam", "include echo_args.jam ; echo_args 1 2 : 3 ;")
-t.run_build_system("-ffile.jam", status=1)
-t.expect_output_line("a= 1 b= 2 c= : d= 3 : e=");
-
-t.write("file.jam", "include echo_args.jam ; echo_args 1 : 2 ;")
-t.run_build_system("-ffile.jam", status=1)
-t.expect_output_line("a= 1 b= c= : d= 2 : e=");
-
-# Check modifier '+'
-
-t.write("file.jam", "include echo_args.jam ; echo_args 1 ;")
-t.run_build_system("-ffile.jam", status=1)
-t.expect_output_line("* missing argument d");
-
-t.write("file.jam", "include echo_args.jam ; echo_args 1 : 2 3 ;")
-t.run_build_system("-ffile.jam", status=1)
-t.expect_output_line("a= 1 b= c= : d= 2 3 : e=");
-
-t.write("file.jam", "include echo_args.jam ; echo_args 1 : 2 3 4 ;")
-t.run_build_system("-ffile.jam", status=1)
-t.expect_output_line("a= 1 b= c= : d= 2 3 4 : e=");
-
-# Check modifier '*'
-
-t.write("file.jam", "include echo_args.jam ; echo_args 1 : 2 : 3 ;")
-t.run_build_system("-ffile.jam", status=1)
-t.expect_output_line("a= 1 b= c= : d= 2 : e= 3");
-
-t.write("file.jam", "include echo_args.jam ; echo_args 1 : 2 : 3 4 ;")
-t.run_build_system("-ffile.jam", status=1)
-t.expect_output_line("a= 1 b= c= : d= 2 : e= 3 4");
-
-t.write("file.jam", "include echo_args.jam ; echo_args 1 : 2 : 3 4 5 ;")
-t.run_build_system("-ffile.jam", status=1)
-t.expect_output_line("a= 1 b= c= : d= 2 : e= 3 4 5");
-
-#
-# Check varargs
-#
-
-t.write("file.jam", "include echo_args.jam ; echo_varargs 1 : 2 : 3 4 5 ;")
-t.run_build_system("-ffile.jam", status=1)
-t.expect_output_line("a= 1 b= c= : d= 2 : e= 3 4 5");
-
-t.write("file.jam", "include echo_args.jam ; echo_varargs 1 : 2 : 3 4 5 : 6 ;")
-t.run_build_system("-ffile.jam", status=1)
-t.expect_output_line("a= 1 b= c= : d= 2 : e= 3 4 5 : rest= 6");
-
-t.write("file.jam", "include echo_args.jam ; echo_varargs 1 : 2 : 3 4 5 : 6 7 ;")
-t.run_build_system("-ffile.jam", status=1)
-t.expect_output_line("a= 1 b= c= : d= 2 : e= 3 4 5 : rest= 6 7");
-
-t.write("file.jam", "include echo_args.jam ; echo_varargs 1 : 2 : 3 4 5 : 6 7 : 8 ;")
-t.run_build_system("-ffile.jam", status=1)
-t.expect_output_line("a= 1 b= c= : d= 2 : e= 3 4 5 : rest= 6 7 : 8");
-
-t.write("file.jam", "include echo_args.jam ; echo_varargs 1 : 2 : 3 4 5 : 6 7 : 8 : 9 ;")
-t.run_build_system("-ffile.jam", status=1)
-t.expect_output_line("a= 1 b= c= : d= 2 : e= 3 4 5 : rest= 6 7 : 8 : 9");
-
-t.cleanup()
diff --git a/tools/build/v2/test/core_at_file.py b/tools/build/v2/test/core_at_file.py
deleted file mode 100755
index 71f1bbcebe..0000000000
--- a/tools/build/v2/test/core_at_file.py
+++ /dev/null
@@ -1,75 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2011 Steven Watanabe
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-
-import BoostBuild
-import os
-
-t = BoostBuild.Tester(pass_toolset=0)
-
-t.write("file.jam", """
-name = n1 n2 ;
-contents = M1 M2 ;
-EXIT file: "@(o$(name) .txt:E= test -D$(contents))" : 0 ;
-""")
-
-t.run_build_system("-ffile.jam")
-t.expect_output_line("file: on1 on2 .txt");
-t.expect_addition("on1 on2 .txt")
-t.expect_content("on1 on2 .txt", " test -DM1 -DM2", True)
-
-t.rm(".")
-
-t.write("file.jam", """
-name = n1 n2 ;
-contents = M1 M2 ;
-actions run {
-echo file: "@(o$(name) .txt:E= test -D$(contents))"
-}
-
-run all ;
-
-""")
-
-t.run_build_system("-ffile.jam -d2")
-t.expect_output_line('echo file: "on1 on2 .txt"');
-t.expect_addition("on1 on2 .txt")
-t.expect_content("on1 on2 .txt", " test -DM1 -DM2", True)
-
-t.rm(".")
-
-t.write("file.jam", """
-name = n1 n2 ;
-contents = M1 M2 ;
-file = "@($(STDOUT):E= test -D$(contents)\n)" ;
-
-actions run {
-$(file)
-}
-
-run all ;
-""")
-
-t.run_build_system("-ffile.jam -d1")
-t.expect_output_line(" test -DM1 -DM2")
-
-t.rm(".")
-
-t.write("file.jam", """
-name = n1 n2 ;
-contents = M1 M2 ;
-actions run {
-@($(STDOUT):E= test -D$(contents)\n)
-}
-
-run all ;
-
-""")
-
-t.run_build_system("-ffile.jam -d1")
-t.expect_output_line(" test -DM1 -DM2")
-
-t.cleanup()
diff --git a/tools/build/v2/test/core_bindrule.py b/tools/build/v2/test/core_bindrule.py
deleted file mode 100755
index d5aaa7fc53..0000000000
--- a/tools/build/v2/test/core_bindrule.py
+++ /dev/null
@@ -1,47 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2001 Dave Abrahams
-# Copyright 2011 Steven Watanabe
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import BoostBuild
-import os
-
-t = BoostBuild.Tester(pass_toolset=0, pass_d0=False)
-
-t.write("subdir1/file-to-bind", "# This file intentionally left blank")
-
-t.write("file.jam", """
-rule do-nothing ( target : source )
-{
- DEPENDS $(target) : $(source) ;
-}
-actions quietly do-nothing
-{
-}
-
-# Make a non-file target which depends on a file that exists
-NOTFILE fake-target ;
-SEARCH on file-to-bind = subdir1 ;
-
-do-nothing fake-target
- : file-to-bind ;
-
-# Set jam up to call our bind-rule
-BINDRULE = bind-rule ;
-
-rule bind-rule ( target : path )
-{
- ECHO found: $(target) at $(path) ;
-}
-
-DEPENDS all : fake-target ;
-""")
-
-t.run_build_system("-ffile.jam", stdout="""found: all at all
-found: file-to-bind at subdir1%sfile-to-bind
-...found 3 targets...
-""" % os.sep)
-
-t.cleanup()
diff --git a/tools/build/v2/test/core_d12.py b/tools/build/v2/test/core_d12.py
deleted file mode 100644
index 83076350af..0000000000
--- a/tools/build/v2/test/core_d12.py
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2002, 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# This tests correct handling of "-d1" and "-d2" options.
-
-import BoostBuild
-
-t = BoostBuild.Tester(pass_toolset=0)
-
-t.write("file.jam", """
-actions a { }
-actions quietly b { }
-ALWAYS all ;
-a all ;
-b all ;
-""")
-
-t.run_build_system("-ffile.jam -d0", stdout="")
-
-t.run_build_system("-ffile.jam -d1", stdout=
-"""...found 1 target...
-...updating 1 target...
-a all
-...updated 1 target...
-""")
-
-t.run_build_system("-ffile.jam -d2")
-
-t.fail_test(t.stdout().find("a all") == -1)
-t.fail_test(t.stdout().find("b all") == -1)
-
-t.cleanup()
diff --git a/tools/build/v2/test/core_delete_module.py b/tools/build/v2/test/core_delete_module.py
deleted file mode 100644
index b02fd95a39..0000000000
--- a/tools/build/v2/test/core_delete_module.py
+++ /dev/null
@@ -1,51 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2003 Dave Abrahams
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# This tests the facilities for deleting modules.
-
-import BoostBuild
-
-t = BoostBuild.Tester(pass_toolset=0)
-
-t.write("file.jam", """
-module foo
-{
- rule bar { }
- var = x y ;
-}
-DELETE_MODULE foo ;
-if [ RULENAMES foo ]
-{
- EXIT DELETE_MODULE failed to kill foo's rules: [ RULENAMES foo ] ;
-}
-
-module foo
-{
- if $(var)
- {
- EXIT DELETE_MODULE failed to kill foo's variables ;
- }
-
- rule bar { }
- var = x y ;
-
- DELETE_MODULE foo ;
-
- if $(var)
- {
- EXIT internal DELETE_MODULE failed to kill foo's variables ;
- }
- if [ RULENAMES foo ]
- {
- EXIT internal DELETE_MODULE failed to kill foo's rules: [ RULENAMES foo ] ;
- }
-}
-DEPENDS all : xx ;
-NOTFILE xx ;
-""")
-
-t.run_build_system("-ffile.jam", status=0)
-t.cleanup()
diff --git a/tools/build/v2/test/core_import_module.py b/tools/build/v2/test/core_import_module.py
deleted file mode 100644
index 2ab5942415..0000000000
--- a/tools/build/v2/test/core_import_module.py
+++ /dev/null
@@ -1,80 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import BoostBuild
-
-t = BoostBuild.Tester(pass_toolset=0)
-
-t.write("code", """
-module a
-{
- rule r1 ( )
- {
- ECHO R1 ;
- }
-
- local rule l1 ( )
- {
- ECHO A.L1 ;
- }
-}
-module a2
-{
- rule r2 ( )
- {
- ECHO R2 ;
- }
-}
-IMPORT a2 : r2 : : a2.r2 ;
-
-rule a.l1 ( )
-{
- ECHO L1 ;
-}
-
-module b
-{
- IMPORT_MODULE a : b ;
- rule test
- {
- # Call rule visible via IMPORT_MODULE
- a.r1 ;
- # Call rule in global scope
- a2.r2 ;
- # Call rule in global scope. Doesn't find local rule
- a.l1 ;
- # Make l1 visible
- EXPORT a : l1 ;
- a.l1 ;
- }
-}
-
-IMPORT b : test : : test ;
-test ;
-
-module c
-{
- rule test
- {
- ECHO CTEST ;
- }
-}
-
-IMPORT_MODULE c : ;
-c.test ;
-
-actions do-nothing { }
-do-nothing all ;
-""")
-
-t.run_build_system("-fcode", stdout="""R1
-R2
-L1
-A.L1
-CTEST
-""")
-
-t.cleanup()
diff --git a/tools/build/v2/test/core_language.py b/tools/build/v2/test/core_language.py
deleted file mode 100755
index 42b4366894..0000000000
--- a/tools/build/v2/test/core_language.py
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2002, 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import BoostBuild
-
-t = BoostBuild.Tester(pass_toolset=0)
-
-t.set_tree("core-language")
-t.run_build_system(extra_args="-ftest.jam")
-
-t.cleanup()
diff --git a/tools/build/v2/test/core_nt_line_length.py b/tools/build/v2/test/core_nt_line_length.py
deleted file mode 100755
index 809c5e1ec5..0000000000
--- a/tools/build/v2/test/core_nt_line_length.py
+++ /dev/null
@@ -1,52 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2001 Dave Abrahams
-# Copyright 2011 Steven Watanabe
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import BoostBuild
-import os
-
-t = BoostBuild.Tester(pass_toolset=0, pass_d0=False)
-
-t.write("file.jam", """
-if $(NT)
-{
- #
- # Build a really long commandline. (> 10K characters).
- #
- ten = 0 1 2 3 4 5 6 7 8 9 ;
- 1x7chars = 0_____ ;
- # add a digit and multiply by 10
- 10x8chars = $(ten)$(1x7chars) ;
- # add a digit to each of 10 strings and multiply by 10
- 100x9chars = $(ten)$(10x8chars) ;
- # add a digit to each of 100 strings and multiply by 10
- 1000x10chars = $(ten)$(100x9chars) ;
-
- #
- # Cause line_length_test to be built
- #
- actions do_echo
- {
- echo $(text)
- }
-
- 400x10chars = $(ten[1-4])$(100x9chars) ;
-
- text on line_length_test = $(400x10chars) 40$(10x8chars[1-9]) 01234 ;
- text on line_length_test = $(1000x10chars) $(1000x10chars) ;
- JAMSHELL on line_length_test = % ;
- DEPENDS all : line_length_test ;
-
- do_echo line_length_test ;
-}
-else
-{
- NOCARE all ;
-}
-""")
-t.run_build_system("-ffile.jam")
-
-t.cleanup()
diff --git a/tools/build/v2/test/core_option_d2.py b/tools/build/v2/test/core_option_d2.py
deleted file mode 100755
index 0784954020..0000000000
--- a/tools/build/v2/test/core_option_d2.py
+++ /dev/null
@@ -1,59 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2007 Rene Rivera.
-# Copyright 2011 Steven Watanabe
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import BoostBuild
-
-t = BoostBuild.Tester(pass_toolset=0, pass_d0=False)
-
-t.write("sleep.bat","""@setlocal
-@echo off
-timeout /T %1 /NOBREAK >nul
-""")
-
-t.write("file.jam", """
- actions .a.
- {
-echo [$(<:B)] 0
-echo [$(<:B)] 1
-echo [$(<:B)] 2
- }
-
- rule .a.
- {
- DEPENDS $(<) : $(>) ;
- }
-
- NOTFILE subtest ;
- .a. subtest_a : subtest ;
- .a. subtest_b : subtest ;
- DEPENDS all : subtest_a subtest_b ;
-""")
-
-t.run_build_system("-ffile.jam -d2", stdout="""...found 4 targets...
-...updating 2 targets...
-.a. subtest_a
-
-echo [subtest_a] 0
-echo [subtest_a] 1
-echo [subtest_a] 2
-
-[subtest_a] 0
-[subtest_a] 1
-[subtest_a] 2
-.a. subtest_b
-
-echo [subtest_b] 0
-echo [subtest_b] 1
-echo [subtest_b] 2
-
-[subtest_b] 0
-[subtest_b] 1
-[subtest_b] 2
-...updated 2 targets...
-""")
-
-t.cleanup()
diff --git a/tools/build/v2/test/core_option_l.py b/tools/build/v2/test/core_option_l.py
deleted file mode 100755
index e05e9a9517..0000000000
--- a/tools/build/v2/test/core_option_l.py
+++ /dev/null
@@ -1,47 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2007 Rene Rivera.
-# Copyright 2011 Steven Watanabe
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import BoostBuild
-
-t = BoostBuild.Tester(pass_toolset=0)
-
-t.write("sleep.bat","""@setlocal
-@echo off
-@REM timeout /T %1 /NOBREAK >nul
-ping 127.0.0.1 -n 2 -w 1000 >nul
-ping 127.0.0.1 -n %1 -w 1000 >nul
-@endlocal
-@exit /B 0
-""")
-
-t.write("file.jam", """
-
-if $(NT)
-{
- SLEEP = @call sleep.bat ;
-}
-else
-{
- SLEEP = sleep ;
-}
-
-actions .a. {
-echo 001
-$(SLEEP) 4
-echo 002
-}
-
-.a. sleeper ;
-
-DEPENDS all : sleeper ;
-""")
-
-t.run_build_system("-ffile.jam -d1 -l2", status=1)
-
-t.expect_output_line("2 second time limit exceeded")
-
-t.cleanup()
diff --git a/tools/build/v2/test/core_option_n.py b/tools/build/v2/test/core_option_n.py
deleted file mode 100755
index 3b91d8e632..0000000000
--- a/tools/build/v2/test/core_option_n.py
+++ /dev/null
@@ -1,51 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2007 Rene Rivera.
-# Copyright 2011 Steven Watanabe
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import BoostBuild
-
-t = BoostBuild.Tester(pass_toolset=0, pass_d0=False)
-
-t.write("file.jam", """
- actions .a.
- {
-echo [$(<:B)] 0
-echo [$(<:B)] 1
-echo [$(<:B)] 2
- }
-
- rule .a.
- {
- DEPENDS $(<) : $(>) ;
- }
-
- NOTFILE subtest ;
- .a. subtest_a : subtest ;
- .a. subtest_b : subtest ;
- FAIL_EXPECTED subtest_b ;
- DEPENDS all : subtest_a subtest_b ;
-""")
-
-t.run_build_system("-ffile.jam -n", stdout="""...found 4 targets...
-...updating 2 targets...
-.a. subtest_a
-
-echo [subtest_a] 0
-echo [subtest_a] 1
-echo [subtest_a] 2
-
-.a. subtest_b
-
-echo [subtest_b] 0
-echo [subtest_b] 1
-echo [subtest_b] 2
-
-...updated 2 targets...
-""")
-
-t.expect_nothing_more()
-
-t.cleanup()
diff --git a/tools/build/v2/test/core_parallel_actions.py b/tools/build/v2/test/core_parallel_actions.py
deleted file mode 100755
index dedf9b1183..0000000000
--- a/tools/build/v2/test/core_parallel_actions.py
+++ /dev/null
@@ -1,104 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2006 Rene Rivera.
-# Copyright 2011 Steven Watanabe
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import BoostBuild
-
-t = BoostBuild.Tester(pass_toolset=0, pass_d0=False)
-
-t.write("sleep.bat","""@setlocal
-@echo off
-@REM timeout /T %1 /NOBREAK >nul
-ping 127.0.0.1 -n 2 -w 1000 >nul
-ping 127.0.0.1 -n %1 -w 1000 >nul
-@endlocal
-@exit /B 0
-""")
-
-t.write("file.jam", """
- if $(NT)
- {
- actions sleeper
- {
-echo [$(<:S)] 0
-@call sleep.bat 1
-echo [$(<:S)] 1
-@call sleep.bat 1
-echo [$(<:S)] 2
-@call sleep.bat $(<:B)
- }
- }
- else
- {
- actions sleeper
- {
-echo "[$(<:S)] 0" 1>&2
-sleep 1
-echo "[$(<:S)] 1"
-sleep 1
-echo "[$(<:S)] 2" 1>&2
-sleep $(<:B)
- }
- }
-
- rule sleeper
- {
- DEPENDS $(<) : $(>) ;
- }
-
- NOTFILE front ;
- sleeper 1.a : front ;
- sleeper 2.a : front ;
- sleeper 3.a : front ;
- sleeper 4.a : front ;
- NOTFILE choke ;
- DEPENDS choke : 1.a 2.a 3.a 4.a ;
- sleeper 1.b : choke ;
- sleeper 2.b : choke ;
- sleeper 3.b : choke ;
- sleeper 4.b : choke ;
- DEPENDS bottom : 1.b 2.b 3.b 4.b ;
- DEPENDS all : bottom ;
-""")
-
-t.run_build_system("-ffile.jam -j4", stdout="""...found 12 targets...
-...updating 8 targets...
-sleeper 1.a
-[.a] 0
-[.a] 1
-[.a] 2
-sleeper 2.a
-[.a] 0
-[.a] 1
-[.a] 2
-sleeper 3.a
-[.a] 0
-[.a] 1
-[.a] 2
-sleeper 4.a
-[.a] 0
-[.a] 1
-[.a] 2
-sleeper 1.b
-[.b] 0
-[.b] 1
-[.b] 2
-sleeper 2.b
-[.b] 0
-[.b] 1
-[.b] 2
-sleeper 3.b
-[.b] 0
-[.b] 1
-[.b] 2
-sleeper 4.b
-[.b] 0
-[.b] 1
-[.b] 2
-...updated 8 targets...
-""")
-
-t.cleanup()
diff --git a/tools/build/v2/test/core_parallel_multifile_actions_1.py b/tools/build/v2/test/core_parallel_multifile_actions_1.py
deleted file mode 100755
index 9d995dbca4..0000000000
--- a/tools/build/v2/test/core_parallel_multifile_actions_1.py
+++ /dev/null
@@ -1,68 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2007 Rene Rivera.
-# Copyright 2011 Steven Watanabe
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import BoostBuild
-
-t = BoostBuild.Tester(pass_toolset=0, pass_d0=False)
-
-t.write("sleep.bat","""@setlocal
-@echo off
-@REM timeout /T %1 /NOBREAK >nul
-ping 127.0.0.1 -n 2 -w 1000 >nul
-ping 127.0.0.1 -n %1 -w 1000 >nul
-@endlocal
-@exit /B 0
-""")
-
-t.write("file.jam", """
-
- if $(NT)
- {
- SLEEP = @call sleep.bat ;
- }
- else
- {
- SLEEP = sleep ;
- }
-
- actions .gen. {
-echo 001
-$(SLEEP) 4
-echo 002
-}
- rule .use.1 { DEPENDS $(<) : $(>) ; }
- actions .use.1 {
-echo 003
-}
- rule .use.2 { DEPENDS $(<) : $(>) ; }
- actions .use.2 {
-$(SLEEP) 1
-echo 004
-}
-
- .gen. g1.generated g2.generated ;
- .use.1 u1.user : g1.generated ;
- .use.2 u2.user : g2.generated ;
-
- NOTFILE root ;
- DEPENDS g1.generated g2.generated : root ;
- DEPENDS all : u1.user u2.user ;
-""")
-
-t.run_build_system("-ffile.jam -j2", stdout="""...found 6 targets...
-...updating 4 targets...
-.gen. g1.generated
-001
-002
-.use.1 u1.user
-003
-.use.2 u2.user
-004
-...updated 4 targets...
-""")
-
-t.cleanup()
diff --git a/tools/build/v2/test/core_parallel_multifile_actions_2.py b/tools/build/v2/test/core_parallel_multifile_actions_2.py
deleted file mode 100755
index aae0fe26e6..0000000000
--- a/tools/build/v2/test/core_parallel_multifile_actions_2.py
+++ /dev/null
@@ -1,72 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2008 Jurko Gospodnetic, Vladimir Prus
-# Copyright 2011 Steven Watanabe
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Added to guard against a bug causing targets to be used before they
-# themselves have finished building. This used to happen for targets built by a
-# multi-file action that got triggered by another target, except when the target
-# triggering the action was the first one in the list of targets produced by
-# that action.
-#
-# Example:
-# When target A and target B were declared as created by a single action with
-# A being the first one listed, and target B triggered running that action then
-# while the action was still running, target A was already reporting as being
-# built causing other targets depending on target A to be built prematurely.
-
-import BoostBuild
-
-t = BoostBuild.Tester(pass_toolset=0, pass_d0=False)
-
-t.write("sleep.bat","""@setlocal
-@echo off
-@REM timeout /T %1 /NOBREAK >nul
-ping 127.0.0.1 -n 2 -w 1000 >nul
-ping 127.0.0.1 -n %1 -w 1000 >nul
-@endlocal
-@exit /B 0
-""")
-
-t.write("file.jam", """
-
- if $(NT)
- {
- SLEEP = @call sleep.bat ;
- }
- else
- {
- SLEEP = sleep ;
- }
-
- actions link
- {
- $(SLEEP) 1
- echo 001 - linked
- }
-
- link dll lib ;
-
- actions install
- {
- echo 002 - installed
- }
-
- install installed_dll : dll ;
- DEPENDS installed_dll : dll ;
-
- DEPENDS all : lib installed_dll ;
-""")
-
-t.run_build_system("-ffile.jam -j2", stdout="""...found 4 targets...
-...updating 3 targets...
-link dll
-001 - linked
-install installed_dll
-002 - installed
-...updated 3 targets...
-""")
-
-t.cleanup()
diff --git a/tools/build/v2/test/core_typecheck.py b/tools/build/v2/test/core_typecheck.py
deleted file mode 100644
index 31f408356d..0000000000
--- a/tools/build/v2/test/core_typecheck.py
+++ /dev/null
@@ -1,47 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# This tests the typechecking facilities.
-
-import BoostBuild
-
-t = BoostBuild.Tester(pass_toolset=0)
-
-t.write("file.jam", """
-module .typecheck
-{
- rule [path] ( x )
- {
- if ! [ MATCH "^(::)" : $(x) ]
- {
- ECHO "Error: $(x) is not a path" ;
- return true ;
- }
- }
-}
-
-rule do ( [path] a )
-{
-}
-
-do $(ARGUMENT) ;
-
-actions dummy { }
-dummy all ;
-""")
-
-t.run_build_system("-ffile.jam -sARGUMENT=::a/b/c")
-t.run_build_system("-ffile.jam -sARGUMENT=a/b/c", status=1,
- stdout="""Error: a/b/c is not a path
-file.jam:18: in module scope
-*** argument error
-* rule do ( [path] a )
-* called with: ( a/b/c )
-* true a
-file.jam:16:see definition of rule 'do' being called
-""")
-
-t.cleanup()
diff --git a/tools/build/v2/test/core_update_now.py b/tools/build/v2/test/core_update_now.py
deleted file mode 100755
index d31a8c5607..0000000000
--- a/tools/build/v2/test/core_update_now.py
+++ /dev/null
@@ -1,198 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2011 Steven Watanabe
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import BoostBuild
-import os
-
-def basic():
- # Basic test
-
- t = BoostBuild.Tester(pass_toolset=0, pass_d0=False)
-
- t.write("file.jam", """
-
-actions do-print
-{
- echo updating $(<)
-}
-
-NOTFILE target1 ;
-ALWAYS target1 ;
-do-print target1 ;
-
-UPDATE_NOW target1 ;
-
-DEPENDS all : target1 ;
-""")
-
- t.run_build_system("-ffile.jam", stdout="""...found 1 target...
-...updating 1 target...
-do-print target1
-updating target1
-...updated 1 target...
-...found 1 target...
-""")
-
- t.cleanup()
-
-def ignore_minus_n():
- # ignore-minus-n
-
- t = BoostBuild.Tester(pass_toolset=0, pass_d0=False)
-
- t.write("file.jam", """
-
-actions do-print
-{
- echo updating $(<)
-}
-
-NOTFILE target1 ;
-ALWAYS target1 ;
-do-print target1 ;
-
-UPDATE_NOW target1 : : ignore-minus-n ;
-
-DEPENDS all : target1 ;
-""")
-
- t.run_build_system("-ffile.jam -n", stdout="""...found 1 target...
-...updating 1 target...
-do-print target1
-
- echo updating target1
-
-updating target1
-...updated 1 target...
-...found 1 target...
-""")
-
- t.cleanup()
-
-def failed_target():
-
- t = BoostBuild.Tester(pass_toolset=0, pass_d0=False)
-
- t.write("file.jam", """
-
-actions fail
-{
- exit 1
-}
-
-NOTFILE target1 ;
-ALWAYS target1 ;
-fail target1 ;
-
-actions do-print
-{
- echo updating $(<)
-}
-
-NOTFILE target2 ;
-do-print target2 ;
-DEPENDS target2 : target1 ;
-
-UPDATE_NOW target1 : : ignore-minus-n ;
-
-DEPENDS all : target1 target2 ;
-""")
-
- t.run_build_system("-ffile.jam -n", stdout="""...found 1 target...
-...updating 1 target...
-fail target1
-
- exit 1
-
-...failed fail target1...
-...failed updating 1 target...
-...found 2 targets...
-...updating 1 target...
-do-print target2
-
- echo updating target2
-
-...updated 1 target...
-""")
-
- t.cleanup()
-
-def missing_target():
- t = BoostBuild.Tester(pass_toolset=0, pass_d0=False)
-
- t.write("file.jam", """
-
-actions do-print
-{
- echo updating $(<)
-}
-
-NOTFILE target2 ;
-do-print target2 ;
-DEPENDS target2 : target1 ;
-
-UPDATE_NOW target1 : : ignore-minus-n ;
-
-DEPENDS all : target1 target2 ;
-""")
-
- t.run_build_system("-ffile.jam -n", status=1, stdout="""don't know how to make target1
-...found 1 target...
-...can't find 1 target...
-...found 2 targets...
-...can't make 1 target...
-""")
-
- t.cleanup()
-
-# Make sure that if we call UPDATE_NOW with ignore-minus-n,
-# the target gets updated exactly once regardless of previous
-# calls to UPDATE_NOW with -n in effect.
-
-def build_once():
- t = BoostBuild.Tester(pass_toolset=0, pass_d0=False)
-
- t.write("file.jam", """
-
-actions do-print
-{
- echo updating $(<)
-}
-
-NOTFILE target1 ;
-ALWAYS target1 ;
-do-print target1 ;
-
-UPDATE_NOW target1 ;
-UPDATE_NOW target1 : : ignore-minus-n ;
-UPDATE_NOW target1 : : ignore-minus-n ;
-
-DEPENDS all : target1 ;
-""")
-
- t.run_build_system("-ffile.jam -n", stdout="""...found 1 target...
-...updating 1 target...
-do-print target1
-
- echo updating target1
-
-...updated 1 target...
-do-print target1
-
- echo updating target1
-
-updating target1
-...updated 1 target...
-...found 1 target...
-""")
-
- t.cleanup()
-
-basic()
-ignore_minus_n()
-failed_target()
-missing_target()
-build_once()
diff --git a/tools/build/v2/test/core_varnames.py b/tools/build/v2/test/core_varnames.py
deleted file mode 100644
index a94ed8fa58..0000000000
--- a/tools/build/v2/test/core_varnames.py
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2003 Dave Abrahams
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# This tests the core rule for enumerating the variable names in a module.
-
-import BoostBuild
-
-t = BoostBuild.Tester(pass_toolset=0)
-
-t.write("file.jam", """
-module foo
-{
- rule bar { }
- var1 = x y ;
- var2 = fubar ;
-}
-
-expected = var1 var2 ;
-names = [ VARNAMES foo ] ;
-if $(names) in $(expected) && $(expected) in $(names)
-{
- # everything OK
-}
-else
-{
- EXIT expected to find variables $(expected:J=", ") in module foo,
- but found $(names:J=", ") instead. ;
-}
-DEPENDS all : xx ;
-NOTFILE xx ;
-""")
-
-t.run_build_system("-ffile.jam", status=0)
-
-t.cleanup()
diff --git a/tools/build/v2/test/default_build.py b/tools/build/v2/test/default_build.py
deleted file mode 100644
index 33f2b5bbbe..0000000000
--- a/tools/build/v2/test/default_build.py
+++ /dev/null
@@ -1,93 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2003 Dave Abrahams
-# Copyright 2002, 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Test that default build clause actually has any effect.
-
-import BoostBuild
-
-t = BoostBuild.Tester()
-
-t.write("jamroot.jam", "import gcc ;")
-t.write("jamfile.jam", "exe a : a.cpp : : debug release ;")
-t.write("a.cpp", "int main() {}\n")
-
-t.run_build_system()
-t.expect_addition("bin/$toolset/debug/a.exe")
-t.expect_addition("bin/$toolset/release/a.exe")
-
-# Check that explictly-specified build variant supresses default-build.
-t.rm("bin")
-t.run_build_system("release")
-t.expect_addition(BoostBuild.List("bin/$toolset/release/") * "a.exe a.obj")
-t.expect_nothing_more()
-
-# Now check that we can specify explicit build request and default-build will be
-# combined with it.
-t.run_build_system("optimization=space")
-t.expect_addition("bin/$toolset/debug/optimization-space/a.exe")
-t.expect_addition("bin/$toolset/release/optimization-space/a.exe")
-
-# Test that default-build must be identical in all alternatives. Error case.
-t.write("jamfile.jam", """
-exe a : a.cpp : : debug ;
-exe a : b.cpp : : ;
-""")
-expected="""error: default build must be identical in all alternatives
-main target is ./a
-with
-differing from previous default build <variant>debug
-
-"""
-t.run_build_system("-n --no-error-backtrace", status=1)
-t.fail_test(t.stdout().find("default build must be identical in all alternatives") == -1)
-
-# Test that default-build must be identical in all alternatives. No Error case,
-# empty default build.
-t.write("jamfile.jam", """
-exe a : a.cpp : <variant>debug ;
-exe a : b.cpp : <variant>release ;
-""")
-t.run_build_system("-n --no-error-backtrace", status=0)
-
-
-# Now try a harder example: default build which contains <define> should cause
-# <define> to be present when "b" is compiled. This happens only if
-# "build-project b" is placed first.
-t.write("jamfile.jam", """
-project : default-build <define>FOO ;
-build-project a ;
-build-project b ;
-""")
-
-t.write("a/jamfile.jam", """
-exe a : a.cpp ../b//b ;
-""")
-
-t.write("a/a.cpp", """
-#ifdef _WIN32
-__declspec(dllimport)
-#endif
-void foo();
-int main() { foo(); }
-""")
-
-t.write("b/jamfile.jam", """
-lib b : b.cpp ;
-""")
-
-t.write("b/b.cpp", """
-#ifdef FOO
-#ifdef _WIN32
-__declspec(dllexport)
-#endif
-void foo() {}
-#endif
-""")
-
-t.run_build_system()
-
-t.cleanup()
diff --git a/tools/build/v2/test/default_features.py b/tools/build/v2/test/default_features.py
deleted file mode 100644
index 4d810caa71..0000000000
--- a/tools/build/v2/test/default_features.py
+++ /dev/null
@@ -1,50 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Test that features with default values are always present in build properties
-# of any target.
-
-import BoostBuild
-
-t = BoostBuild.Tester()
-
-# Declare *non-propagated* feature foo.
-t.write("jamroot.jam", """
-import feature : feature ;
-feature foo : on off ;
-""")
-
-# Note that '<foo>on' will not be propagated to 'd/l'.
-t.write("jamfile.jam", """
-exe hello : hello.cpp d//l ;
-""")
-
-t.write("hello.cpp", """
-#ifdef _WIN32
-__declspec(dllimport)
-#endif
-void foo();
-int main() { foo(); }
-""")
-
-t.write("d/jamfile.jam", """
-lib l : l.cpp : <foo>on:<define>FOO ;
-""")
-
-t.write("d/l.cpp", """
-#ifdef _WIN32
-__declspec(dllexport)
-#endif
-#ifdef FOO
-void foo() {}
-#endif
-""")
-
-t.run_build_system()
-
-t.expect_addition("bin/$toolset/debug/hello.exe")
-
-t.cleanup()
diff --git a/tools/build/v2/test/default_toolset.py b/tools/build/v2/test/default_toolset.py
deleted file mode 100755
index e430c68a7e..0000000000
--- a/tools/build/v2/test/default_toolset.py
+++ /dev/null
@@ -1,211 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2008 Jurko Gospodnetic
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Test that the expected default toolset is used when no toolset is explicitly
-# specified on the command line or used from code via the using rule. Test that
-# the default toolset is correctly used just like any other explicitly used
-# toolset (e.g. toolset prerequisites, properties conditioned on toolset related
-# features, etc.).
-#
-# Note that we need to ignore regular site/user/test configuration files to
-# avoid them marking any toolsets not under our control as used.
-
-import BoostBuild
-
-
-# Line displayed by Boost Build when using the default toolset.
-configuring_default_toolset_message = \
- 'warning: Configuring default toolset "%s".'
-
-
-################################################################################
-#
-# test_conditions_on_default_toolset()
-# ------------------------------------
-#
-################################################################################
-
-def test_conditions_on_default_toolset():
- """Test that toolset and toolset subfeature conditioned properties get
- applied correctly when the toolset is selected by default. Implicitly tests
- that we can use the set-default-toolset rule to set the default toolset to
- be used by Boost Build.
- """
-
- t = BoostBuild.Tester("--user-config= --ignore-site-config",
- pass_toolset=False, use_test_config=False)
-
- toolset_name = "myCustomTestToolset"
- toolset_version = "v"
- toolset_version_unused = "v_unused"
- message_loaded = "Toolset '%s' loaded." % toolset_name
- message_initialized = "Toolset '%s' initialized." % toolset_name ;
-
- # Custom toolset.
- t.write(toolset_name + ".jam", """
-import feature ;
-ECHO "%(message_loaded)s" ;
-feature.extend toolset : %(toolset_name)s ;
-feature.subfeature toolset %(toolset_name)s : version : %(toolset_version)s %(toolset_version_unused)s ;
-rule init ( version ) { ECHO "%(message_initialized)s" ; }
-""" % {'message_loaded' : message_loaded ,
- 'message_initialized' : message_initialized,
- 'toolset_name' : toolset_name ,
- 'toolset_version' : toolset_version ,
- 'toolset_version_unused': toolset_version_unused})
-
- # Main Boost Build project script.
- t.write("jamroot.jam", """
-import build-system ;
-import errors ;
-import feature ;
-import notfile ;
-
-build-system.set-default-toolset %(toolset_name)s : %(toolset_version)s ;
-
-feature.feature description : : free incidental ;
-
-# We use a rule instead of an action to avoid problems with action output not
-# getting piped to stdout by the testing system.
-rule buildRule ( names : targets ? : properties * )
-{
- local descriptions = [ feature.get-values description : $(properties) ] ;
- ECHO "descriptions:" /$(descriptions)/ ;
- local toolset = [ feature.get-values toolset : $(properties) ] ;
- ECHO "toolset:" /$(toolset)/ ;
- local toolset-version = [ feature.get-values "toolset-$(toolset):version" : $(properties) ] ;
- ECHO "toolset-version:" /$(toolset-version)/ ;
-}
-
-notfile testTarget
- : @buildRule
- :
- :
- <description>stand-alone
- <toolset>%(toolset_name)s:<description>toolset
- <toolset>%(toolset_name)s-%(toolset_version)s:<description>toolset-version
- <toolset>%(toolset_name)s-%(toolset_version_unused)s:<description>toolset-version-unused ;
-""" % {'toolset_name' : toolset_name ,
- 'toolset_version' : toolset_version,
- 'toolset_version_unused': toolset_version_unused})
-
- t.run_build_system()
- t.expect_output_line(configuring_default_toolset_message % toolset_name)
- t.expect_output_line(message_loaded)
- t.expect_output_line(message_initialized)
- t.expect_output_line("descriptions: /stand-alone/ /toolset/ /toolset-version/")
- t.expect_output_line("toolset: /%s/" % toolset_name)
- t.expect_output_line("toolset-version: /%s/" % toolset_version)
-
- t.cleanup()
-
-
-################################################################################
-#
-# test_default_toolset_on_os()
-# ----------------------------
-#
-################################################################################
-
-def test_default_toolset_on_os( os, expected_toolset ):
- """Test that the given toolset is used as the default toolset on the given
- os. Uses hardcoded knowledge of how Boost Build decides on which host OS it
- is currently running. Note that we must not do much after tricking Boost
- Build into believing it has a specific host OS as this might mess up other
- important internal Boost Build state.
- """
-
- t = BoostBuild.Tester("--user-config= --ignore-site-config",
- pass_toolset=False, use_test_config=False)
-
- t.write("jamroot.jam", "modules.poke os : .name : %s ;" % os)
-
- # We need to tell the test system to ignore stderr output as attempting to
- # load missing toolsets might cause random failures with which we are not
- # concerned in this test.
- t.run_build_system(stderr=None)
- t.expect_output_line(configuring_default_toolset_message % expected_toolset)
-
- t.cleanup()
-
-
-################################################################################
-#
-# test_default_toolset_requirements()
-# -----------------------------------
-#
-################################################################################
-
-def test_default_toolset_requirements():
- """Test that default toolset's requirements get applied correctly.
- """
-
- t = BoostBuild.Tester("--user-config= --ignore-site-config",
- pass_toolset=False, use_test_config=False,
- ignore_toolset_requirements=False)
-
- toolset_name = "customTestToolsetWithRequirements"
-
- # Custom toolset.
- t.write(toolset_name + ".jam", """
-import feature ;
-import toolset ;
-feature.extend toolset : %(toolset_name)s ;
-toolset.add-requirements <description>toolset-requirement ;
-rule init ( ) { }
-""" % {'toolset_name': toolset_name})
-
- # Main Boost Build project script.
- t.write("jamroot.jam", """
-import build-system ;
-import errors ;
-import feature ;
-import notfile ;
-
-build-system.set-default-toolset %(toolset_name)s ;
-
-feature.feature description : : free incidental ;
-
-# We use a rule instead of an action to avoid problems with action output not
-# getting piped to stdout by the testing system.
-rule buildRule ( names : targets ? : properties * )
-{
- local descriptions = [ feature.get-values description : $(properties) ] ;
- ECHO "descriptions:" /$(descriptions)/ ;
- local toolset = [ feature.get-values toolset : $(properties) ] ;
- ECHO "toolset:" /$(toolset)/ ;
-}
-
-notfile testTarget
- : @buildRule
- :
- :
- <description>target-requirement
- <description>toolset-requirement:<description>conditioned-requirement
- <description>unrelated-condition:<description>unrelated-description ;
-""" % {'toolset_name': toolset_name})
-
- t.run_build_system()
- t.expect_output_line(configuring_default_toolset_message % toolset_name)
- t.expect_output_line("descriptions: /conditioned-requirement/ /target-requirement/ /toolset-requirement/")
- t.expect_output_line("toolset: /%s/" % toolset_name)
-
- t.cleanup()
-
-
-################################################################################
-#
-# main()
-# ------
-#
-################################################################################
-
-test_default_toolset_on_os("NT" , "msvc")
-test_default_toolset_on_os("LINUX" , "gcc" )
-test_default_toolset_on_os("CYGWIN" , "gcc" )
-test_default_toolset_on_os("SomeOtherOS", "gcc" )
-test_default_toolset_requirements()
-test_conditions_on_default_toolset()
diff --git a/tools/build/v2/test/dependency-test/a.cpp b/tools/build/v2/test/dependency-test/a.cpp
deleted file mode 100644
index e77a4308d3..0000000000
--- a/tools/build/v2/test/dependency-test/a.cpp
+++ /dev/null
@@ -1,17 +0,0 @@
-// Copyright (c) 2003 Vladimir Prus
-//
-// Distributed under the Boost Software License, Version 1.0. (See
-// accompanying file LICENSE_1_0.txt or copy at
-// http://www.boost.org/LICENSE_1_0.txt)
-//
-// http://www.boost.org
-//
-
-#include <a.h>
-# include "a.h"
-#include <x.h>
-
-int main()
-{
- return 0;
-}
diff --git a/tools/build/v2/test/dependency-test/a.h b/tools/build/v2/test/dependency-test/a.h
deleted file mode 100644
index ccecbb414f..0000000000
--- a/tools/build/v2/test/dependency-test/a.h
+++ /dev/null
@@ -1,8 +0,0 @@
-// Copyright (c) 2003 Vladimir Prus
-//
-// Distributed under the Boost Software License, Version 1.0. (See
-// accompanying file LICENSE_1_0.txt or copy at
-// http://www.boost.org/LICENSE_1_0.txt)
-//
-// http://www.boost.org
-//
diff --git a/tools/build/v2/test/dependency-test/a_c.c b/tools/build/v2/test/dependency-test/a_c.c
deleted file mode 100644
index 18be81e274..0000000000
--- a/tools/build/v2/test/dependency-test/a_c.c
+++ /dev/null
@@ -1,12 +0,0 @@
-// Copyright (c) 2003 Vladimir Prus
-//
-// Distributed under the Boost Software License, Version 1.0. (See
-// accompanying file LICENSE_1_0.txt or copy at
-// http://www.boost.org/LICENSE_1_0.txt)
-//
-// http://www.boost.org
-//
-
-#include <a.h>
-# include "a.h"
-#include <x.h>
diff --git a/tools/build/v2/test/dependency-test/b.cpp b/tools/build/v2/test/dependency-test/b.cpp
deleted file mode 100644
index 0af83d3aa2..0000000000
--- a/tools/build/v2/test/dependency-test/b.cpp
+++ /dev/null
@@ -1,14 +0,0 @@
-// Copyright (c) 2003 Vladimir Prus
-//
-// Distributed under the Boost Software License, Version 1.0. (See
-// accompanying file LICENSE_1_0.txt or copy at
-// http://www.boost.org/LICENSE_1_0.txt)
-//
-// http://www.boost.org
-//
-
-#include "a.h"
-
-int main()
-{
-}
diff --git a/tools/build/v2/test/dependency-test/b.h b/tools/build/v2/test/dependency-test/b.h
deleted file mode 100644
index ccecbb414f..0000000000
--- a/tools/build/v2/test/dependency-test/b.h
+++ /dev/null
@@ -1,8 +0,0 @@
-// Copyright (c) 2003 Vladimir Prus
-//
-// Distributed under the Boost Software License, Version 1.0. (See
-// accompanying file LICENSE_1_0.txt or copy at
-// http://www.boost.org/LICENSE_1_0.txt)
-//
-// http://www.boost.org
-//
diff --git a/tools/build/v2/test/dependency-test/c.cpp b/tools/build/v2/test/dependency-test/c.cpp
deleted file mode 100644
index 6d4170ff3c..0000000000
--- a/tools/build/v2/test/dependency-test/c.cpp
+++ /dev/null
@@ -1,14 +0,0 @@
-// Copyright (c) 2003 Vladimir Prus
-//
-// Distributed under the Boost Software License, Version 1.0. (See
-// accompanying file LICENSE_1_0.txt or copy at
-// http://www.boost.org/LICENSE_1_0.txt)
-//
-// http://www.boost.org
-//
-
-#include "x.h"
-
-int main()
-{
-}
diff --git a/tools/build/v2/test/dependency-test/e.cpp b/tools/build/v2/test/dependency-test/e.cpp
deleted file mode 100644
index 43814b98e3..0000000000
--- a/tools/build/v2/test/dependency-test/e.cpp
+++ /dev/null
@@ -1,15 +0,0 @@
-// Copyright (c) 2003 Vladimir Prus
-//
-// Distributed under the Boost Software License, Version 1.0. (See
-// accompanying file LICENSE_1_0.txt or copy at
-// http://www.boost.org/LICENSE_1_0.txt)
-//
-// http://www.boost.org
-//
-
-#include "x.h"
-
-int main()
-{
- return 0;
-}
diff --git a/tools/build/v2/test/dependency-test/foo.jam b/tools/build/v2/test/dependency-test/foo.jam
deleted file mode 100644
index 1cf44681da..0000000000
--- a/tools/build/v2/test/dependency-test/foo.jam
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2003 Dave Abrahams
-# Copyright 2002, 2003, 2005 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import type ;
-import generators ;
-import os ;
-import print ;
-
-type.register FOO : foo ;
-
-generators.register-standard foo.foo : FOO : CPP H ;
-
-nl = "
-" ;
-
-rule foo ( targets * : sources * : properties * )
-{
- # On NT, you need an exported symbol in order to have an
- # import lib generated
- # We won't really use the symbol defined here, just force
- # lib creation.
- if ( [ os.name ] = NT || [ modules.peek : OS ] in CYGWIN )
- && <main-target-type>LIB in $(properties)
- {
- .decl = "void __declspec(dllexport) foo(){}" ;
- }
-
- print.output $(<[1]) ;
- print.text $(.decl:E="//")$(nl) ;
- print.output $(<[2]) ;
- print.text "#include <z.h>"$(nl) ;
-}
diff --git a/tools/build/v2/test/dependency-test/foo.py b/tools/build/v2/test/dependency-test/foo.py
deleted file mode 100644
index f807bf4fa9..0000000000
--- a/tools/build/v2/test/dependency-test/foo.py
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright 2003 Dave Abrahams
-# Copyright 2002, 2003, 2005, 2010 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import bjam
-import b2.build.type as type
-import b2.build.generators as generators
-
-from b2.manager import get_manager
-
-type.register("FOO", ["foo"])
-generators.register_standard("foo.foo", ["FOO"], ["CPP", "H"])
-
-def prepare_foo(targets, sources, properties):
-
- if properties.get('os') in ['windows', 'cygwin']:
- bjam.call('set-target-variable', targets, "DECL",
- "void __declspec(dllexport) foo(){}")
-
- pass
-
-get_manager().engine().register_action("foo.foo",\
-"""echo -e $(DECL:E="//")\\n > $(<[1])
-echo -e "#include <z.h>\\n" > $(<[2])
-""", function=prepare_foo)
diff --git a/tools/build/v2/test/dependency-test/jamfile.jam b/tools/build/v2/test/dependency-test/jamfile.jam
deleted file mode 100644
index 590d0865ca..0000000000
--- a/tools/build/v2/test/dependency-test/jamfile.jam
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright 2002, 2003, 2005 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-
-project test
- : requirements <include>src1
- ;
-
-exe a
- : x.foo a.cpp a_c.c
- ;
-
-exe b
- : b.cpp
- ;
-
-# Because of <define>, c.cpp will be compiled to different
-# directory than everything for main target "a". Therefore
-# without <implicit-dependency>, it won't find "x.h", which is part
-# of "a"'s dependency graph.
-exe c
- : c.cpp
- : <define>FOO <implicit-dependency>a
- ;
diff --git a/tools/build/v2/test/dependency-test/jamroot.jam b/tools/build/v2/test/dependency-test/jamroot.jam
deleted file mode 100644
index e779ecc913..0000000000
--- a/tools/build/v2/test/dependency-test/jamroot.jam
+++ /dev/null
@@ -1,7 +0,0 @@
-# Copyright 2002 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-
-import gcc ;
-import foo ;
diff --git a/tools/build/v2/test/dependency-test/src1/a.h b/tools/build/v2/test/dependency-test/src1/a.h
deleted file mode 100644
index d8b04ea0ad..0000000000
--- a/tools/build/v2/test/dependency-test/src1/a.h
+++ /dev/null
@@ -1,10 +0,0 @@
-// Copyright (c) 2003 Vladimir Prus
-//
-// Distributed under the Boost Software License, Version 1.0. (See
-// accompanying file LICENSE_1_0.txt or copy at
-// http://www.boost.org/LICENSE_1_0.txt)
-//
-// http://www.boost.org
-//
-
-#include "b.h"
diff --git a/tools/build/v2/test/dependency-test/src1/b.h b/tools/build/v2/test/dependency-test/src1/b.h
deleted file mode 100644
index 016b031374..0000000000
--- a/tools/build/v2/test/dependency-test/src1/b.h
+++ /dev/null
@@ -1,10 +0,0 @@
-// Copyright (c) 2003 Vladimir Prus
-//
-// Distributed under the Boost Software License, Version 1.0. (See
-// accompanying file LICENSE_1_0.txt or copy at
-// http://www.boost.org/LICENSE_1_0.txt)
-//
-// http://www.boost.org
-//
-
-#include "c.h"
diff --git a/tools/build/v2/test/dependency-test/src1/c.h b/tools/build/v2/test/dependency-test/src1/c.h
deleted file mode 100644
index ccecbb414f..0000000000
--- a/tools/build/v2/test/dependency-test/src1/c.h
+++ /dev/null
@@ -1,8 +0,0 @@
-// Copyright (c) 2003 Vladimir Prus
-//
-// Distributed under the Boost Software License, Version 1.0. (See
-// accompanying file LICENSE_1_0.txt or copy at
-// http://www.boost.org/LICENSE_1_0.txt)
-//
-// http://www.boost.org
-//
diff --git a/tools/build/v2/test/dependency-test/src1/z.h b/tools/build/v2/test/dependency-test/src1/z.h
deleted file mode 100644
index 7b8ca34e6d..0000000000
--- a/tools/build/v2/test/dependency-test/src1/z.h
+++ /dev/null
@@ -1,5 +0,0 @@
-/* Copyright 2003, 2004, 2006 Vladimir Prus */
-/* Distributed under the Boost Software License, Version 1.0. */
-/* (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) */
-
-extern int dummy_variabled_need_to_suppress_empty_file_warning_on_hp_cxx_compiler;
diff --git a/tools/build/v2/test/dependency-test/src2/b.h b/tools/build/v2/test/dependency-test/src2/b.h
deleted file mode 100644
index ccecbb414f..0000000000
--- a/tools/build/v2/test/dependency-test/src2/b.h
+++ /dev/null
@@ -1,8 +0,0 @@
-// Copyright (c) 2003 Vladimir Prus
-//
-// Distributed under the Boost Software License, Version 1.0. (See
-// accompanying file LICENSE_1_0.txt or copy at
-// http://www.boost.org/LICENSE_1_0.txt)
-//
-// http://www.boost.org
-//
diff --git a/tools/build/v2/test/dependency-test/y.foo b/tools/build/v2/test/dependency-test/y.foo
deleted file mode 100644
index e69de29bb2..0000000000
--- a/tools/build/v2/test/dependency-test/y.foo
+++ /dev/null
diff --git a/tools/build/v2/test/dependency_property.py b/tools/build/v2/test/dependency_property.py
deleted file mode 100644
index 2f19ba3f8d..0000000000
--- a/tools/build/v2/test/dependency_property.py
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Regression test: virtual targets with different dependency properties were
-# considered different by 'virtual-target.register', but the code which
-# determined target paths ignored dependency properties --- so both targets used
-# to be placed to the same location.
-
-import BoostBuild
-import string
-
-
-t = BoostBuild.Tester()
-
-t.write("jamroot.jam", """
-lib foo : foo.cpp ;
-exe hello : hello.cpp ;
-exe hello2 : hello.cpp : <library>foo ;
-""")
-
-t.write("hello.cpp", "int main() {}\n")
-
-t.write("foo.cpp", """
-#ifdef _WIN32
-__declspec(dllexport)
-#endif
-void foo() {}
-""")
-
-t.run_build_system("--no-error-backtrace", status=1)
-t.fail_test(string.find(t.stdout(), "Duplicate name of actual target") == -1)
-
-t.cleanup()
diff --git a/tools/build/v2/test/dependency_test.py b/tools/build/v2/test/dependency_test.py
deleted file mode 100644
index 7da20712ac..0000000000
--- a/tools/build/v2/test/dependency_test.py
+++ /dev/null
@@ -1,104 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2003 Dave Abrahams
-# Copyright 2002, 2003, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import BoostBuild
-
-t = BoostBuild.Tester()
-
-t.set_tree("dependency-test")
-
-t.run_build_system()
-
-# Check that main target 'c' was able to find 'x.h' from 'a's dependency graph.
-t.expect_addition("bin/$toolset/debug/c.exe")
-
-# Check handling of first level includes.
-
-# Both 'a' and 'b' include "a.h" and should be updated.
-t.touch("a.h")
-t.run_build_system()
-
-t.expect_touch("bin/$toolset/debug/a.exe")
-t.expect_touch("bin/$toolset/debug/a.obj")
-t.expect_touch("bin/$toolset/debug/a_c.obj")
-t.expect_touch("bin/$toolset/debug/b.exe")
-t.expect_touch("bin/$toolset/debug/b.obj")
-# Now, <dependency> does not add a dependency. It sound weird, but is
-# intentional. Need to rename <dependency> eventually.
-#t.expect_touch("bin/$toolset/debug/main-target-c/c.exe")
-t.ignore("*.tds")
-t.expect_nothing_more()
-
-# Only 'a' include <a.h> and should be updated.
-t.touch("src1/a.h")
-t.run_build_system()
-
-t.expect_touch("bin/$toolset/debug/a.exe")
-t.expect_touch("bin/$toolset/debug/a.obj")
-t.expect_touch("bin/$toolset/debug/a_c.obj")
-t.ignore("*.tds")
-t.expect_nothing_more()
-
-# "src/a.h" includes "b.h" (in the same dir).
-t.touch("src1/b.h")
-t.run_build_system()
-t.expect_touch("bin/$toolset/debug/a.exe")
-t.expect_touch("bin/$toolset/debug/a.obj")
-t.expect_touch("bin/$toolset/debug/a_c.obj")
-t.ignore("*.tds")
-t.expect_nothing_more()
-
-# Included by "src/b.h". We had a bug: file included via "", like "b.h" is in
-# this case was not scanned at all.
-t.touch("src1/c.h")
-t.run_build_system()
-t.expect_touch("bin/$toolset/debug/a.exe")
-
-t.touch("b.h")
-t.run_build_system()
-t.expect_nothing_more()
-
-# Test dependency on a generated header.
-#
-# TODO: we have also to check that generated header is found correctly if it is
-# different for different subvariants. Lacking any toolset support, this check
-# will be implemented later.
-t.touch("x.foo")
-t.run_build_system()
-t.expect_touch("bin/$toolset/debug/a.obj")
-t.expect_touch("bin/$toolset/debug/a_c.obj")
-
-# Check that generated headers are scanned for dependencies as well.
-t.touch("src1/z.h")
-t.run_build_system()
-t.expect_touch("bin/$toolset/debug/a.obj")
-t.expect_touch("bin/$toolset/debug/a_c.obj")
-
-# Regression test: on Windows, <includes> with absolute paths were not
-# considered when scanning dependencies.
-t.rm(".")
-
-t.write("jamroot.jam", """
-path-constant TOP : . ;
-exe app : main.cpp : <include>$(TOP)/include ;
-""");
-
-t.write("main.cpp", """
-#include <dir/header.h>
-int main() {}
-""")
-
-t.write("include/dir/header.h", "")
-
-t.run_build_system()
-t.expect_addition("bin/$toolset/debug/main.obj")
-
-t.touch("include/dir/header.h")
-t.run_build_system()
-t.expect_touch("bin/$toolset/debug/main.obj")
-
-t.cleanup()
diff --git a/tools/build/v2/test/direct_request_test.py b/tools/build/v2/test/direct_request_test.py
deleted file mode 100644
index 32121f1d75..0000000000
--- a/tools/build/v2/test/direct_request_test.py
+++ /dev/null
@@ -1,73 +0,0 @@
-#!/usr/bin/python
-
-import BoostBuild
-
-t = BoostBuild.Tester()
-
-# First check some startup.
-
-t.write("jamroot.jam", "")
-
-t.write("jamfile.jam", """
-exe a : a.cpp b ;
-lib b : b.cpp ;
-""")
-
-t.write("a.cpp", """
-void
-# ifdef _WIN32
-__declspec(dllimport)
-# endif
-foo();
-
-int main()
-{
- foo();
-}
-""")
-
-t.write("b.cpp", """
-#ifdef MACROS
-void
-# ifdef _WIN32
-__declspec(dllexport)
-# endif
-foo() {}
-#endif
-
-# ifdef _WIN32
-int __declspec(dllexport) force_implib_creation;
-# endif
-""")
-
-t.run_build_system(extra_args="define=MACROS")
-t.expect_addition("bin/$toolset/debug/"
- * (BoostBuild.List("a.obj b.obj b.dll a.exe")))
-
-
-# When building a debug version, the 'define' still applies.
-t.rm("bin")
-t.run_build_system(extra_args="debug define=MACROS")
-t.expect_addition("bin/$toolset/debug/"
- * (BoostBuild.List("a.obj b.obj b.dll a.exe")))
-
-
-# When building release version, the 'define' still applies.
-t.write("jamfile.jam", """
-exe a : a.cpp b : <variant>debug ;
-lib b : b.cpp ;
-""")
-t.rm("bin")
-t.run_build_system(extra_args="release define=MACROS")
-
-
-# Regression test: direct build request was not working when there was more than
-# one level of 'build-project'.
-t.rm(".")
-t.write('jamroot.jam', '')
-t.write('jamfile.jam', 'build-project a ;')
-t.write('a/jamfile.jam', 'build-project b ;')
-t.write('a/b/jamfile.jam', '')
-t.run_build_system("release")
-
-t.cleanup()
diff --git a/tools/build/v2/test/disambiguation.py b/tools/build/v2/test/disambiguation.py
deleted file mode 100644
index 91b14c5c17..0000000000
--- a/tools/build/v2/test/disambiguation.py
+++ /dev/null
@@ -1,32 +0,0 @@
-#!/usr/bin/python
-
-# Copyright (C) Vladimir Prus 2006.
-# Distributed under the Boost Software License, Version 1.0. (See
-# accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-# Test that it is possible to add a suffix to a main target name to disambiguate
-# that main target from another, and that this does not affect the names of the
-# generated targets.
-
-import BoostBuild
-
-t = BoostBuild.Tester()
-
-t.write("jamroot.jam", """
-exe hello.exe : hello.obj ;
-obj hello.obj : hello.cpp : <variant>debug ;
-obj hello.obj2 : hello.cpp : <variant>release ;
-""")
-
-t.write("hello.cpp", """
-int main() {}
-""")
-
-t.run_build_system()
-
-t.expect_addition("bin/$toolset/debug/hello.exe")
-t.expect_addition("bin/$toolset/debug/hello.obj")
-t.expect_addition("bin/$toolset/release/hello.obj")
-
-t.cleanup()
diff --git a/tools/build/v2/test/dll_path.py b/tools/build/v2/test/dll_path.py
deleted file mode 100644
index f88e06168b..0000000000
--- a/tools/build/v2/test/dll_path.py
+++ /dev/null
@@ -1,158 +0,0 @@
-#!/usr/bin/python
-
-# Copyright (C) Vladimir Prus 2003. Permission to copy, use, modify, sell and
-# distribute this software is granted provided this copyright notice appears in
-# all copies. This software is provided "as is" without express or implied
-# warranty, and with no claim as to its suitability for any purpose.
-
-# Test that the <dll-path> property is correctly set when using
-# <hardcode-dll-paths>true.
-
-import BoostBuild
-
-t = BoostBuild.Tester()
-
-# The point of this test is to have exe "main" which uses library "b", which
-# uses library "a". When "main" is built with <hardcode-dll-paths>true, paths to
-# both libraries should be present as values of <dll-path> feature. We create a
-# special target type which reports <dll-path> values on its sources and compare
-# the list of found values with out expectations.
-
-t.write("jamfile.jam", """
-exe main : main.cpp b//b ;
-explicit main ;
-path-list mp : main ;
-""")
-
-t.write("main.cpp", """
-int main() {}
-""")
-
-t.write("jamroot.jam", """
-using dll_paths ;
-""")
-
-t.write("dll_paths.jam", """
-import type ;
-import generators ;
-import feature ;
-import sequence ;
-import print ;
-import "class" : new ;
-
-rule init ( )
-{
- type.register PATH_LIST : pathlist ;
-
- class dll-paths-list-generator : generator
- {
- rule __init__ ( )
- {
- generator.__init__ dll_paths.list : EXE : PATH_LIST ;
- }
-
- rule generated-targets ( sources + : property-set : project name ? )
- {
- local dll-paths ;
- for local s in $(sources)
- {
- local a = [ $(s).action ] ;
- if $(a)
- {
- local p = [ $(a).properties ] ;
- dll-paths += [ $(p).get <dll-path> ] ;
- }
- }
- return [ generator.generated-targets $(sources) :
- [ $(property-set).add-raw $(dll-paths:G=<dll-path>) ] :
- $(project) $(name) ] ;
-
- }
- }
- generators.register [ new dll-paths-list-generator ] ;
-}
-
-rule list ( target : sources * : properties * )
-{
- local paths = [ feature.get-values <dll-path> : $(properties) ] ;
- paths = [ sequence.insertion-sort $(paths) ] ;
- print.output $(target) ;
- print.text $(paths) ;
-}
-""")
-
-t.write("dll_paths.py", """
-import bjam
-
-import b2.build.type as type
-import b2.build.generators as generators
-
-from b2.manager import get_manager
-
-def init():
- type.register("PATH_LIST", ["pathlist"])
-
- class DllPathsListGenerator(generators.Generator):
-
- def __init__(self):
- generators.Generator.__init__(self, "dll_paths.list", False, ["EXE"], ["PATH_LIST"])
-
- def generated_targets(self, sources, ps, project, name):
-
- dll_paths = []
- for s in sources:
- a = s.action()
- if a:
- p = a.properties()
- dll_paths += p.get('dll-path')
- dll_paths.sort()
- return generators.Generator.generated_targets(self,
- sources, ps.add_raw(["<dll-path>" + p for p in dll_paths]),
- project, name)
-
- generators.register(DllPathsListGenerator())
-
-command = \"\"\"
-echo $(PATHS) > $(<[1])
-\"\"\"
-def function(target, sources, ps):
- bjam.call('set-target-variable', target, "PATHS", ps.get('dll-path'))
-
-get_manager().engine().register_action("dll_paths.list", command, function=function)
-""")
-
-t.write("a/a.cpp", """
-void
-#if defined(_WIN32)
-__declspec(dllexport)
-#endif
-foo() {}
-""")
-
-t.write("a/jamfile.jam", """
-lib a : a.cpp ;
-""")
-
-t.write("b/b.cpp", """
-void
-#if defined(_WIN32)
-__declspec(dllexport)
-#endif
-bar() {}
-""")
-
-t.write("b/jamfile.jam", """
-lib b : b.cpp ../a//a ;
-""")
-
-t.run_build_system("hardcode-dll-paths=true")
-
-t.expect_addition("bin/$toolset/debug/mp.pathlist")
-
-es1 = t.adjust_names(["a/bin/$toolset/debug"])[0]
-es2 = t.adjust_names(["b/bin/$toolset/debug"])[0]
-
-t.expect_content_line("bin/$toolset/debug/mp.pathlist", "*" + es1);
-t.expect_content_line("bin/$toolset/debug/mp.pathlist", "*" + es2);
-
-t.cleanup()
diff --git a/tools/build/v2/test/double_loading.py b/tools/build/v2/test/double_loading.py
deleted file mode 100644
index 27004a1767..0000000000
--- a/tools/build/v2/test/double_loading.py
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import BoostBuild
-
-t = BoostBuild.Tester()
-
-# Regression test for double loading of the same Jamfile.
-t.write("jamroot.jam", "" )
-t.write("jamfile.jam", "build-project subdir ;")
-t.write("subdir/jamfile.jam", 'ECHO "Loaded subdir" ;')
-
-t.run_build_system(subdir="subdir")
-t.expect_output_line("Loaded subdir")
-
-
-# Regression test for a more contrived case. The top-level Jamfile refers to
-# subdir via use-project, while subdir's Jamfile is being loaded. The motivation
-# why use-project referring to subprojects is useful can be found at
-# http://article.gmane.org/gmane.comp.lib.boost.build/3906/
-t.write("jamroot.jam", "" )
-t.write("jamfile.jam", "use-project /subdir : subdir ;")
-t.write("subdir/jamfile.jam", "project subdir ;")
-
-t.run_build_system(subdir="subdir");
-
-t.cleanup()
diff --git a/tools/build/v2/test/example_libraries.py b/tools/build/v2/test/example_libraries.py
deleted file mode 100644
index c40af74de1..0000000000
--- a/tools/build/v2/test/example_libraries.py
+++ /dev/null
@@ -1,21 +0,0 @@
-#!/usr/bin/python
-
-# Copyright (C) Vladimir Prus 2006.
-# Distributed under the Boost Software License, Version 1.0. (See
-# accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-# Test the 'libraries' example.
-
-import BoostBuild
-
-t = BoostBuild.Tester()
-
-t.set_tree("../example/libraries")
-
-t.run_build_system()
-
-t.expect_addition(["app/bin/$toolset/debug/app.exe",
- "util/foo/bin/$toolset/debug/bar.dll"])
-
-t.cleanup()
diff --git a/tools/build/v2/test/example_make.py b/tools/build/v2/test/example_make.py
deleted file mode 100644
index 1e62702eff..0000000000
--- a/tools/build/v2/test/example_make.py
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/usr/bin/python
-
-# Copyright (C) Vladimir Prus 2006.
-# Distributed under the Boost Software License, Version 1.0. (See
-# accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-# Test the 'make' example.
-
-import BoostBuild
-
-t = BoostBuild.Tester()
-
-t.set_tree("../example/make")
-
-t.run_build_system()
-
-t.expect_addition(["bin/$toolset/debug/main.cpp"])
-
-t.cleanup()
diff --git a/tools/build/v2/test/expansion.py b/tools/build/v2/test/expansion.py
deleted file mode 100644
index a9f6653563..0000000000
--- a/tools/build/v2/test/expansion.py
+++ /dev/null
@@ -1,80 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-
-import BoostBuild
-
-t = BoostBuild.Tester()
-
-t.write("a.cpp", """
-#ifdef CF_IS_OFF
-int main() {}
-#endif
-""")
-
-t.write("b.cpp", """
-#ifdef CF_1
-int main() {}
-#endif
-""")
-
-t.write("c.cpp", """
-#ifdef FOO
-int main() {}
-#endif
-""")
-
-t.write("jamfile.jam", """
-# See if default value of composite feature 'cf' will be expanded to
-# <define>CF_IS_OFF.
-exe a : a.cpp ;
-
-# See if subfeature in requirements in expanded.
-exe b : b.cpp : <cf>on-1 ;
-
-# See if conditional requirements are recursively expanded.
-exe c : c.cpp : <toolset>$toolset:<variant>release <variant>release:<define>FOO
- ;
-""")
-
-t.write("jamroot.jam", """
-import feature ;
-feature.feature cf : off on : composite incidental ;
-feature.compose <cf>off : <define>CF_IS_OFF ;
-feature.subfeature cf on : version : 1 2 : composite optional incidental ;
-feature.compose <cf-on:version>1 : <define>CF_1 ;
-""")
-
-t.expand_toolset("jamfile.jam")
-
-t.run_build_system()
-t.expect_addition(["bin/$toolset/debug/a.exe",
- "bin/$toolset/debug/b.exe",
- "bin/$toolset/release/c.exe"])
-
-t.rm("bin")
-
-
-# Test for issue BB60.
-
-t.write("test.cpp", """
-#include "header.h"
-int main() {}
-""")
-
-t.write("jamfile.jam", """
-project : requirements <toolset>$toolset:<include>foo ;
-exe test : test.cpp : <toolset>$toolset ;
-""")
-
-t.expand_toolset("jamfile.jam")
-t.write("foo/header.h", "\n")
-t.write("jamroot.jam", "")
-
-t.run_build_system()
-t.expect_addition("bin/$toolset/debug/test.exe")
-
-t.cleanup()
diff --git a/tools/build/v2/test/explicit.py b/tools/build/v2/test/explicit.py
deleted file mode 100644
index 43137402b4..0000000000
--- a/tools/build/v2/test/explicit.py
+++ /dev/null
@@ -1,64 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import BoostBuild
-
-t = BoostBuild.Tester()
-
-t.write("jamroot.jam", """
-exe hello : hello.cpp ;
-exe hello2 : hello.cpp ;
-explicit hello2 ;
-""")
-
-t.write("hello.cpp", """
-int main() {}
-""")
-
-t.run_build_system()
-t.ignore("*.tds")
-t.expect_addition(BoostBuild.List("bin/$toolset/debug/hello") * \
- [".exe", ".obj"])
-t.expect_nothing_more()
-
-t.run_build_system("hello2")
-t.expect_addition("bin/$toolset/debug/hello2.exe")
-
-t.rm(".")
-
-
-# Test that 'explicit' used in a helper rule applies to the current project, and
-# not to the Jamfile where the helper rule is defined.
-t.write("jamroot.jam", """
-rule myinstall ( name : target )
-{
- install $(name)-bin : $(target) ;
- explicit $(name)-bin ;
- alias $(name) : $(name)-bin ;
-}
-""")
-
-t.write("sub/a.cpp", """
-""")
-
-t.write("sub/jamfile.jam", """
-myinstall dist : a.cpp ;
-""")
-
-t.run_build_system(subdir="sub")
-t.expect_addition("sub/dist-bin/a.cpp")
-
-t.rm("sub/dist-bin")
-
-t.write("sub/jamfile.jam", """
-myinstall dist : a.cpp ;
-explicit dist ;
-""")
-
-t.run_build_system(subdir="sub")
-t.expect_nothing_more()
-
-t.cleanup()
diff --git a/tools/build/v2/test/free_features_request.py b/tools/build/v2/test/free_features_request.py
deleted file mode 100644
index 108072758a..0000000000
--- a/tools/build/v2/test/free_features_request.py
+++ /dev/null
@@ -1,42 +0,0 @@
-#!/usr/bin/python
-
-# Copyright (C) Vladimir Prus 2007.
-# Distributed under the Boost Software License, Version 1.0. (See
-# accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-# Tests that a free feature specified on the command line applies to all targets
-# ever built.
-
-import BoostBuild
-
-t = BoostBuild.Tester()
-
-t.write("jamroot.jam", """
-exe hello : hello.cpp foo ;
-lib foo : foo.cpp ;
-""")
-
-t.write("hello.cpp", """
-extern void foo();
-#ifdef FOO
-int main() { foo(); }
-#endif
-""")
-
-t.write("foo.cpp", """
-#ifdef FOO
-#ifdef _WIN32
-__declspec(dllexport)
-#endif
-void foo() {}
-#endif
-""")
-
-# If FOO is not defined when compiling the 'foo' target, we will get a link
-# error at this point.
-t.run_build_system("hello define=FOO")
-
-t.expect_addition("bin/$toolset/debug/hello.exe")
-
-t.cleanup()
diff --git a/tools/build/v2/test/gcc_runtime.py b/tools/build/v2/test/gcc_runtime.py
deleted file mode 100644
index 696b6a5fda..0000000000
--- a/tools/build/v2/test/gcc_runtime.py
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2004 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Tests that on gcc, we correctly report problem when static runtime is
-# requested for building a shared library.
-
-import BoostBuild
-import string
-
-t = BoostBuild.Tester()
-
-# Create the needed files.
-t.write("jamroot.jam", "")
-
-t.write("jamfile.jam", """
-lib hello : hello.cpp ;
-""")
-
-t.write("hello.cpp", """
-int main() { }
-""")
-
-t.run_build_system("runtime-link=static")
-t.fail_test(string.find(t.stdout(),
- "On gcc, DLL can't be build with '<runtime-link>static'") == -1)
-
-t.run_build_system("link=static runtime-link=static")
-t.expect_addition("bin/$toolset/debug/link-static/runtime-link-static/hello.lib")
-
-t.cleanup()
diff --git a/tools/build/v2/test/generator_selection.py b/tools/build/v2/test/generator_selection.py
deleted file mode 100755
index e10cb0d3b2..0000000000
--- a/tools/build/v2/test/generator_selection.py
+++ /dev/null
@@ -1,140 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2008 Jurko Gospodnetic
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Tests that generators get selected correctly.
-
-import BoostBuild
-
-
-################################################################################
-#
-# test_generator_added_after_already_building_a_target_of_its_target_type()
-# -------------------------------------------------------------------------
-#
-################################################################################
-
-def test_generator_added_after_already_building_a_target_of_its_target_type():
- """Regression test for a Boost Build bug causing it to not use a generator
- if it got added after already building a targer of its target type.
- """
-
- t = BoostBuild.Tester()
-
- t.write("dummy.cpp", "void f() {}\n")
-
- t.write("jamroot.jam", """
-# Building this dummy target must not cause a later defined CPP target type
-# generator not to be recognized as viable.
-obj dummy : dummy.cpp ;
-alias the-other-obj : Other//other-obj ;
-""")
-
- t.write("Other/source.extension", "A dummy source file.")
-
- t.write("Other/mygen.jam", """
-import generators ;
-import os ;
-import type ;
-type.register MY_TYPE : extension ;
-generators.register-standard mygen.generate-a-cpp-file : MY_TYPE : CPP ;
-rule generate-a-cpp-file { ECHO Generating a CPP file... ; }
-if [ os.name ] = NT
-{
- actions generate-a-cpp-file { echo void g() {} > "$(<)" }
-}
-else
-{
- actions generate-a-cpp-file { echo "void g() {}" > "$(<)" }
-}
-""")
-
- t.write("Other/mygen.py", """
-import b2.build.generators as generators
-import b2.build.type as type
-
-from b2.manager import get_manager
-
-import os
-
-
-type.register('MY_TYPE', ['extension'])
-generators.register_standard('mygen.generate-a-cpp-file', ['MY_TYPE'], ['CPP'])
-if os.name == 'nt':
- action = 'echo void g() {} > "$(<)"'
-else:
- action = 'echo "void g() {}" > "$(<)"'
-def f(*args):
- print "Generating a CPP file..."
-
-get_manager().engine().register_action("mygen.generate-a-cpp-file",
- action, function=f)
-""")
-
- t.write("Other/jamfile.jam", """
-import mygen ;
-obj other-obj : source.extension ;
-""")
-
- t.run_build_system()
- t.expect_output_line("Generating a CPP file...")
- t.expect_addition("bin/$toolset/debug/dummy.obj")
- t.expect_addition("Other/bin/$toolset/debug/other-obj.obj")
-
- t.cleanup()
-
-
-################################################################################
-#
-# test_using_a_derived_source_type_created_after_generator_already_used()
-# -----------------------------------------------------------------------
-#
-################################################################################
-
-def test_using_a_derived_source_type_created_after_generator_already_used():
- """Regression test for a Boost Build bug causing it to not use a generator
- with a source type derived from one of the generator's sources but created
- only after already using the generateor.
- """
-
- t = BoostBuild.Tester()
-
- t.write("dummy.cpp", "void f() {}\n")
-
- t.write("jamroot.jam", """
-# Building this dummy target must not cause a later defined UNGA_CPP target type
-# not to be recognized as a viable source type for building OBJ targets.
-obj dummy : dummy.cpp ;
-alias the-test-output : Other//other-obj ;
-""")
-
- t.write("Other/source.unga_cpp", "void g() {}\n")
-
- t.write("Other/jamfile.jam", """
-import type ;
-type.register UNGA_CPP : unga_cpp : CPP ;
-# We are careful not to do anything between defining our new UNGA_CPP target
-# type and using the CPP --> OBJ generator that could potentially cover the
-# Boost Build bug by clearing its internal viable source target type state.
-obj other-obj : source.unga_cpp ;
-""")
-
- t.run_build_system()
- t.expect_addition("bin/$toolset/debug/dummy.obj")
- t.expect_addition("Other/bin/$toolset/debug/other-obj.obj")
- t.expect_nothing_more()
-
- t.cleanup()
-
-
-################################################################################
-#
-# main()
-# ------
-#
-################################################################################
-
-test_generator_added_after_already_building_a_target_of_its_target_type()
-test_using_a_derived_source_type_created_after_generator_already_used()
diff --git a/tools/build/v2/test/generators-test/a.cpp b/tools/build/v2/test/generators-test/a.cpp
deleted file mode 100644
index 7ef7f6872e..0000000000
--- a/tools/build/v2/test/generators-test/a.cpp
+++ /dev/null
@@ -1,22 +0,0 @@
-// Copyright (c) 2003 Vladimir Prus
-//
-// Distributed under the Boost Software License, Version 1.0. (See
-// accompanying file LICENSE_1_0.txt or copy at
-// http://www.boost.org/LICENSE_1_0.txt)
-//
-// http://www.boost.org
-//
-
-int foo();
-int bar();
-
-void z1(), z2();
-
-int main()
-{
- foo();
- bar();
- z1();
- z2();
- return 0;
-}
diff --git a/tools/build/v2/test/generators-test/b.cxx b/tools/build/v2/test/generators-test/b.cxx
deleted file mode 100644
index 85b41d3f9d..0000000000
--- a/tools/build/v2/test/generators-test/b.cxx
+++ /dev/null
@@ -1,10 +0,0 @@
-// Copyright (c) 2003 Vladimir Prus
-//
-// Distributed under the Boost Software License, Version 1.0. (See
-// accompanying file LICENSE_1_0.txt or copy at
-// http://www.boost.org/LICENSE_1_0.txt)
-//
-// http://www.boost.org
-//
-
-int foo() { return 0; }
diff --git a/tools/build/v2/test/generators-test/c.tui b/tools/build/v2/test/generators-test/c.tui
deleted file mode 100644
index e69de29bb2..0000000000
--- a/tools/build/v2/test/generators-test/c.tui
+++ /dev/null
diff --git a/tools/build/v2/test/generators-test/d.wd b/tools/build/v2/test/generators-test/d.wd
deleted file mode 100644
index e69de29bb2..0000000000
--- a/tools/build/v2/test/generators-test/d.wd
+++ /dev/null
diff --git a/tools/build/v2/test/generators-test/extra.jam b/tools/build/v2/test/generators-test/extra.jam
deleted file mode 100644
index d3500c73ca..0000000000
--- a/tools/build/v2/test/generators-test/extra.jam
+++ /dev/null
@@ -1,120 +0,0 @@
-# Copyright 2002, 2003, 2005 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-
-import type ;
-import generators ;
-import "class" : new ;
-import common ;
-
-type.register WHL : whl ;
-type.register DLP : dlp ;
-type.register WHL_LR0 : lr0 ;
-type.register WD : wd ;
-
-generators.register-standard extra.whale : WHL : CPP WHL_LR0 H H(%_symbols) ;
-generators.register-standard extra.dolphin : DLP : CPP ;
-generators.register-standard extra.wd : WD : WHL(%_parser) DLP(%_lexer) ;
-
-class wd-to-cpp : generator
-{
- rule __init__ ( * : * : * )
- {
- generator.__init__ $(1) : $(2) : $(3) ;
- }
-
- rule run ( project name ? : property-set : source )
- {
- local new-sources ;
- if ! [ $(source).type ] in WHL DLP
- {
- local r1 = [ generators.construct $(project) $(name)
- : WHL : $(property-set) : $(source) ] ;
- local r2 = [ generators.construct $(project) $(name)
- : DLP : $(property-set) : $(source) ] ;
-
- new-sources = [ sequence.unique $(r1[2-]) $(r2[2-]) ] ;
- }
- else
- {
- new-sources = $(source) ;
- }
-
- local result ;
- for local i in $(new-sources)
- {
- local t = [ generators.construct $(project) $(name) : CPP
- : $(property-set) : $(i) ] ;
- result += $(t[2-]) ;
- }
- return $(result) ;
- }
-}
-generators.override extra.wd-to-cpp : extra.whale ;
-generators.override extra.wd-to-cpp : extra.dolphin ;
-
-generators.register [ new wd-to-cpp extra.wd-to-cpp : : CPP ] ;
-
-rule whale ( targets * : sources * : properties * )
-{
-}
-
-TOUCH = [ common.file-touch-command ] ;
-
-actions whale
-{
- echo "Whale consuming " $(>)
- $(TOUCH) $(<[1])
- $(TOUCH) $(<[2])
- $(TOUCH) $(<[3])
- $(TOUCH) $(<[4])
-}
-
-rule dolphin ( targets * : source * : properties * )
-{
-}
-
-actions dolphin
-{
- echo "Dolphin consuming" $(>)
- $(TOUCH) $(<)
-}
-
-rule wd ( targets * : source * : properties * )
-{
-}
-
-actions wd
-{
- echo "WD consuming" $(>)
- $(TOUCH) $(<[1])
- $(TOUCH) $(<[2])
-}
-
-rule x ( target * : source * : properties * )
-{
-}
-
-
-actions x
-{
- echo "X: source is " $(>)
- $(TOUCH) $(<[1])
-}
-
-rule x_pro ( target * : source * : properties * )
-{
-}
-
-
-actions x_pro
-{
- echo "X_PRO: source is " $(>)
- $(TOUCH) $(<[1])
- $(TOUCH) $(<[2])
-}
-
-
-
-
diff --git a/tools/build/v2/test/generators-test/jamfile.jam b/tools/build/v2/test/generators-test/jamfile.jam
deleted file mode 100644
index e5d9242d35..0000000000
--- a/tools/build/v2/test/generators-test/jamfile.jam
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright 2002, 2003, 2005 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-
-project
- # This is needed to supress gcc warning on flex output, which otherwise
- # results in test failure
- : requirements <define>YY_NO_UNPUT
- ;
-
-exe a : a.cpp b.cxx obj_1 obj_2 c.tui d.wd x.l y.x_pro lib//auxilliary ;
-# This should not cause second compilation of a.cpp
-exe f : a.cpp b.cxx obj_1 obj_2 lib//auxilliary ;
-
-obj obj_1 : z.cpp : <define>SELECT=1 ;
-obj obj_2 : z.cpp : <define>SELECT=2 ;
-
-nm-exe e : e.cpp ;
diff --git a/tools/build/v2/test/generators-test/jamroot.jam b/tools/build/v2/test/generators-test/jamroot.jam
deleted file mode 100644
index abe08bc438..0000000000
--- a/tools/build/v2/test/generators-test/jamroot.jam
+++ /dev/null
@@ -1,95 +0,0 @@
-# Copyright 2002, 2003, 2004, 2005 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-
-import "class" : new ;
-
-import lex ;
-import qt ;
-import extra ;
-
-import type ;
-
-type.register tUI : tui ;
-type.register tUIC_H ;
-type.set-generated-target-suffix tUIC_H : : h ;
-
-type.register X1 : x1 ;
-type.register X2 : x2 ;
-type.register X_PRO : x_pro ;
-
-import generators ;
-
-generators.register-standard qt.uic : tUI tUIC_H : CPP ;
-generators.register-standard qt.uic-h : tUI : tUIC_H ;
-
-# That's an interesting example. Currently, X_PRO will be processed
-# twice.
-generators.register-standard extra.x : X1 X2 : CPP ;
-generators.register-standard extra.x_pro : X_PRO : X1 X2 ;
-
-# The point of this setup of to implement this functionality
-# "When main target type is EST_EXE, build OBJ from CPP-MARKED, not
-# for anything else (like CPP)
-# Unfortunately, this does not really works.
-
-#if $(no-var) {
-import nm ;
-
-type.register CPP_MARKED : marked_cpp : CPP ;
-type.register POSITIONS : positions ;
-type.register NM.TARGET.CPP : target_cpp : CPP ;
-type.register NM_EXE : : EXE ;
-
-generators.register-standard nm.target-source : CPP_MARKED : NM.TARGET.CPP ;
-generators.register-standard nm.cpp-mark : CPP : CPP_MARKED POSITIONS ;
-
-class nm::target::cpp-obj-generator : generator
-{
- rule __init__ ( )
- {
- generator.__init__ nm.target-obj : NM.TARGET.CPP : OBJ ;
- }
-
- rule requirements ( )
- {
- return <main-target-type>NM_EXE ;
- }
-
- # Consider: it it OK to ignore all other generated targets except for the first?
- rule run ( project name ? : properties * : source : multiple ? )
- {
- if [ $(source).type ] = CPP {
- local converted = [ generators.construct $(project) : NM.TARGET.CPP : $(properties) : $(source) ] ;
- if $(converted[1])
- {
- local result = [ generators.construct $(project) : OBJ : $(properties) : $(converted[2]) ] ;
- return $(result) ;
- }
- else
- {
- return ;
- }
- }
- else
- {
- return ;
- }
- }
-}
-
-generators.register [ new nm::target::cpp-obj-generator ] ;
-
-generators.override nm.target-obj : all ;
-
-#}
-
-
-
-
-
-
-
-
-
diff --git a/tools/build/v2/test/generators-test/lex.jam b/tools/build/v2/test/generators-test/lex.jam
deleted file mode 100644
index 4ae5422e9b..0000000000
--- a/tools/build/v2/test/generators-test/lex.jam
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright 2002, 2003, 2004, 2005 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import type ;
-import generators ;
-import feature ;
-import property ;
-import print ;
-
-type.register LEX : l ;
-
-generators.register-standard lex.lex : LEX : C ;
-
-rule lex ( targets * : sources * : properties * )
-{
- print.output $(<) ;
- # Need to supress SunCC's warning about empty source
- # file.
- print.text "void foo() {}
-" ;
-}
-
-actions lex
-{
-}
diff --git a/tools/build/v2/test/generators-test/lib/c.cpp b/tools/build/v2/test/generators-test/lib/c.cpp
deleted file mode 100644
index f32b624048..0000000000
--- a/tools/build/v2/test/generators-test/lib/c.cpp
+++ /dev/null
@@ -1,10 +0,0 @@
-// Copyright (c) 2003 Vladimir Prus
-//
-// Distributed under the Boost Software License, Version 1.0. (See
-// accompanying file LICENSE_1_0.txt or copy at
-// http://www.boost.org/LICENSE_1_0.txt)
-//
-// http://www.boost.org
-//
-
-int bar() { return 0; }
diff --git a/tools/build/v2/test/generators-test/lib/jamfile.jam b/tools/build/v2/test/generators-test/lib/jamfile.jam
deleted file mode 100644
index 48ff90fd21..0000000000
--- a/tools/build/v2/test/generators-test/lib/jamfile.jam
+++ /dev/null
@@ -1,9 +0,0 @@
-# Copyright 2002 Dave Abrahams
-# Copyright 2002 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-
-static-lib auxilliary : c.cpp ;
-
-lib auxilliary2 : c.cpp ;
diff --git a/tools/build/v2/test/generators-test/nm.jam b/tools/build/v2/test/generators-test/nm.jam
deleted file mode 100644
index 0b1ad984a0..0000000000
--- a/tools/build/v2/test/generators-test/nm.jam
+++ /dev/null
@@ -1,43 +0,0 @@
-# Copyright 2002, 2003 Dave Abrahams
-# Copyright 2002 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import modules ;
-import common ;
-
-TOUCH = [ common.file-touch-command ] ;
-
-rule target-source ( targets * : sources * : properties * )
-{
- if [ modules.peek : NT ]
- {
- main on $(<) = "int main() { return 0; }" ;
- }
- else
- {
- main on $(<) = "\"int main() { return 0; }\"" ;
- }
-}
-
-actions target-source
-{
- echo "NM target source consuming " $(>)
- echo $(main) > $(<)
-}
-
-rule cpp-mark ( targets * : sources * : properties * )
-{
-}
-
-actions cpp-mark
-{
- echo "CPP-MARK consuming " $(>)
- $(TOUCH) $(<[1])
- $(TOUCH) $(<[2])
-}
-
-
-
-
-
diff --git a/tools/build/v2/test/generators-test/qt.jam b/tools/build/v2/test/generators-test/qt.jam
deleted file mode 100644
index ec0ee33749..0000000000
--- a/tools/build/v2/test/generators-test/qt.jam
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2002 Dave Abrahams
-# Copyright 2002 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import modules ;
-
-if [ modules.peek : NT ]
-{
- comment = // ;
-}
-else
-{
- comment = \"//\" ;
-}
-
-rule uic ( target : sources * : properties * )
-{
- comment on $(<) = $(comment) ;
-}
-rule uic-h ( target : sources * : properties * )
-{
- comment on $(<) = $(comment) ;
-}
-
-actions uic
-{
- echo $(comment) $(>) > $(<)
-}
-
-actions uic-h
-{
- echo $(comment) $(>) > $(<)
-}
diff --git a/tools/build/v2/test/generators-test/x.l b/tools/build/v2/test/generators-test/x.l
deleted file mode 100644
index 16beb4d9b8..0000000000
--- a/tools/build/v2/test/generators-test/x.l
+++ /dev/null
@@ -1,5 +0,0 @@
-%option noyywrap
-
-%%
-
-%% \ No newline at end of file
diff --git a/tools/build/v2/test/generators-test/y.x_pro b/tools/build/v2/test/generators-test/y.x_pro
deleted file mode 100644
index e69de29bb2..0000000000
--- a/tools/build/v2/test/generators-test/y.x_pro
+++ /dev/null
diff --git a/tools/build/v2/test/generators-test/z.cpp b/tools/build/v2/test/generators-test/z.cpp
deleted file mode 100644
index 7826c1ca77..0000000000
--- a/tools/build/v2/test/generators-test/z.cpp
+++ /dev/null
@@ -1,16 +0,0 @@
-// Copyright (c) 2003 Vladimir Prus
-//
-// Distributed under the Boost Software License, Version 1.0. (See
-// accompanying file LICENSE_1_0.txt or copy at
-// http://www.boost.org/LICENSE_1_0.txt)
-//
-// http://www.boost.org
-//
-
-#if SELECT == 1
-void z1() {}
-#elif SELECT == 2
-void z2() {}
-#else
-#error Invlid value of SELECT
-#endif
diff --git a/tools/build/v2/test/generators_test.py b/tools/build/v2/test/generators_test.py
deleted file mode 100644
index 549a8e0ace..0000000000
--- a/tools/build/v2/test/generators_test.py
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2003 Dave Abrahams
-# Copyright 2002, 2003, 2005 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import BoostBuild
-
-t = BoostBuild.Tester()
-
-t.set_tree("generators-test")
-
-t.run_build_system()
-
-t.expect_addition( "bin/$toolset/debug/" * BoostBuild.List( "a.obj b.obj c.h "
- + "c.cpp c.obj d_parser.whl d_lexer.dlp d_parser.cpp d_lexer.cpp "
- + "d_parser.lr0 d_parser.h d_parser_symbols.h x.c x.obj y.x1 y.x2 y.cpp "
- + "y.obj e.marked_cpp e.positions e.target_cpp e.obj"))
-t.expect_addition("bin/$toolset/debug/a.exe")
-t.expect_addition(["lib/bin/$toolset/debug/c.obj",
- "lib/bin/$toolset/debug/auxilliary.lib"])
-
-t.run_build_system(subdir='lib')
-t.expect_addition(["lib/bin/$toolset/debug/auxilliary2.dll"])
-
-t.run_build_system(subdir='lib', extra_args="link=static")
-t.expect_addition(["lib/bin/$toolset/debug/link-static/auxilliary2.lib"])
-
-t.cleanup()
diff --git a/tools/build/v2/test/implicit_dependency.py b/tools/build/v2/test/implicit_dependency.py
deleted file mode 100644
index 91e2644c8c..0000000000
--- a/tools/build/v2/test/implicit_dependency.py
+++ /dev/null
@@ -1,81 +0,0 @@
-#!/usr/bin/python
-
-# Copyright (C) Vladimir Prus 2006.
-# Distributed under the Boost Software License, Version 1.0. (See
-# accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-# Test the <implicit-dependency> is respected even if the target referred to is
-# not built itself, but only referred to by <implicit-dependency>.
-
-import BoostBuild
-
-t = BoostBuild.Tester()
-
-t.write("jamroot.jam", """
-make a.h : : gen-header ;
-explicit a.h ;
-
-exe hello : hello.cpp : <implicit-dependency>a.h ;
-
-import os ;
-if [ os.name ] = NT
-{
- actions gen-header
- {
- echo int i; > $(<)
- }
-}
-else
-{
- actions gen-header
- {
- echo "int i;" > $(<)
- }
-}
-""")
-
-t.write("hello.cpp", """
-#include "a.h"
-int main() { return i; }
-""")
-
-
-t.run_build_system()
-
-t.expect_addition("bin/$toolset/debug/hello.exe")
-
-t.rm("bin")
-
-t.write("jamroot.jam", """
-make dir/a.h : : gen-header ;
-explicit dir/a.h ;
-
-exe hello : hello.cpp : <implicit-dependency>dir/a.h ;
-
-import os ;
-if [ os.name ] = NT
-{
- actions gen-header
- {
- echo int i; > $(<)
- }
-}
-else
-{
- actions gen-header
- {
- echo "int i;" > $(<)
- }
-}
-""")
-
-t.write("hello.cpp", """
-#include "dir/a.h"
-int main() { return i; }
-""")
-t.run_build_system()
-
-t.expect_addition("bin/$toolset/debug/hello.exe")
-
-t.cleanup()
diff --git a/tools/build/v2/test/indirect_conditional.py b/tools/build/v2/test/indirect_conditional.py
deleted file mode 100644
index 6e9c181417..0000000000
--- a/tools/build/v2/test/indirect_conditional.py
+++ /dev/null
@@ -1,78 +0,0 @@
-#!/usr/bin/python
-
-# Copyright (C) Vladimir Prus 2006.
-# Distributed under the Boost Software License, Version 1.0. (See
-# accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-import BoostBuild
-
-t = BoostBuild.Tester()
-
-t.write("jamroot.jam", """
-exe a1 : a1.cpp : <conditional>@a1-rule ;
-
-rule a1-rule ( properties * )
-{
- if <variant>debug in $(properties)
- {
- return <define>OK ;
- }
-}
-
-exe a2 : a2.cpp : <conditional>@$(__name__).a2-rule
- <variant>debug:<optimization>speed ;
-
-rule a2-rule ( properties * )
-{
- if <optimization>speed in $(properties)
- {
- return <define>OK ;
- }
-}
-
-exe a3 : a3.cpp : <conditional>@$(__name__).a3-rule-1
- <conditional>@$(__name__).a3-rule-2 ;
-
-rule a3-rule-1 ( properties * )
-{
- if <optimization>speed in $(properties)
- {
- return <define>OK ;
- }
-}
-
-rule a3-rule-2 ( properties * )
-{
- if <variant>debug in $(properties)
- {
- return <optimization>speed ;
- }
-}
-""")
-
-t.write("a1.cpp", """
-#ifdef OK
-int main() {}
-#endif
-""")
-
-t.write("a2.cpp", """
-#ifdef OK
-int main() {}
-#endif
-""")
-
-t.write("a3.cpp", """
-#ifdef OK
-int main() {}
-#endif
-""")
-
-t.run_build_system()
-
-t.expect_addition("bin/$toolset/debug/a1.exe")
-t.expect_addition("bin/$toolset/debug/optimization-speed/a2.exe")
-t.expect_addition("bin/$toolset/debug/optimization-speed/a3.exe")
-
-t.cleanup()
diff --git a/tools/build/v2/test/inherit_toolset.py b/tools/build/v2/test/inherit_toolset.py
deleted file mode 100644
index ca050d2c4f..0000000000
--- a/tools/build/v2/test/inherit_toolset.py
+++ /dev/null
@@ -1,59 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import BoostBuild
-import string
-
-t = BoostBuild.Tester(pass_toolset=0)
-
-t.write("a.cpp", """
-""")
-
-t.write("yfc1.jam", """
-import feature ;
-import generators ;
-
-feature.extend toolset : yfc1 ;
-rule init ( ) { }
-
-generators.register-standard yfc1.compile : CPP : OBJ : <toolset>yfc1 ;
-generators.register-standard yfc1.link : OBJ : EXE : <toolset>yfc1 ;
-
-actions compile { yfc1-compile }
-actions link { yfc1-link }
-""")
-
-t.write("yfc2.jam", """
-import feature ;
-import toolset ;
-
-feature.extend toolset : yfc2 ;
-toolset.inherit yfc2 : yfc1 ;
-rule init ( ) { }
-
-actions link { yfc2-link }
-""")
-
-t.write("jamfile.jam", """
-exe a : a.cpp ;
-""")
-
-t.write("jamroot.jam", """
-using yfc1 ;
-""")
-
-t.run_build_system("-n -d2 yfc1")
-t.fail_test(string.find(t.stdout(), "yfc1-link") == -1)
-
-# Make sure we do not have to explicitly 'use' yfc1.
-t.write("jamroot.jam", """
-using yfc2 ;
-""")
-
-t.run_build_system("-n -d2 yfc2")
-t.fail_test(string.find(t.stdout(), "yfc2-link") == -1)
-
-t.cleanup()
diff --git a/tools/build/v2/test/inherited_dependency.py b/tools/build/v2/test/inherited_dependency.py
deleted file mode 100755
index 7d4895e07a..0000000000
--- a/tools/build/v2/test/inherited_dependency.py
+++ /dev/null
@@ -1,237 +0,0 @@
-#!/usr/bin/python
-#
-# Copyright (c) 2008 Steven Watanabe
-#
-# Distributed under the Boost Software License, Version 1.0. (See
-# accompanying file LICENSE_1_0.txt) or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-import BoostBuild
-
-tester = BoostBuild.Tester()
-
-
-################################################################################
-#
-# Test without giving the project an explicit id.
-#
-################################################################################
-
-tester.write("jamroot.jam", """
-lib test : test.cpp ;
-project : requirements <library>test ;
-build-project a ;
-""")
-
-tester.write("test.cpp", """
-#ifdef _WIN32
- __declspec(dllexport)
-#endif
-void foo() {}
-""")
-
-tester.write("a/test1.cpp", """
-int main() {}
-""")
-
-tester.write("a/jamfile.jam", """
-exe test1 : test1.cpp ;
-""")
-
-tester.run_build_system()
-
-tester.expect_addition("bin/$toolset/debug/test.obj")
-tester.expect_addition("a/bin/$toolset/debug/test1.exe")
-
-tester.rm("bin")
-tester.rm("a/bin")
-
-
-################################################################################
-#
-# Run the same test from the "a" directory.
-#
-################################################################################
-
-tester.run_build_system(subdir="a")
-
-tester.expect_addition("bin/$toolset/debug/test.obj")
-tester.expect_addition("a/bin/$toolset/debug/test1.exe")
-
-tester.rm("bin")
-tester.rm("a/bin")
-
-
-################################################################################
-#
-# This time, do give the project an id.
-#
-################################################################################
-
-tester.write("jamroot.jam", """
-lib test : test.cpp ;
-project test_project : requirements <library>test ;
-build-project a ;
-""")
-
-tester.run_build_system()
-
-tester.expect_addition("bin/$toolset/debug/test.obj")
-tester.expect_addition("a/bin/$toolset/debug/test1.exe")
-
-tester.rm("bin")
-tester.rm("a/bin")
-
-
-################################################################################
-#
-# Now, give the project an id in its attributes.
-#
-################################################################################
-
-tester.write("jamroot.jam", """
-lib test : test.cpp ;
-project : id test_project : requirements <library>test ;
-build-project a ;
-""")
-
-tester.run_build_system()
-
-tester.expect_addition("bin/$toolset/debug/test.obj")
-tester.expect_addition("a/bin/$toolset/debug/test1.exe")
-
-tester.rm("bin")
-tester.rm("a/bin")
-
-
-################################################################################
-#
-# Give the project an id in both ways at once.
-#
-################################################################################
-
-tester.write("jamroot.jam", """
-lib test : test.cpp ;
-project test_project1 : id test_project : requirements <library>test ;
-build-project a ;
-""")
-
-tester.run_build_system()
-
-tester.expect_addition("bin/$toolset/debug/test.obj")
-tester.expect_addition("a/bin/$toolset/debug/test1.exe")
-
-tester.rm("bin")
-tester.rm("a/bin")
-
-
-################################################################################
-#
-# Test an absolute path in native format.
-#
-################################################################################
-
-tester.write("jamroot.jam", """
-import path ;
-path-constant here : . ;
-current-location = [ path.native [ path.root [ path.make $(here) ] [ path.pwd ]
- ] ] ;
-project test : requirements <source>$(current-location)/a/test1.cpp ;
-exe test : test.cpp ;
-""")
-
-tester.run_build_system()
-tester.expect_addition("bin/$toolset/debug/test.exe")
-
-tester.rm("bin")
-tester.rm("a/bin")
-
-
-################################################################################
-#
-# Test an absolute path in canonical format.
-#
-################################################################################
-
-tester.write("jamroot.jam", """
-import path ;
-path-constant here : . ;
-current-location = [ path.root [ path.make $(here) ] [ path.pwd ] ] ;
-project test : requirements <source>$(current-location)/a/test1.cpp ;
-exe test : test.cpp ;
-""")
-
-tester.run_build_system()
-tester.expect_addition("bin/$toolset/debug/test.exe")
-
-tester.rm("bin")
-tester.rm("a/bin")
-
-
-################################################################################
-#
-# Test dependency properties (e.g. <source>) whose targets are specified using a
-# relative path.
-#
-################################################################################
-
-# Use jamroot.jam rather than jamfile.jam to avoid inheriting the <source> from
-# the parent as that would would make test3 a source of itself.
-tester.write("b/jamroot.jam", """
-obj test3 : test3.cpp ;
-""")
-
-tester.write("b/test3.cpp", """
-void bar() {}
-""")
-
-tester.write("jamroot.jam", """
-project test : requirements <source>b//test3 ;
-build-project a ;
-""")
-
-tester.write("a/jamfile.jam", """
-exe test : test1.cpp ;
-""")
-
-tester.write("a/test1.cpp", """
-void bar();
-int main() { bar(); }
-""")
-
-tester.run_build_system()
-tester.expect_addition("b/bin/$toolset/debug/test3.obj")
-tester.expect_addition("a/bin/$toolset/debug/test.exe")
-
-tester.rm("bin")
-tester.rm("a")
-tester.rm("jamroot.jam")
-tester.rm("test.cpp")
-
-
-################################################################################
-#
-# Test that source-location is respected.
-#
-################################################################################
-
-tester.write("build/jamroot.jam", """
-project : requirements <source>test.cpp : source-location ../src ;
-""")
-
-tester.write("src/test.cpp", """
-int main() {}
-""")
-
-tester.write("build/a/jamfile.jam", """
-project : source-location ../../a_src ;
-exe test : test1.cpp ;
-""")
-
-tester.write("a_src/test1.cpp", """
-""")
-
-tester.run_build_system(subdir="build/a")
-tester.expect_addition("build/a/bin/$toolset/debug/test.exe")
-
-tester.cleanup()
diff --git a/tools/build/v2/test/inline.py b/tools/build/v2/test/inline.py
deleted file mode 100644
index df6c098929..0000000000
--- a/tools/build/v2/test/inline.py
+++ /dev/null
@@ -1,64 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2003, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import BoostBuild
-
-t = BoostBuild.Tester()
-
-t.write("jamroot.jam", """
-project : requirements <link>static ;
-exe a : a.cpp [ lib helper : helper.cpp ] ;
-""")
-
-t.write("a.cpp", """
-extern void helper();
-int main() {}
-""")
-
-t.write("helper.cpp", """
-void helper() {}
-""")
-
-t.run_build_system()
-t.expect_addition("bin/$toolset/debug/link-static/a__helper.lib")
-t.rm("bin/$toolset/debug/link-static/a__helper.lib")
-
-t.run_build_system("a__helper")
-t.expect_addition("bin/$toolset/debug/link-static/a__helper.lib")
-
-t.rm("bin")
-
-
-# Now check that inline targets with the same name but present in different
-# places are not confused between each other, and with top-level targets.
-t.write("jamroot.jam", """
-project : requirements <link>static ;
-exe a : a.cpp [ lib helper : helper.cpp ] ;
-exe a2 : a.cpp [ lib helper : helper.cpp ] ;
-""")
-
-t.run_build_system()
-t.expect_addition("bin/$toolset/debug/link-static/a.exe")
-t.expect_addition("bin/$toolset/debug/link-static/a__helper.lib")
-t.expect_addition("bin/$toolset/debug/link-static/a2__helper.lib")
-
-
-# Check that the 'alias' target does not change the name of inline targets, and
-# that inline targets are explicit.
-t.write("jamroot.jam", """
-project : requirements <link>static ;
-alias a : [ lib helper : helper.cpp ] ;
-explicit a ;
-""")
-t.rm("bin")
-
-t.run_build_system()
-t.expect_nothing_more()
-
-t.run_build_system("a")
-t.expect_addition("bin/$toolset/debug/link-static/helper.lib")
-
-t.cleanup()
diff --git a/tools/build/v2/test/lib_source_property.py b/tools/build/v2/test/lib_source_property.py
deleted file mode 100644
index 2290c1fbf2..0000000000
--- a/tools/build/v2/test/lib_source_property.py
+++ /dev/null
@@ -1,45 +0,0 @@
-#!/usr/bin/python
-
-# Copyright (C) Vladimir Prus 2006.
-# Distributed under the Boost Software License, Version 1.0. (See
-# accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-# Regression test: if a library had no explicit sources, but only <source>
-# properties, it was built as if it were a searched library, and the specified
-# sources were not compiled.
-
-import BoostBuild
-
-t = BoostBuild.Tester()
-
-t.write("jamroot.jam", """
-lib a : : <source>a.cpp ;
-""")
-
-t.write("a.cpp", """
-#ifdef _WIN32
-__declspec(dllexport)
-#endif
-void foo() {}
-""")
-
-t.run_build_system()
-t.expect_addition("bin/$toolset/debug/a.obj")
-
-t.rm("bin")
-
-
-# Now try with <conditional>.
-t.write("jamroot.jam", """
-rule test ( properties * )
-{
- return <source>a.cpp ;
-}
-lib a : : <conditional>@test ;
-""")
-
-t.run_build_system()
-t.expect_addition("bin/$toolset/debug/a.obj")
-
-t.cleanup()
diff --git a/tools/build/v2/test/library_chain.py b/tools/build/v2/test/library_chain.py
deleted file mode 100644
index a51056ef95..0000000000
--- a/tools/build/v2/test/library_chain.py
+++ /dev/null
@@ -1,166 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Test that a chain of libraries works ok, no matter if we use static or shared
-# linking.
-
-import BoostBuild
-import string
-import os
-
-t = BoostBuild.Tester()
-
-t.write("jamfile.jam", """
-# Stage the binary, so that it will be relinked without hardcode-dll-paths. That
-# will chech that we pass correct -rpath-link, even if not passing -rpath.
-stage dist : main ;
-exe main : main.cpp b ;
-""")
-
-t.write("main.cpp", """
-void foo();
-int main() { foo(); }
-""")
-
-t.write("jamroot.jam", """
-""")
-
-t.write("a/a.cpp", """
-void
-#if defined(_WIN32)
-__declspec(dllexport)
-#endif
-gee() {}
-void
-#if defined(_WIN32)
-__declspec(dllexport)
-#endif
-geek() {}
-""")
-
-t.write("a/jamfile.jam", """
-lib a : a.cpp ;
-""")
-
-t.write("b/b.cpp", """
-void geek();
-void
-#if defined(_WIN32)
-__declspec(dllexport)
-#endif
-foo() { geek(); }
-""")
-
-t.write("b/jamfile.jam", """
-lib b : b.cpp ../a//a ;
-""")
-
-t.run_build_system("-d2", stderr=None)
-t.expect_addition("bin/$toolset/debug/main.exe")
-t.rm(["bin", "a/bin", "b/bin"])
-
-t.run_build_system("link=static")
-t.expect_addition("bin/$toolset/debug/link-static/main.exe")
-t.rm(["bin", "a/bin", "b/bin"])
-
-
-# Check that <library> works for static linking.
-t.write("b/jamfile.jam", """
-lib b : b.cpp : <library>../a//a ;
-""")
-
-t.run_build_system("link=static")
-t.expect_addition("bin/$toolset/debug/link-static/main.exe")
-
-t.rm(["bin", "a/bin", "b/bin"])
-
-t.write("b/jamfile.jam", """
-lib b : b.cpp ../a//a/<link>shared : <link>static ;
-""")
-
-t.run_build_system()
-t.expect_addition("bin/$toolset/debug/main.exe")
-
-t.rm(["bin", "a/bin", "b/bin"])
-
-
-# Test that putting a library in sources of a searched library works.
-t.write("jamfile.jam", """
-exe main : main.cpp png ;
-lib png : z : <name>png ;
-lib z : : <name>zzz ;
-""")
-
-t.run_build_system("-a -d+2", status=None, stderr=None)
-# Try to find the "zzz" string either in response file (for Windows compilers),
-# or in the standard output.
-rsp = t.adjust_names("bin/$toolset/debug/main.exe.rsp")[0]
-if os.path.exists(rsp) and ( string.find(open(rsp).read(), "zzz") != -1 ):
- pass
-elif string.find(t.stdout(), "zzz") != -1:
- pass
-else:
- t.fail_test(1)
-
-# Test main -> libb -> liba chain in the case where liba is a file and not a
-# Boost.Build target.
-t.rm(".")
-
-t.write("jamroot.jam", "")
-
-t.write("a/jamfile.jam", """
-lib a : a.cpp ;
-install dist : a ;
-""")
-
-t.write("a/a.cpp", """
-#if defined(_WIN32)
-__declspec(dllexport)
-#endif
-void a() {}
-""")
-
-t.run_build_system(subdir="a")
-t.expect_addition("a/dist/a.dll")
-
-if ( ( os.name == 'nt' ) or os.uname()[0].lower().startswith('cygwin') ) and \
- ( BoostBuild.get_toolset() != 'gcc' ):
- # This is windows import library -- we know the exact name.
- file = "a/dist/a.lib"
-else:
- file = t.adjust_names(["a/dist/a.dll"])[0]
-
-t.write("b/jamfile.jam", """
-lib b : b.cpp ../%s ;
-""" % file)
-
-t.write("b/b.cpp", """
-#if defined(_WIN32)
-__declspec(dllimport)
-#endif
-void a();
-#if defined(_WIN32)
-__declspec(dllexport)
-#endif
-void b() { a(); }
-""")
-
-t.write("jamroot.jam", """
-exe main : main.cpp b//b ;
-""")
-
-t.write("main.cpp", """
-#if defined(_WIN32)
-__declspec(dllimport)
-#endif
-void b();
-int main() { b(); }
-""")
-
-t.run_build_system()
-t.expect_addition("bin/$toolset/debug/main.exe")
-
-t.cleanup()
diff --git a/tools/build/v2/test/library_order.py b/tools/build/v2/test/library_order.py
deleted file mode 100644
index 7baecf93e4..0000000000
--- a/tools/build/v2/test/library_order.py
+++ /dev/null
@@ -1,100 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2004 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Test that on compilers sensitive to library order on linker's command line, we
-# generate the correct order.
-
-import BoostBuild
-import string
-
-
-t = BoostBuild.Tester()
-
-t.write("a.cpp", """
-void b();
-void a() { b(); }
-""")
-
-t.write("b.cpp", """
-void c();
-void b() { c(); }
-""")
-
-t.write("c.cpp", """
-void d();
-void c() { d(); }
-""")
-
-t.write("d.cpp", """
-void d() {}
-""")
-
-# The order of libraries in 'main' is crafted so that we get error unless we do
-# something about the order ourselves.
-t.write("jamfile.jam", """
-exe main : main.cpp libd libc libb liba ;
-lib libd : d.cpp ;
-lib libc : c.cpp : <link>static <use>libd ;
-lib libb : b.cpp : <use>libc ;
-lib liba : a.cpp : <use>libb ;
-""")
-
-t.write("main.cpp", """
-void a();
-int main() { a(); }
-""")
-
-t.write("jamroot.jam", """
-""")
-
-t.run_build_system("-d2")
-t.expect_addition("bin/$toolset/debug/main.exe")
-
-
-# Test the order between searched libraries.
-t.write("jamfile.jam", """
-exe main : main.cpp png z ;
-lib png : z : <name>png ;
-lib z : : <name>zzz ;
-""")
-
-t.run_build_system("-a -n -d+2")
-t.fail_test(string.find(t.stdout(), "png") > string.find(t.stdout(), "zzz"))
-
-t.write("jamfile.jam", """
-exe main : main.cpp png z ;
-lib png : : <name>png ;
-lib z : png : <name>zzz ;
-""")
-
-t.run_build_system("-a -n -d+2")
-t.fail_test(string.find(t.stdout(), "png") < string.find(t.stdout(), "zzz"))
-
-
-# Test the order between prebuilt libraries.
-t.write("first.a", "")
-
-t.write("second.a", "")
-
-t.write("jamfile.jam", """
-exe main : main.cpp first second ;
-lib first : second : <file>first.a ;
-lib second : : <file>second.a ;
-""")
-
-t.run_build_system("-a -n -d+2")
-t.fail_test(string.find(t.stdout(), "first") > string.find(t.stdout(), "second"))
-
-t.write("jamfile.jam", """
-exe main : main.cpp first second ;
-lib first : : <file>first.a ;
-lib second : first : <file>second.a ;
-""")
-
-t.run_build_system("-a -n -d+2")
-t.fail_test(string.find(t.stdout(), "first") < string.find(t.stdout(), "second"))
-
-t.cleanup()
diff --git a/tools/build/v2/test/library_property.py b/tools/build/v2/test/library_property.py
deleted file mode 100644
index 9430bb7054..0000000000
--- a/tools/build/v2/test/library_property.py
+++ /dev/null
@@ -1,56 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2004 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Test that the <library> property has no effect on "obj" targets. Previously,
-# it affected all targets, so
-#
-# project : requirements <library>foo ;
-# exe a : a.cpp helper ;
-# obj helper : helper.cpp : <optimization>off ;
-#
-# caused 'foo' to be built with and without optimization.
-
-import BoostBuild
-
-t = BoostBuild.Tester()
-
-t.write("jamroot.jam", """
-project : requirements <library>lib//x ;
-exe a : a.cpp foo ;
-obj foo : foo.cpp : <variant>release ;
-""")
-
-t.write("a.cpp", """
-void aux();
-int main() { aux(); }
-""")
-
-t.write("foo.cpp", """
-void gee();
-void aux() { gee(); }
-""")
-
-t.write("lib/x.cpp", """
-void
-#if defined(_WIN32)
-__declspec(dllexport)
-#endif
-gee() {}
-""")
-
-t.write("lib/jamfile.jam", """
-lib x : x.cpp ;
-""")
-
-t.write("lib/jamroot.jam", """
-""")
-
-
-t.run_build_system()
-t.expect_addition("bin/$toolset/debug/a.exe")
-t.expect_nothing("lib/bin/$toolset/release/x.obj")
-
-t.cleanup()
diff --git a/tools/build/v2/test/load_order.py b/tools/build/v2/test/load_order.py
deleted file mode 100644
index 183bf2440b..0000000000
--- a/tools/build/v2/test/load_order.py
+++ /dev/null
@@ -1,88 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2004 Vladimir Prus.
-# Distributed under the Boost Software License, Version 1.0. (See
-# accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-# Test that we load parent projects before loading children.
-
-import BoostBuild
-import string
-
-t = BoostBuild.Tester()
-
-t.write("jamroot.jam", """
-use-project /child : child ;
-ECHO "Setting parent requirements" ;
-project : requirements <define>PASS_THE_TEST ;
-alias x : child//main ;
-""")
-
-t.write("child/jamfile.jam", """
-ECHO "Setting child requirements" ;
-project /child ;
-exe main : main.cpp ;
-""")
-
-t.write("child/main.cpp", """
-#if defined(PASS_THE_TEST)
-int main() {}
-#endif
-""")
-
-t.run_build_system()
-
-t.expect_addition("child/bin/$toolset/debug/main.exe")
-t.fail_test(string.find(t.stdout(), "Setting child requirements") <
- string.find(t.stdout(), "Setting parent requirements"))
-
-
-# Regression test: parent requirements were ignored in some cases.
-t.rm(".")
-t.write("jamroot.jam", """
-build-project src ;
-""")
-
-t.write("src/jamfile.jam", """
-project : requirements <define>EVERYTHING_OK ;
-""")
-
-t.write("src/app/jamfile.jam", """
-exe test : test.cpp ;
-""")
-
-t.write("src/app/test.cpp", """
-#ifdef EVERYTHING_OK
-int main() {}
-#endif
-""")
-
-t.run_build_system(subdir="src/app")
-t.expect_addition("src/app/bin/$toolset/debug/test.exe")
-
-# child/child2 used to be loaded before child
-t.rm(".")
-t.write("jamroot.jam", """
-use-project /child/child2 : child/child2 ;
-rule parent-rule ( )
-{
- ECHO "Running parent-rule" ;
-}
-""")
-
-t.write("child/jamfile.jam", """
-""")
-
-t.write("child/child1/jamfile.jam", """
-""")
-
-t.write("child/child2/jamfile.jam", """
-parent-rule ;
-""")
-
-
-t.run_build_system(subdir="child/child1")
-t.expect_output_line("Running parent-rule")
-
-t.cleanup()
diff --git a/tools/build/v2/test/loop.py b/tools/build/v2/test/loop.py
deleted file mode 100644
index f01b959735..0000000000
--- a/tools/build/v2/test/loop.py
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import BoostBuild
-import string
-
-t = BoostBuild.Tester()
-
-t.write("jamroot.jam", """
-lib main : main.cpp l ;
-lib l : l.cpp main ;
-""")
-
-t.write("main.cpp", "")
-
-t.write("l.cpp", "")
-
-t.run_build_system("--no-error-backtrace", status=1)
-t.fail_test(string.find(t.stdout(),
- "error: Recursion in main target references") == -1)
-
-t.cleanup()
diff --git a/tools/build/v2/test/make_rule.py b/tools/build/v2/test/make_rule.py
deleted file mode 100644
index 3892009057..0000000000
--- a/tools/build/v2/test/make_rule.py
+++ /dev/null
@@ -1,58 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2003 Dave Abrahams
-# Copyright 2003, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Test the 'make' rule.
-
-import BoostBuild
-import string
-
-t = BoostBuild.Tester(pass_toolset=1)
-
-t.write("jamroot.jam", """
-import feature ;
-feature.feature test_feature : : free ;
-
-import toolset ;
-toolset.flags creator STRING : <test_feature> ;
-
-actions creator
-{
- echo $(STRING) > $(<)
-}
-
-make foo.bar : : creator : <test_feature>12345678 ;
-""")
-
-t.run_build_system()
-t.expect_addition("bin/$toolset/debug/foo.bar")
-t.fail_test(string.find(t.read("bin/$toolset/debug/foo.bar"), "12345678") == -1)
-
-
-# Regression test. Make sure that if a main target is requested two times, and
-# build requests differ only in incidental properties, the main target is
-# created only once. The bug was discovered by Kirill Lapshin.
-t.write("jamroot.jam", """
-# Make sure that incidental property does not cause second creation of
-# 'hello1.cpp'.
-exe a : dir//hello1.cpp ;
-exe b : dir//hello1.cpp/<hardcode-dll-paths>true ;
-""")
-
-t.write("dir/jamfile.jam", """
-import common ;
-make hello1.cpp : hello.cpp : common.copy ;
-""")
-
-t.write("dir/hello.cpp", """
-int main() {}
-""")
-
-# Show only action names.
-t.run_build_system("-d1 -n")
-t.fail_test(t.stdout().count("copy") != 1)
-
-t.cleanup()
diff --git a/tools/build/v2/test/module-actions/boost-build.jam b/tools/build/v2/test/module-actions/boost-build.jam
deleted file mode 100644
index 377f6ec023..0000000000
--- a/tools/build/v2/test/module-actions/boost-build.jam
+++ /dev/null
@@ -1,5 +0,0 @@
-# Copyright 2003 Dave Abrahams
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-boost-build . ;
diff --git a/tools/build/v2/test/module-actions/bootstrap.jam b/tools/build/v2/test/module-actions/bootstrap.jam
deleted file mode 100644
index 6b024cac98..0000000000
--- a/tools/build/v2/test/module-actions/bootstrap.jam
+++ /dev/null
@@ -1,61 +0,0 @@
-# Copyright 2003 Dave Abrahams
-# Copyright 2006 Rene Rivera
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Demonstration that module variables have the right effect in actions.
-
-
-# Top-level rule that causes a target to be built by invoking the specified
-# action.
-rule make ( target : sources * : act )
-{
- DEPENDS all : $(target) ;
- DEPENDS $(target) : $(sources) ;
- $(act) $(target) : $(sources) ;
-}
-
-
-X1 = X1-global ;
-X2 = X2-global ;
-X3 = X3-global ;
-
-module A
-{
- X1 = X1-A ;
-
- rule act ( target )
- {
- NOTFILE $(target) ;
- ALWAYS $(target) ;
- }
-
- actions act { echo A.act $(<): $(X1) $(X2) $(X3) }
-
- make t1 : : A.act ;
- make t2 : : A.act ;
- make t3 : : A.act ;
-}
-
-module B
-{
- X2 = X2-B ;
-
- actions act { echo B.act $(<): $(X1) $(X2) $(X3) }
-
- make t1 : : B.act ;
- make t2 : : B.act ;
- make t3 : : B.act ;
-}
-
-actions act { echo act $(<): $(X1) $(X2) $(X3) }
-
-make t1 : : act ;
-make t2 : : act ;
-make t3 : : act ;
-
-X1 on t1 = X1-t1 ;
-X2 on t2 = X2-t2 ;
-X3 on t3 = X3-t3 ;
-
-DEPENDS all : t1 t2 t3 ;
diff --git a/tools/build/v2/test/module_actions.py b/tools/build/v2/test/module_actions.py
deleted file mode 100644
index 9f6da7ebc8..0000000000
--- a/tools/build/v2/test/module_actions.py
+++ /dev/null
@@ -1,53 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2003 Dave Abrahams
-# Copyright 2006 Rene Rivera
-# Copyright 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import BoostBuild
-import os
-import re
-
-spaces_re = re.compile("\ \ +")
-trailing_spaces_re = re.compile("\ +\n")
-
-t = BoostBuild.Tester("-d+1", pass_toolset=0)
-
-t.set_tree('module-actions')
-
-# Note that the following string contains some trailing spaces that should not
-# be removed.
-expected_output = """...found 4 targets...
-...updating 3 targets...
-A.act t1
-A.act t1: X1-t1
-B.act t1
-B.act t1: X1-t1 X2-B
-act t1
-act t1: X1-t1 X2-global X3-global
-A.act t2
-A.act t2: X1-A X2-t2
-B.act t2
-B.act t2: X2-t2
-act t2
-act t2: X1-global X2-t2 X3-global
-A.act t3
-A.act t3: X1-A X3-t3
-B.act t3
-B.act t3: X2-B X3-t3
-act t3
-act t3: X1-global X2-global X3-t3
-...updated 3 targets...
-"""
-
-# On Unixes, call to 'echo 1 2 3' produces '1 2 3' (note the spacing)
-# Accomodate for that fact.
-if os.name != 'nt':
- expected_output = re.sub(spaces_re, " ", expected_output)
- expected_output = re.sub(trailing_spaces_re, "\n", expected_output)
-
-t.run_build_system(stdout=expected_output)
-t.expect_nothing_more()
-t.cleanup()
diff --git a/tools/build/v2/test/ndebug.py b/tools/build/v2/test/ndebug.py
deleted file mode 100644
index 123576f316..0000000000
--- a/tools/build/v2/test/ndebug.py
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Test that building with optimization brings NDEBUG define, and, more
-# importantly, that dependency targets are built with NDEBUG as well, even if
-# they are not directly requested.
-
-import BoostBuild
-
-t = BoostBuild.Tester()
-
-t.write("jamroot.jam", """
-exe hello : hello.cpp lib//lib1 ;
-""")
-
-t.write("hello.cpp", """
-#ifdef NDEBUG
-void foo();
-int main() { foo(); }
-#endif
-""")
-
-t.write("lib/jamfile.jam", """
-lib lib1 : lib1.cpp ;
-""")
-t.write("lib/lib1.cpp", """
-#ifdef NDEBUG
-void foo() {}
-#endif
-""")
-
-# 'release' builds should get the NDEBUG define. We use static linking to avoid
-# messing with imports/exports on windows.
-t.run_build_system("link=static release")
-
-t.cleanup()
diff --git a/tools/build/v2/test/no_type.py b/tools/build/v2/test/no_type.py
deleted file mode 100644
index 0668e53ef5..0000000000
--- a/tools/build/v2/test/no_type.py
+++ /dev/null
@@ -1,23 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2002 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Test that we cannot specify targets of unknown type as sources. This is based
-# on the fact that Unix 'ar' will happily consume just about anything.
-
-import BoostBuild
-
-t = BoostBuild.Tester()
-
-t.write("jamroot.jam", """
-import gcc ;
-static-lib a : a.foo ;
-""")
-
-t.write("a.foo", "")
-
-t.run_build_system(status=1)
-
-t.cleanup()
diff --git a/tools/build/v2/test/notfile.py b/tools/build/v2/test/notfile.py
deleted file mode 100644
index f4db796654..0000000000
--- a/tools/build/v2/test/notfile.py
+++ /dev/null
@@ -1,44 +0,0 @@
-#!/usr/bin/python
-
-# Copyright (C) Vladimir Prus 2005.
-# Distributed under the Boost Software License, Version 1.0. (See
-# accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-# Basic tests for the 'notfile' rule.
-
-import BoostBuild
-import string
-import os
-
-t = BoostBuild.Tester()
-
-t.write("jamroot.jam", """
-import notfile ;
-notfile say : "echo hi" ;
-
-exe hello : hello.cpp ;
-notfile hello_valgrind : @valgrind : hello ;
-
-actions valgrind
-{
- valgrind $(>[1])
-}
-""")
-
-t.write("hello.cpp", """
-#include <iostream>
-int main() { std::cout << "Hello!\\n"; }
-""")
-
-
-t.run_build_system("-n -d+2")
-
-t.fail_test(string.find(t.stdout(), "echo hi") == -1)
-
-name = t.adjust_names(["bin/$toolset/debug/hello.exe"])[0]
-name = apply(os.path.join, string.split(name, "/"));
-c = "valgrind *" + name
-t.expect_output_line(c)
-
-t.cleanup()
diff --git a/tools/build/v2/test/ordered_include.py b/tools/build/v2/test/ordered_include.py
deleted file mode 100644
index 3de7295328..0000000000
--- a/tools/build/v2/test/ordered_include.py
+++ /dev/null
@@ -1,41 +0,0 @@
-#!/usr/bin/python
-#
-# Copyright (c) 2008 Steven Watanabe
-#
-# Distributed under the Boost Software License, Version 1.0. (See
-# accompanying file LICENSE_1_0.txt) or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-import BoostBuild
-
-tester = BoostBuild.Tester()
-
-tester.write("jamroot.jam", """
-obj test : test.cpp : <include>a&&b ;
-""")
-
-tester.write("test.cpp", """
-#include <test1.hpp>
-#include <test2.hpp>
-int main() {}
-""")
-
-tester.write("a/test1.hpp", """
-""")
-
-tester.write("b/test2.hpp", """
-""")
-
-tester.run_build_system()
-
-tester.expect_addition("bin/$toolset/debug/test.obj")
-
-tester.touch("a/test1.hpp")
-tester.run_build_system()
-tester.expect_touch("bin/$toolset/debug/test.obj")
-
-tester.touch("b/test2.hpp")
-tester.run_build_system()
-tester.expect_touch("bin/$toolset/debug/test.obj")
-
-tester.cleanup()
diff --git a/tools/build/v2/test/out_of_tree.py b/tools/build/v2/test/out_of_tree.py
deleted file mode 100644
index 9e89cefbe1..0000000000
--- a/tools/build/v2/test/out_of_tree.py
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/usr/bin/python
-
-# Copyright (C) FILL SOMETHING HERE 2005.
-# Distributed under the Boost Software License, Version 1.0. (See
-# accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-# Tests that we can build a project when the current directory is outside of
-# that project tree, that is 'bjam some_dir' works.
-
-import BoostBuild
-
-# Create a temporary working directory.
-t = BoostBuild.Tester()
-
-# Create the needed files.
-t.write("p1/jamroot.jam", """
-exe hello : hello.cpp ;
-""")
-
-t.write("p1/hello.cpp", """
-int main() {}
-""")
-
-t.write("p2/jamroot.jam", """
-exe hello2 : hello.cpp ;
-exe hello3 : hello.cpp ;
-""")
-
-t.write("p2/hello.cpp", """
-int main() {}
-""")
-
-t.run_build_system("p1 p2//hello3")
-
-t.expect_addition("p1/bin/$toolset/debug/hello.exe")
-t.expect_addition("p2/bin/$toolset/debug/hello3.exe")
-
-t.cleanup()
diff --git a/tools/build/v2/test/path_features.py b/tools/build/v2/test/path_features.py
deleted file mode 100644
index 231030be86..0000000000
--- a/tools/build/v2/test/path_features.py
+++ /dev/null
@@ -1,85 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2003 Dave Abrahams
-# Copyright 2002, 2003, 2004 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import BoostBuild
-
-t = BoostBuild.Tester()
-
-t.write("jamroot.jam", "import gcc ;")
-t.write("jamfile.jam", "lib a : a.cpp : <include>. ;")
-t.write("a.cpp", """
-#include <a.h>
-void
-# ifdef _WIN32
-__declspec(dllexport)
-# endif
-foo() {}
-""")
-t.write("a.h", "//empty file\n")
-
-t.write("d/jamfile.jam", "exe b : b.cpp ..//a ; ")
-t.write("d/b.cpp", """
-void foo();
-int main() { foo(); }
-""")
-
-t.run_build_system(subdir="d")
-
-# Now test the path features with condition work as well.
-t.write("jamfile.jam", "lib a : a.cpp : <variant>debug:<include>. ;")
-t.rm("bin")
-t.run_build_system(subdir="d")
-
-# Test path features with condition in usage requirements.
-t.write("jamfile.jam", """
-lib a : a.cpp : <include>. : : <variant>debug:<include>. ;
-""")
-t.write("d/b.cpp", """
-#include <a.h>
-void foo();
-int main() { foo(); }
-""")
-t.rm("d/bin")
-t.run_build_system(subdir="d")
-
-# Test that absolute paths inside requirements are ok. The problems appeared
-# only when building targets in subprojects.
-t.write("jamroot.jam", "")
-t.write("jamfile.jam", "build-project x ; ")
-t.write("x/jamfile.jam", """
-local pwd = [ PWD ] ;
-project : requirements <include>$(pwd)/x/include ;
-exe m : m.cpp : <include>$(pwd)/x/include2 ;
-""")
-t.write("x/m.cpp", """
-#include <h1.hpp>
-#include <h2.hpp>
-int main() {}
-""")
-t.write("x/include/h1.hpp", "\n")
-t.write("x/include2/h2.hpp", "\n")
-
-t.run_build_system()
-t.expect_addition("x/bin/$toolset/debug/m.exe")
-
-# Test that "&&" in path features is handled correctly.
-t.rm("bin")
-t.write("jamfile.jam", "build-project sub ;")
-t.write("sub/jamfile.jam", """
-exe a : a.cpp : <include>../h1&&../h2 ;
-""")
-t.write("sub/a.cpp", """
-#include <header.h>
-int main() { return OK; }
-""")
-t.write("h2/header.h", """
-const int OK = 0;
-""")
-t.run_build_system()
-t.expect_addition("sub/bin/$toolset/debug/a.exe")
-
-t.cleanup()
diff --git a/tools/build/v2/test/prebuilt.py b/tools/build/v2/test/prebuilt.py
deleted file mode 100644
index 250cdc8201..0000000000
--- a/tools/build/v2/test/prebuilt.py
+++ /dev/null
@@ -1,43 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2002, 2003, 2004 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Test that we can use already built sources
-
-import BoostBuild
-
-t = BoostBuild.Tester()
-
-t.set_tree('prebuilt')
-
-t.expand_toolset("ext/jamroot.jam")
-t.expand_toolset("jamroot.jam")
-
-# First, build the external project.
-t.run_build_system("debug release", subdir="ext")
-
-# Then pretend that we do not have the sources for the external project, and can
-# only use compiled binaries.
-t.copy("ext/jamfile2.jam", "ext/jamfile.jam")
-t.expand_toolset("ext/jamfile.jam")
-
-# Now check that we can build the main project, and that correct prebuilt file
-# is picked, depending of variant. This also checks that correct includes for
-# prebuilt libraries are used.
-t.run_build_system("debug release")
-t.expect_addition("bin/$toolset/debug/hello.exe")
-t.expect_addition("bin/$toolset/release/hello.exe")
-
-t.rm("bin")
-
-
-# Now test that prebuilt file specified by absolute name works too.
-t.copy("ext/jamfile3.jam", "ext/jamfile.jam")
-t.expand_toolset("ext/jamfile.jam")
-t.run_build_system("debug release")
-t.expect_addition("bin/$toolset/debug/hello.exe")
-t.expect_addition("bin/$toolset/release/hello.exe")
-
-t.cleanup()
diff --git a/tools/build/v2/test/prebuilt/ext/jamfile2.jam b/tools/build/v2/test/prebuilt/ext/jamfile2.jam
deleted file mode 100644
index cd59663d6d..0000000000
--- a/tools/build/v2/test/prebuilt/ext/jamfile2.jam
+++ /dev/null
@@ -1,39 +0,0 @@
-
-import os ;
-
-local dll-suffix = so ;
-local prefix = "" ;
-if [ os.name ] in CYGWIN NT
-{
- if [ MATCH ^(gcc) : $toolset ]
- {
- dll-suffix = dll ;
- }
- else
- {
- dll-suffix = lib ;
- }
-}
-else
-{
- prefix = "lib" ;
-}
-if [ MATCH ^(darwin) : $toolset ]
-{
- dll-suffix = dylib ;
-}
-
-project ext ;
-
-lib a :
- : <file>debug/$(prefix)a.$(dll-suffix) <variant>debug
- :
- : <include>debug
- ;
-
-lib a :
- : <file>release/$(prefix)a.$(dll-suffix) <variant>release
- :
- : <include>release
- ;
-
diff --git a/tools/build/v2/test/prebuilt/ext/jamfile3.jam b/tools/build/v2/test/prebuilt/ext/jamfile3.jam
deleted file mode 100644
index a373a5fb21..0000000000
--- a/tools/build/v2/test/prebuilt/ext/jamfile3.jam
+++ /dev/null
@@ -1,46 +0,0 @@
-
-# This Jamfile is the same as Jamfile2, except that
-# it tries to access prebuilt targets using absolute
-# paths. It used to be broken on Windows.
-
-import os ;
-
-local dll-suffix = so ;
-local prefix = "" ;
-if [ os.name ] in CYGWIN NT
-{
- if [ MATCH ^(gcc) : $toolset ]
- {
- dll-suffix = dll ;
- }
- else
- {
- dll-suffix = lib ;
- }
-}
-else
-{
- prefix = "lib" ;
-}
-if [ MATCH ^(darwin) : $toolset ]
-{
- dll-suffix = dylib ;
-}
-
-project ext ;
-
-# Assumed bjam was invoked from the project root
-local pwd = [ PWD ] ;
-
-lib a :
- : <file>$(pwd)/ext/debug/$(prefix)a.$(dll-suffix) <variant>debug
- :
- : <include>debug
- ;
-
-lib a :
- : <file>$(pwd)/ext/release/$(prefix)a.$(dll-suffix) <variant>release
- :
- : <include>release
- ;
-
diff --git a/tools/build/v2/test/project-test3/lib2/helper/e.cpp b/tools/build/v2/test/project-test3/lib2/helper/e.cpp
deleted file mode 100644
index ccecbb414f..0000000000
--- a/tools/build/v2/test/project-test3/lib2/helper/e.cpp
+++ /dev/null
@@ -1,8 +0,0 @@
-// Copyright (c) 2003 Vladimir Prus
-//
-// Distributed under the Boost Software License, Version 1.0. (See
-// accompanying file LICENSE_1_0.txt or copy at
-// http://www.boost.org/LICENSE_1_0.txt)
-//
-// http://www.boost.org
-//
diff --git a/tools/build/v2/test/project_dependencies.py b/tools/build/v2/test/project_dependencies.py
deleted file mode 100644
index 96df9e250f..0000000000
--- a/tools/build/v2/test/project_dependencies.py
+++ /dev/null
@@ -1,51 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2003 Dave Abrahams
-# Copyright 2002, 2003, 2004 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Test that we can specify a dependency property in project requirements, and
-# that it will not cause every main target in the project to be generated in its
-# own subdirectory.
-
-# The whole test is somewhat moot now.
-
-import BoostBuild
-
-t = BoostBuild.Tester()
-
-t.write("jamroot.jam", "build-project src ;")
-
-t.write("lib/jamfile.jam", "lib lib1 : lib1.cpp ;")
-
-t.write("lib/lib1.cpp", """
-#ifdef _WIN32
-__declspec(dllexport)
-#endif
-void foo() {}\n
-""")
-
-t.write("src/jamfile.jam", """
-project : requirements <library>../lib//lib1 ;
-exe a : a.cpp ;
-exe b : b.cpp ;
-""")
-
-t.write("src/a.cpp", """
-#ifdef _WIN32
-__declspec(dllimport)
-#endif
-void foo();
-int main() { foo(); }
-""")
-
-t.copy("src/a.cpp", "src/b.cpp")
-
-t.run_build_system()
-
-# Test that there is no "main-target-a" part.
-# t.expect_addition("src/bin/$toolset/debug/a.exe")
-# t.expect_addition("src/bin/$toolset/debug/b.exe")
-
-t.cleanup()
diff --git a/tools/build/v2/test/project_glob.py b/tools/build/v2/test/project_glob.py
deleted file mode 100644
index 1445eb2a69..0000000000
--- a/tools/build/v2/test/project_glob.py
+++ /dev/null
@@ -1,161 +0,0 @@
-#!/usr/bin/python
-
-# Copyright (C) Vladimir Prus 2003.
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-# Test the 'glob' rule in Jamfile context.
-
-import BoostBuild
-
-# Create a temporary working directory.
-t = BoostBuild.Tester()
-
-t.write("jamroot.jam", """
-""")
-
-t.write("d1/a.cpp", """
-int main() {}
-""")
-
-t.write("d1/jamfile.jam", """
-exe a : [ glob *.cpp ] ../d2/d//l ;
-""")
-
-t.write("d2/d/l.cpp", """
-#if defined(_WIN32)
-__declspec(dllexport)
-void force_import_lib_creation() {}
-#endif
-""")
-
-t.write("d2/d/jamfile.jam", """
-lib l : [ glob *.cpp ] ;
-""")
-
-t.write("d3/d/jamfile.jam", """
-exe a : [ glob ../*.cpp ] ;
-""")
-
-t.write("d3/a.cpp", """
-int main() {}
-""")
-
-t.run_build_system(subdir="d1")
-t.expect_addition("d1/bin/$toolset/debug/a.exe")
-
-t.run_build_system(subdir="d3/d")
-t.expect_addition("d3/d/bin/$toolset/debug/a.exe")
-
-t.rm("d2/d/bin")
-
-t.run_build_system(subdir="d2/d")
-t.expect_addition("d2/d/bin/$toolset/debug/l.dll")
-
-
-# Test that when 'source-location' is explicitly-specified glob works relatively
-# to the source location.
-t.rm("d1")
-
-t.write("d1/src/a.cpp", """
-int main() {}
-""")
-
-t.write("d1/jamfile.jam", """
-project : source-location src ;
-exe a : [ glob *.cpp ] ../d2/d//l ;
-""")
-
-t.run_build_system(subdir="d1")
-t.expect_addition("d1/bin/$toolset/debug/a.exe")
-
-# Test that wildcards can include directories. Also test exclusion patterns.
-t.rm("d1")
-
-t.write("d1/src/foo/a.cpp", """
-void bar();
-int main() { bar(); }
-""")
-
-t.write("d1/src/bar/b.cpp", """
-void bar() {}
-""")
-
-t.write("d1/src/bar/bad.cpp", """
-very bad non-compilable file
-""")
-
-t.write("d1/jamfile.jam", """
-project : source-location src ;
-exe a : [ glob foo/*.cpp bar/*.cpp : bar/bad* ] ../d2/d//l ;
-""")
-
-t.run_build_system(subdir="d1")
-t.expect_addition("d1/bin/$toolset/debug/a.exe")
-
-
-# Test that 'glob-tree' works.
-t.rm("d1/bin/$toolset/debug/a.exe")
-
-t.write("d1/jamfile.jam", """
-project : source-location src ;
-exe a : [ glob-tree *.cpp : bad* ] ../d2/d//l ;
-""")
-
-t.run_build_system(subdir="d1")
-t.expect_addition("d1/bin/$toolset/debug/a.exe")
-
-
-# Test that directory names in patterns for 'glob-tree' are rejected.
-t.write("d1/jamfile.jam", """
-project : source-location src ;
-exe a : [ glob-tree foo/*.cpp bar/*.cpp : bad* ] ../d2/d//l ;
-""")
-
-t.run_build_system(subdir="d1", status=1)
-t.expect_output_line("error: The patterns * may not include directory")
-
-
-t.rm("d1/src/bar/bad.cpp")
-
-# Test that 'glob' works with absolute names.
-t.rm("d1/bin")
-
-# Note that to get current dir, we use bjam's PWD, not Python's os.getcwd(),
-# because the former will always return long path while the latter might return
-# a short path, and that will confuse path.glob.
-t.write("d1/jamfile.jam", """
-project : source-location src ;
-local pwd = [ PWD ] ; # Always absolute
-exe a : [ glob $(pwd)/src/foo/*.cpp $(pwd)/src/bar/*.cpp ] ../d2/d//l ;
-""")
-
-t.run_build_system(subdir="d1")
-t.expect_addition("d1/bin/$toolset/debug/a.exe")
-
-
-# Regression test: glob excludes used to be broken when building from a
-# subdirectory.
-t.rm(".")
-
-t.write("jamroot.jam", """
-build-project p ;
-""")
-
-t.write("p/p.c", """
-int main() {}
-""")
-
-t.write("p/p_x.c", """
-int main() {}
-""")
-
-t.write("p/jamfile.jam", """
-exe p : [ glob *.c : p_x.c ] ;
-""")
-
-t.run_build_system(subdir="p")
-t.expect_addition("p/bin/$toolset/debug/p.exe")
-
-t.cleanup()
diff --git a/tools/build/v2/test/project_root_constants.py b/tools/build/v2/test/project_root_constants.py
deleted file mode 100644
index 400c78b4ab..0000000000
--- a/tools/build/v2/test/project_root_constants.py
+++ /dev/null
@@ -1,66 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2003, 2004, 2005 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import BoostBuild
-import string
-
-# Create a temporary working directory.
-t = BoostBuild.Tester()
-
-# Create the needed files.
-t.write("jamroot.jam", """
-constant FOO : foobar gee ;
-ECHO $(FOO) ;
-""")
-
-t.run_build_system()
-t.fail_test(string.find(t.stdout(), "foobar gee") == -1)
-
-# Regression test: when absolute paths were passed to path-constant rule,
-# Boost.Build failed to recognize path as absolute and prepended the current
-# dir.
-t.write("jamroot.jam", """
-import path ;
-local here = [ path.native [ path.pwd ] ] ;
-path-constant HERE : $(here) ;
-if $(HERE) != $(here)
-{
- ECHO "PWD =" $(here) ;
- ECHO "path constant =" $(HERE) ;
- EXIT ;
-}
-""")
-t.write("jamfile.jam", "")
-
-t.run_build_system()
-
-t.write("jamfile.jam", """
-# This tests that rule 'hello' will be imported to children unlocalized, and
-# will still access variables in this Jamfile.
-x = 10 ;
-constant FOO : foo ;
-rule hello ( ) { ECHO "Hello $(x)" ; }
-""")
-
-t.write("d/jamfile.jam", """
-ECHO "d: $(FOO)" ;
-constant BAR : bar ;
-""")
-
-t.write("d/d2/jamfile.jam", """
-ECHO "d2: $(FOO)" ;
-ECHO "d2: $(BAR)" ;
-hello ;
-""")
-
-t.run_build_system(subdir="d/d2")
-t.fail_test(t.stdout().find("""d: foo
-d2: foo
-d2: bar
-Hello 10
-""") == -1)
-
-t.cleanup()
diff --git a/tools/build/v2/test/project_root_rule.py b/tools/build/v2/test/project_root_rule.py
deleted file mode 100644
index 29fdcff4ca..0000000000
--- a/tools/build/v2/test/project_root_rule.py
+++ /dev/null
@@ -1,34 +0,0 @@
-#!/usr/bin/python
-
-# Copyright (C) Vladimir Prus 2005.
-# Distributed under the Boost Software License, Version 1.0. (See
-# accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-# Tests that we can declare a rule in Jamroot that will be can be called in
-# child Jamfile to declare a target. Specifically test for use of 'glob' in that
-# rule.
-
-import BoostBuild
-
-t = BoostBuild.Tester()
-
-
-t.write("jamroot.jam", """
-project : requirements <link>static ;
-rule my-lib ( name ) { lib $(name) : [ glob *.cpp ] ; }
-""")
-
-t.write("sub/a.cpp", """
-""")
-
-t.write("sub/jamfile.jam", """
-my-lib foo ;
-""")
-
-
-t.run_build_system(subdir="sub")
-
-t.expect_addition("sub/bin/$toolset/debug/link-static/foo.lib")
-
-t.cleanup()
diff --git a/tools/build/v2/test/project_test3.py b/tools/build/v2/test/project_test3.py
deleted file mode 100644
index 8a4421604a..0000000000
--- a/tools/build/v2/test/project_test3.py
+++ /dev/null
@@ -1,133 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2002, 2003 Dave Abrahams
-# Copyright 2002, 2003, 2004, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import BoostBuild
-import os
-
-t = BoostBuild.Tester(translate_suffixes=0)
-
-# First check some startup.
-t.set_tree("project-test3")
-os.remove("jamroot.jam")
-t.run_build_system(status=1, stdout=
-"""error: Could not find parent for project at '.'
-error: Did not find Jamfile.jam or Jamroot.jam in any parent directory.
-""")
-
-t.set_tree("project-test3")
-t.run_build_system()
-
-t.expect_addition("bin/$toolset/debug/a.obj")
-t.expect_content("bin/$toolset/debug/a.obj",
-"""$toolset/debug
-a.cpp
-""")
-
-t.expect_addition("bin/$toolset/debug/a.exe")
-t.expect_content("bin/$toolset/debug/a.exe",
-"$toolset/debug\n" +
-"bin/$toolset/debug/a.obj lib/bin/$toolset/debug/b.obj " +
-"lib2/bin/$toolset/debug/c.obj lib2/bin/$toolset/debug/d.obj " +
-"lib2/helper/bin/$toolset/debug/e.obj " +
-"lib3/bin/$toolset/debug/f.obj\n"
-)
-
-t.expect_addition("lib/bin/$toolset/debug/b.obj")
-t.expect_content("lib/bin/$toolset/debug/b.obj",
-"""$toolset/debug
-lib/b.cpp
-""")
-
-t.expect_addition("lib/bin/$toolset/debug/m.exe")
-t.expect_content("lib/bin/$toolset/debug/m.exe",
-"""$toolset/debug
-lib/bin/$toolset/debug/b.obj lib2/bin/$toolset/debug/c.obj
-""")
-
-t.expect_addition("lib2/bin/$toolset/debug/c.obj")
-t.expect_content("lib2/bin/$toolset/debug/c.obj",
-"""$toolset/debug
-lib2/c.cpp
-""")
-
-t.expect_addition("lib2/bin/$toolset/debug/d.obj")
-t.expect_content("lib2/bin/$toolset/debug/d.obj",
-"""$toolset/debug
-lib2/d.cpp
-""")
-
-t.expect_addition("lib2/bin/$toolset/debug/l.exe")
-t.expect_content("lib2/bin/$toolset/debug/l.exe",
-"""$toolset/debug
-lib2/bin/$toolset/debug/c.obj bin/$toolset/debug/a.obj
-""")
-
-t.expect_addition("lib2/helper/bin/$toolset/debug/e.obj")
-t.expect_content("lib2/helper/bin/$toolset/debug/e.obj",
-"""$toolset/debug
-lib2/helper/e.cpp
-""")
-
-t.expect_addition("lib3/bin/$toolset/debug/f.obj")
-t.expect_content("lib3/bin/$toolset/debug/f.obj",
-"""$toolset/debug
-lib3/f.cpp lib2/helper/bin/$toolset/debug/e.obj
-""")
-
-t.touch("a.cpp")
-t.run_build_system()
-t.expect_touch(["bin/$toolset/debug/a.obj",
- "bin/$toolset/debug/a.exe",
- "lib2/bin/$toolset/debug/l.exe"])
-
-t.run_build_system(extra_args="release optimization=off,speed")
-t.expect_addition(["bin/$toolset/release/a.exe",
- "bin/$toolset/release/a.obj",
- "bin/$toolset/release/optimization-off/a.exe",
- "bin/$toolset/release/optimization-off/a.obj"])
-
-t.run_build_system(extra_args='--clean-all')
-t.expect_removal(["bin/$toolset/debug/a.obj",
- "bin/$toolset/debug/a.exe",
- "lib/bin/$toolset/debug/b.obj",
- "lib/bin/$toolset/debug/m.exe",
- "lib2/bin/$toolset/debug/c.obj",
- "lib2/bin/$toolset/debug/d.obj",
- "lib2/bin/$toolset/debug/l.exe",
- "lib3/bin/$toolset/debug/f.obj"])
-
-# Now test target ids in command line.
-t.set_tree("project-test3")
-t.run_build_system("lib//b.obj")
-t.expect_addition("lib/bin/$toolset/debug/b.obj")
-t.expect_nothing_more()
-
-t.run_build_system("--clean lib//b.obj")
-t.expect_removal("lib/bin/$toolset/debug/b.obj")
-t.expect_nothing_more()
-
-t.run_build_system("lib//b.obj")
-t.expect_addition("lib/bin/$toolset/debug/b.obj")
-t.expect_nothing_more()
-
-t.run_build_system("release lib2/helper//e.obj /lib3//f.obj")
-t.expect_addition("lib2/helper/bin/$toolset/release/e.obj")
-t.expect_addition("lib3/bin/$toolset/release/f.obj")
-t.expect_nothing_more()
-
-# Test project ids in command line work as well.
-t.set_tree("project-test3")
-t.run_build_system("/lib2")
-t.expect_addition("lib2/bin/$toolset/debug/" * BoostBuild.List("c.obj d.obj l.exe"))
-t.expect_addition("bin/$toolset/debug/a.obj")
-t.expect_nothing_more()
-
-t.run_build_system("lib")
-t.expect_addition("lib/bin/$toolset/debug/" * BoostBuild.List("b.obj m.exe"))
-t.expect_nothing_more()
-
-t.cleanup()
diff --git a/tools/build/v2/test/property_expansion.py b/tools/build/v2/test/property_expansion.py
deleted file mode 100644
index 280f862085..0000000000
--- a/tools/build/v2/test/property_expansion.py
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Test that free property inside.
-
-import BoostBuild
-
-t = BoostBuild.Tester()
-
-t.write("jamroot.jam", """
-project ;
-variant debug-AA : debug : <define>AA ;
-alias all : hello ;
-exe hello : hello.cpp ;
-explicit hello ;
-""")
-
-t.write("hello.cpp", """
-#ifdef AA
-int main() {}
-#endif
-""")
-
-t.run_build_system("debug-AA")
-
-t.cleanup()
diff --git a/tools/build/v2/test/qt4/jamroot.jam b/tools/build/v2/test/qt4/jamroot.jam
deleted file mode 100644
index eeddc586f0..0000000000
--- a/tools/build/v2/test/qt4/jamroot.jam
+++ /dev/null
@@ -1,79 +0,0 @@
-# (c) Copyright Juergen Hunold 2008
-# Use, modification, and distribution are subject to the
-# Boost Software License, Version 1.0. (See accompanying file
-# LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
-
-import qt4 ;
-import testing ;
-import cast ;
-
-
-
-if [ qt4.initialized ]
-{
- use-project /boost : ../../../../.. ;
-
- project qttest
- : requirements
- <library>/boost/test//boost_unit_test_framework
- ;
-
- alias qt-tests :
- # Check for explicit libraries, <use>/qt should not link any lib
- [ link-fail qtcorefail.cpp : <use>/qt ]
-
- [ run qtcore.cpp /qt//QtCore ]
- [ run qtsql.cpp /qt//QtSql ]
- [ run qtxml.cpp /qt//QtXml ]
- [ run qtnetwork.cpp /qt//QtNetwork ]
- [ run qtscript.cpp /qt//QtScript ]
- [ run qtscripttools.cpp /qt//QtScriptTools ]
- [ run qtxmlpatterns.cpp /qt//QtXmlPatterns ]
-
- # ToDo: runable example code
- [ link qtsvg.cpp /qt//QtSvg ]
- [ link qtgui.cpp /qt//QtGui ]
-
- # Multimedia toolkits.
- [ link qtwebkit.cpp /qt//QtWebKit ]
- [ link phonon.cpp /qt//phonon ]
- [ link qtmultimedia.cpp /qt//QtMultimedia ]
-
- # QML
- [ link qtdeclarative.cpp /qt//QtDeclarative ]
-
- # Help systems.
- [ link qthelp.cpp /qt//QtHelp ]
- [ link qtassistant.cpp /qt//QtAssistantClient : <conditional>@check_for_assistant ]
-
- # Check working and disabled Qt3Support
- [ link qt3support.cpp /qt//Qt3Support : <qt3support>on ]
- [ compile-fail qt3support.cpp /qt//Qt3Support : <qt3support>off ]
-
- # Testing using QtTest. Simple sample
- # ToDo: better support for "automoc" aka '#include "qttest.moc"'
- [ run qttest.cpp [ cast _ moccable-cpp : qttest.cpp ] /qt//QtTest : : : <define>TEST_MOCK ]
-
- # Test moc rule
- [ run mock.cpp mock.h /qt//QtCore : : : <define>TEST_MOCK ]
-
- : # requirements
- : # default-build
- : # usage-requirements
- ;
-}
-
-# QtAssistant is removed from Qt >= 4.6
-rule check_for_assistant ( properties * )
-{
- # Extract version number from toolset
- local version = [ MATCH "<qt>([0-9.]+).*"
- : $(properties) ] ;
-
- if $(version) > "4.6.99"
- {
- result += <build>no ;
- }
-}
-
-
diff --git a/tools/build/v2/test/railsys/program/jamfile.jam b/tools/build/v2/test/railsys/program/jamfile.jam
deleted file mode 100644
index 9d66f2dbb5..0000000000
--- a/tools/build/v2/test/railsys/program/jamfile.jam
+++ /dev/null
@@ -1,45 +0,0 @@
-# ================================================================
-#
-# Railsys
-# --------------
-#
-# Copyright (c) 2002 Institute of Transport,
-# Railway Construction and Operation,
-# University of Hanover, Germany
-# Copyright (c) 2006 Jürgen Hunold
-#
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-#
-# 02/21/02! Jürgen Hunold
-#
-# $Id: jamfile.jam 55188 2009-07-26 20:11:03Z danieljames $
-#
-# ================================================================
-
-local BOOST_ROOT = [ modules.peek : BOOST_ROOT ] ;
-
-use-project /libx : ../libx/src ;
-
-project program
- : requirements
- <include>$(BOOST_ROOT)
- <threading>multi
- <library>/qt3//qt
- <hardcode-dll-paths>true
- <stdlib>stlport
- <use>/libx
- <library>/libx//libx
-
- : usage-requirements
- <include>$(BOOST_ROOT)
- :
- default-build release
- <threading>multi
- <library>/qt3//qt
- <hardcode-dll-paths>true
- ;
-
-build-project main ;
-
diff --git a/tools/build/v2/test/rebuilds.py b/tools/build/v2/test/rebuilds.py
deleted file mode 100644
index da64eada3a..0000000000
--- a/tools/build/v2/test/rebuilds.py
+++ /dev/null
@@ -1,51 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2005 Dave Abrahams
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import BoostBuild
-
-t = BoostBuild.Tester(pass_toolset=0)
-
-t.write('file.jam', '''
-rule make
-{
- DEPENDS $(<) : $(>) ;
- DEPENDS all : $(<) ;
-}
-actions make
-{
- echo "******" making $(<) from $(>) "******"
- echo made from $(>) > $(<)
-}
-
-make aux1 : bar ;
-make foo : bar ;
-REBUILDS foo : bar ;
-make bar : baz ;
-make aux2 : bar ;
-''')
-
-t.write('baz', 'nothing\n')
-
-t.run_build_system('-ffile.jam bar')
-t.expect_addition('bar')
-t.expect_nothing_more()
-
-t.wait_for_time_change_since_last_build()
-t.run_build_system('-ffile.jam foo')
-t.expect_touch('bar')
-t.expect_addition('foo')
-t.expect_nothing_more()
-
-t.run_build_system('-ffile.jam')
-t.expect_addition(['aux1', 'aux2'])
-t.expect_nothing_more()
-
-t.touch('bar')
-t.run_build_system('-ffile.jam')
-t.expect_touch(['foo', 'aux1', 'aux2'])
-t.expect_nothing_more()
-
-t.cleanup()
diff --git a/tools/build/v2/test/regression.py b/tools/build/v2/test/regression.py
deleted file mode 100644
index c65082810e..0000000000
--- a/tools/build/v2/test/regression.py
+++ /dev/null
@@ -1,124 +0,0 @@
-#!/usr/bin/python
-
-# Copyright (C) Vladimir Prus 2003.
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-# Test for the regression testing framework.
-
-import BoostBuild
-
-# Create a temporary working directory.
-t = BoostBuild.Tester()
-
-t.write("c.cpp", "\n")
-
-t.write("r.cpp", """
-void helper();
-
-#include <iostream>
-int main( int ac, char * av[] )
-{
- helper();
- for ( int i = 1; i < ac; ++i )
- std::cout << av[ i ] << '\\n';
-}
-""")
-
-t.write("c-f.cpp", """
-int
-""")
-
-t.write("r-f.cpp", """
-int main() { return 1; }
-""")
-
-
-t.write("jamfile.jam", """
-import testing ;
-compile c.cpp ;
-compile-fail c-f.cpp ;
-run r.cpp libs//helper : foo bar ;
-run-fail r-f.cpp ;
-""")
-
-t.write("libs/jamfile.jam", """
-lib helper : helper.cpp ;
-""")
-
-t.write("libs/helper.cpp", """
-void
-#if defined(_WIN32)
-__declspec(dllexport)
-#endif
-helper() {}
-""")
-
-t.write("jamroot.jam", "")
-
-# First test that when outcomes are expected, all .test files are created.
-t.run_build_system("hardcode-dll-paths=false", stderr=None, status=None)
-t.expect_addition("bin/c.test/$toolset/debug/c.test")
-t.expect_addition("bin/c-f.test/$toolset/debug/c-f.test")
-t.expect_addition("bin/r.test/$toolset/debug/r.test")
-t.expect_addition("bin/r-f.test/$toolset/debug/r-f.test")
-
-# Make sure args are handled.
-t.expect_content("bin/r.test/$toolset/debug/r.output",
- "foo\nbar\n*\nEXIT STATUS: 0*\n", True)
-
-# Test that input file is handled as well.
-t.write("r.cpp", """
-#include <iostream>
-#include <fstream>
-int main( int ac, char * av[] )
-{
- for ( int i = 1; i < ac; ++i )
- {
- std::ifstream ifs( av[ i ] );
- std::cout << ifs.rdbuf();
- }
-}
-""")
-
-t.write("dir/input.txt", "test input")
-
-t.write("jamfile.jam", """
-import testing ;
-compile c.cpp ;
-obj c-obj : c.cpp ;
-compile-fail c-f.cpp ;
-run r.cpp : : dir/input.txt ;
-run-fail r-f.cpp ;
-time execution : r ;
-time compilation : c-obj ;
-""")
-
-t.run_build_system('hardcode-dll-paths=false')
-t.expect_content("bin/r.test/$toolset/debug/r.output",
- "test input\nEXIT STATUS: 0\n")
-
-t.expect_addition('bin/$toolset/debug/execution.time')
-t.expect_addition('bin/$toolset/debug/compilation.time')
-
-# Make sure test failures are detected. Reverse expectation and see if .test
-# files are created or not.
-t.write("jamfile.jam", """
-import testing ;
-
-compile-fail c.cpp ;
-compile c-f.cpp ;
-run-fail r.cpp : : dir/input.txt ;
-run r-f.cpp ;
-""")
-
-t.touch(BoostBuild.List("c.cpp c-f.cpp r.cpp r-f.cpp"))
-
-t.run_build_system("hardcode-dll-paths=false", stderr=None, status=1)
-t.expect_removal("bin/c.test/$toolset/debug/c.test")
-t.expect_removal("bin/c-f.test/$toolset/debug/c-f.test")
-t.expect_removal("bin/r.test/$toolset/debug/r.test")
-t.expect_removal("bin/r-f.test/$toolset/debug/r-f.test")
-
-t.cleanup()
diff --git a/tools/build/v2/test/relative_sources.py b/tools/build/v2/test/relative_sources.py
deleted file mode 100644
index 021d5975aa..0000000000
--- a/tools/build/v2/test/relative_sources.py
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2003 Dave Abrahams
-# Copyright 2002, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Test that we can specify sources using relative names.
-
-import BoostBuild
-
-t = BoostBuild.Tester()
-
-# Test that relative path to source, 'src', is preserved.
-t.write("jamroot.jam", "exe a : src/a.cpp ;")
-t.write("src/a.cpp", "int main() {}\n")
-
-t.run_build_system()
-t.expect_addition("bin/$toolset/debug/src/a.obj")
-
-# Test that the relative path to source is preserved
-# when using 'glob'.
-t.rm("bin")
-t.write("jamroot.jam", "exe a : [ glob src/*.cpp ] ;")
-t.run_build_system()
-t.expect_addition("bin/$toolset/debug/src/a.obj")
-
-
-# Test that relative path with ".." is *not* added to
-# target path.
-t.rm(".")
-t.write("jamroot.jam", "")
-t.write("a.cpp", "int main() { return 0; }\n")
-t.write("build/Jamfile", "exe a : ../a.cpp ; ")
-t.run_build_system(subdir="build")
-t.expect_addition("build/bin/$toolset/debug/a.obj")
-
-t.cleanup()
diff --git a/tools/build/v2/test/remove_requirement.py b/tools/build/v2/test/remove_requirement.py
deleted file mode 100644
index b060a2ab79..0000000000
--- a/tools/build/v2/test/remove_requirement.py
+++ /dev/null
@@ -1,89 +0,0 @@
-#!/usr/bin/python
-
-# Copyright (C) Vladimir Prus 2006.
-# Distributed under the Boost Software License, Version 1.0. (See
-# accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-import BoostBuild
-
-t = BoostBuild.Tester()
-
-
-t.write("jamroot.jam", """
-project : requirements <threading>multi <variant>debug:<link>static ;
-
-build-project sub ;
-build-project sub2 ;
-build-project sub3 ;
-build-project sub4 ;
-""")
-
-t.write("sub/jamfile.jam", """
-exe hello : hello.cpp : -<threading>multi ;
-""")
-
-t.write("sub/hello.cpp", """
-int main() {}
-""")
-
-t.write("sub2/jamfile.jam", """
-project : requirements -<threading>multi ;
-exe hello : hello.cpp ;
-""")
-
-t.write("sub2/hello.cpp", """
-int main() {}
-""")
-
-t.write("sub3/hello.cpp", """
-int main() {}
-""")
-
-t.write("sub3/jamfile.jam", """
-exe hello : hello.cpp : -<variant>debug:<link>static ;
-""")
-
-t.write("sub4/hello.cpp", """
-int main() {}
-""")
-
-t.write("sub4/jamfile.jam", """
-project : requirements -<variant>debug:<link>static ;
-exe hello : hello.cpp ;
-""")
-
-t.run_build_system()
-
-t.expect_addition("sub/bin/$toolset/debug/link-static/hello.exe")
-t.expect_addition("sub2/bin/$toolset/debug/link-static/hello.exe")
-t.expect_addition("sub3/bin/$toolset/debug/threading-multi/hello.exe")
-t.expect_addition("sub4/bin/$toolset/debug/threading-multi/hello.exe")
-
-t.rm(".")
-
-# Now test that path requirements can be removed as well.
-t.write("jamroot.jam", """
-build-project sub ;
-""")
-
-t.write("sub/jamfile.jam", """
-project : requirements <include>broken ;
-exe hello : hello.cpp : -<include>broken ;
-""")
-
-t.write("sub/hello.cpp", """
-#include "math.h"
-int main() {}
-""")
-
-t.write("sub/broken/math.h", """
-Broken
-""")
-
-
-t.run_build_system()
-
-t.expect_addition("sub/bin/$toolset/debug/hello.exe")
-
-t.cleanup()
diff --git a/tools/build/v2/test/resolution.py b/tools/build/v2/test/resolution.py
deleted file mode 100644
index 31c122e197..0000000000
--- a/tools/build/v2/test/resolution.py
+++ /dev/null
@@ -1,37 +0,0 @@
-#!/usr/bin/python
-
-# Copyright (C) Vladimir Prus 2006.
-# Distributed under the Boost Software License, Version 1.0. (See
-# accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-# Tests for the target id resolution process.
-
-import BoostBuild
-
-# Create a temporary working directory.
-t = BoostBuild.Tester()
-
-# Create the needed files
-t.write("jamroot.jam", """
-exe hello : hello.cpp ;
-# This should use the 'hello' target, even if there is a 'hello' file in the
-# current dir.
-install s : hello : <location>. ;
-""")
-
-t.write("hello.cpp", """
-int main() {}
-""")
-
-t.run_build_system()
-
-t.expect_addition("bin/$toolset/debug/hello.obj")
-
-t.touch("hello.cpp")
-t.run_build_system("s")
-# If 'hello' in the 's' target resolved to file in the current dir, nothing will
-# be rebuilt.
-t.expect_touch("bin/$toolset/debug/hello.obj")
-
-t.cleanup()
diff --git a/tools/build/v2/test/searched_lib.py b/tools/build/v2/test/searched_lib.py
deleted file mode 100644
index efbca36ca2..0000000000
--- a/tools/build/v2/test/searched_lib.py
+++ /dev/null
@@ -1,187 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2003 Dave Abrahams
-# Copyright 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Test usage of searched-libs: one which are found via -l
-# switch to the linker/compiler.
-
-import BoostBuild
-import os
-import string
-
-t = BoostBuild.Tester()
-
-
-# To start with, we have to prepare a library to link with.
-t.write("lib/jamroot.jam", "")
-t.write("lib/jamfile.jam", "lib test_lib : test_lib.cpp ;")
-t.write("lib/test_lib.cpp", """
-#ifdef _WIN32
-__declspec(dllexport)
-#endif
-void foo() {}
-""");
-
-t.run_build_system(subdir="lib")
-t.expect_addition("lib/bin/$toolset/debug/test_lib.dll")
-
-
-# Auto adjusting of suffixes does not work, since we need to
-# change dll to lib.
-if ( ( os.name == "nt" ) or os.uname()[0].lower().startswith("cygwin") ) and \
- ( BoostBuild.get_toolset() != "gcc" ):
- t.copy("lib/bin/$toolset/debug/test_lib.implib", "lib/test_lib.implib")
- t.copy("lib/bin/$toolset/debug/test_lib.dll", "lib/test_lib.dll")
-else:
- t.copy("lib/bin/$toolset/debug/test_lib.dll", "lib/test_lib.dll")
-
-
-# Test that the simplest usage of searched library works.
-t.write("jamroot.jam", "")
-
-t.write("jamfile.jam", """
-import path ;
-import project ;
-
-exe main : main.cpp helper ;
-lib helper : helper.cpp test_lib ;
-lib test_lib : : <name>test_lib <search>lib ;
-""")
-
-t.write("main.cpp", """
-void helper();
-int main() { helper(); }
-""")
-
-t.write("helper.cpp", """
-void foo();
-void
-#if defined(_WIN32)
-__declspec(dllexport)
-#endif
-helper() { foo(); }
-""")
-
-t.run_build_system("-d2")
-t.expect_addition("bin/$toolset/debug/main.exe")
-t.rm("bin/$toolset/debug/main.exe")
-
-
-# Test that 'unit-test' will correctly add runtime paths to searched libraries.
-t.write("jamfile.jam", """
-
-import path ;
-import project ;
-import testing ;
-
-project : requirements <hardcode-dll-paths>false ;
-
-unit-test main : main.cpp helper ;
-lib helper : helper.cpp test_lib ;
-lib test_lib : : <name>test_lib <search>lib ;
-""")
-
-t.run_build_system()
-t.expect_addition("bin/$toolset/debug/main.passed")
-t.rm("bin/$toolset/debug/main.exe")
-
-
-# Now try using searched lib from static lib. Request shared version of searched
-# lib, since we do not have a static one handy.
-t.write("jamfile.jam", """
-exe main : main.cpp helper ;
-lib helper : helper.cpp test_lib/<link>shared : <link>static ;
-lib test_lib : : <name>test_lib <search>lib ;
-""")
-
-t.run_build_system(stderr=None)
-t.expect_addition("bin/$toolset/debug/main.exe")
-t.expect_addition("bin/$toolset/debug/link-static/helper.lib")
-t.rm("bin/$toolset/debug/main.exe")
-
-# A regression test: <library>property referring to searched-lib was being
-# mishandled. As the result, we were putting target name to the command line!
-# Note that
-# g++ ...... <.>z
-# works nicely in some cases, sending output from compiler to file 'z'. This
-# problem shows up when searched libs are in usage requirements.
-t.write("jamfile.jam", "exe main : main.cpp d/d2//a ;")
-t.write("main.cpp", """
-void foo();
-int main() { foo(); }
-""")
-
-t.write("d/d2/jamfile.jam", """
-lib test_lib : : <name>test_lib <search>../../lib ;
-lib a : a.cpp : : : <library>test_lib ;
-""")
-
-t.write("d/d2/a.cpp", """
-#ifdef _WIN32
-__declspec(dllexport) int force_library_creation_for_a;
-#endif
-""")
-
-t.run_build_system()
-
-
-# A regression test. Searched targets were not associated with any properties.
-# For that reason, if the same searched lib is generated with two different
-# properties, we had an error saying they are actualized to the same Jam target
-# name.
-t.write("jamroot.jam", "")
-
-t.write("a.cpp", "")
-
-# The 'l' library will be built in two variants: 'debug' (directly requested)
-# and 'release' (requested from 'a').
-t.write("jamfile.jam", """
-exe a : a.cpp l/<variant>release ;
-lib l : : <name>l_d <variant>debug ;
-lib l : : <name>l_r <variant>release ;
-""")
-
-t.run_build_system("-n")
-
-
-# A regression test. Two virtual target with the same properties were created
-# for 'l' target, which caused and error to be reported when actualizing
-# targets. The final error is correct, but we should not create two duplicated
-# targets. Thanks to Andre Hentz for finding this bug.
-t.write("jamroot.jam", "")
-
-t.write("a.cpp", "")
-
-t.write("jamfile.jam", """
-project a : requirements <runtime-link>static ;
-static-lib a : a.cpp l ;
-lib l : : <name>l_f ;
-""")
-
-t.run_build_system("-n")
-
-
-# Make sure plain "lib foobar ; " works.
-t.write("jamfile.jam", """
-exe a : a.cpp foobar ;
-lib foobar ;
-""")
-
-t.run_build_system("-n -d2")
-t.fail_test(string.find(t.stdout(), "foobar") == -1)
-
-
-# Make sure plain "lib foo bar ; " works.
-t.write("jamfile.jam", """
-exe a : a.cpp foo bar ;
-lib foo bar ;
-""")
-
-t.run_build_system("-n -d2")
-t.fail_test(string.find(t.stdout(), "foo") == -1)
-t.fail_test(string.find(t.stdout(), "bar") == -1)
-
-t.cleanup()
diff --git a/tools/build/v2/test/skipping.py b/tools/build/v2/test/skipping.py
deleted file mode 100644
index b3575eafd3..0000000000
--- a/tools/build/v2/test/skipping.py
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Test that V2 does not fail gracelessy when any target is skipped.
-
-import BoostBuild
-
-# Create a temporary working directory.
-t = BoostBuild.Tester()
-
-t.write("a.cpp", """
-int main() {}
-""")
-
-t.write("b.cpp", """
-int main() {}
-""")
-
-t.write("c.cpp", """
-int main() {}
-""")
-
-t.write("jamroot.jam", """
-import feature ;
-feature.feature foo : 1 2 : link-incompatible ;
-exe a : a.cpp : <foo>1 ;
-exe b : b.cpp : <foo>2 ;
-exe c : c.cpp ;
-""")
-
-t.run_build_system("foo=1")
-
-t.cleanup()
diff --git a/tools/build/v2/test/sort_rule.py b/tools/build/v2/test/sort_rule.py
deleted file mode 100755
index f4a4acda13..0000000000
--- a/tools/build/v2/test/sort_rule.py
+++ /dev/null
@@ -1,95 +0,0 @@
-#!/usr/bin/python
-
-# Copyright (C) Jurko Gospodnetic 2008.
-# Distributed under the Boost Software License, Version 1.0. (See
-# accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-# Tests for the Boost Jam builtin SORT rule.
-
-import BoostBuild
-
-
-################################################################################
-#
-# testSORTCorrectness()
-# ---------------------
-#
-################################################################################
-
-def testSORTCorrectness():
- """Testing that Boost Jam's SORT builtin rule actually sorts correctly.
- """
- t = BoostBuild.Tester("-f test.jam -d1", pass_toolset=False,
- use_test_config=False)
-
- t.write("test.jam", """
-NOCARE all ;
-source-data = 1 8 9 2 7 3 4 7 1 27 27 9 98 98 1 1 4 5 6 2 3 4 8 1 -2 -2 0 0 0 ;
-target-data = -2 -2 0 0 0 1 1 1 1 1 2 2 27 27 3 3 4 4 4 5 6 7 7 8 8 9 9 98 98 ;
-ECHO "starting up" ;
-sorted-data = [ SORT $(source-data) ] ;
-ECHO "done" ;
-if $(sorted-data) != $(target-data)
-{
- ECHO "Source :" $(source-data) ;
- ECHO "Expected :" $(target-data) ;
- ECHO "SORT returned:" $(sorted-data) ;
- EXIT "SORT error" : -2 ;
-}
-""")
-
- t.run_build_system()
- t.expect_output_line("starting up")
- t.expect_output_line("done")
- t.expect_output_line("SORT error", False)
-
- t.cleanup()
-
-
-################################################################################
-#
-# testSORTDuration()
-# ------------------
-#
-################################################################################
-
-def testSORTDuration():
- """Regression test making sure Boost Jam's SORT builtin rule does not get
- quadratic behaviour again in this use case.
- """
- t = BoostBuild.Tester("-f test.jam -d1", pass_toolset=False,
- use_test_config=False)
-
- f = open(t.workpath("test.jam"), "w")
- print >> f, "data = "
- for i in range(0, 20000):
- if i % 2 != 0:
- print >> f, '"aaa"'
- else:
- print >> f, '"bbb"'
- print >> f, """;
-
-ECHO "starting up" ;
-sorted = [ SORT $(data) ] ;
-ECHO "done" ;
-NOCARE all ;
-"""
- f.close()
-
- t.run_build_system(expected_duration=1)
- t.expect_output_line("starting up")
- t.expect_output_line("done")
-
- t.cleanup()
-
-
-################################################################################
-#
-# main()
-# ------
-#
-################################################################################
-
-testSORTCorrectness()
-testSORTDuration()
diff --git a/tools/build/v2/test/source_locations.py b/tools/build/v2/test/source_locations.py
deleted file mode 100644
index 7133ab044e..0000000000
--- a/tools/build/v2/test/source_locations.py
+++ /dev/null
@@ -1,42 +0,0 @@
-#!/usr/bin/python
-
-# Copyright (C) Craig Rodrigues 2005.
-# Distributed under the Boost Software License, Version 1.0. (See
-# accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-# Test that projects with multiple source-location directories are handled OK.
-
-import BoostBuild
-
-t = BoostBuild.Tester()
-
-t.write("jamroot.jam", """
-path-constant SRC1 : "./src1" ;
-path-constant SRC2 : "./src2" ;
-path-constant SRC3 : "./src3" ;
-path-constant BUILD : "build" ;
-
-project : requirements <include>$(SRC1)/include <threading>multi
- : build-dir $(BUILD) ;
-
-build-project project1 ;
-""")
-
-t.write("project1/jamfile.jam", """
-project project1 : source-location $(SRC1) $(SRC2) $(SRC3) ;
-SRCS = s1.cpp s2.cpp testfoo.cpp ;
-exe test : $(SRCS) ;
-""")
-
-t.write("src1/s1.cpp", "int main() {}\n")
-t.write("src2/s2.cpp", "void hello() {}\n")
-t.write("src3/testfoo.cpp", "void testfoo() {}\n")
-
-# This file should not be picked up, because "src2" is before "src3" in the list
-# of source directories.
-t.write("src3/s2.cpp", "void hello() {}\n")
-
-t.run_build_system()
-
-t.cleanup()
diff --git a/tools/build/v2/test/stage.py b/tools/build/v2/test/stage.py
deleted file mode 100644
index 9862138b0d..0000000000
--- a/tools/build/v2/test/stage.py
+++ /dev/null
@@ -1,258 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2003 Dave Abrahams
-# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Test staging.
-
-import BoostBuild
-
-t = BoostBuild.Tester()
-
-t.write("jamroot.jam", "import gcc ;")
-
-t.write("jamfile.jam", """
-lib a : a.cpp ;
-stage dist : a a.h auxilliary/1 ;
-""")
-
-t.write(
- "a.cpp",
-"""
-int
-#ifdef _WIN32
-__declspec(dllexport)
-#endif
-must_export_something;
-""")
-
-t.write("a.h", "")
-t.write("auxilliary/1", "")
-
-t.run_build_system()
-t.expect_addition(["dist/a.dll", "dist/a.h", "dist/1"])
-
-
-# Regression test: the following was causing the "duplicate target name" error.
-t.write("jamfile.jam", """
-project : requirements <hardcode-dll-paths>true ;
-lib a : a.cpp ;
-stage dist : a a.h auxilliary/1 ;
-alias dist-alias : dist ;
-""")
-
-t.run_build_system()
-
-
-# Test the <location> property.
-t.write("jamfile.jam", """
-lib a : a.cpp ;
-stage dist : a : <variant>debug:<location>ds <variant>release:<location>rs ;
-""")
-
-t.run_build_system()
-t.expect_addition("ds/a.dll")
-
-t.run_build_system("release")
-t.expect_addition("rs/a.dll")
-
-
-# Test the <location> property in subprojects. Thanks to Kirill Lapshin for the
-# bug report.
-
-t.write("jamroot.jam", """
-path-constant DIST : dist ;
-""")
-
-t.write("jamfile.jam", "build-project d ;")
-
-t.write("d/jamfile.jam", """
-exe a : a.cpp ;
-stage dist : a : <location>$(DIST) ;
-""")
-
-t.write("d/a.cpp", "int main() {}\n")
-
-t.run_build_system()
-t.expect_addition("dist/a.exe")
-
-t.rm("dist")
-
-# Workaround a BIG BUG: the response file is not deleted, even if application
-# *is* deleted. We will try to use the same response file when building from
-# subdir, with very bad results.
-t.rm("d/bin")
-t.run_build_system(subdir="d")
-t.expect_addition("dist/a.exe")
-
-
-# Check that 'stage' does not incorrectly reset target suffixes.
-t.write("a.cpp", """
-int main() {}
-""")
-
-t.write("jamroot.jam", """
-import type ;
-type.register MYEXE : : EXE ;
-type.set-generated-target-suffix MYEXE : <optimization>off : myexe ;
-""")
-
-# Since <optimization>off is in properties when 'a' is built, and staged, its
-# suffix should be "myexe".
-t.write("jamfile.jam", """
-stage dist : a ;
-myexe a : a.cpp ;
-""")
-
-t.run_build_system()
-t.expect_addition("dist/a.myexe")
-
-# Test 'stage's ability to traverse dependencies.
-t.write("a.cpp", """
-int main() {}
-""")
-
-t.write("l.cpp", """
-void
-#if defined(_WIN32)
-__declspec(dllexport)
-#endif
-foo() {}
-""")
-
-t.write("jamfile.jam", """
-lib l : l.cpp ;
-exe a : a.cpp l ;
-stage dist : a : <install-dependencies>on <install-type>EXE <install-type>LIB ;
-""")
-
-t.write("jamroot.jam", "")
-
-t.rm("dist")
-
-t.run_build_system()
-t.expect_addition("dist/a.exe")
-t.expect_addition("dist/l.dll")
-
-# Check that <use> properties are ignored the traversing target for staging.
-t.copy("l.cpp", "l2.cpp")
-
-t.copy("l.cpp", "l3.cpp")
-
-t.write("jamfile.jam", """
-lib l2 : l2.cpp ;
-lib l3 : l3.cpp ;
-lib l : l.cpp : <use>l2 <dependency>l3 ;
-exe a : a.cpp l ;
-stage dist : a : <install-dependencies>on <install-type>EXE <install-type>LIB ;
-""")
-
-t.rm("dist")
-
-t.run_build_system()
-t.expect_addition("dist/l3.dll")
-t.expect_nothing("dist/l2.dll")
-
-# Check if <dependency> on 'stage' works.
-t.rm(".")
-t.write("jamroot.jam", """
-stage a1 : a1.txt : <location>dist ;
-stage a2 : a2.txt : <location>dist <dependency>a1 ;
-""")
-t.write("a1.txt", "")
-t.write("a2.txt", "")
-t.run_build_system("a2")
-t.expect_addition(["dist/a1.txt", "dist/a2.txt"])
-
-# Regression test: check that <location>. works.
-t.rm(".")
-
-t.write("jamroot.jam", """
-stage a1 : d/a1.txt : <location>. ;
-""")
-
-t.write("d/a1.txt", "")
-
-t.run_build_system()
-t.expect_addition("a1.txt")
-
-# Test that relative paths of sources can be preserved.
-t.rm(".")
-
-t.write("jamroot.jam", """
-install dist : a/b/c.h : <install-source-root>. ;
-""")
-
-t.write("a/b/c.h", "")
-
-t.run_build_system()
-t.expect_addition("dist/a/b/c.h")
-
-t.write("jamroot.jam", """
-install dist : a/b/c.h : <install-source-root>a ;
-""")
-
-t.write("a/b/c.h", "")
-
-t.run_build_system()
-t.expect_addition("dist/b/c.h")
-
-t.rm(".")
-t.write("build/jamroot.jam", """
-install dist : ../a/b/c.h : <location>../dist <install-source-root>../a ;
-""")
-
-t.write("a/b/c.h", "")
-
-t.run_build_system(subdir="build")
-t.expect_addition("dist/b/c.h")
-
-t.write("jamroot.jam", """
-install dist2 : a/b/c.h : <install-source-root>a ;
-""")
-
-t.write("a/b/c.h", "")
-
-t.write("sub/jamfile.jam", """
-alias h : ..//dist2 ;
-""")
-
-t.run_build_system(subdir="sub")
-t.expect_addition("dist2/b/c.h")
-
-# Test that when installing .cpp files, we do not scan include dependencies.
-t.rm(".")
-
-t.write("jamroot.jam", """
-install dist : a.cpp ;
-""")
-
-t.write("a.cpp", """
-#include "a.h"
-""")
-
-t.write("a.h", "")
-
-t.run_build_system()
-t.expect_addition("dist/a.cpp")
-
-t.touch("a.h")
-
-t.run_build_system()
-t.expect_nothing("dist/a.cpp")
-
-# Test that <name> property works, when there is just one file in sources.
-t.rm(".")
-
-t.write("jamroot.jam", """
-install dist : a.cpp : <name>b.cpp ;
-""")
-
-t.write("a.cpp", "test file")
-
-t.run_build_system()
-t.expect_addition("dist/b.cpp")
-
-t.cleanup()
diff --git a/tools/build/v2/test/standalone.py b/tools/build/v2/test/standalone.py
deleted file mode 100644
index 31603fc974..0000000000
--- a/tools/build/v2/test/standalone.py
+++ /dev/null
@@ -1,59 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import BoostBuild
-
-t = BoostBuild.Tester()
-
-
-# Regression tests: standalone project were not able to refer to targets
-# declared in themselves.
-
-t.write("a.cpp", """
-int main() {}
-""")
-
-t.write("jamroot.jam", """
-import standalone ;
-""")
-
-t.write("standalone.jam", """
-import project ;
-
-project.initialize $(__name__) ;
-project standalone ;
-
-local pwd = [ PWD ] ;
-
-alias x : $(pwd)/../a.cpp ;
-alias runtime : x ;
-""")
-
-t.write("standalone.py", """
-from b2.manager import get_manager
-
-# FIXME: this is ugly as death
-get_manager().projects().initialize(__name__)
-
-import os ;
-
-# This use of list as parameter is also ugly.
-project(['standalone'])
-
-pwd = os.getcwd()
-alias('x', [os.path.join(pwd, '../a.cpp')])
-alias('runtime', ['x'])
-""")
-
-
-t.write("sub/jamfile.jam", """
-stage bin : /standalone//runtime ;
-""")
-
-t.run_build_system(subdir="sub")
-t.expect_addition("sub/bin/a.cpp")
-
-t.cleanup()
diff --git a/tools/build/v2/test/startup_v2.py b/tools/build/v2/test/startup_v2.py
deleted file mode 100644
index a4faf56ded..0000000000
--- a/tools/build/v2/test/startup_v2.py
+++ /dev/null
@@ -1,79 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2002 Dave Abrahams
-# Copyright 2003, 2004 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import BoostBuild
-import os
-import re
-
-
-def match_re(actual,expected):
- return re.match(expected,actual,re.DOTALL) != None
-
-# Test the v1 startup behavior.
-t = BoostBuild.Tester(match=match_re, boost_build_path='', pass_toolset=0)
-
-t.set_tree('startup')
-
-t.run_build_system(
- status=1, stdout=r'''Unable to load Boost\.Build: could not find "boost-build.jam"
-.*Attempted search from .* up to the root''', match = match_re)
-
-os.chdir('no-bootstrap1')
-
-t.run_build_system(
- status=1
- , stdout=r'''Unable to load Boost\.Build: could not find build system\.'''
- + r'''.*attempted to load the build system by invoking'''
- + r'''.*'boost-build ;'.*'''
- + r'''but we were unable to find "bootstrap\.jam"'''
- )
-
-# Descend to a subdirectory which /doesn't/ contain a boost-build.jam file, and
-# try again to test the crawl-up behavior.
-os.chdir('subdir')
-
-t.run_build_system(
- status=1
- , stdout=r'''Unable to load Boost\.Build: could not find build system\.'''
- + r'''.*attempted to load the build system by invoking'''
- + r'''.*'boost-build ;'.*'''
- + r'''but we were unable to find "bootstrap\.jam"'''
- )
-
-os.chdir('../../no-bootstrap2')
-
-t.run_build_system(
- status=1
- , stdout=r'''Unable to load Boost\.Build: could not find build system\.'''
- + r'''.*attempted to load the build system by invoking'''
- + r'''.*'boost-build \. ;'.*'''
- + r'''but we were unable to find "bootstrap\.jam"'''
- )
-
-os.chdir('../no-bootstrap3')
-
-t.run_build_system(
- status=1
- , stdout=r'''Unable to load Boost.Build
-.*boost-build.jam" was found.*
-However, it failed to call the "boost-build" rule'''
- )
-
-# Test bootstrapping based on BOOST_BUILD_PATH.
-os.chdir('../bootstrap-env')
-t.run_build_system(
- extra_args = '-sBOOST_BUILD_PATH=../boost-root/build'
- , stdout = 'build system bootstrapped'
- )
-
-# Test bootstrapping based on an explicit path in boost-build.jam.
-os.chdir('../bootstrap-explicit')
-t.run_build_system(
- stdout = 'build system bootstrapped'
- )
-
-t.cleanup()
diff --git a/tools/build/v2/test/svn_tree.py b/tools/build/v2/test/svn_tree.py
deleted file mode 100644
index 74bceb7f57..0000000000
--- a/tools/build/v2/test/svn_tree.py
+++ /dev/null
@@ -1,668 +0,0 @@
-#!/usr/bin/env python
-#
-# tree.py: tools for comparing directory trees
-#
-# Subversion is a tool for revision control.
-# See http://subversion.tigris.org for more information.
-#
-# ====================================================================
-# Copyright (c) 2001 Sam Tobin-Hochstadt. All rights reserved.
-#
-# This software is licensed as described in the file COPYING, which
-# you should have received as part of this distribution. The terms
-# are also available at http://subversion.tigris.org/license-1.html.
-# If newer versions of this license are posted there, you may use a
-# newer version instead, at your option.
-#
-######################################################################
-
-# This file was modified by Vladimir Prus to store modification times in tree
-# nodes.
-
-import re
-import string
-import os.path
-import os
-import stat
-
-
-#========================================================================
-
-# ===> Overview of our Datastructures <===
-
-# The general idea here is that many, many things can be represented by
-# a tree structure:
-
-# - a working copy's structure and contents
-# - the output of 'svn status'
-# - the output of 'svn checkout/update'
-# - the output of 'svn commit'
-
-# The idea is that a test function creates a "expected" tree of some
-# kind, and is then able to compare it to an "actual" tree that comes
-# from running the Subversion client. This is what makes a test
-# automated; if an actual and expected tree match exactly, then the test
-# has passed. (See compare_trees() below.)
-
-# The SVNTreeNode class is the fundamental data type used to build tree
-# structures. The class contains a method for "dropping" a new node
-# into an ever-growing tree structure. (See also create_from_path()).
-
-# We have four parsers in this file for the four use cases listed above:
-# each parser examines some kind of input and returns a tree of
-# SVNTreeNode objects. (See build_tree_from_checkout(),
-# build_tree_from_commit(), build_tree_from_status(), and
-# build_tree_from_wc()). These trees are the "actual" trees that result
-# from running the Subversion client.
-
-# Also necessary, of course, is a convenient way for a test to create an
-# "expected" tree. The test *could* manually construct and link a bunch
-# of SVNTreeNodes, certainly. But instead, all the tests are using the
-# build_generic_tree() routine instead.
-
-# build_generic_tree() takes a specially-formatted list of lists as
-# input, and returns a tree of SVNTreeNodes. The list of lists has this
-# structure:
-
-# [ ['/full/path/to/item', 'text contents', {prop-hash}, {att-hash}],
-# [...],
-# [...],
-# ... ]
-
-# You can see that each item in the list essentially defines an
-# SVNTreeNode. build_generic_tree() instantiates a SVNTreeNode for each
-# item, and then drops it into a tree by parsing each item's full path.
-
-# So a typical test routine spends most of its time preparing lists of
-# this format and sending them to build_generic_tree(), rather than
-# building the "expected" trees directly.
-
-# ### Note: in the future, we'd like to remove this extra layer of
-# ### abstraction. We'd like the SVNTreeNode class to be more
-# ### directly programmer-friendly, providing a number of accessor
-# ### routines, so that tests can construct trees directly.
-
-# The first three fields of each list-item are self-explanatory. It's
-# the fourth field, the "attribute" hash, that needs some explanation.
-# The att-hash is used to place extra information about the node itself,
-# depending on the parsing context:
-
-# - in the 'svn co/up' use-case, each line of output starts with two
-# characters from the set of (A, D, G, U, C, _). This status code
-# is stored in a attribute named 'status'.
-
-# - in the 'svn ci/im' use-case, each line of output starts with one
-# of the words (Adding, Deleting, Sending). This verb is stored in
-# an attribute named 'verb'.
-
-# - in the 'svn status' use-case (which is always run with the -v
-# (--verbose) flag), each line of output contains a working revision
-# number and a two-letter status code similar to the 'svn co/up'
-# case. The repository revision is also printed. All of this
-# information is stored in attributes named 'wc_rev', 'status', and
-# 'repos_rev', respectively.
-
-# - in the working-copy use-case, the att-hash is ignored.
-
-
-# Finally, one last explanation: the file 'actions.py' contain a number
-# of helper routines named 'run_and_verify_FOO'. These routines take
-# one or more "expected" trees as input, then run some svn subcommand,
-# then push the output through an appropriate parser to derive an
-# "actual" tree. Then it runs compare_trees() and returns the result.
-# This is why most tests typically end with a call to
-# run_and_verify_FOO().
-
-
-
-
-# A node in a tree.
-#
-# If CHILDREN is None, then the node is a file. Otherwise, CHILDREN
-# is a list of the nodes making up that directory's children.
-#
-# NAME is simply the name of the file or directory. CONTENTS is a
-# string that contains the file's contents (if a file), PROPS are
-# properties attached to files or dirs, and ATTS is a dictionary of
-# other metadata attached to the node.
-
-class SVNTreeNode:
-
- def __init__(self, name, children=None, contents=None, props={}, atts={}):
- self.name = name
- self.mtime = 0
- self.children = children
- self.contents = contents
- self.props = props
- self.atts = atts
- self.path = name
-
-# TODO: Check to make sure contents and children are mutually exclusive
-
- def add_child(self, newchild):
- if self.children is None: # if you're a file,
- self.children = [] # become an empty dir.
- child_already_exists = 0
- for a in self.children:
- if a.name == newchild.name:
- child_already_exists = 1
- break
- if child_already_exists == 0:
- self.children.append(newchild)
- newchild.path = os.path.join (self.path, newchild.name)
-
- # If you already have the node,
- else:
- if newchild.children is None:
- # this is the 'end' of the chain, so copy any content here.
- a.contents = newchild.contents
- a.props = newchild.props
- a.atts = newchild.atts
- a.path = os.path.join (self.path, newchild.name)
- else:
- # try to add dangling children to your matching node
- for i in newchild.children:
- a.add_child(i)
-
-
- def pprint(self):
- print " * Node name: ", self.name
- print " Path: ", self.path
- print " Contents: ", self.contents
- print " Properties:", self.props
- print " Attributes:", self.atts
- ### FIXME: I'd like to be able to tell the difference between
- ### self.children is None (file) and self.children == [] (empty
- ### diretory), but it seems that most places that construct
- ### SVNTreeNode objects don't even try to do that. --xbc
- if self.children is not None:
- print " Children: ", len(self.children)
- else:
- print " Children: is a file."
-
-# reserved name of the root of the tree
-
-root_node_name = "__SVN_ROOT_NODE"
-
-# Exception raised if you screw up in this module.
-
-class SVNTreeError(Exception): pass
-
-# Exception raised if two trees are unequal
-
-class SVNTreeUnequal(Exception): pass
-
-# Exception raised if one node is file and other is dir
-
-class SVNTypeMismatch(Exception): pass
-
-# Exception raised if get_child is passed a file.
-
-class SVNTreeIsNotDirectory(Exception): pass
-
-
-# Some attributes 'stack' on each other if the same node is added
-# twice to a tree. Place all such special cases in here.
-def attribute_merge(orighash, newhash):
- "Merge the attributes in NEWHASH into ORIGHASH."
-
- if orighash.has_key('verb') and newhash.has_key('verb'):
- # Special case: if a commit reports a node as "deleted", then
- # "added", it's a replacment.
- if orighash['verb'] == "Deleting":
- if newhash['verb'] == "Adding":
- orighash['verb'] = "Replacing"
-
- # Add future stackable attributes here...
-
- return orighash
-
-
-# helper func
-def add_elements_as_path(top_node, element_list):
- """Add the elements in ELEMENT_LIST as if they were a single path
- below TOP_NODE."""
-
- # The idea of this function is to take a list like so:
- # ['A', 'B', 'C'] and a top node, say 'Z', and generate a tree
- # like this:
- #
- # Z -> A -> B -> C
- #
- # where 1 -> 2 means 2 is a child of 1.
- #
-
- prev_node = top_node
- for i in element_list:
- new_node = SVNTreeNode(i, None)
- prev_node.add_child(new_node)
- prev_node = new_node
-
-
-# Sorting function -- sort 2 nodes by their names.
-def node_is_greater(a, b):
- "Sort the names of two nodes."
- # Interal use only
- if a.name == b.name:
- return 0
- if a.name > b.name:
- return 1
- else:
- return -1
-
-
-# Helper for compare_trees
-def compare_file_nodes(a, b):
- """Compare two nodes' names, contents, and properties, ignoring
- children. Return 0 if the same, 1 otherwise."""
- if a.name != b.name:
- return 1
- if a.contents != b.contents:
- return 1
- if a.props != b.props:
- return 1
- if a.atts != b.atts:
- return 1
-
-
-# Internal utility used by most build_tree_from_foo() routines.
-#
-# (Take the output and .add_child() it to a root node.)
-
-def create_from_path(path, contents=None, props={}, atts={}):
- """Create and return a linked list of treenodes, given a PATH
- representing a single entry into that tree. CONTENTS and PROPS are
- optional arguments that will be deposited in the tail node."""
-
- # get a list of all the names in the path
- # each of these will be a child of the former
- elements = path.split("/")
- if len(elements) == 0:
- raise SVNTreeError
-
- root_node = SVNTreeNode(elements[0], None)
-
- add_elements_as_path(root_node, elements[1:])
-
- # deposit contents in the very last node.
- node = root_node
- while 1:
- if node.children is None:
- node.contents = contents
- node.props = props
- node.atts = atts
- break
- node = node.children[0]
-
- return root_node
-
-
-# helper for handle_dir(), which is a helper for build_tree_from_wc()
-def get_props(path):
- "Return a hash of props for PATH, using the svn client."
-
- # It's not kosher to look inside SVN/ and try to read the internal
- # property storage format. Instead, we use 'svn proplist'. After
- # all, this is the only way the user can retrieve them, so we're
- # respecting the black-box paradigm.
-
- props = {}
- output, errput = main.run_svn(1, "proplist", path, "--verbose")
-
- for line in output:
- name, value = line.split(' : ')
- name = string.strip(name)
- value = string.strip(value)
- props[name] = value
-
- return props
-
-
-# helper for handle_dir(), which helps build_tree_from_wc()
-def get_text(path):
- "Return a string with the textual contents of a file at PATH."
-
- # sanity check
- if not os.path.isfile(path):
- return None
-
- fp = open(path, 'r')
- contents = fp.read()
- fp.close()
- return contents
-
-
-# main recursive helper for build_tree_from_wc()
-def handle_dir(path, current_parent, load_props, ignore_svn):
-
- # get a list of all the files
- all_files = os.listdir(path)
- files = []
- dirs = []
-
- # put dirs and files in their own lists, and remove SVN dirs
- for f in all_files:
- f = os.path.join(path, f)
- if (os.path.isdir(f) and os.path.basename(f) != 'SVN'):
- dirs.append(f)
- elif os.path.isfile(f):
- files.append(f)
-
- # add each file as a child of CURRENT_PARENT
- for f in files:
- fcontents = get_text(f)
- if load_props:
- fprops = get_props(f)
- else:
- fprops = {}
- c = SVNTreeNode(os.path.basename(f), None,
- fcontents, fprops)
- c.mtime = os.stat(f)[stat.ST_MTIME]
- current_parent.add_child(c)
-
- # for each subdir, create a node, walk its tree, add it as a child
- for d in dirs:
- if load_props:
- dprops = get_props(d)
- else:
- dprops = {}
- new_dir_node = SVNTreeNode(os.path.basename(d), [], None, dprops)
- handle_dir(d, new_dir_node, load_props, ignore_svn)
- new_dir_node.mtime = os.stat(f)[stat.ST_MTIME]
- current_parent.add_child(new_dir_node)
-
-def get_child(node, name):
- """If SVNTreeNode NODE contains a child named NAME, return child;
- else, return None. If SVNTreeNode is not a directory, raise a
- SVNTreeIsNotDirectory exception"""
- if node.children == None:
- raise SVNTreeIsNotDirectory
- for n in node.children:
- if (name == n.name):
- return n
- return None
-
-
-# Helper for compare_trees
-def default_singleton_handler(a, baton):
- "Printing SVNTreeNode A's name, then raise SVNTreeUnequal."
- print "Got singleton", a.name
- a.pprint()
- raise SVNTreeUnequal
-
-
-###########################################################################
-###########################################################################
-# EXPORTED ROUTINES ARE BELOW
-
-
-# Main tree comparison routine!
-
-def compare_trees(a, b,
- singleton_handler_a = None,
- a_baton = None,
- singleton_handler_b = None,
- b_baton = None):
- """Compare SVNTreeNodes A and B, expressing differences using FUNC_A
- and FUNC_B. FUNC_A and FUNC_B are functions of two arguments (a
- SVNTreeNode and a context baton), and may raise exception
- SVNTreeUnequal. Their return value is ignored.
-
- If A and B are both files, then return 0 if their contents,
- properties, and names are all the same; else raise a SVNTreeUnequal.
- If A is a file and B is a directory, raise a SVNTypeMismatch; same
- vice-versa. If both are directories, then for each entry that
- exists in both, call compare_trees on the two entries; otherwise, if
- the entry exists only in A, invoke FUNC_A on it, and likewise for
- B with FUNC_B."""
-
- def display_nodes(a, b):
- 'Display two nodes, expected and actual.'
- print "============================================================="
- print "Expected", b.name, "and actual", a.name, "are different!"
- print "============================================================="
- print "EXPECTED NODE TO BE:"
- print "============================================================="
- b.pprint()
- print "============================================================="
- print "ACTUAL NODE FOUND:"
- print "============================================================="
- a.pprint()
-
- # Setup singleton handlers
- if (singleton_handler_a is None):
- singleton_handler_a = default_singleton_handler
- if (singleton_handler_b is None):
- singleton_handler_b = default_singleton_handler
-
- try:
- # A and B are both files.
- if ((a.children is None) and (b.children is None)):
- if compare_file_nodes(a, b):
- display_nodes(a, b)
- raise main.SVNTreeUnequal
- # One is a file, one is a directory.
- elif (((a.children is None) and (b.children is not None))
- or ((a.children is not None) and (b.children is None))):
- display_nodes(a, b)
- raise main.SVNTypeMismatch
- # They're both directories.
- else:
- # First, compare the directories' two hashes.
- if (a.props != b.props) or (a.atts != b.atts):
- display_nodes(a, b)
- raise main.SVNTreeUnequal
-
- accounted_for = []
- # For each child of A, check and see if it's in B. If so, run
- # compare_trees on the two children and add b's child to
- # accounted_for. If not, run FUNC_A on the child. Next, for each
- # child of B, check and see if it's in accounted_for. If it is,
- # do nothing. If not, run FUNC_B on it.
- for a_child in a.children:
- b_child = get_child(b, a_child.name)
- if b_child:
- accounted_for.append(b_child)
- compare_trees(a_child, b_child,
- singleton_handler_a, a_baton,
- singleton_handler_b, b_baton)
- else:
- singleton_handler_a(a_child, a_baton)
- for b_child in b.children:
- if (b_child not in accounted_for):
- singleton_handler_b(b_child, b_baton)
- return 0
- except SVNTypeMismatch:
- print 'Unequal Types: one Node is a file, the other is a directory'
- raise SVNTreeUnequal
- except SVNTreeIsNotDirectory:
- print "Error: Foolish call to get_child."
- sys.exit(1)
- except IndexError:
- print "Error: unequal number of children"
- raise SVNTreeUnequal
- except SVNTreeUnequal:
- if a.name == root_node_name:
- return 1
- else:
- print "Unequal at node %s" % a.name
- raise SVNTreeUnequal
- return 0
-
-
-
-
-# Visually show a tree's structure
-
-def dump_tree(n,indent=""):
- "Print out a nice representation of the tree's structure."
-
- # Code partially stolen from Dave Beazley.
- if n.children is None:
- tmp_children = []
- else:
- tmp_children = n.children
-
- if n.name == root_node_name:
- print "%s%s" % (indent, "ROOT")
- else:
- print "%s%s" % (indent, n.name)
-
- indent = indent.replace("-", " ")
- indent = indent.replace("+", " ")
- for i in range(len(tmp_children)):
- c = tmp_children[i]
- if i == len(tmp_children) - 1:
- dump_tree(c,indent + " +-- ")
- else:
- dump_tree(c,indent + " |-- ")
-
-
-###################################################################
-###################################################################
-# PARSERS that return trees made of SVNTreeNodes....
-
-
-###################################################################
-# Build an "expected" static tree from a list of lists
-
-
-# Create a list of lists, of the form:
-#
-# [ [path, contents, props, atts], ... ]
-#
-# and run it through this parser. PATH is a string, a path to the
-# object. CONTENTS is either a string or None, and PROPS and ATTS are
-# populated dictionaries or {}. Each CONTENTS/PROPS/ATTS will be
-# attached to the basename-node of the associated PATH.
-
-def build_generic_tree(nodelist):
- "Given a list of lists of a specific format, return a tree."
-
- root = SVNTreeNode(root_node_name)
-
- for list in nodelist:
- new_branch = create_from_path(list[0], list[1], list[2], list[3])
- root.add_child(new_branch)
-
- return root
-
-
-####################################################################
-# Build trees from different kinds of subcommand output.
-
-
-# Parse co/up output into a tree.
-#
-# Tree nodes will contain no contents, and only one 'status' att.
-
-def build_tree_from_checkout(lines):
- "Return a tree derived by parsing the output LINES from 'co' or 'up'."
-
- root = SVNTreeNode(root_node_name)
- rm = re.compile ('^([MAGCUD_ ][MAGCUD_ ]) (.+)')
-
- for line in lines:
- match = rm.search(line)
- if match and match.groups():
- new_branch = create_from_path(match.group(2), None, {},
- {'status' : match.group(1)})
- root.add_child(new_branch)
-
- return root
-
-
-# Parse ci/im output into a tree.
-#
-# Tree nodes will contain no contents, and only one 'verb' att.
-
-def build_tree_from_commit(lines):
- "Return a tree derived by parsing the output LINES from 'ci' or 'im'."
-
- # Lines typically have a verb followed by whitespace then a path.
- root = SVNTreeNode(root_node_name)
- rm1 = re.compile ('^(\w+)\s+(.+)')
- rm2 = re.compile ('^Transmitting')
-
- for line in lines:
- match = rm2.search(line)
- if not match:
- match = rm1.search(line)
- if match and match.groups():
- new_branch = create_from_path(match.group(2), None, {},
- {'verb' : match.group(1)})
- root.add_child(new_branch)
-
- return root
-
-
-# Parse status output into a tree.
-#
-# Tree nodes will contain no contents, and these atts:
-#
-# 'status', 'wc_rev', 'repos_rev'
-# ... and possibly 'locked', 'copied', IFF columns non-empty.
-#
-
-def build_tree_from_status(lines):
- "Return a tree derived by parsing the output LINES from 'st'."
-
- root = SVNTreeNode(root_node_name)
- rm = re.compile ('^.+\:.+(\d+)')
- lastline = string.strip(lines.pop())
- match = rm.search(lastline)
- if match and match.groups():
- repos_rev = match.group(1)
- else:
- repos_rev = '?'
-
- # Try http://www.wordsmith.org/anagram/anagram.cgi?anagram=ACDRMGU
- rm = re.compile ('^([MACDRUG_ ][MACDRUG_ ])(.)(.) . [^0-9-]+(\d+|-)(.{23})(.+)')
- for line in lines:
- match = rm.search(line)
- if match and match.groups():
- if match.group(5) != '-': # ignore items that only exist on repos
- atthash = {'status' : match.group(1),
- 'wc_rev' : match.group(4),
- 'repos_rev' : repos_rev}
- if match.group(2) != ' ':
- atthash['locked'] = match.group(2)
- if match.group(3) != ' ':
- atthash['copied'] = match.group(3)
- new_branch = create_from_path(match.group(6), None, {}, atthash)
-
- root.add_child(new_branch)
-
- return root
-
-
-####################################################################
-# Build trees by looking at the working copy
-
-
-# The reason the 'load_props' flag is off by default is because it
-# creates a drastic slowdown -- we spawn a new 'svn proplist'
-# process for every file and dir in the working copy!
-
-
-def build_tree_from_wc(wc_path, load_props=0, ignore_svn=1):
- """Takes WC_PATH as the path to a working copy. Walks the tree below
- that path, and creates the tree based on the actual found
- files. If IGNORE_SVN is true, then exclude SVN dirs from the tree.
- If LOAD_PROPS is true, the props will be added to the tree."""
-
- root = SVNTreeNode(root_node_name, None)
-
- # if necessary, store the root dir's props in the root node.
- if load_props:
- root.props = get_props(wc_path)
-
- # Walk the tree recursively
- handle_dir(os.path.normpath(wc_path), root, load_props, ignore_svn)
-
- return root
-
-### End of file.
-# local variables:
-# eval: (load-file "../../../../../tools/dev/svn-dev.el")
-# end:
diff --git a/tools/build/v2/test/symlink.py b/tools/build/v2/test/symlink.py
deleted file mode 100644
index d78e96c15f..0000000000
--- a/tools/build/v2/test/symlink.py
+++ /dev/null
@@ -1,41 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2003 Dave Abrahams
-# Copyright 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Test the 'symlink' rule.
-
-import os
-import BoostBuild
-
-
-if os.name != 'posix':
- print "The symlink tests can be run on posix only."
- import sys
- sys.exit(1)
-
-
-t = BoostBuild.Tester()
-
-t.write("jamroot.jam", "import gcc ;")
-
-t.write("jamfile.jam", """
-exe hello : hello.cpp ;
-symlink hello_release : hello/<variant>release ;
-symlink hello_debug : hello/<variant>debug ;
-symlink links/hello_release : hello/<variant>release ;
-""")
-
-t.write("hello.cpp", """
-int main() {}
-""")
-
-t.run_build_system()
-t.expect_addition([
- 'hello_debug.exe',
- 'hello_release.exe',
- 'links/hello_release.exe'])
-
-t.cleanup()
diff --git a/tools/build/v2/test/tag.py b/tools/build/v2/test/tag.py
deleted file mode 100644
index aef31e8081..0000000000
--- a/tools/build/v2/test/tag.py
+++ /dev/null
@@ -1,122 +0,0 @@
-#!/usr/bin/python
-
-# Copyright (C) Pedro Ferreira 2003. Permission to copy, use, modify, sell and
-# distribute this software is granted provided this copyright notice appears in
-# all copies. This software is provided "as is" without express or implied
-# warranty, and with no claim as to its suitability for any purpose.
-
-import BoostBuild
-
-
-################################################################################
-#
-# test_folder_with_dot_in_name()
-# ------------------------------
-#
-################################################################################
-
-def test_folder_with_dot_in_name(t):
- """ Regression test: the 'tag' feature did not work in directories that had
- a dot in their name.
- """
-
- t.write("version-1.32.0/jamroot.jam", """
-project test : requirements <tag>@$(__name__).tag ;
-
-rule tag ( name : type ? : property-set )
-{
- # Do nothing, just make sure the rule is invoked OK.
- ECHO "The tag rule has been invoked." ;
-}
-exe a : a.cpp ;
-""")
- t.write("version-1.32.0/a.cpp", "int main() {}\n")
-
- t.run_build_system(subdir="version-1.32.0")
- t.expect_addition("version-1.32.0/bin/$toolset/debug/a.exe")
- t.expect_output_line("The tag rule has been invoked.")
-
-
-################################################################################
-#
-# test_tag_property()
-# -------------------
-#
-################################################################################
-
-def test_tag_property(t):
- """Basic tag property test.
- """
-
- t.write("jamroot.jam", """
-import virtual-target ;
-
-rule tag ( name : type ? : property-set )
-{
- local tags ;
- switch [ $(property-set).get <variant> ]
- {
- case debug : tags += d ;
- case release : tags += r ;
- }
- switch [ $(property-set).get <link> ]
- {
- case shared : tags += s ;
- case static : tags += t ;
- }
- if $(tags)
- {
- return [ virtual-target.add-prefix-and-suffix $(name)_$(tags:J="")
- : $(type) : $(property-set) ] ;
- }
-}
-
-# Test both fully-qualified and local name of the rule
-exe a : a.cpp : <tag>@$(__name__).tag ;
-lib b : a.cpp : <tag>@tag ;
-stage c : a ;
-""")
-
- t.write("a.cpp", """
-int main() {}
-#ifdef _MSC_VER
-__declspec (dllexport) void x () {}
-#endif
-""")
-
- file_list = \
- BoostBuild.List("bin/$toolset/debug/a_ds.exe") + \
- BoostBuild.List("bin/$toolset/debug/b_ds.dll") + \
- BoostBuild.List("c/a_ds.exe") + \
- BoostBuild.List("bin/$toolset/release/a_rs.exe") + \
- BoostBuild.List("bin/$toolset/release/b_rs.dll") + \
- BoostBuild.List("c/a_rs.exe") + \
- BoostBuild.List("bin/$toolset/debug/link-static/a_dt.exe") + \
- BoostBuild.List("bin/$toolset/debug/link-static/b_dt.lib") + \
- BoostBuild.List("c/a_dt.exe") + \
- BoostBuild.List("bin/$toolset/release/link-static/a_rt.exe") + \
- BoostBuild.List("bin/$toolset/release/link-static/b_rt.lib") + \
- BoostBuild.List("c/a_rt.exe")
-
- variants = "debug release link=static,shared"
-
- t.run_build_system(variants)
- t.expect_addition(file_list)
-
- t.run_build_system(variants + " clean")
- t.expect_removal(file_list)
-
-
-################################################################################
-#
-# main()
-# ------
-#
-################################################################################
-
-t = BoostBuild.Tester()
-
-test_tag_property(t)
-test_folder_with_dot_in_name(t)
-
-t.cleanup()
diff --git a/tools/build/v2/test/test_all.py b/tools/build/v2/test/test_all.py
deleted file mode 100644
index b6aad705d9..0000000000
--- a/tools/build/v2/test/test_all.py
+++ /dev/null
@@ -1,255 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2002-2005 Dave Abrahams.
-# Copyright 2002-2006 Vladimir Prus.
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-import os
-import sys
-import string
-import BoostBuild
-
-xml = "--xml" in sys.argv
-toolset = BoostBuild.get_toolset()
-
-
-# Clear environment for testing.
-#
-for s in ('BOOST_ROOT', 'BOOST_BUILD_PATH', 'JAM_TOOLSET', 'BCCROOT', 'MSVCDir',
- 'MSVC', 'MSVCNT', 'MINGW', 'watcom' ):
- try:
- del os.environ[s]
- except:
- pass
-
-BoostBuild.set_defer_annotations(1)
-
-
-def run_tests(critical_tests, other_tests):
- """Runs first critical tests and then other_tests.
-
- Stops on first error, and write the name of failed test to
- test_results.txt. Critical tests are run in the specified order, other
- tests are run starting with the one that failed the last time.
- """
- last_failed = last_failed_test()
- other_tests = reorder_tests(other_tests, last_failed)
- all_tests = critical_tests + other_tests
-
- invocation_dir = os.getcwd()
-
- pass_count = 0
- failures_count = 0
-
- for i in all_tests:
- passed = 1
- if not xml:
- print ("%-25s : " %(i)),
- try:
- __import__(i)
- except SystemExit:
- passed = 0;
- if failures_count == 0:
- f = open(os.path.join(invocation_dir, 'test_results.txt'), 'w')
- f.write(i)
- f.close()
- failures_count = failures_count + 1
- # Restore the current directory, which might be changed by the test.
- os.chdir(invocation_dir)
-
- if not xml:
- if passed:
- print "PASSED"
- else:
- print "FAILED"
-
- if i == "regression":
- BoostBuild.flush_annotations()
- BoostBuild.clear_annotations()
- else:
- rs = "succeed"
- if not passed:
- rs = "fail"
- print """
-<test-log library="build" test-name="%s" test-type="run" toolset="%s" test-program="%s" target-directory="%s">
-<run result="%s">""" % (i, toolset, "tools/build/v2/test/" + i + ".py",
- "boost/bin.v2/boost.build.tests/" + toolset + "/" + i, rs)
-
- if not passed:
- BoostBuild.flush_annotations(1)
-
- print """
-</run>
-</test-log>
-"""
- if passed:
- pass_count = pass_count + 1
- sys.stdout.flush() # Makes testing under emacs more entertaining.
-
- # Erase the file on success.
- if failures_count == 0:
- open('test_results.txt', 'w')
-
- if not xml:
- print """
- === Test summary ===
- PASS: %d
- FAIL: %d
- """ % (pass_count, failures_count)
-
-
-def last_failed_test():
- "Returns the name of last failed test or None"
- try:
- f = open("test_results.txt")
- s = string.strip(f.read())
- return s
- except:
- return None
-
-
-def reorder_tests(tests, first_test):
- try:
- n = tests.index(first_test)
- return [first_test] + tests[:n] + tests[n+1:]
- except ValueError:
- return tests
-
-
-critical_tests = ["unit_tests", "module_actions", "startup_v2"]
-
-critical_tests += ["core_d12", "core_typecheck", "core_delete_module",
- "core_language", "core_arguments", "core_varnames", "core_import_module"]
-
-tests = [ "absolute_sources",
- "alias",
- "alternatives",
- "bad_dirname",
- "build_dir",
- "build_file",
- "build_no",
- "builtin_echo",
- "builtin_exit",
- "c_file",
- "chain",
- "clean",
- "composite",
- "conditionals",
- "conditionals2",
- "conditionals3",
- "conditionals_multiple",
- "configuration",
- "copy_time",
- "core_action_status",
- "core_actions_quietly",
- "core_at_file",
- "core_bindrule",
- "core_nt_line_length",
- "core_option_d2",
- "core_option_l",
- "core_option_n",
- "core_parallel_actions",
- "core_parallel_multifile_actions_1",
- "core_parallel_multifile_actions_2",
- "core_update_now",
- "custom_generator",
- "default_build",
- "default_features",
-# This test is known to be broken itself.
-# "default_toolset",
- "dependency_property",
- "dependency_test",
- "direct_request_test",
- "disambiguation",
- "dll_path",
- "double_loading",
- "duplicate",
- "example_libraries",
- "example_make",
- "expansion",
- "explicit",
- "free_features_request",
- "generator_selection",
- "generators_test",
- "implicit_dependency",
- "indirect_conditional",
- "inherit_toolset",
- "inherited_dependency",
- "inline",
- "lib_source_property",
- "library_chain",
- "library_property",
- "load_order",
- "loop",
- "make_rule",
- "ndebug",
- "no_type",
- "notfile",
- "ordered_include",
- "out_of_tree",
- "path_features",
- "prebuilt",
- "print",
- "project_dependencies",
- "project_glob",
- "project_root_constants",
- "project_root_rule",
- "project_test3",
- "project_test4",
- "property_expansion",
- "rebuilds",
- "regression",
- "relative_sources",
- "remove_requirement",
- "resolution",
- "searched_lib",
- "skipping",
- "sort_rule",
- "source_locations",
- "stage",
- "standalone",
- "suffix",
- "tag",
- "test_result_dumping",
- "testing_support",
- "timedata",
- "unit_test",
- "unused",
- "use_requirements",
- "using",
- "wrapper",
- "wrong_project",
- "exit_status",
- ]
-
-if os.name == 'posix':
- tests.append("symlink")
- # On windows, library order is not important, so skip this test. Besides, it
- # fails ;-). Further, the test relies on the fact that on Linux, one can
- # build a shared library with unresolved symbols. This is not true on
- # Windows (even with cygwin gcc).
- if string.find(os.uname()[0], "CYGWIN") == -1:
- tests.append("library_order")
-
-if string.find(BoostBuild.get_toolset(), 'gcc') == 0:
- tests.append("gcc_runtime")
-
-if ( string.find(BoostBuild.get_toolset(), 'gcc') == 0 )or \
- ( string.find(BoostBuild.get_toolset(), 'msvc') == 0 ):
- tests.append("pch")
-
-if "--extras" in sys.argv:
- tests.append("boostbook")
- tests.append("qt4")
- tests.append("example_qt4")
- # Requires ./whatever.py to work, so is not guaranted to work everywhere.
- tests.append("example_customization")
- # Requires gettext tools.
- tests.append("example_gettext")
-
-elif not xml:
- print 'Note: skipping extra tests'
-
-run_tests(critical_tests, tests)
diff --git a/tools/build/v2/test/test_result_dumping.py b/tools/build/v2/test/test_result_dumping.py
deleted file mode 100755
index 07eb594808..0000000000
--- a/tools/build/v2/test/test_result_dumping.py
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2008 Jurko Gospodnetic
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Tests dumping Boost Build based testing results.
-
-import BoostBuild
-
-
-################################################################################
-#
-# Test that dumping Boost Build based testing results works in case test code
-# is not located in a folder under the Jamroot folder.
-#
-################################################################################
-
-t = BoostBuild.Tester("--dump-tests")
-
-t.write("TestBuild/jamroot.jam", """
-import testing ;
-test-suite testit : [ run ../TestSource/test.cpp ] ;
-""")
-
-t.write("TestSource/test.cpp", """
-int main() {}
-""")
-
-t.run_build_system("", subdir="TestBuild")
-t.expect_output_line('boost-test(RUN) "*/TestBuild/test" : "../TestSource/test.cpp"')
-
-t.cleanup()
diff --git a/tools/build/v2/test/testing_support.py b/tools/build/v2/test/testing_support.py
deleted file mode 100755
index eee4345c5a..0000000000
--- a/tools/build/v2/test/testing_support.py
+++ /dev/null
@@ -1,61 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2008 Jurko Gospodnetic
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Tests different aspects of Boost Builds automated testing support.
-
-import BoostBuild
-
-
-################################################################################
-#
-# test_files_with_spaces_in_their_name()
-# --------------------------------------
-#
-################################################################################
-
-def test_files_with_spaces_in_their_name():
- """Regression test making sure test result files get created correctly when
- testing files with spaces in their name.
- """
-
- t = BoostBuild.Tester()
-
- t.write("valid source.cpp", "int main() {}\n");
-
- t.write("invalid source.cpp", "this is not valid source code");
-
- t.write("jamroot.jam", """
-import testing ;
-testing.compile "valid source.cpp" ;
-testing.compile-fail "invalid source.cpp" ;
-""")
-
- t.run_build_system(status=0)
- t.expect_addition("bin/invalid source.test/$toolset/debug/invalid source.obj")
- t.expect_addition("bin/invalid source.test/$toolset/debug/invalid source.test")
- t.expect_addition("bin/valid source.test/$toolset/debug/valid source.obj")
- t.expect_addition("bin/valid source.test/$toolset/debug/valid source.test")
-
- t.expect_content("bin/valid source.test/$toolset/debug/valid source.test", \
- "passed" )
- t.expect_content( \
- "bin/invalid source.test/$toolset/debug/invalid source.test", \
- "passed" )
- t.expect_content( \
- "bin/invalid source.test/$toolset/debug/invalid source.obj", \
- "failed as expected" )
-
- t.cleanup()
-
-
-################################################################################
-#
-# main()
-# ------
-#
-################################################################################
-
-test_files_with_spaces_in_their_name()
diff --git a/tools/build/v2/test/timedata.py b/tools/build/v2/test/timedata.py
deleted file mode 100644
index 04ec99f98a..0000000000
--- a/tools/build/v2/test/timedata.py
+++ /dev/null
@@ -1,155 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2005 David Abrahams
-# Copyright 2008 Jurko Gospodnetic
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Tests the build step timing facilities.
-
-
-import BoostBuild
-import re
-
-
-################################################################################
-#
-# basic_jam_action_test()
-# -----------------------
-#
-################################################################################
-
-def basic_jam_action_test():
- """Tests basic Jam action timing support."""
-
- t = BoostBuild.Tester(pass_toolset=0)
-
- t.write("file.jam", """
-rule time
-{
- DEPENDS $(<) : $(>) ;
- __TIMING_RULE__ on $(>) = record_time $(<) ;
- DEPENDS all : $(<) ;
-}
-
-actions time
-{
- echo $(>) user: $(__USER_TIME__) system: $(__SYSTEM_TIME__)
- echo timed from $(>) >> $(<)
-}
-
-rule record_time ( target : source : start end user system )
-{
- __USER_TIME__ on $(target) = $(user) ;
- __SYSTEM_TIME__ on $(target) = $(system) ;
-}
-
-rule make
-{
- DEPENDS $(<) : $(>) ;
-}
-
-actions make
-{
- echo made from $(>) >> $(<)
-}
-
-time foo : bar ;
-make bar : baz ;
-""")
-
- t.write("baz", "nothing\n")
-
- expected_output = """\.\.\.found 4 targets\.\.\.
-\.\.\.updating 2 targets\.\.\.
-make bar
-time foo
-bar +user: [0-9\.]+ +system: +[0-9\.]+ *
-\.\.\.updated 2 targets\.\.\.$
-"""
-
- t.run_build_system("-ffile.jam -d+1", stdout=expected_output, match=lambda
- actual, expected: re.search(expected, actual, re.DOTALL))
- t.expect_addition("foo")
- t.expect_addition("bar")
- t.expect_nothing_more()
-
- t.cleanup()
-
-
-################################################################################
-#
-# boost_build_testing_support_timing_rule():
-# ------------------------------------------
-#
-################################################################################
-
-def boost_build_testing_support_timing_rule():
- """Tests the target build timing rule provided by the Boost Build testing
- support system.
- """
-
- t = BoostBuild.Tester()
-
- t.write("aaa.cpp", "int main() {}\n")
-
- t.write("jamroot.jam", """
-import testing ;
-exe my-exe : aaa.cpp ;
-time my-time : my-exe ;
-""")
-
- t.run_build_system()
- t.expect_addition("bin/$toolset/debug/aaa.obj")
- t.expect_addition("bin/$toolset/debug/my-exe.exe")
- t.expect_addition("bin/$toolset/debug/my-time.time")
-
- t.expect_content_line("bin/$toolset/debug/my-time.time", "user: *")
- t.expect_content_line("bin/$toolset/debug/my-time.time", "system: *")
-
- t.cleanup()
-
-
-################################################################################
-#
-# boost_build_testing_support_timing_rule_with_spaces_in_names()
-# --------------------------------------------------------------
-#
-################################################################################
-
-def boost_build_testing_support_timing_rule_with_spaces_in_names():
- """Tests the target build timing rule provided by the Boost Build testing
- support system when used with targets contining spaces in their names.
- """
-
- t = BoostBuild.Tester()
-
- t.write("aaa bbb.cpp", "int main() {}\n")
-
- t.write("jamroot.jam", """
-import testing ;
-exe "my exe" : "aaa bbb.cpp" ;
-time "my time" : "my exe" ;
-""")
-
- t.run_build_system()
- t.expect_addition("bin/$toolset/debug/aaa bbb.obj")
- t.expect_addition("bin/$toolset/debug/my exe.exe")
- t.expect_addition("bin/$toolset/debug/my time.time")
-
- t.expect_content_line("bin/$toolset/debug/my time.time", "user: *")
- t.expect_content_line("bin/$toolset/debug/my time.time", "system: *")
-
- t.cleanup()
-
-
-################################################################################
-#
-# main()
-# ------
-#
-################################################################################
-
-basic_jam_action_test()
-boost_build_testing_support_timing_rule()
-boost_build_testing_support_timing_rule_with_spaces_in_names() \ No newline at end of file
diff --git a/tools/build/v2/test/tree.py b/tools/build/v2/test/tree.py
deleted file mode 100644
index 89f8ad4ff1..0000000000
--- a/tools/build/v2/test/tree.py
+++ /dev/null
@@ -1,120 +0,0 @@
-# Copyright 2003 Dave Abrahams
-# Copyright 2001, 2002 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# This file is based in part on the content of svn_tree.py.
-
-import svn_tree;
-
-class Trees_difference:
-
- def __init__(self):
- self.added_files = []
- self.removed_files = []
- self.modified_files = []
- self.touched_files = []
-
- def append(self, other):
- self.added_files.extend(other.added_files)
- self.removed_files.extend(other.removed_files)
- self.modified_files.extend(other.modified_files)
- self.touched_files.extend(other.touched_files)
-
- def ignore_directories(self):
- "Removes directories for list of found differences"
-
- def not_dir(x):
- return x[-1] != "/"
- self.added_files = filter(not_dir, self.added_files)
- self.removed_files = filter(not_dir, self.removed_files)
- self.modified_files = filter(not_dir, self.modified_files)
- self.touched_files = filter(not_dir, self.touched_files)
-
- def pprint(self, f=None):
- print >> f, "Added files :", self.added_files
- print >> f, "Removed files :", self.removed_files
- print >> f, "Modified files:", self.modified_files
- print >> f, "Touched files :", self.touched_files
-
- def empty(self):
- return ( len(self.added_files) == 0 ) and \
- ( len(self.removed_files) == 0 ) and \
- ( len(self.modified_files) == 0 ) and \
- ( len(self.touched_files) == 0 )
-
-def build_tree(dir):
- return svn_tree.build_tree_from_wc(dir, load_props=0, ignore_svn=1)
-
-def trees_difference(a, b, current_name=""):
- """Compare SVNTreeNodes A and B, and create Trees_difference class."""
-
- assert a.name == b.name
-
- result = Trees_difference()
- try:
- # A and B are both files.
- if ((a.children is None) and (b.children is None)):
- assert a.name == b.name
- if svn_tree.compare_file_nodes(a, b):
- result.modified_files.append(current_name)
- elif (a.mtime != b.mtime):
- result.touched_files.append(current_name)
-
- # One is a file, one is a directory.
- # this case is disabled because svn_tree doesn't distinguish
- # empty directories from files, at least on Cygwin.
- elif 0 and (((a.children is None) and (b.children is not None))
- or ((a.children is not None) and (b.children is None))):
- a.pprint()
- b.pprint()
- raise svn_tree.SVNTypeMismatch
- # They're both directories.
- else:
- # accounted_for holds childrens present in both trees
- accounted_for = []
- for a_child in (a.children or []):
- b_child = svn_tree.get_child(b, a_child.name)
- if b_child:
- accounted_for.append(b_child)
- if current_name:
- result.append(trees_difference(a_child, b_child, current_name + "/" + a_child.name))
- else:
- result.append(trees_difference(a_child, b_child, a_child.name))
- else:
- if current_name:
- result.removed_files.append(current_name + "/" + a_child.name)
- else:
- result.removed_files.append(a_child.name)
- for b_child in (b.children or []):
- if (b_child not in accounted_for):
- result.added_files.extend(traverse_tree(b_child, current_name))
-
- except svn_tree.SVNTypeMismatch:
- print 'Unequal Types: one Node is a file, the other is a directory'
- raise svn_tree.SVNTreeUnequal
- except svn_tree.SVNTreeIsNotDirectory:
- print "Error: Foolish call to get_child."
- sys.exit(1)
- except IndexError:
- print "Error: unequal number of children"
- raise svn_tree.SVNTreeUnequal
- return result
-
-def dump_tree(t):
- svn_tree.dump_tree(t)
-
-def traverse_tree(t, parent_name=""):
- """ Returns the list of all names in tree. """
- if parent_name:
- full_node_name = parent_name + "/" + t.name
- else:
- full_node_name = t.name
-
- if (t.children is None):
- result = [full_node_name]
- else:
- result = [full_node_name + "/"]
- for i in t.children:
- result.extend(traverse_tree(i, full_node_name))
- return result
diff --git a/tools/build/v2/test/unit_test.py b/tools/build/v2/test/unit_test.py
deleted file mode 100644
index 94575ad319..0000000000
--- a/tools/build/v2/test/unit_test.py
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2003, 2004 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Test the unit_test rule.
-
-import BoostBuild
-
-t = BoostBuild.Tester()
-
-# Create the needed files.
-t.write("jamroot.jam", """
-using testing ;
-lib helper : helper.cpp ;
-unit-test test : test.cpp : <library>helper ;
-""")
-
-t.write("test.cpp", """
-void helper();
-int main() { helper(); }
-""")
-
-t.write("helper.cpp", """
-void
-#if defined(_WIN32)
-__declspec(dllexport)
-#endif
-helper() {}
-""")
-
-t.run_build_system("link=static")
-t.expect_addition("bin/$toolset/debug/link-static/test.passed")
-
-t.cleanup()
diff --git a/tools/build/v2/test/unit_tests.py b/tools/build/v2/test/unit_tests.py
deleted file mode 100644
index f306a90570..0000000000
--- a/tools/build/v2/test/unit_tests.py
+++ /dev/null
@@ -1,13 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2002, 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import BoostBuild
-
-t = BoostBuild.Tester(pass_toolset=0)
-
-t.run_build_system(extra_args="--debug --build-system=test/test")
-
-t.cleanup()
diff --git a/tools/build/v2/test/unused.py b/tools/build/v2/test/unused.py
deleted file mode 100644
index b44320c52e..0000000000
--- a/tools/build/v2/test/unused.py
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Test that unused sources are at least reported.
-
-import BoostBuild
-from string import find
-
-t = BoostBuild.Tester()
-
-t.set_tree("unused")
-
-t.run_build_system()
-# The second invocation should do nothing, and produce no warning. The previous
-# invocation might have printed executed actions and other things, so it is not
-# easy to check if warning was issued or not.
-t.run_build_system(stdout="")
-
-t.run_build_system("-sGENERATE_ONLY_UNUSABLE=1", stdout="")
-
-# Now check that even if main target generates nothing, its usage requirements
-# are still propagated to dependants.
-t.write("a.cpp", """
-#ifdef FOO
-int main() {}
-#endif
-""")
-t.run_build_system("-sGENERATE_NOTHING=1")
-
-t.cleanup()
diff --git a/tools/build/v2/test/unused/a.cpp b/tools/build/v2/test/unused/a.cpp
deleted file mode 100644
index c4a7ae8926..0000000000
--- a/tools/build/v2/test/unused/a.cpp
+++ /dev/null
@@ -1,9 +0,0 @@
-// Copyright Vladimir Prus 2003.
-// Distributed under the Boost Software License, Version 1.0.
-// (See accompanying file LICENSE_1_0.txt
-// or copy at http://www.boost.org/LICENSE_1_0.txt)
-
-int main()
-{
- return 0;
-}
diff --git a/tools/build/v2/test/unused/b.cpp b/tools/build/v2/test/unused/b.cpp
deleted file mode 100644
index 5551e35f65..0000000000
--- a/tools/build/v2/test/unused/b.cpp
+++ /dev/null
@@ -1,4 +0,0 @@
-/* Copyright 2003 Vladimir Prus */
-/* Distributed under the Boost Software License, Version 1.0. */
-/* (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) */
-
diff --git a/tools/build/v2/test/unused/b.x b/tools/build/v2/test/unused/b.x
deleted file mode 100644
index e69de29bb2..0000000000
--- a/tools/build/v2/test/unused/b.x
+++ /dev/null
diff --git a/tools/build/v2/test/unused/jamfile.jam b/tools/build/v2/test/unused/jamfile.jam
deleted file mode 100644
index 58ef45605a..0000000000
--- a/tools/build/v2/test/unused/jamfile.jam
+++ /dev/null
@@ -1,11 +0,0 @@
-# Copyright 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-
-exe a : a.cpp b c ;
-
-make-b-main-target ;
-
-# Expands to nothing, intentionally.
-alias c ;
diff --git a/tools/build/v2/test/unused/jamroot.jam b/tools/build/v2/test/unused/jamroot.jam
deleted file mode 100644
index bc97c2b197..0000000000
--- a/tools/build/v2/test/unused/jamroot.jam
+++ /dev/null
@@ -1,60 +0,0 @@
-# Copyright 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-
-import type ;
-import generators ;
-import print ;
-import virtual-target ;
-import "class" : new ;
-import modules ;
-import targets ;
-import project ;
-
-
-type.register X : x ;
-
-class test-target-class : basic-target
-{
- rule __init__ ( name : project )
- {
- basic-target.__init__ $(name) : $(project) ;
- }
-
- rule construct ( name : source-targets * : property-set )
- {
- if [ modules.peek : GENERATE_NOTHING ]
- {
- return [ property-set.empty ] ;
- }
- else if [ modules.peek : GENERATE_ONLY_UNUSABLE ]
- {
- return [ property-set.empty ]
- [ virtual-target.from-file b.x : . : $(self.project) ]
- ;
- }
- else
- {
- return [ property-set.empty ]
- [ virtual-target.from-file b.x : . : $(self.project) ]
- [ virtual-target.from-file b.cpp : . : $(self.project) ]
- ;
- }
- }
-
- rule compute-usage-requirements ( rproperties : targets * )
- {
- return [ property-set.create <define>FOO ] ;
- }
-}
-
-rule make-b-main-target
-{
- local project = [ project.current ] ;
-
- targets.main-target-alternative
- [ new test-target-class b : $(project) ] ;
-}
-
-IMPORT $(__name__) : make-b-main-target : : make-b-main-target ;
diff --git a/tools/build/v2/test/use_requirements.py b/tools/build/v2/test/use_requirements.py
deleted file mode 100644
index a73c01f1f5..0000000000
--- a/tools/build/v2/test/use_requirements.py
+++ /dev/null
@@ -1,295 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2003 Dave Abrahams
-# Copyright 2002, 2003, 2004, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import BoostBuild
-
-t = BoostBuild.Tester()
-
-
-# Test that use requirements on main target work (and a propagated all the way
-# up, not only to direct dependants).
-t.write("jamroot.jam", "import gcc ;")
-
-# Note: 'lib cc ..', not 'lib c'. If using 'lib c: ...' the HP-CXX linker will
-# confuse it with the system C runtime.
-t.write("jamfile.jam", """
-lib b : b.cpp : <link>shared:<define>SHARED_B : :
- <define>FOO <link>shared:<define>SHARED_B ;
-lib cc : c.cpp b ;
-exe a : a.cpp cc ;
-""")
-
-t.write("b.cpp", """
-void
-#if defined(_WIN32) && defined(SHARED_B)
-__declspec(dllexport)
-#endif
-foo() {}\n
-""")
-
-t.write("c.cpp", """
-void
-#if defined(_WIN32) && defined(SHARED_B)
-__declspec(dllexport)
-#endif
-create_lib_please() {}\n
-""")
-
-t.write("a.cpp", """
-#ifdef FOO
-void
-# if defined(_WIN32) && defined(SHARED_B)
-__declspec(dllexport)
-# endif
-foo() {}
-#endif
-int main() { foo(); }
-""")
-
-t.run_build_system()
-t.run_build_system("--clean")
-
-
-# Test that use requirements on main target work, when they are referred using
-# 'dependency' features.
-t.write("jamroot.jam", "import gcc ;")
-
-t.write("jamfile.jam", """
-lib b : b.cpp : <link>shared:<define>SHARED_B : : <define>FOO
- <link>shared:<define>SHARED_B ;
-exe a : a.cpp : <use>b ;
-""")
-
-t.write("b.cpp", """
-void
-#if defined(_WIN32) && defined(SHARED_B)
-__declspec(dllexport)
-#endif
-foo() {}
-""")
-
-t.write("a.cpp", """
-#ifdef FOO
-int main() {}
-#endif
-""")
-
-t.run_build_system()
-
-t.run_build_system("--clean")
-
-
-# Test that usage requirements on a project work.
-t.write("jamfile.jam", "exe a : a.cpp lib//b ;")
-
-t.write("lib/jamfile.jam", """
-project
- : requirements <link>shared:<define>SHARED_B
- : usage-requirements <define>FOO <link>shared:<define>SHARED_B ;
-lib b : b.cpp ;
-""")
-
-t.write("lib/b.cpp", """
-void
-#if defined(_WIN32) && defined(SHARED_B)
-__declspec(dllexport)
-#endif
-foo() {}\n
-""")
-
-t.run_build_system()
-
-
-# Test that use requirements are inherited correctly.
-t.write("jamfile.jam", "exe a : a.cpp lib/1//b ;")
-
-t.write("a.cpp", """
-#if defined(FOO) && defined(ZOO)
-void foo() {}
-#endif
-int main() { foo(); }
-""")
-
-t.write("lib/jamfile.jam", """
-project : requirements : usage-requirements <define>FOO ;
-""")
-
-t.write("lib/1/jamfile.jam", """
-project
- : requirements <link>shared:<define>SHARED_B
- : usage-requirements <define>ZOO <link>shared:<define>SHARED_B ;
-lib b : b.cpp ;
-""")
-
-t.write("lib/1/b.cpp", """
-void
-#if defined(_WIN32) && defined(SHARED_B)
-__declspec(dllexport)
-#endif
-foo() {}\n
-""")
-
-t.run_build_system()
-t.run_build_system("--clean")
-
-
-# Test that we correctly handle dependency features in use requirements on
-# target.
-t.write("jamfile.jam", """
-lib b : b.cpp : <link>shared:<define>SHARED_B : : <define>FOO
- <link>shared:<define>SHARED_B ;
-
-# Here's the test: we should correctly handle dependency feature and get usage
-# requirements from 'b'.
-lib cc : c.cpp : <link>shared:<define>SHARED_C : : <library>b ;
-
-# This will build only if <define>FOO was propagated from 'c'.
-exe a : a.cpp cc ;
-""")
-
-t.write("a.cpp", """
-#ifdef FOO
-void
-# if defined(_WIN32) && defined(SHARED_B)
-__declspec(dllexport)
-# endif
-foo();
-#endif
-
-int main() { foo(); }
-""")
-
-t.write("c.cpp", """
-int
-#if defined(_WIN32) && defined(SHARED_C)
-__declspec(dllexport)
-#endif
-must_export_something;
-""")
-
-t.run_build_system()
-t.run_build_system("--clean")
-
-
-# Test correct handling of dependency features in project requirements.
-t.write("jamfile.jam", """
-exe a : a.cpp lib1//cc ;
-""")
-
-t.write("lib1/jamfile.jam", """
-project
- : requirements <link>shared:<define>SHARED_C
- : usage-requirements <library>../lib2//b <link>shared:<define>SHARED_C ;
-lib cc : c.cpp ;
-""")
-
-t.write("lib1/c.cpp", """
-int
-#if defined(_WIN32) && defined(SHARED_C)
-__declspec(dllexport)
-#endif
-must_export_something;
-""")
-
-t.write("lib2/jamfile.jam", """
-lib b : b.cpp : <link>shared:<define>SHARED_B : : <define>FOO
- <link>shared:<define>SHARED_B ;
-""")
-
-t.copy("b.cpp", "lib2/b.cpp")
-
-t.run_build_system()
-
-
-# Test that dependency feature in use requirements are built with the correct
-# properties.
-t.rm(".")
-
-t.write("jamfile.jam", """
-lib main : main.cpp : <use>libs//lib1 : : <library>libs//lib1 ;
-exe hello : hello.cpp main : ;
-""")
-
-t.write("main.cpp", """
-void
-#if defined(_WIN32) && defined(SHARED_LIB1)
-__declspec(dllimport)
-#endif
-foo();
-
-int main() { foo(); }
-""")
-
-t.write("hello.cpp", "\n")
-
-t.write("jamroot.jam", """
-import gcc ;
-""")
-
-t.write("libs/a.cpp", """
-void
-#if defined(_WIN32) && defined(SHARED_LIB1)
-__declspec(dllexport)
-#endif
-foo() {}
-""")
-
-
-# This library should be build with the same properties as 'main'. This is a
-# regression test for a bug when they were generated with empty properties, and
-# there were ambiguity between variants.
-t.write("libs/jamfile.jam", """
-lib lib1 : a_d.cpp : <variant>debug <link>shared:<define>SHARED_LIB1 : :
- <link>shared:<define>SHARED_LIB1 ;
-lib lib1 : a.cpp : <variant>release <link>shared:<define>SHARED_LIB1 : :
- <link>shared:<define>SHARED_LIB1 ;
-""")
-
-t.write("libs/a_d.cpp", """
-void
-#if defined(_WIN32) && defined(SHARED_LIB1)
-__declspec(dllexport)
-#endif
-foo() {}
-""")
-
-t.run_build_system("link=static")
-t.expect_addition("libs/bin/$toolset/debug/link-static/a_d.obj")
-
-
-# Test that indirect conditionals are respected in usage requirements.
-t.rm(".")
-
-t.write("jamroot.jam", """
-rule has-foo ( properties * )
-{
- return <define>HAS_FOO ;
-}
-
-exe a : a.cpp b ;
-lib b : b.cpp : <link>static : : <conditional>@has-foo ;
-""")
-
-t.write("a.cpp", """
-#ifdef HAS_FOO
-void foo();
-int main() { foo(); }
-#endif
-""")
-
-t.write("b.cpp", """
-void
-#if defined(_WIN32) && defined(SHARED_B)
-__declspec(dllexport)
-#endif
-foo() {}\n
-""")
-
-t.run_build_system()
-t.expect_addition("bin/$toolset/debug/a.exe")
-
-t.cleanup()
diff --git a/tools/build/v2/test/using.py b/tools/build/v2/test/using.py
deleted file mode 100644
index 452e7363b2..0000000000
--- a/tools/build/v2/test/using.py
+++ /dev/null
@@ -1,42 +0,0 @@
-#!/usr/bin/python
-
-# Copyright (C) Vladimir Prus 2005.
-# Distributed under the Boost Software License, Version 1.0. (See
-# accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-import BoostBuild
-
-t = BoostBuild.Tester()
-
-t.write("sub/a.cpp", """
-int main() {}
-""")
-
-t.write("jamroot.jam", """
-using some_tool ;
-""")
-
-t.write("some_tool.jam", """
-import project ;
-project.initialize $(__name__) ;
-rule init ( ) { }
-""")
-
-t.write("some_tool.py", """
-from b2.manager import get_manager
-
-get_manager().projects().initialize(__name__)
-
-def init():
- pass
-""")
-
-t.write("sub/jamfile.jam", """
-exe a : a.cpp ;
-""")
-
-t.run_build_system(subdir="sub")
-t.expect_addition("sub/bin/$toolset/debug/a.exe")
-
-t.cleanup()
diff --git a/tools/build/v2/test/wrapper.py b/tools/build/v2/test/wrapper.py
deleted file mode 100644
index 676892fa95..0000000000
--- a/tools/build/v2/test/wrapper.py
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/usr/bin/python
-
-# Copyright 2004 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Test that the user can define his own rule that will call built-in main target
-# rule and that this will work.
-
-import BoostBuild
-
-
-t = BoostBuild.Tester()
-
-t.write("jamfile.jam", """
-my-test : test.cpp ;
-""")
-
-t.write("test.cpp", """
-int main() {}
-""")
-
-t.write("jamroot.jam", """
-using testing ;
-
-rule my-test ( name ? : sources + )
-{
- name ?= test ;
- unit-test $(name) : $(sources) ; # /site-config//cppunit /util//testMain ;
-}
-
-IMPORT $(__name__) : my-test : : my-test ;
-""")
-
-t.run_build_system()
-t.expect_addition("bin/$toolset/debug/test.passed")
-
-t.cleanup()
diff --git a/tools/build/v2/test/wrong_project.py b/tools/build/v2/test/wrong_project.py
deleted file mode 100644
index 88315d64e9..0000000000
--- a/tools/build/v2/test/wrong_project.py
+++ /dev/null
@@ -1,41 +0,0 @@
-#!/usr/bin/python
-
-# Copyright Vladimir Prus 2005.
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt
-# or copy at http://www.boost.org/LICENSE_1_0.txt)
-
-# Regression test. When Jamfile contained "using whatever ; " and the 'whatever'
-# module declared a project, then all targets in Jamfile were considered to be
-# declared in the project associated with 'whatever', not with the Jamfile.
-
-import BoostBuild
-
-t = BoostBuild.Tester()
-
-t.write("a.cpp", "int main() {}\n")
-
-t.write("jamroot.jam", """
-using some_tool ;
-exe a : a.cpp ;
-""")
-
-t.write("some_tool.jam", """
-import project ;
-project.initialize $(__name__) ;
-rule init ( ) { }
-""")
-
-t.write("some_tool.py", """
-from b2.manager import get_manager
-
-get_manager().projects().initialize(__name__)
-
-def init():
- pass
-""")
-
-t.run_build_system()
-t.expect_addition("bin/$toolset/debug/a.exe")
-
-t.cleanup()
diff --git a/tools/build/v2/tools/auto-index.jam b/tools/build/v2/tools/auto-index.jam
deleted file mode 100644
index 5c5c1d06cf..0000000000
--- a/tools/build/v2/tools/auto-index.jam
+++ /dev/null
@@ -1,212 +0,0 @@
-
-import feature ;
-import generators ;
-import "class" ;
-import toolset ;
-import targets ;
-import "class" : new ;
-import project ;
-
-feature.feature auto-index : off "on" ;
-feature.feature auto-index-internal : off "on" ;
-feature.feature auto-index-verbose : off "on" ;
-feature.feature auto-index-no-duplicates : off "on" ;
-feature.feature auto-index-script : : free path ;
-feature.feature auto-index-prefix : : free path ;
-feature.feature auto-index-type : : free ;
-feature.feature auto-index-section-names : "on" off ;
-
-toolset.flags auto-index.auto-index FLAGS <auto-index-internal>on : --internal-index ;
-toolset.flags auto-index.auto-index SCRIPT <auto-index-script> ;
-toolset.flags auto-index.auto-index PREFIX <auto-index-prefix> ;
-toolset.flags auto-index.auto-index INDEX_TYPE <auto-index-type> ;
-toolset.flags auto-index.auto-index FLAGS <auto-index-verbose>on : --verbose ;
-toolset.flags auto-index.auto-index FLAGS <auto-index-no-duplicates>on : --no-duplicates ;
-toolset.flags auto-index.auto-index FLAGS <auto-index-section-names>off : --no-section-names ;
-
-# <auto-index-binary> shell command to run AutoIndex
-# <auto-index-binary-dependencies> targets to build AutoIndex from sources.
-feature.feature <auto-index-binary> : : free ;
-feature.feature <auto-index-binary-dependencies> : : free dependency ;
-
-class auto-index-generator : generator
-{
- import common modules path targets build-system ;
- rule run ( project name ? : property-set : sources * )
- {
- # AutoIndex invocation command and dependencies.
- local auto-index-binary = [ modules.peek auto-index : .command ] ;
- local auto-index-binary-dependencies ;
-
- if $(auto-index-binary)
- {
- # Use user-supplied command.
- auto-index-binary = [ common.get-invocation-command auto-index : auto-index : $(auto-index-binary) ] ;
- }
- else
- {
- # Search for AutoIndex sources in sensible places, like
- # $(BOOST_ROOT)/tools/auto_index
- # $(BOOST_BUILD_PATH)/../../auto_index
-
- # And build auto-index executable from sources.
-
- local boost-root = [ modules.peek : BOOST_ROOT ] ;
- local boost-build-path = [ build-system.location ] ;
- local boost-build-path2 = [ modules.peek : BOOST_BUILD_PATH ] ;
-
- local auto-index-dir ;
-
- if $(boost-root)
- {
- auto-index-dir += [ path.join $(boost-root) tools ] ;
- }
-
- if $(boost-build-path)
- {
- auto-index-dir += $(boost-build-path)/../.. ;
- }
- if $(boost-build-path2)
- {
- auto-index-dir += $(boost-build-path2)/.. ;
- }
-
- #ECHO $(auto-index-dir) ;
- auto-index-dir = [ path.glob $(auto-index-dir) : auto_index ] ;
- #ECHO $(auto-index-dir) ;
-
- # If the AutoIndex source directory was found, mark its main target
- # as a dependency for the current project. Otherwise, try to find
- # 'auto-index' in user's PATH
- if $(auto-index-dir)
- {
- auto-index-dir = [ path.make $(auto-index-dir[1]) ] ;
- auto-index-dir = $(auto-index-dir)/build ;
-
- #ECHO $(auto-index-dir) ;
-
- # Get the main-target in AutoIndex directory.
- local auto-index-main-target = [ targets.resolve-reference $(auto-index-dir) : $(project) ] ;
-
- #ECHO $(auto-index-main-target) ;
-
- # The first element are actual targets, the second are
- # properties found in target-id. We do not care about these
- # since we have passed the id ourselves.
- auto-index-main-target =
- [ $(auto-index-main-target[1]).main-target auto_index ] ;
-
- #ECHO $(auto-index-main-target) ;
-
- auto-index-binary-dependencies =
- [ $(auto-index-main-target).generate [ $(property-set).propagated ] ] ;
-
- # Ignore usage-requirements returned as first element.
- auto-index-binary-dependencies = $(auto-index-binary-dependencies[2-]) ;
-
- # Some toolsets generate extra targets (e.g. RSP). We must mark
- # all targets as dependencies for the project, but we will only
- # use the EXE target for auto-index-to-boostbook translation.
- for local target in $(auto-index-binary-dependencies)
- {
- if [ $(target).type ] = EXE
- {
- auto-index-binary =
- [ path.native
- [ path.join
- [ $(target).path ]
- [ $(target).name ]
- ]
- ] ;
- }
- }
- }
- else
- {
- ECHO "AutoIndex warning: The path to the auto-index executable was" ;
- ECHO " not provided. Additionally, couldn't find AutoIndex" ;
- ECHO " sources searching in" ;
- ECHO " * BOOST_ROOT/tools/auto-index" ;
- ECHO " * BOOST_BUILD_PATH/../../auto-index" ;
- ECHO " Will now try to find a precompiled executable by searching" ;
- ECHO " the PATH for 'auto-index'." ;
- ECHO " To disable this warning in the future, or to completely" ;
- ECHO " avoid compilation of auto-index, you can explicitly set the" ;
- ECHO " path to a auto-index executable command in user-config.jam" ;
- ECHO " or site-config.jam with the call" ;
- ECHO " using auto-index : /path/to/auto-index ;" ;
-
- # As a last resort, search for 'auto-index' command in path. Note
- # that even if the 'auto-index' command is not found,
- # get-invocation-command will still return 'auto-index' and might
- # generate an error while generating the virtual-target.
-
- auto-index-binary = [ common.get-invocation-command auto-index : auto-index ] ;
- }
- }
-
- # Add $(auto-index-binary-dependencies) as a dependency of the current
- # project and set it as the <auto-index-binary> feature for the
- # auto-index-to-boostbook rule, below.
- property-set = [ $(property-set).add-raw
- <dependency>$(auto-index-binary-dependencies)
- <auto-index-binary>$(auto-index-binary)
- <auto-index-binary-dependencies>$(auto-index-binary-dependencies)
- ] ;
-
- #ECHO "binary = " $(auto-index-binary) ;
- #ECHO "dependencies = " $(auto-index-binary-dependencies) ;
-
- if [ $(property-set).get <auto-index> ] = "on"
- {
- return [ generator.run $(project) $(name) : $(property-set) : $(sources) ] ;
- }
- else
- {
- return [ generators.construct $(project) $(name) : DOCBOOK : $(property-set)
- : $(sources) ] ;
- }
- }
-}
-
-# Initialization of toolset.
-#
-# Parameters:
-# command ? -> path to AutoIndex executable.
-#
-# When command is not supplied toolset will search for AutoIndex directory and
-# compile the executable from source. If that fails we still search the path for
-# 'auto_index'.
-#
-rule init (
- command ? # path to the AutoIndex executable.
- )
-{
- if ! $(.initialized)
- {
- .initialized = true ;
- .command = $(command) ;
- }
-}
-
-toolset.flags auto-index.auto-index AI-COMMAND <auto-index-binary> ;
-toolset.flags auto-index.auto-index AI-DEPENDENCIES <auto-index-binary-dependencies> ;
-
-generators.register [ class.new auto-index-generator auto-index.auto-index : DOCBOOK : DOCBOOK(%.auto_index) ] ;
-generators.override auto-index.auto-index : boostbook.boostbook-to-docbook ;
-
-rule auto-index ( target : source : properties * )
-{
- # Signal dependency of auto-index sources on <auto-index-binary-dependencies>
- # upon invocation of auto-index-to-boostbook.
- #ECHO "AI-COMMAND= " $(AI-COMMAND) ;
- DEPENDS $(target) : [ on $(target) return $(AI-DEPENDENCIES) ] ;
- #DEPENDS $(target) : [ on $(target) return $(SCRIPT) ] ;
-}
-
-actions auto-index
-{
- $(AI-COMMAND) $(FLAGS) "--prefix="$(PREFIX) "--script="$(SCRIPT) "--index-type="$(INDEX_TYPE) "--in="$(>) "--out="$(<)
-}
-
-
diff --git a/tools/build/v2/tools/boostbook.jam b/tools/build/v2/tools/boostbook.jam
deleted file mode 100644
index 3ab0debdb0..0000000000
--- a/tools/build/v2/tools/boostbook.jam
+++ /dev/null
@@ -1,730 +0,0 @@
-# Copyright 2003, 2004, 2005 Dave Abrahams
-# Copyright 2003, 2004, 2005 Douglas Gregor
-# Copyright 2005, 2006, 2007 Rene Rivera
-# Copyright 2003, 2004, 2005 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# This module defines rules to handle generation of documentation
-# from BoostBook sources.
-#
-# The type of output is controlled by the <format> feature which can
-# have the following values::
-#
-# * html: Generates html documention. This is the default.
-# * xhtml: Generates xhtml documentation
-# * htmlhelp: Generates html help output.
-# * onehtml: Generates a single html page.
-# * man: Generates man pages.
-# * pdf: Generates pdf documentation.
-# * ps: Generates postscript output.
-# * docbook: Generates docbook XML.
-# * fo: Generates XSL formating objects.
-# * tests: Extracts test cases from the boostbook XML.
-#
-# format is an implicit feature, so typing pdf on the command
-# line (for example) is a short-cut for format=pdf.
-
-import "class" : new ;
-import common ;
-import errors ;
-import targets ;
-import feature ;
-import generators ;
-import print ;
-import property ;
-import project ;
-import property-set ;
-import regex ;
-import scanner ;
-import sequence ;
-import make ;
-import os ;
-import type ;
-import modules path project ;
-import build-system ;
-
-import xsltproc : xslt xslt-dir ;
-
-# Make this module into a project.
-project.initialize $(__name__) ;
-project boostbook ;
-
-
-feature.feature format : html xhtml htmlhelp onehtml man pdf ps docbook fo tests
- : incidental implicit composite propagated ;
-
-type.register DTDXML : dtdxml ;
-type.register XML : xml ;
-type.register BOOSTBOOK : boostbook : XML ;
-type.register DOCBOOK : docbook : XML ;
-type.register FO : fo : XML ;
-type.register PDF : pdf ;
-type.register PS : ps ;
-type.register XSLT : xsl : XML ;
-type.register HTMLDIR ;
-type.register XHTMLDIR ;
-type.register HTMLHELP ;
-type.register MANPAGES ;
-type.register TESTS : tests ;
-# Artificial target type, used to require invocation of top-level
-# BoostBook generator.
-type.register BOOSTBOOK_MAIN ;
-
-
-# Initialize BoostBook support.
-rule init (
- docbook-xsl-dir ? # The DocBook XSL stylesheet directory. If not
- # provided, we use DOCBOOK_XSL_DIR from the environment
- # (if available) or look in standard locations.
- # Otherwise, we let the XML processor load the
- # stylesheets remotely.
-
- : docbook-dtd-dir ? # The DocBook DTD directory. If not provided, we use
- # DOCBOOK_DTD_DIR From the environment (if available) or
- # look in standard locations. Otherwise, we let the XML
- # processor load the DTD remotely.
-
- : boostbook-dir ? # The BoostBook directory with the DTD and XSL subdirs.
-)
-{
-
- if ! $(.initialized)
- {
- .initialized = true ;
-
- check-boostbook-dir $(boostbook-dir) ;
- find-tools $(docbook-xsl-dir) : $(docbook-dtd-dir) : $(boostbook-dir) ;
-
- # Register generators only if we've were called via "using boostbook ; "
- generators.register-standard boostbook.dtdxml-to-boostbook : DTDXML : XML ;
- generators.register-standard boostbook.boostbook-to-docbook : XML : DOCBOOK ;
- generators.register-standard boostbook.boostbook-to-tests : XML : TESTS ;
- generators.register-standard boostbook.docbook-to-onehtml : DOCBOOK : HTML ;
- generators.register-standard boostbook.docbook-to-htmldir : DOCBOOK : HTMLDIR ;
- generators.register-standard boostbook.docbook-to-xhtmldir : DOCBOOK : XHTMLDIR ;
- generators.register-standard boostbook.docbook-to-htmlhelp : DOCBOOK : HTMLHELP ;
- generators.register-standard boostbook.docbook-to-manpages : DOCBOOK : MANPAGES ;
- generators.register-standard boostbook.docbook-to-fo : DOCBOOK : FO ;
-
- # The same about Jamfile main target rules.
- IMPORT $(__name__) : boostbook : : boostbook ;
- }
- else
- {
- if $(docbook-xsl-dir)
- {
- modify-config ;
- .docbook-xsl-dir = [ path.make $(docbook-xsl-dir) ] ;
- check-docbook-xsl-dir ;
- }
- if $(docbook-dtd-dir)
- {
- modify-config ;
- .docbook-dtd-dir = [ path.make $(docbook-dtd-dir) ] ;
- check-docbook-dtd-dir ;
- }
- if $(boostbook-dir)
- {
- modify-config ;
- check-boostbook-dir $(boostbook-dir) ;
- local boostbook-xsl-dir = [ path.glob $(boostbook-dir) : xsl ] ;
- local boostbook-dtd-dir = [ path.glob $(boostbook-dir) : dtd ] ;
- .boostbook-xsl-dir = $(boostbook-xsl-dir[1]) ;
- .boostbook-dtd-dir = $(boostbook-dtd-dir[1]) ;
- check-boostbook-xsl-dir ;
- check-boostbook-dtd-dir ;
- }
- }
-}
-
-rule lock-config ( )
-{
- if ! $(.initialized)
- {
- errors.user-error "BoostBook has not been configured." ;
- }
- if ! $(.config-locked)
- {
- .config-locked = true ;
- }
-}
-
-rule modify-config ( )
-{
- if $(.config-locked)
- {
- errors.user-error "BoostBook configuration cannot be changed after it has been used." ;
- }
-}
-
-rule find-boost-in-registry ( keys * )
-{
- local boost-root = ;
- for local R in $(keys)
- {
- local installed-boost = [ W32_GETREG
- "HKEY_LOCAL_MACHINE\\SOFTWARE\\$(R)"
- : "InstallRoot" ] ;
- if $(installed-boost)
- {
- boost-root += [ path.make $(installed-boost) ] ;
- }
- }
- return $(boost-root) ;
-}
-
-rule check-docbook-xsl-dir ( )
-{
- if $(.docbook-xsl-dir)
- {
- if ! [ path.glob $(.docbook-xsl-dir) : common/common.xsl ]
- {
- errors.user-error "BoostBook: could not find docbook XSL stylesheets in:" [ path.native $(.docbook-xsl-dir) ] ;
- }
- else
- {
- if --debug-configuration in [ modules.peek : ARGV ]
- {
- ECHO "notice: BoostBook: found docbook XSL stylesheets in:" [ path.native $(.docbook-xsl-dir) ] ;
- }
- }
- }
-}
-
-rule check-docbook-dtd-dir ( )
-{
- if $(.docbook-dtd-dir)
- {
- if ! [ path.glob $(.docbook-dtd-dir) : docbookx.dtd ]
- {
- errors.user-error "error: BoostBook: could not find docbook DTD in:" [ path.native $(.docbook-dtd-dir) ] ;
- }
- else
- {
- if --debug-configuration in [ modules.peek : ARGV ]
- {
- ECHO "notice: BoostBook: found docbook DTD in:" [ path.native $(.docbook-dtd-dir) ] ;
- }
- }
- }
-}
-
-rule check-boostbook-xsl-dir ( )
-{
- if ! $(.boostbook-xsl-dir)
- {
- errors.user-error "error: BoostBook: could not find boostbook XSL stylesheets." ;
- }
- else if ! [ path.glob $(.boostbook-xsl-dir) : docbook.xsl ]
- {
- errors.user-error "error: BoostBook: could not find docbook XSL stylesheets in:" [ path.native $(.boostbook-xsl-dir) ] ;
- }
- else
- {
- if --debug-configuration in [ modules.peek : ARGV ]
- {
- ECHO "notice: BoostBook: found boostbook XSL stylesheets in:" [ path.native $(.boostbook-xsl-dir) ] ;
- }
- }
-}
-
-rule check-boostbook-dtd-dir ( )
-{
- if ! $(.boostbook-dtd-dir)
- {
- errors.user-error "error: BoostBook: could not find boostbook DTD." ;
- }
- else if ! [ path.glob $(.boostbook-dtd-dir) : boostbook.dtd ]
- {
- errors.user-error "error: BoostBook: could not find boostbook DTD in:" [ path.native $(.boostbook-dtd-dir) ] ;
- }
- else
- {
- if --debug-configuration in [ modules.peek : ARGV ]
- {
- ECHO "notice: BoostBook: found boostbook DTD in:" [ path.native $(.boostbook-dtd-dir) ] ;
- }
- }
-}
-
-rule check-boostbook-dir ( boostbook-dir ? )
-{
- if $(boostbook-dir) && ! [ path.glob $(boostbook-dir) : xsl ]
- {
- errors.user-error "error: BoostBook: could not find boostbook in:" [ path.native $(boostbook-dir) ] ;
- }
-}
-
-rule find-tools ( docbook-xsl-dir ? : docbook-dtd-dir ? : boostbook-dir ? )
-{
- docbook-xsl-dir ?= [ modules.peek : DOCBOOK_XSL_DIR ] ;
- docbook-dtd-dir ?= [ modules.peek : DOCBOOK_DTD_DIR ] ;
- boostbook-dir ?= [ modules.peek : BOOSTBOOK_DIR ] ;
-
- # Look for the boostbook stylesheets relative to BOOST_ROOT
- # and Boost.Build.
- local boost-build-root = [ path.make [ build-system.location ] ] ;
- local boostbook-search-dirs = [ path.join $(boost-build-root) .. .. ] ;
-
- local boost-root = [ modules.peek : BOOST_ROOT ] ;
- if $(boost-root)
- {
- boostbook-search-dirs += [ path.join [ path.make $(boost-root) ] tools ] ;
- }
- boostbook-dir ?= [ path.glob $(boostbook-search-dirs) : boostbook* ] ;
-
- # Try to find the tools in platform specific locations
- if [ os.name ] = NT
- {
- # If installed by the Boost installer.
- local boost-root = ;
-
- local boost-installer-versions = snapshot cvs 1.33.0 ;
- local boost-consulting-installer-versions = 1.33.1 1.34.0 1.34.1 ;
- local boostpro-installer-versions =
- 1.35.0 1.36.0 1.37.0 1.38.0 1.39.0 1.40.0 1.41.0 1.42.0
- 1.43.0 1.44.0 1.45.0 1.46.0 1.47.0 1.48.0 1.49.0 1.50.0 ;
-
- local old-installer-root = [ find-boost-in-registry Boost.org\\$(boost-installer-versions) ] ;
-
- # Make sure that the most recent version is searched for first
- boost-root += [ sequence.reverse
- [ find-boost-in-registry
- Boost-Consulting.com\\$(boost-consulting-installer-versions)
- boostpro.com\\$(boostpro-installer-versions) ] ] ;
-
- # Plausible locations.
- local root = [ PWD ] ;
- while $(root) != $(root:D) { root = $(root:D) ; }
- root = [ path.make $(root) ] ;
- local search-dirs = ;
- local docbook-search-dirs = ;
- for local p in $(boost-root) {
- search-dirs += [ path.join $(p) tools ] ;
- }
- for local p in $(old-installer-root)
- {
- search-dirs += [ path.join $(p) share ] ;
- docbook-search-dirs += [ path.join $(p) share ] ;
- }
- search-dirs += [ path.join $(root) Boost tools ] ;
- search-dirs += [ path.join $(root) Boost share ] ;
- docbook-search-dirs += [ path.join $(root) Boost share ] ;
-
- docbook-xsl-dir ?= [ path.glob $(docbook-search-dirs) : docbook-xsl* ] ;
- docbook-dtd-dir ?= [ path.glob $(docbook-search-dirs) : docbook-xml* ] ;
- boostbook-dir ?= [ path.glob $(search-dirs) : boostbook* ] ;
- }
- else
- {
- # Plausible locations.
-
- local share = /usr/local/share /usr/share /opt/share /opt/local/share ;
- local dtd-versions = 4.2 ;
-
- docbook-xsl-dir ?= [ path.glob $(share) : docbook-xsl* ] ;
- docbook-xsl-dir ?= [ path.glob $(share)/sgml/docbook : xsl-stylesheets ] ;
- docbook-xsl-dir ?= [ path.glob $(share)/xsl : docbook* ] ;
-
- docbook-dtd-dir ?= [ path.glob $(share) : docbook-xml* ] ;
- docbook-dtd-dir ?= [ path.glob $(share)/sgml/docbook : xml-dtd-$(dtd-versions)* ] ;
- docbook-dtd-dir ?= [ path.glob $(share)/xml/docbook : $(dtd-versions) ] ;
-
- boostbook-dir ?= [ path.glob $(share) : boostbook* ] ;
-
- # Ubuntu Linux
- docbook-xsl-dir ?= [ path.glob /usr/share/xml/docbook/stylesheet : nwalsh ] ;
- docbook-dtd-dir ?= [ path.glob /usr/share/xml/docbook/schema/dtd : $(dtd-versions) ] ;
-
- # SUSE
- docbook-xsl-dir ?= [ path.glob /usr/share/xml/docbook/stylesheet/nwalsh : current ] ;
- }
-
- if $(docbook-xsl-dir)
- {
- .docbook-xsl-dir = [ path.make $(docbook-xsl-dir[1]) ] ;
- }
- if $(docbook-dtd-dir)
- {
- .docbook-dtd-dir = [ path.make $(docbook-dtd-dir[1]) ] ;
- }
-
- if --debug-configuration in [ modules.peek : ARGV ]
- {
- ECHO "notice: Boost.Book: searching XSL/DTD in" ;
- ECHO "notice:" [ sequence.transform path.native : $(boostbook-dir) ] ;
- }
- local boostbook-xsl-dir ;
- for local dir in $(boostbook-dir) {
- boostbook-xsl-dir += [ path.glob $(dir) : xsl ] ;
- }
- local boostbook-dtd-dir ;
- for local dir in $(boostbook-dir) {
- boostbook-dtd-dir += [ path.glob $(dir) : dtd ] ;
- }
- .boostbook-xsl-dir = $(boostbook-xsl-dir[1]) ;
- .boostbook-dtd-dir = $(boostbook-dtd-dir[1]) ;
-
- check-docbook-xsl-dir ;
- check-docbook-dtd-dir ;
- check-boostbook-xsl-dir ;
- check-boostbook-dtd-dir ;
-}
-
-rule xsl-dir
-{
- lock-config ;
- return $(.boostbook-xsl-dir) ;
-}
-
-rule dtd-dir
-{
- lock-config ;
- return $(.boostbook-dtd-dir) ;
-}
-
-rule docbook-xsl-dir
-{
- lock-config ;
- return $(.docbook-xsl-dir) ;
-}
-
-rule docbook-dtd-dir
-{
- lock-config ;
- return $(.docbook-dtd-dir) ;
-}
-
-rule dtdxml-to-boostbook ( target : source : properties * )
-{
- lock-config ;
- xslt $(target) : $(source) "$(.boostbook-xsl-dir)/dtd/dtd2boostbook.xsl"
- : $(properties) ;
-}
-
-rule boostbook-to-docbook ( target : source : properties * )
-{
- lock-config ;
- local stylesheet = [ path.native $(.boostbook-xsl-dir)/docbook.xsl ] ;
- xslt $(target) : $(source) $(stylesheet) : $(properties) ;
-}
-
-rule docbook-to-onehtml ( target : source : properties * )
-{
- lock-config ;
- local stylesheet = [ path.native $(.boostbook-xsl-dir)/html-single.xsl ] ;
- xslt $(target) : $(source) $(stylesheet) : $(properties) ;
-}
-
-rule docbook-to-htmldir ( target : source : properties * )
-{
- lock-config ;
- local stylesheet = [ path.native $(.boostbook-xsl-dir)/html.xsl ] ;
- xslt-dir $(target) : $(source) $(stylesheet) : $(properties) : html ;
-}
-
-rule docbook-to-xhtmldir ( target : source : properties * )
-{
- lock-config ;
- local stylesheet = [ path.native $(.boostbook-xsl-dir)/xhtml.xsl ] ;
- xslt-dir $(target) : $(source) $(stylesheet) : $(properties) : xhtml ;
-}
-
-rule docbook-to-htmlhelp ( target : source : properties * )
-{
- lock-config ;
- local stylesheet = [ path.native $(.boostbook-xsl-dir)/html-help.xsl ] ;
- xslt-dir $(target) : $(source) $(stylesheet) : $(properties) : htmlhelp ;
-}
-
-rule docbook-to-manpages ( target : source : properties * )
-{
- lock-config ;
- local stylesheet = [ path.native $(.boostbook-xsl-dir)/manpages.xsl ] ;
- xslt-dir $(target) : $(source) $(stylesheet) : $(properties) : man ;
-}
-
-rule docbook-to-fo ( target : source : properties * )
-{
- lock-config ;
- local stylesheet = [ path.native $(.boostbook-xsl-dir)/fo.xsl ] ;
- xslt $(target) : $(source) $(stylesheet) : $(properties) ;
-}
-
-rule format-catalog-path ( path )
-{
- local result = $(path) ;
- if [ xsltproc.is-cygwin ]
- {
- if [ os.name ] = NT
- {
- drive = [ MATCH ^/(.):(.*)$ : $(path) ] ;
- result = /cygdrive/$(drive[1])$(drive[2]) ;
- }
- }
- else
- {
- if [ os.name ] = CYGWIN
- {
- local native-path = [ path.native $(path) ] ;
- result = [ path.make $(native-path:W) ] ;
- }
- }
- return [ regex.replace $(result) " " "%20" ] ;
-}
-
-rule generate-xml-catalog ( target : sources * : properties * )
-{
- print.output $(target) ;
-
- # BoostBook DTD catalog entry
- local boostbook-dtd-dir = [ boostbook.dtd-dir ] ;
- if $(boostbook-dtd-dir)
- {
- boostbook-dtd-dir = [ format-catalog-path $(boostbook-dtd-dir) ] ;
- }
-
- print.text
- "<?xml version=\"1.0\"?>"
- "<!DOCTYPE catalog "
- " PUBLIC \"-//OASIS/DTD Entity Resolution XML Catalog V1.0//EN\""
- " \"http://www.oasis-open.org/committees/entity/release/1.0/catalog.dtd\">"
- "<catalog xmlns=\"urn:oasis:names:tc:entity:xmlns:xml:catalog\">"
- " <rewriteURI uriStartString=\"http://www.boost.org/tools/boostbook/dtd/\" rewritePrefix=\"file://$(boostbook-dtd-dir)/\"/>"
- : true ;
-
- local docbook-xsl-dir = [ boostbook.docbook-xsl-dir ] ;
- if ! $(docbook-xsl-dir)
- {
- ECHO "BoostBook warning: no DocBook XSL directory specified." ;
- ECHO " If you have the DocBook XSL stylesheets installed, please " ;
- ECHO " set DOCBOOK_XSL_DIR to the stylesheet directory on either " ;
- ECHO " the command line (via -sDOCBOOK_XSL_DIR=...) or in a " ;
- ECHO " Boost.Jam configuration file. The DocBook XSL stylesheets " ;
- ECHO " are available here: http://docbook.sourceforge.net/ " ;
- ECHO " Stylesheets will be downloaded on-the-fly (very slow!) " ;
- }
- else
- {
- docbook-xsl-dir = [ format-catalog-path $(docbook-xsl-dir) ] ;
- print.text " <rewriteURI uriStartString=\"http://docbook.sourceforge.net/release/xsl/current/\" rewritePrefix=\"file://$(docbook-xsl-dir)/\"/>" ;
- }
-
- local docbook-dtd-dir = [ boostbook.docbook-dtd-dir ] ;
- if ! $(docbook-dtd-dir)
- {
- ECHO "BoostBook warning: no DocBook DTD directory specified." ;
- ECHO " If you have the DocBook DTD installed, please set " ;
- ECHO " DOCBOOK_DTD_DIR to the DTD directory on either " ;
- ECHO " the command line (via -sDOCBOOK_DTD_DIR=...) or in a " ;
- ECHO " Boost.Jam configuration file. The DocBook DTD is available " ;
- ECHO " here: http://www.oasis-open.org/docbook/xml/4.2/index.shtml" ;
- ECHO " The DTD will be downloaded on-the-fly (very slow!) " ;
- }
- else
- {
- docbook-dtd-dir = [ format-catalog-path $(docbook-dtd-dir) ] ;
- print.text " <rewriteURI uriStartString=\"http://www.oasis-open.org/docbook/xml/4.2/\" rewritePrefix=\"file://$(docbook-dtd-dir)/\"/>" ;
- }
-
- print.text "</catalog>" ;
-}
-
-rule xml-catalog ( )
-{
- if ! $(.xml-catalog)
- {
- # The target is created as part of the root project. But ideally
- # it would be created as part of the boostbook project. This is not
- # current possible as such global projects don't inherit things like
- # the build directory.
-
- # Find the root project.
- local root-project = [ project.current ] ;
- root-project = [ $(root-project).project-module ] ;
- while
- [ project.attribute $(root-project) parent-module ] &&
- [ project.attribute $(root-project) parent-module ] != user-config &&
- [ project.attribute $(root-project) parent-module ] != project-config
- {
- root-project = [ project.attribute $(root-project) parent-module ] ;
- }
- .xml-catalog = [ new file-target boostbook_catalog
- : XML
- : [ project.target $(root-project) ]
- : [ new action : boostbook.generate-xml-catalog ]
- :
- ] ;
- .xml-catalog-file = [ $(.xml-catalog).path ] [ $(.xml-catalog).name ] ;
- .xml-catalog-file = $(.xml-catalog-file:J=/) ;
- }
- return $(.xml-catalog) $(.xml-catalog-file) ;
-}
-
-class boostbook-generator : generator
-{
- import feature ;
- import virtual-target ;
- import generators ;
- import boostbook ;
-
-
- rule __init__ ( * : * )
- {
- generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
- }
-
- rule run ( project name ? : property-set : sources * )
- {
- # Generate the catalog, but only once...
- local global-catalog = [ boostbook.xml-catalog ] ;
- local catalog = $(global-catalog[1]) ;
- local catalog-file = $(global-catalog[2]) ;
- local targets ;
-
- # Add the catalog to the property set
- property-set = [ $(property-set).add-raw <catalog>$(catalog-file) ] ;
-
- local type = none ;
- local manifest ;
- local format = [ $(property-set).get <format> ] ;
- switch $(format)
- {
- case html :
- {
- type = HTMLDIR ;
- manifest = HTML.manifest ;
- }
- case xhtml :
- {
- type = XHTMLDIR ;
- manifest = HTML.manifest ;
- }
- case htmlhelp :
- {
- type = HTMLHELP ;
- manifest = HTML.manifest ;
- }
-
- case onehtml : type = HTML ;
-
- case man :
- {
- type = MANPAGES ;
- manifest = man.manifest ;
- }
-
- case docbook : type = DOCBOOK ;
- case fo : type = FO ;
- case pdf : type = PDF ;
- case ps : type = PS ;
- case tests : type = TESTS ;
- }
-
- if $(manifest)
- {
- # Create DOCBOOK file from BOOSTBOOK sources.
- local base-target = [ generators.construct $(project)
- : DOCBOOK : $(property-set) : $(sources) ] ;
- base-target = $(base-target[2]) ;
- $(base-target).depends $(catalog) ;
-
- # Generate HTML/PDF/PS from DOCBOOK.
- local target = [ generators.construct $(project) $(name)_$(manifest)
- : $(type)
- : [ $(property-set).add-raw
- <xsl:param>manifest=$(name)_$(manifest) ]
- : $(base-target) ] ;
- local name = [ $(property-set).get <name> ] ;
- name ?= $(format) ;
- $(target[2]).set-path $(name) ;
- $(target[2]).depends $(catalog) ;
-
- targets += $(target[2]) ;
- }
- else {
- local target = [ generators.construct $(project)
- : $(type) : $(property-set) : $(sources) ] ;
-
- if ! $(target)
- {
- errors.error "Cannot build documentation type '$(format)'" ;
- }
- else
- {
- $(target[2]).depends $(catalog) ;
- targets += $(target[2]) ;
- }
- }
-
- return $(targets) ;
- }
-}
-
-generators.register [ new boostbook-generator boostbook.main : : BOOSTBOOK_MAIN ] ;
-
-# Creates a boostbook target.
-rule boostbook ( target-name : sources * : requirements * : default-build * )
-{
- local project = [ project.current ] ;
-
- targets.main-target-alternative
- [ new typed-target $(target-name) : $(project) : BOOSTBOOK_MAIN
- : [ targets.main-target-sources $(sources) : $(target-name) ]
- : [ targets.main-target-requirements $(requirements) : $(project) ]
- : [ targets.main-target-default-build $(default-build) : $(project) ]
- ] ;
-}
-
-#############################################################################
-# Dependency scanners
-#############################################################################
-# XInclude scanner. Mostly stolen from c-scanner :)
-# Note that this assumes an "xi" prefix for XIncludes. This isn't always the
-# case for XML documents, but we'll assume it's true for anything we encounter.
-class xinclude-scanner : scanner
-{
- import virtual-target ;
- import path ;
- import scanner ;
-
- rule __init__ ( includes * )
- {
- scanner.__init__ ;
- self.includes = $(includes) ;
- }
-
- rule pattern ( )
- {
- return "xi:include[ ]*href=\"([^\"]*)\"" ;
- }
-
- rule process ( target : matches * : binding )
- {
- local target_path = [ NORMALIZE_PATH $(binding:D) ] ;
-
- NOCARE $(matches) ;
- INCLUDES $(target) : $(matches) ;
- SEARCH on $(matches) = $(target_path) $(self.includes:G=) ;
-
- scanner.propagate $(__name__) : $(matches) : $(target) ;
- }
-}
-
-scanner.register xinclude-scanner : xsl:path ;
-type.set-scanner XML : xinclude-scanner ;
-
-rule boostbook-to-tests ( target : source : properties * )
-{
- lock-config ;
- local boost_root = [ modules.peek : BOOST_ROOT ] ;
- local native-path =
- [ path.native [ path.join $(.boostbook-xsl-dir) testing Jamfile ] ] ;
- local stylesheet = $(native-path:S=.xsl) ;
- xslt $(target) : $(source) $(stylesheet)
- : $(properties) <xsl:param>boost.root=$(boost_root)
- ;
-}
-
-
diff --git a/tools/build/v2/tools/borland.jam b/tools/build/v2/tools/borland.jam
deleted file mode 100644
index 6e43ca93a5..0000000000
--- a/tools/build/v2/tools/borland.jam
+++ /dev/null
@@ -1,220 +0,0 @@
-# Copyright 2005 Dave Abrahams
-# Copyright 2003 Rene Rivera
-# Copyright 2003, 2004, 2005 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Support for the Borland's command line compiler
-
-import property ;
-import generators ;
-import os ;
-import toolset : flags ;
-import feature : get-values ;
-import type ;
-import common ;
-
-feature.extend toolset : borland ;
-
-rule init ( version ? : command * : options * )
-{
- local condition = [ common.check-init-parameters borland :
- version $(version) ] ;
-
- local command = [ common.get-invocation-command borland : bcc32.exe
- : $(command) ] ;
-
- common.handle-options borland : $(condition) : $(command) : $(options) ;
-
- if $(command)
- {
- command = [ common.get-absolute-tool-path $(command[-1]) ] ;
- }
- root = $(command:D) ;
-
- flags borland.compile STDHDRS $(condition) : $(root)/include/ ;
- flags borland.link STDLIBPATH $(condition) : $(root)/lib ;
- flags borland.link RUN_PATH $(condition) : $(root)/bin ;
- flags borland .root $(condition) : $(root)/bin/ ;
-}
-
-
-# A borland-specific target type
-type.register BORLAND.TDS : tds ;
-
-# Declare generators
-
-generators.register-linker borland.link : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : EXE : <toolset>borland ;
-generators.register-linker borland.link.dll : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : SHARED_LIB IMPORT_LIB : <toolset>borland ;
-
-generators.register-archiver borland.archive : OBJ : STATIC_LIB : <toolset>borland ;
-generators.register-c-compiler borland.compile.c++ : CPP : OBJ : <toolset>borland ;
-generators.register-c-compiler borland.compile.c : C : OBJ : <toolset>borland ;
-generators.register-standard borland.asm : ASM : OBJ : <toolset>borland ;
-
-# Declare flags
-
-flags borland.compile OPTIONS <debug-symbols>on : -v ;
-flags borland.link OPTIONS <debug-symbols>on : -v ;
-
-flags borland.compile OPTIONS <optimization>off : -Od ;
-flags borland.compile OPTIONS <optimization>speed : -O2 ;
-flags borland.compile OPTIONS <optimization>space : -O1 ;
-
-if $(.BORLAND_HAS_FIXED_INLINING_BUGS)
-{
- flags borland CFLAGS <inlining>off : -vi- ;
- flags borland CFLAGS <inlining>on : -vi -w-inl ;
- flags borland CFLAGS <inlining>full : -vi -w-inl ;
-}
-else
-{
- flags borland CFLAGS : -vi- ;
-}
-
-flags borland.compile OPTIONS <warnings>off : -w- ;
-flags borland.compile OPTIONS <warnings>all : -w ;
-flags borland.compile OPTIONS <warnings-as-errors>on : -w! ;
-
-
-# Deal with various runtime configs...
-
-# This should be not for DLL
-flags borland OPTIONS <user-interface>console : -tWC ;
-
-# -tWR sets -tW as well, so we turn it off here and then turn it
-# on again later if we need it:
-flags borland OPTIONS <runtime-link>shared : -tWR -tWC ;
-flags borland OPTIONS <user-interface>gui : -tW ;
-
-flags borland OPTIONS <main-target-type>LIB/<link>shared : -tWD ;
-# Hmm.. not sure what's going on here.
-flags borland OPTIONS : -WM- ;
-flags borland OPTIONS <threading>multi : -tWM ;
-
-
-
-flags borland.compile OPTIONS <cxxflags> ;
-flags borland.compile DEFINES <define> ;
-flags borland.compile INCLUDES <include> ;
-
-flags borland NEED_IMPLIB <main-target-type>LIB/<link>shared : "" ;
-
-#
-# for C++ compiles the following options are turned on by default:
-#
-# -j5 stops after 5 errors
-# -g255 allow an unlimited number of warnings
-# -q no banner
-# -c compile to object
-# -P C++ code regardless of file extention
-# -a8 8 byte alignment, this option is on in the IDE by default
-# and effects binary compatibility.
-#
-
-# -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) $(C++FLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o"$(<)" "$(>)"
-
-
-actions compile.c++
-{
- "$(CONFIG_COMMAND)" -j5 -g255 -q -c -P -a8 -Vx- -Ve- -b- $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -I"$(STDHDRS)" -o"$(<)" "$(>)"
-}
-
-# For C, we don't pass -P flag
-actions compile.c
-{
- "$(CONFIG_COMMAND)" -j5 -g255 -q -c -a8 -Vx- -Ve- -b- $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -I"$(STDHDRS)" -o"$(<)" "$(>)"
-}
-
-
-# Declare flags and action for linking
-toolset.flags borland.link OPTIONS <debug-symbols>on : -v ;
-toolset.flags borland.link LIBRARY_PATH <library-path> ;
-toolset.flags borland.link FINDLIBS_ST <find-static-library> ;
-toolset.flags borland.link FINDLIBS_SA <find-shared-library> ;
-toolset.flags borland.link LIBRARIES <library-file> ;
-
-flags borland.link OPTIONS <linkflags> ;
-flags borland.link OPTIONS <link>shared : -tWD ;
-
-flags borland.link LIBRARY_PATH_OPTION <toolset>borland : -L : unchecked ;
-flags borland.link LIBRARY_OPTION <toolset>borland : "" : unchecked ;
-
-
-
-# bcc32 needs to have ilink32 in the path in order to invoke it, so explicitly
-# specifying $(BCC_TOOL_PATH)bcc32 doesn't help. You need to add
-# $(BCC_TOOL_PATH) to the path
-# The NEED_IMPLIB variable controls whether we need to invoke implib.
-
-flags borland.archive AROPTIONS <archiveflags> ;
-
-# Declare action for archives. We don't use response file
-# since it's hard to get "+-" there.
-# The /P256 increases 'page' size -- with too low
-# values tlib fails when building large applications.
-# CONSIDER: don't know what 'together' is for...
-actions updated together piecemeal archive
-{
- $(.set-path)$(.root:W)$(.old-path)
- tlib $(AROPTIONS) /P256 /u /a /C "$(<:W)" +-"$(>:W)"
-}
-
-
-if [ os.name ] = CYGWIN
-{
- .set-path = "cmd /S /C set \"PATH=" ;
- .old-path = ";%PATH%\" \"&&\"" ;
-
-
- # Couldn't get TLIB to stop being confused about pathnames
- # containing dashes (it seemed to treat them as option separators
- # when passed through from bash), so we explicitly write the
- # command into a .bat file and execute that. TLIB is also finicky
- # about pathname style! Forward slashes, too, are treated as
- # options.
- actions updated together piecemeal archive
- {
- chdir $(<:D)
- echo +-$(>:BS) > $(<:BS).rsp
- $(.set-path)$(.root)$(.old-path) "tlib.exe" $(AROPTIONS) /P256 /C $(<:BS) @$(<:BS).rsp && $(RM) $(<:BS).rsp
- }
-}
-else if [ os.name ] = NT
-{
- .set-path = "set \"PATH=" ;
- .old-path = ";%PATH%\"
- " ;
-}
-else
-{
- .set-path = "PATH=\"" ;
- .old-path = "\":$PATH
- export PATH
- " ;
-}
-
-RM = [ common.rm-command ] ;
-
-nl = "
-" ;
-
-actions link
-{
- $(.set-path)$(.root:W)$(.old-path) "$(CONFIG_COMMAND)" -v -q $(OPTIONS) -L"$(LIBRARY_PATH:W)" -L"$(STDLIBPATH:W)" -e"$(<[1]:W)" @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST:S=.lib)" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA:S=.lib)")"
-}
-
-
-actions link.dll bind LIBRARIES RSP
-{
- $(.set-path)$(.root:W)$(.old-path) "$(CONFIG_COMMAND)" -v -q $(OPTIONS) -L"$(LIBRARY_PATH:W)" -L"$(STDLIBPATH:W)" -e"$(<[1]:W)" @"@($(<[1]:W).rsp:E=$(nl)"$(>)" $(nl)$(LIBRARIES) $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST:S=.lib)" $(nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA:S=.lib)")" && "$(.root)implib" "$(<[2]:W)" "$(<[1]:W)"
-}
-
-# It seems impossible to specify output file with directory when compiling
-# asm files using bcc32, so use tasm32 directly.
-# /ml makes all symbol names case-sensitive
-actions asm
-{
- $(.set-path)$(.root:W)$(.old-path) tasm32.exe /ml "$(>)" "$(<)"
-}
-
diff --git a/tools/build/v2/tools/builtin.jam b/tools/build/v2/tools/builtin.jam
deleted file mode 100644
index d1351a4231..0000000000
--- a/tools/build/v2/tools/builtin.jam
+++ /dev/null
@@ -1,960 +0,0 @@
-# Copyright 2002, 2003, 2004, 2005 Dave Abrahams
-# Copyright 2002, 2005, 2006, 2007, 2010 Rene Rivera
-# Copyright 2006 Juergen Hunold
-# Copyright 2005 Toon Knapen
-# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Defines standard features and rules.
-
-import alias ;
-import "class" : new ;
-import errors ;
-import feature ;
-import generators ;
-import numbers ;
-import os ;
-import path ;
-import print ;
-import project ;
-import property ;
-import regex ;
-import scanner ;
-import sequence ;
-import stage ;
-import symlink ;
-import toolset ;
-import type ;
-import targets ;
-import types/register ;
-import utility ;
-import virtual-target ;
-import message ;
-import convert ;
-
-# FIXME: the following generate module import is not needed here but removing it
-# too hastly will break using code (e.g. the main Boost library Jamroot file)
-# that forgot to import the generate module before calling the generate rule.
-import generate ;
-
-
-.os-names = aix bsd cygwin darwin freebsd hpux iphone linux netbsd
- openbsd osf qnx qnxnto sgi solaris unix unixware windows
- elf # Not actually an OS -- used for targeting bare metal where
- # object format is ELF. This catches both -elf and -eabi gcc
- # targets and well as other compilers targeting ELF. It is not
- # clear how often do we need to key of ELF specifically as opposed
- # to other bare metal targets, but let's stick with gcc naming.
- ;
-
-# Feature used to determine which OS we're on. New <target-os> and <host-os>
-# features should be used instead.
-local os = [ modules.peek : OS ] ;
-feature.feature os : $(os) : propagated link-incompatible ;
-
-
-# Translates from bjam current OS to the os tags used in host-os and target-os,
-# i.e. returns the running host-os.
-#
-local rule default-host-os ( )
-{
- local host-os ;
- if [ os.name ] in $(.os-names:U)
- {
- host-os = [ os.name ] ;
- }
- else
- {
- switch [ os.name ]
- {
- case NT : host-os = windows ;
- case AS400 : host-os = unix ;
- case MINGW : host-os = windows ;
- case BSDI : host-os = bsd ;
- case COHERENT : host-os = unix ;
- case DRAGONFLYBSD : host-os = bsd ;
- case IRIX : host-os = sgi ;
- case MACOSX : host-os = darwin ;
- case KFREEBSD : host-os = freebsd ;
- case LINUX : host-os = linux ;
- case SUNOS :
- ECHO "SunOS is not a supported operating system." ;
- ECHO "We believe last version of SunOS was released in 1992, " ;
- ECHO "so if you get this message, something is very wrong with configuration logic. " ;
- ECHO "Please report this as a bug. " ;
- EXIT ;
- case * : host-os = unix ;
- }
- }
- return $(host-os:L) ;
-}
-
-
-# The two OS features define a known set of abstract OS names. The host-os is
-# the OS under which bjam is running. Even though this should really be a fixed
-# property we need to list all the values to prevent unknown value errors. Both
-# set the default value to the current OS to account for the default use case of
-# building on the target OS.
-feature.feature host-os : $(.os-names) ;
-feature.set-default host-os : [ default-host-os ] ;
-
-feature.feature target-os : $(.os-names) : propagated link-incompatible ;
-feature.set-default target-os : [ default-host-os ] ;
-
-
-feature.feature toolset : : implicit propagated symmetric ;
-feature.feature stdlib : native : propagated composite ;
-feature.feature link : shared static : propagated ;
-feature.feature runtime-link : shared static : propagated ;
-feature.feature runtime-debugging : on off : propagated ;
-feature.feature optimization : off speed space : propagated ;
-feature.feature profiling : off on : propagated ;
-feature.feature inlining : off on full : propagated ;
-feature.feature threading : single multi : propagated ;
-feature.feature rtti : on off : propagated ;
-feature.feature exception-handling : on off : propagated ;
-
-# Whether there is support for asynchronous EH (e.g. catching SEGVs).
-feature.feature asynch-exceptions : off on : propagated ;
-
-# Whether all extern "C" functions are considered nothrow by default.
-feature.feature extern-c-nothrow : off on : propagated ;
-
-feature.feature debug-symbols : on off : propagated ;
-# Controls whether the binary should be stripped -- that is have
-# everything not necessary to running removed. This option should
-# not be very often needed. Also, this feature will show up in
-# target paths of everything, not just binaries. Should fix that
-# when impelementing feature relevance.
-feature.feature strip : off on : propagated ;
-feature.feature define : : free ;
-feature.feature undef : : free ;
-feature.feature "include" : : free path ; #order-sensitive ;
-feature.feature cflags : : free ;
-feature.feature cxxflags : : free ;
-feature.feature fflags : : free ;
-feature.feature asmflags : : free ;
-feature.feature linkflags : : free ;
-feature.feature archiveflags : : free ;
-feature.feature version : : free ;
-
-# Generic, i.e. non-language specific, flags for tools.
-feature.feature flags : : free ;
-feature.feature location-prefix : : free ;
-
-
-# The following features are incidental since they have no effect on built
-# products. Not making them incidental will result in problems in corner cases,
-# e.g.:
-#
-# unit-test a : a.cpp : <use>b ;
-# lib b : a.cpp b ;
-#
-# Here, if <use> is not incidental, we would decide we have two targets for
-# a.obj with different properties and complain about it.
-#
-# Note that making a feature incidental does not mean it is ignored. It may be
-# ignored when creating a virtual target, but the rest of build process will use
-# them.
-feature.feature use : : free dependency incidental ;
-feature.feature dependency : : free dependency incidental ;
-feature.feature implicit-dependency : : free dependency incidental ;
-
-feature.feature warnings :
- on # Enable default/"reasonable" warning level for the tool.
- all # Enable all possible warnings issued by the tool.
- off # Disable all warnings issued by the tool.
- : incidental propagated ;
-
-feature.feature warnings-as-errors :
- off # Do not fail the compilation if there are warnings.
- on # Fail the compilation if there are warnings.
- : incidental propagated ;
-
-# Feature that allows us to configure the maximal template instantiation depth
-# level allowed by a C++ compiler. Applies only to C++ toolsets whose compilers
-# actually support this configuration setting.
-#
-# Note that Boost Build currently does not allow defining features that take any
-# positive integral value as a parameter, which is what we need here, so we just
-# define some of the values here and leave it up to the user to extend this set
-# as he needs using the feature.extend rule.
-#
-# TODO: This should be upgraded as soon as Boost Build adds support for custom
-# validated feature values or at least features allowing any positive integral
-# value. See related Boost Build related trac ticket #194.
-#
-feature.feature c++-template-depth
- :
- [ numbers.range 64 1024 : 64 ]
- [ numbers.range 20 1000 : 10 ]
- # Maximum template instantiation depth guaranteed for ANSI/ISO C++
- # conforming programs.
- 17
- :
- incidental optional propagated ;
-
-feature.feature source : : free dependency incidental ;
-feature.feature library : : free dependency incidental ;
-feature.feature file : : free dependency incidental ;
-feature.feature find-shared-library : : free ; #order-sensitive ;
-feature.feature find-static-library : : free ; #order-sensitive ;
-feature.feature library-path : : free path ; #order-sensitive ;
-
-# Internal feature.
-feature.feature library-file : : free dependency ;
-
-feature.feature name : : free ;
-feature.feature tag : : free ;
-feature.feature search : : free path ; #order-sensitive ;
-feature.feature location : : free path ;
-feature.feature dll-path : : free path ;
-feature.feature hardcode-dll-paths : true false : incidental ;
-
-
-# An internal feature that holds the paths of all dependency shared libraries.
-# On Windows, it is needed so that we can add all those paths to PATH when
-# running applications. On Linux, it is needed to add proper -rpath-link command
-# line options.
-feature.feature xdll-path : : free path ;
-
-# Provides means to specify def-file for windows DLLs.
-feature.feature def-file : : free dependency ;
-
-feature.feature suppress-import-lib : false true : incidental ;
-
-# Internal feature used to store the name of a bjam action to call when building
-# a target.
-feature.feature action : : free ;
-
-# This feature is used to allow specific generators to run. For example, QT
-# tools can only be invoked when QT library is used. In that case, <allow>qt
-# will be in usage requirement of the library.
-feature.feature allow : : free ;
-
-# The addressing model to generate code for. Currently a limited set only
-# specifying the bit size of pointers.
-feature.feature address-model : 16 32 64 32_64 : propagated optional ;
-
-# Type of CPU architecture to compile for.
-feature.feature architecture :
- # x86 and x86-64
- x86
-
- # ia64
- ia64
-
- # Sparc
- sparc
-
- # RS/6000 & PowerPC
- power
-
- # MIPS/SGI
- mips1 mips2 mips3 mips4 mips32 mips32r2 mips64
-
- # HP/PA-RISC
- parisc
-
- # Advanced RISC Machines
- arm
-
- # Combined architectures for platforms/toolsets that support building for
- # multiple architectures at once. "combined" would be the default multi-arch
- # for the toolset.
- combined
- combined-x86-power
-
- : propagated optional ;
-
-# The specific instruction set in an architecture to compile.
-feature.feature instruction-set :
- # x86 and x86-64
- native i386 i486 i586 i686 pentium pentium-mmx pentiumpro pentium2 pentium3
- pentium3m pentium-m pentium4 pentium4m prescott nocona core2 conroe conroe-xe
- conroe-l allendale mermon mermon-xe kentsfield kentsfield-xe penryn wolfdale
- yorksfield nehalem k6 k6-2 k6-3 athlon athlon-tbird athlon-4 athlon-xp
- athlon-mp k8 opteron athlon64 athlon-fx winchip-c6 winchip2 c3 c3-2
-
- # ia64
- itanium itanium1 merced itanium2 mckinley
-
- # Sparc
- v7 cypress v8 supersparc sparclite hypersparc sparclite86x f930 f934
- sparclet tsc701 v9 ultrasparc ultrasparc3
-
- # RS/6000 & PowerPC
- 401 403 405 405fp 440 440fp 505 601 602 603 603e 604 604e 620 630 740 7400
- 7450 750 801 821 823 860 970 8540 power-common ec603e g3 g4 g5 power power2
- power3 power4 power5 powerpc powerpc64 rios rios1 rsc rios2 rs64a
-
- # MIPS
- 4kc 4kp 5kc 20kc m4k r2000 r3000 r3900 r4000 r4100 r4300 r4400 r4600 r4650
- r6000 r8000 rm7000 rm9000 orion sb1 vr4100 vr4111 vr4120 vr4130 vr4300
- vr5000 vr5400 vr5500
-
- # HP/PA-RISC
- 700 7100 7100lc 7200 7300 8000
-
- # Advanced RISC Machines
- armv2 armv2a armv3 armv3m armv4 armv4t armv5 armv5t armv5te armv6 armv6j iwmmxt ep9312
-
- : propagated optional ;
-
-# Used to select a specific variant of C++ ABI if the compiler supports several.
-feature.feature c++abi : : propagated optional ;
-
-feature.feature conditional : : incidental free ;
-
-# The value of 'no' prevents building of a target.
-feature.feature build : yes no : optional ;
-
-# Windows-specific features
-
-feature.feature user-interface : console gui wince native auto ;
-
-feature.feature variant : : implicit composite propagated symmetric ;
-
-
-# Declares a new variant.
-#
-# First determines explicit properties for this variant, by refining parents'
-# explicit properties with the passed explicit properties. The result is
-# remembered and will be used if this variant is used as parent.
-#
-# Second, determines the full property set for this variant by adding to the
-# explicit properties default values for all missing non-symmetric properties.
-#
-# Lastly, makes appropriate value of 'variant' property expand to the full
-# property set.
-#
-rule variant ( name # Name of the variant
- : parents-or-properties * # Specifies parent variants, if
- # 'explicit-properties' are given, and
- # explicit-properties or parents otherwise.
- : explicit-properties * # Explicit properties.
- )
-{
- local parents ;
- if ! $(explicit-properties)
- {
- if $(parents-or-properties[1]:G)
- {
- explicit-properties = $(parents-or-properties) ;
- }
- else
- {
- parents = $(parents-or-properties) ;
- }
- }
- else
- {
- parents = $(parents-or-properties) ;
- }
-
- # The problem is that we have to check for conflicts between base variants.
- if $(parents[2])
- {
- errors.error "multiple base variants are not yet supported" ;
- }
-
- local inherited ;
- # Add explicitly specified properties for parents.
- for local p in $(parents)
- {
- # TODO: This check may be made stricter.
- if ! [ feature.is-implicit-value $(p) ]
- {
- errors.error "Invalid base variant" $(p) ;
- }
-
- inherited += $(.explicit-properties.$(p)) ;
- }
- property.validate $(explicit-properties) ;
- explicit-properties = [ property.refine $(inherited)
- : $(explicit-properties) ] ;
-
- # Record explicitly specified properties for this variant. We do this after
- # inheriting parents' properties so they affect other variants derived from
- # this one.
- .explicit-properties.$(name) = $(explicit-properties) ;
-
- feature.extend variant : $(name) ;
- feature.compose <variant>$(name) : $(explicit-properties) ;
-}
-IMPORT $(__name__) : variant : : variant ;
-
-
-variant debug : <optimization>off <debug-symbols>on <inlining>off
- <runtime-debugging>on ;
-variant release : <optimization>speed <debug-symbols>off <inlining>full
- <runtime-debugging>off <define>NDEBUG ;
-variant profile : release : <profiling>on <debug-symbols>on ;
-
-
-class searched-lib-target : abstract-file-target
-{
- rule __init__ ( name
- : project
- : shared ?
- : search *
- : action
- )
- {
- abstract-file-target.__init__ $(name) : SEARCHED_LIB : $(project)
- : $(action) : ;
-
- self.shared = $(shared) ;
- self.search = $(search) ;
- }
-
- rule shared ( )
- {
- return $(self.shared) ;
- }
-
- rule search ( )
- {
- return $(self.search) ;
- }
-
- rule actualize-location ( target )
- {
- NOTFILE $(target) ;
- }
-
- rule path ( )
- {
- }
-}
-
-
-# The generator class for libraries (target type LIB). Depending on properties
-# it will request building of the appropriate specific library type --
-# -- SHARED_LIB, STATIC_LIB or SHARED_LIB.
-#
-class lib-generator : generator
-{
- rule __init__ ( * : * )
- {
- generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
- }
-
- rule run ( project name ? : property-set : sources * )
- {
- # The lib generator is composing, and can be only invoked with an
- # explicit name. This check is present in generator.run (and so in
- # builtin.linking-generator) but duplicated here to avoid doing extra
- # work.
- if $(name)
- {
- local properties = [ $(property-set).raw ] ;
- # Determine the needed target type.
- local actual-type ;
- # <source>files can be generated by <conditional>@rule feature
- # in which case we do not consider it a SEARCHED_LIB type.
- if ! <source> in $(properties:G) &&
- ( <search> in $(properties:G) || <name> in $(properties:G) )
- {
- actual-type = SEARCHED_LIB ;
- }
- else if <file> in $(properties:G)
- {
- actual-type = LIB ;
- }
- else if <link>shared in $(properties)
- {
- actual-type = SHARED_LIB ;
- }
- else
- {
- actual-type = STATIC_LIB ;
- }
- property-set = [ $(property-set).add-raw <main-target-type>LIB ] ;
- # Construct the target.
- return [ generators.construct $(project) $(name) : $(actual-type)
- : $(property-set) : $(sources) ] ;
- }
- }
-
- rule viable-source-types ( )
- {
- return * ;
- }
-}
-
-
-generators.register [ new lib-generator builtin.lib-generator : : LIB ] ;
-
-
-# The implementation of the 'lib' rule. Beyond standard syntax that rule allows
-# simplified: "lib a b c ;".
-#
-rule lib ( names + : sources * : requirements * : default-build * :
- usage-requirements * )
-{
- if $(names[2])
- {
- if <name> in $(requirements:G)
- {
- errors.user-error "When several names are given to the 'lib' rule" :
- "it is not allowed to specify the <name> feature." ;
- }
- if $(sources)
- {
- errors.user-error "When several names are given to the 'lib' rule" :
- "it is not allowed to specify sources." ;
- }
- }
-
- # This is a circular module dependency so it must be imported here.
- import targets ;
-
- local project = [ project.current ] ;
- local result ;
-
- for local name in $(names)
- {
- local r = $(requirements) ;
- # Support " lib a ; " and " lib a b c ; " syntax.
- if ! $(sources) && ! <name> in $(requirements:G)
- && ! <file> in $(requirements:G)
- {
- r += <name>$(name) ;
- }
- result += [ targets.main-target-alternative
- [ new typed-target $(name) : $(project) : LIB
- : [ targets.main-target-sources $(sources) : $(name) ]
- : [ targets.main-target-requirements $(r) : $(project) ]
- : [ targets.main-target-default-build $(default-build) : $(project) ]
- : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ]
- ] ] ;
- }
- return $(result) ;
-}
-IMPORT $(__name__) : lib : : lib ;
-
-
-class searched-lib-generator : generator
-{
- import property-set ;
-
- rule __init__ ( )
- {
- # The requirements cause the generators to be tried *only* when we're
- # building a lib target with a 'search' feature. This seems ugly --- all
- # we want is to make sure searched-lib-generator is not invoked deep
- # inside transformation search to produce intermediate targets.
- generator.__init__ searched-lib-generator : : SEARCHED_LIB ;
- }
-
- rule run ( project name ? : property-set : sources * )
- {
- if $(name)
- {
- # If 'name' is empty, it means we have not been called to build a
- # top-level target. In this case, we just fail immediately, because
- # searched-lib-generator cannot be used to produce intermediate
- # targets.
-
- local properties = [ $(property-set).raw ] ;
- local shared ;
- if <link>shared in $(properties)
- {
- shared = true ;
- }
-
- local search = [ feature.get-values <search> : $(properties) ] ;
-
- local a = [ new null-action $(property-set) ] ;
- local lib-name = [ feature.get-values <name> : $(properties) ] ;
- lib-name ?= $(name) ;
- local t = [ new searched-lib-target $(lib-name) : $(project)
- : $(shared) : $(search) : $(a) ] ;
- # We return sources for a simple reason. If there is
- # lib png : z : <name>png ;
- # the 'z' target should be returned, so that apps linking to 'png'
- # will link to 'z', too.
- return [ property-set.create <xdll-path>$(search) ]
- [ virtual-target.register $(t) ] $(sources) ;
- }
- }
-}
-
-generators.register [ new searched-lib-generator ] ;
-
-
-class prebuilt-lib-generator : generator
-{
- rule __init__ ( * : * )
- {
- generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
- }
-
- rule run ( project name ? : property-set : sources * )
- {
- local f = [ $(property-set).get <file> ] ;
- return $(f) $(sources) ;
- }
-}
-
-generators.register
- [ new prebuilt-lib-generator builtin.prebuilt : : LIB : <file> ] ;
-
-generators.override builtin.prebuilt : builtin.lib-generator ;
-
-class preprocessed-target-class : basic-target
-{
- import generators ;
- rule construct ( name : sources * : property-set )
- {
- local result = [ generators.construct [ project ]
- $(name) : PREPROCESSED_CPP : $(property-set) : $(sources) ] ;
- if ! $(result)
- {
- result = [ generators.construct [ project ]
- $(name) : PREPROCESSED_C : $(property-set) : $(sources) ] ;
- }
- if ! $(result)
- {
- local s ;
- for x in $(sources)
- {
- s += [ $(x).name ] ;
- }
- local p = [ project ] ;
- errors.user-error
- "In project" [ $(p).name ] :
- "Could not construct preprocessed file \"$(name)\" from $(s:J=, )." ;
- }
- return $(result) ;
- }
-}
-
-rule preprocessed ( name : sources * : requirements * : default-build * :
- usage-requirements * )
-{
- local project = [ project.current ] ;
- return [ targets.main-target-alternative
- [ new preprocessed-target-class $(name) : $(project)
- : [ targets.main-target-sources $(sources) : $(name) ]
- : [ targets.main-target-requirements $(r) : $(project) ]
- : [ targets.main-target-default-build $(default-build) : $(project) ]
- : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ]
- ] ] ;
-}
-
-IMPORT $(__name__) : preprocessed : : preprocessed ;
-
-class compile-action : action
-{
- import sequence ;
-
- rule __init__ ( targets * : sources * : action-name : properties * )
- {
- action.__init__ $(targets) : $(sources) : $(action-name) : $(properties) ;
- }
-
- # For all virtual targets for the same dependency graph as self, i.e. which
- # belong to the same main target, add their directories to the include path.
- #
- rule adjust-properties ( property-set )
- {
- local s = [ $(self.targets[1]).creating-subvariant ] ;
- return [ $(property-set).add-raw
- [ $(s).implicit-includes "include" : H ] ] ;
- }
-}
-
-
-# Declare a special compiler generator. The only thing it does is changing the
-# type used to represent 'action' in the constructed dependency graph to
-# 'compile-action'. That class in turn adds additional include paths to handle
-# cases when a source file includes headers which are generated themselves.
-#
-class C-compiling-generator : generator
-{
- rule __init__ ( id : source-types + : target-types + : requirements *
- : optional-properties * )
- {
- generator.__init__ $(id) : $(source-types) : $(target-types) :
- $(requirements) : $(optional-properties) ;
- }
-
- rule action-class ( )
- {
- return compile-action ;
- }
-}
-
-
-rule register-c-compiler ( id : source-types + : target-types + : requirements *
- : optional-properties * )
-{
- generators.register [ new C-compiling-generator $(id) : $(source-types) :
- $(target-types) : $(requirements) : $(optional-properties) ] ;
-}
-
-# FIXME: this is ugly, should find a better way (we would like client code to
-# register all generators as "generators.some-rule" instead of
-# "some-module.some-rule".)
-#
-IMPORT $(__name__) : register-c-compiler : : generators.register-c-compiler ;
-
-
-# The generator class for handling EXE and SHARED_LIB creation.
-#
-class linking-generator : generator
-{
- import path ;
- import project ;
- import property-set ;
- import type ;
-
- rule __init__ ( id
- composing ? : # The generator will be composing if a non-empty
- # string is passed or the parameter is not given. To
- # make the generator non-composing, pass an empty
- # string ("").
- source-types + :
- target-types + :
- requirements * )
- {
- composing ?= true ;
- generator.__init__ $(id) $(composing) : $(source-types)
- : $(target-types) : $(requirements) ;
- }
-
- rule run ( project name ? : property-set : sources + )
- {
- sources += [ $(property-set).get <library> ] ;
-
- # Add <library-path> properties for all searched libraries.
- local extra ;
- for local s in $(sources)
- {
- if [ $(s).type ] = SEARCHED_LIB
- {
- local search = [ $(s).search ] ;
- extra += <library-path>$(search) ;
- }
- }
-
- # It is possible that sources include shared libraries that did not came
- # from 'lib' targets, e.g. .so files specified as sources. In this case
- # we have to add extra dll-path properties and propagate extra xdll-path
- # properties so that application linking to us will get xdll-path to
- # those libraries.
- local extra-xdll-paths ;
- for local s in $(sources)
- {
- if [ type.is-derived [ $(s).type ] SHARED_LIB ] && ! [ $(s).action ]
- {
- # Unfortunately, we do not have a good way to find the path to a
- # file, so use this nasty approach.
- #
- # TODO: This needs to be done better. One thing that is really
- # broken with this is that it does not work correctly with
- # projects having multiple source locations.
- local p = [ $(s).project ] ;
- local location = [ path.root [ $(s).name ]
- [ $(p).get source-location ] ] ;
- extra-xdll-paths += [ path.parent $(location) ] ;
- }
- }
-
- # Hardcode DLL paths only when linking executables.
- # Pros: do not need to relink libraries when installing.
- # Cons: "standalone" libraries (plugins, python extensions) can not
- # hardcode paths to dependent libraries.
- if [ $(property-set).get <hardcode-dll-paths> ] = true
- && [ type.is-derived $(self.target-types[1]) EXE ]
- {
- local xdll-path = [ $(property-set).get <xdll-path> ] ;
- extra += <dll-path>$(xdll-path) <dll-path>$(extra-xdll-paths) ;
- }
-
- if $(extra)
- {
- property-set = [ $(property-set).add-raw $(extra) ] ;
- }
-
- local result = [ generator.run $(project) $(name) : $(property-set)
- : $(sources) ] ;
-
- local ur ;
- if $(result)
- {
- ur = [ extra-usage-requirements $(result) : $(property-set) ] ;
- ur = [ $(ur).add
- [ property-set.create <xdll-path>$(extra-xdll-paths) ] ] ;
- }
- return $(ur) $(result) ;
- }
-
- rule extra-usage-requirements ( created-targets * : property-set )
- {
- local result = [ property-set.empty ] ;
- local extra ;
-
- # Add appropricate <xdll-path> usage requirements.
- local raw = [ $(property-set).raw ] ;
- if <link>shared in $(raw)
- {
- local paths ;
- local pwd = [ path.pwd ] ;
- for local t in $(created-targets)
- {
- if [ type.is-derived [ $(t).type ] SHARED_LIB ]
- {
- paths += [ path.root [ path.make [ $(t).path ] ] $(pwd) ] ;
- }
- }
- extra += $(paths:G=<xdll-path>) ;
- }
-
- # We need to pass <xdll-path> features that we've got from sources,
- # because if a shared library is built, exe using it needs to know paths
- # to other shared libraries this one depends on in order to be able to
- # find them all at runtime.
-
- # Just pass all features in property-set, it is theorically possible
- # that we will propagate <xdll-path> features explicitly specified by
- # the user, but then the user is to blaim for using an internal feature.
- local values = [ $(property-set).get <xdll-path> ] ;
- extra += $(values:G=<xdll-path>) ;
-
- if $(extra)
- {
- result = [ property-set.create $(extra) ] ;
- }
- return $(result) ;
- }
-
- rule generated-targets ( sources + : property-set : project name ? )
- {
- local sources2 ; # Sources to pass to inherited rule.
- local properties2 ; # Properties to pass to inherited rule.
- local libraries ; # Library sources.
-
- # Searched libraries are not passed as arguments to the linker but via
- # some option. So, we pass them to the action using a property.
- properties2 = [ $(property-set).raw ] ;
- local fsa ;
- local fst ;
- for local s in $(sources)
- {
- if [ type.is-derived [ $(s).type ] SEARCHED_LIB ]
- {
- local name = [ $(s).name ] ;
- if [ $(s).shared ]
- {
- fsa += $(name) ;
- }
- else
- {
- fst += $(name) ;
- }
- }
- else
- {
- sources2 += $(s) ;
- }
- }
- properties2 += <find-shared-library>$(fsa:J=&&)
- <find-static-library>$(fst:J=&&) ;
-
- return [ generator.generated-targets $(sources2)
- : [ property-set.create $(properties2) ] : $(project) $(name) ] ;
- }
-}
-
-
-rule register-linker ( id composing ? : source-types + : target-types +
- : requirements * )
-{
- generators.register [ new linking-generator $(id) $(composing)
- : $(source-types) : $(target-types) : $(requirements) ] ;
-}
-
-
-# The generator class for handling STATIC_LIB creation.
-#
-class archive-generator : generator
-{
- import property-set ;
-
- rule __init__ ( id composing ? : source-types + : target-types +
- : requirements * )
- {
- composing ?= true ;
- generator.__init__ $(id) $(composing) : $(source-types)
- : $(target-types) : $(requirements) ;
- }
-
- rule run ( project name ? : property-set : sources + )
- {
- sources += [ $(property-set).get <library> ] ;
-
- local result = [ generator.run $(project) $(name) : $(property-set)
- : $(sources) ] ;
-
- # For static linking, if we get a library in source, we can not directly
- # link to it so we need to cause our dependencies to link to that
- # library. There are two approaches:
- # - adding the library to the list of returned targets.
- # - using the <library> usage requirements.
- # The problem with the first is:
- #
- # lib a1 : : <file>liba1.a ;
- # lib a2 : a2.cpp a1 : <link>static ;
- # install dist : a2 ;
- #
- # here we will try to install 'a1', even though it is not necessary in
- # the general case. With the second approach, even indirect dependants
- # will link to the library, but it should not cause any harm. So, return
- # all LIB sources together with created targets, so that dependants link
- # to them.
- local usage-requirements ;
- if [ $(property-set).get <link> ] = static
- {
- for local t in $(sources)
- {
- if [ type.is-derived [ $(t).type ] LIB ]
- {
- usage-requirements += <library>$(t) ;
- }
- }
- }
-
- usage-requirements = [ property-set.create $(usage-requirements) ] ;
-
- return $(usage-requirements) $(result) ;
- }
-}
-
-
-rule register-archiver ( id composing ? : source-types + : target-types +
- : requirements * )
-{
- generators.register [ new archive-generator $(id) $(composing)
- : $(source-types) : $(target-types) : $(requirements) ] ;
-}
-
-
-# Generator that accepts everything and produces nothing. Useful as a general
-# fallback for toolset-specific actions like PCH generation.
-#
-class dummy-generator : generator
-{
- import property-set ;
-
- rule run ( project name ? : property-set : sources + )
- {
- return [ property-set.empty ] ;
- }
-}
-
-IMPORT $(__name__) : register-linker register-archiver
- : : generators.register-linker generators.register-archiver ;
diff --git a/tools/build/v2/tools/builtin.py b/tools/build/v2/tools/builtin.py
deleted file mode 100644
index 5b28a0aa78..0000000000
--- a/tools/build/v2/tools/builtin.py
+++ /dev/null
@@ -1,725 +0,0 @@
-# Status: minor updates by Steven Watanabe to make gcc work
-#
-# Copyright (C) Vladimir Prus 2002. Permission to copy, use, modify, sell and
-# distribute this software is granted provided this copyright notice appears in
-# all copies. This software is provided "as is" without express or implied
-# warranty, and with no claim as to its suitability for any purpose.
-
-""" Defines standard features and rules.
-"""
-
-import b2.build.targets as targets
-
-import sys
-from b2.build import feature, property, virtual_target, generators, type, property_set, scanner
-from b2.util.utility import *
-from b2.util import path, regex, bjam_signature
-import b2.tools.types
-from b2.manager import get_manager
-
-
-# Records explicit properties for a variant.
-# The key is the variant name.
-__variant_explicit_properties = {}
-
-def reset ():
- """ Clear the module state. This is mainly for testing purposes.
- """
- global __variant_explicit_properties
-
- __variant_explicit_properties = {}
-
-@bjam_signature((["name"], ["parents_or_properties", "*"], ["explicit_properties", "*"]))
-def variant (name, parents_or_properties, explicit_properties = []):
- """ Declares a new variant.
- First determines explicit properties for this variant, by
- refining parents' explicit properties with the passed explicit
- properties. The result is remembered and will be used if
- this variant is used as parent.
-
- Second, determines the full property set for this variant by
- adding to the explicit properties default values for all properties
- which neither present nor are symmetric.
-
- Lastly, makes appropriate value of 'variant' property expand
- to the full property set.
- name: Name of the variant
- parents_or_properties: Specifies parent variants, if
- 'explicit_properties' are given,
- and explicit_properties otherwise.
- explicit_properties: Explicit properties.
- """
- parents = []
- if not explicit_properties:
- explicit_properties = parents_or_properties
- else:
- parents = parents_or_properties
-
- inherited = property_set.empty()
- if parents:
-
- # If we allow multiple parents, we'd have to to check for conflicts
- # between base variants, and there was no demand for so to bother.
- if len (parents) > 1:
- raise BaseException ("Multiple base variants are not yet supported")
-
- p = parents[0]
- # TODO: the check may be stricter
- if not feature.is_implicit_value (p):
- raise BaseException ("Invalid base varaint '%s'" % p)
-
- inherited = __variant_explicit_properties[p]
-
- explicit_properties = property_set.create_with_validation(explicit_properties)
- explicit_properties = inherited.refine(explicit_properties)
-
- # Record explicitly specified properties for this variant
- # We do this after inheriting parents' properties, so that
- # they affect other variants, derived from this one.
- __variant_explicit_properties[name] = explicit_properties
-
- feature.extend('variant', [name])
- feature.compose ("<variant>" + name, explicit_properties.all())
-
-__os_names = """
- amiga aix bsd cygwin darwin dos emx freebsd hpux iphone linux netbsd
- openbsd osf qnx qnxnto sgi solaris sun sunos svr4 sysv ultrix unix unixware
- vms windows
-""".split()
-
-# Translates from bjam current OS to the os tags used in host-os and target-os,
-# i.e. returns the running host-os.
-#
-def default_host_os():
- host_os = os_name()
- if host_os not in (x.upper() for x in __os_names):
- if host_os == 'NT': host_os = 'windows'
- elif host_os == 'AS400': host_os = 'unix'
- elif host_os == 'MINGW': host_os = 'windows'
- elif host_os == 'BSDI': host_os = 'bsd'
- elif host_os == 'COHERENT': host_os = 'unix'
- elif host_os == 'DRAGONFLYBSD': host_os = 'bsd'
- elif host_os == 'IRIX': host_os = 'sgi'
- elif host_os == 'MACOSX': host_os = 'darwin'
- elif host_os == 'KFREEBSD': host_os = 'freebsd'
- elif host_os == 'LINUX': host_os = 'linux'
- else: host_os = 'unix'
- return host_os.lower()
-
-def register_globals ():
- """ Registers all features and variants declared by this module.
- """
-
- # This feature is used to determine which OS we're on.
- # In future, this may become <target-os> and <host-os>
- # TODO: check this. Compatibility with bjam names? Subfeature for version?
- os = sys.platform
- feature.feature ('os', [os], ['propagated', 'link-incompatible'])
-
-
- # The two OS features define a known set of abstract OS names. The host-os is
- # the OS under which bjam is running. Even though this should really be a fixed
- # property we need to list all the values to prevent unknown value errors. Both
- # set the default value to the current OS to account for the default use case of
- # building on the target OS.
- feature.feature('host-os', __os_names)
- feature.set_default('host-os', default_host_os())
-
- feature.feature('target-os', __os_names, ['propagated', 'link-incompatible'])
- feature.set_default('target-os', default_host_os())
-
- feature.feature ('toolset', [], ['implicit', 'propagated' ,'symmetric'])
-
- feature.feature ('stdlib', ['native'], ['propagated', 'composite'])
-
- feature.feature ('link', ['shared', 'static'], ['propagated'])
- feature.feature ('runtime-link', ['shared', 'static'], ['propagated'])
- feature.feature ('runtime-debugging', ['on', 'off'], ['propagated'])
-
-
- feature.feature ('optimization', ['off', 'speed', 'space'], ['propagated'])
- feature.feature ('profiling', ['off', 'on'], ['propagated'])
- feature.feature ('inlining', ['off', 'on', 'full'], ['propagated'])
-
- feature.feature ('threading', ['single', 'multi'], ['propagated'])
- feature.feature ('rtti', ['on', 'off'], ['propagated'])
- feature.feature ('exception-handling', ['on', 'off'], ['propagated'])
-
- # Whether there is support for asynchronous EH (e.g. catching SEGVs).
- feature.feature ('asynch-exceptions', ['on', 'off'], ['propagated'])
-
- # Whether all extern "C" functions are considered nothrow by default.
- feature.feature ('extern-c-nothrow', ['off', 'on'], ['propagated'])
-
- feature.feature ('debug-symbols', ['on', 'off'], ['propagated'])
- feature.feature ('define', [], ['free'])
- feature.feature ('undef', [], ['free'])
- feature.feature ('include', [], ['free', 'path']) #order-sensitive
- feature.feature ('cflags', [], ['free'])
- feature.feature ('cxxflags', [], ['free'])
- feature.feature ('asmflags', [], ['free'])
- feature.feature ('linkflags', [], ['free'])
- feature.feature ('archiveflags', [], ['free'])
- feature.feature ('version', [], ['free'])
-
- feature.feature ('location-prefix', [], ['free'])
-
- feature.feature ('action', [], ['free'])
-
-
- # The following features are incidental, since
- # in themself they have no effect on build products.
- # Not making them incidental will result in problems in corner
- # cases, for example:
- #
- # unit-test a : a.cpp : <use>b ;
- # lib b : a.cpp b ;
- #
- # Here, if <use> is not incidental, we'll decide we have two
- # targets for a.obj with different properties, and will complain.
- #
- # Note that making feature incidental does not mean it's ignored. It may
- # be ignored when creating the virtual target, but the rest of build process
- # will use them.
- feature.feature ('use', [], ['free', 'dependency', 'incidental'])
- feature.feature ('dependency', [], ['free', 'dependency', 'incidental'])
- feature.feature ('implicit-dependency', [], ['free', 'dependency', 'incidental'])
-
- feature.feature('warnings', [
- 'on', # Enable default/"reasonable" warning level for the tool.
- 'all', # Enable all possible warnings issued by the tool.
- 'off'], # Disable all warnings issued by the tool.
- ['incidental', 'propagated'])
-
- feature.feature('warnings-as-errors', [
- 'off', # Do not fail the compilation if there are warnings.
- 'on'], # Fail the compilation if there are warnings.
- ['incidental', 'propagated'])
-
- feature.feature ('source', [], ['free', 'dependency', 'incidental'])
- feature.feature ('library', [], ['free', 'dependency', 'incidental'])
- feature.feature ('file', [], ['free', 'dependency', 'incidental'])
- feature.feature ('find-shared-library', [], ['free']) #order-sensitive ;
- feature.feature ('find-static-library', [], ['free']) #order-sensitive ;
- feature.feature ('library-path', [], ['free', 'path']) #order-sensitive ;
- # Internal feature.
- feature.feature ('library-file', [], ['free', 'dependency'])
-
- feature.feature ('name', [], ['free'])
- feature.feature ('tag', [], ['free'])
- feature.feature ('search', [], ['free', 'path']) #order-sensitive ;
- feature.feature ('location', [], ['free', 'path'])
-
- feature.feature ('dll-path', [], ['free', 'path'])
- feature.feature ('hardcode-dll-paths', ['true', 'false'], ['incidental'])
-
-
- # This is internal feature which holds the paths of all dependency
- # dynamic libraries. On Windows, it's needed so that we can all
- # those paths to PATH, when running applications.
- # On Linux, it's needed to add proper -rpath-link command line options.
- feature.feature ('xdll-path', [], ['free', 'path'])
-
- #provides means to specify def-file for windows dlls.
- feature.feature ('def-file', [], ['free', 'dependency'])
-
- # This feature is used to allow specific generators to run.
- # For example, QT tools can only be invoked when QT library
- # is used. In that case, <allow>qt will be in usage requirement
- # of the library.
- feature.feature ('allow', [], ['free'])
-
- # The addressing model to generate code for. Currently a limited set only
- # specifying the bit size of pointers.
- feature.feature('address-model', ['16', '32', '64'], ['propagated', 'optional'])
-
- # Type of CPU architecture to compile for.
- feature.feature('architecture', [
- # x86 and x86-64
- 'x86',
-
- # ia64
- 'ia64',
-
- # Sparc
- 'sparc',
-
- # RS/6000 & PowerPC
- 'power',
-
- # MIPS/SGI
- 'mips1', 'mips2', 'mips3', 'mips4', 'mips32', 'mips32r2', 'mips64',
-
- # HP/PA-RISC
- 'parisc',
-
- # Advanced RISC Machines
- 'arm',
-
- # Combined architectures for platforms/toolsets that support building for
- # multiple architectures at once. "combined" would be the default multi-arch
- # for the toolset.
- 'combined',
- 'combined-x86-power'],
-
- ['propagated', 'optional'])
-
- # The specific instruction set in an architecture to compile.
- feature.feature('instruction-set', [
- # x86 and x86-64
- 'i386', 'i486', 'i586', 'i686', 'pentium', 'pentium-mmx', 'pentiumpro', 'pentium2', 'pentium3',
- 'pentium3m', 'pentium-m', 'pentium4', 'pentium4m', 'prescott', 'nocona', 'conroe', 'conroe-xe',
- 'conroe-l', 'allendale', 'mermon', 'mermon-xe', 'kentsfield', 'kentsfield-xe', 'penryn', 'wolfdale',
- 'yorksfield', 'nehalem', 'k6', 'k6-2', 'k6-3', 'athlon', 'athlon-tbird', 'athlon-4', 'athlon-xp',
- 'athlon-mp', 'k8', 'opteron', 'athlon64', 'athlon-fx', 'winchip-c6', 'winchip2', 'c3', 'c3-2',
-
- # ia64
- 'itanium', 'itanium1', 'merced', 'itanium2', 'mckinley',
-
- # Sparc
- 'v7', 'cypress', 'v8', 'supersparc', 'sparclite', 'hypersparc', 'sparclite86x', 'f930', 'f934',
- 'sparclet', 'tsc701', 'v9', 'ultrasparc', 'ultrasparc3',
-
- # RS/6000 & PowerPC
- '401', '403', '405', '405fp', '440', '440fp', '505', '601', '602',
- '603', '603e', '604', '604e', '620', '630', '740', '7400',
- '7450', '750', '801', '821', '823', '860', '970', '8540',
- 'power-common', 'ec603e', 'g3', 'g4', 'g5', 'power', 'power2',
- 'power3', 'power4', 'power5', 'powerpc', 'powerpc64', 'rios',
- 'rios1', 'rsc', 'rios2', 'rs64a',
-
- # MIPS
- '4kc', '4kp', '5kc', '20kc', 'm4k', 'r2000', 'r3000', 'r3900', 'r4000',
- 'r4100', 'r4300', 'r4400', 'r4600', 'r4650',
- 'r6000', 'r8000', 'rm7000', 'rm9000', 'orion', 'sb1', 'vr4100',
- 'vr4111', 'vr4120', 'vr4130', 'vr4300',
- 'vr5000', 'vr5400', 'vr5500',
-
- # HP/PA-RISC
- '700', '7100', '7100lc', '7200', '7300', '8000',
-
- # Advanced RISC Machines
- 'armv2', 'armv2a', 'armv3', 'armv3m', 'armv4', 'armv4t', 'armv5',
- 'armv5t', 'armv5te', 'armv6', 'armv6j', 'iwmmxt', 'ep9312'],
-
- ['propagated', 'optional'])
-
- feature.feature('conditional', [], ['incidental', 'free'])
-
- # The value of 'no' prevents building of a target.
- feature.feature('build', ['yes', 'no'], ['optional'])
-
- # Windows-specific features
- feature.feature ('user-interface', ['console', 'gui', 'wince', 'native', 'auto'], [])
- feature.feature ('variant', [], ['implicit', 'composite', 'propagated', 'symmetric'])
-
-
- variant ('debug', ['<optimization>off', '<debug-symbols>on', '<inlining>off', '<runtime-debugging>on'])
- variant ('release', ['<optimization>speed', '<debug-symbols>off', '<inlining>full',
- '<runtime-debugging>off', '<define>NDEBUG'])
- variant ('profile', ['release'], ['<profiling>on', '<debug-symbols>on'])
-
-
-reset ()
-register_globals ()
-
-class SearchedLibTarget (virtual_target.AbstractFileTarget):
- def __init__ (self, name, project, shared, real_name, search, action):
- virtual_target.AbstractFileTarget.__init__ (self, name, 'SEARCHED_LIB', project, action)
-
- self.shared_ = shared
- self.real_name_ = real_name
- if not self.real_name_:
- self.real_name_ = name
- self.search_ = search
-
- def shared (self):
- return self.shared_
-
- def real_name (self):
- return self.real_name_
-
- def search (self):
- return self.search_
-
- def actualize_location (self, target):
- bjam.call("NOTFILE", target)
-
- def path (self):
- #FIXME: several functions rely on this not being None
- return ""
-
-
-class CScanner (scanner.Scanner):
- def __init__ (self, includes):
- scanner.Scanner.__init__ (self)
-
- self.includes_ = []
-
- for i in includes:
- self.includes_.extend(i.split("&&"))
-
- def pattern (self):
- return r'#[ \t]*include[ ]*(<(.*)>|"(.*)")'
-
- def process (self, target, matches, binding):
-
- angle = regex.transform (matches, "<(.*)>")
- quoted = regex.transform (matches, '"(.*)"')
-
- g = str(id(self))
- b = os.path.normpath(os.path.dirname(binding[0]))
-
- # Attach binding of including file to included targets.
- # When target is directly created from virtual target
- # this extra information is unnecessary. But in other
- # cases, it allows to distinguish between two headers of the
- # same name included from different places.
- # We don't need this extra information for angle includes,
- # since they should not depend on including file (we can't
- # get literal "." in include path).
- g2 = g + "#" + b
-
- g = "<" + g + ">"
- g2 = "<" + g2 + ">"
- angle = [g + x for x in angle]
- quoted = [g2 + x for x in quoted]
-
- all = angle + quoted
- bjam.call("mark-included", target, all)
-
- engine = get_manager().engine()
- engine.set_target_variable(angle, "SEARCH", get_value(self.includes_))
- engine.set_target_variable(quoted, "SEARCH", [b] + get_value(self.includes_))
-
- # Just propagate current scanner to includes, in a hope
- # that includes do not change scanners.
- get_manager().scanners().propagate(self, angle + quoted)
-
-scanner.register (CScanner, 'include')
-type.set_scanner ('CPP', CScanner)
-type.set_scanner ('C', CScanner)
-
-# Ported to trunk@47077
-class LibGenerator (generators.Generator):
- """ The generator class for libraries (target type LIB). Depending on properties it will
- request building of the approapriate specific type -- SHARED_LIB, STATIC_LIB or
- SHARED_LIB.
- """
-
- def __init__(self, id, composing = True, source_types = [], target_types_and_names = ['LIB'], requirements = []):
- generators.Generator.__init__(self, id, composing, source_types, target_types_and_names, requirements)
-
- def run(self, project, name, prop_set, sources):
-
- # The lib generator is composing, and can be only invoked with
- # explicit name. This check is present in generator.run (and so in
- # builtin.LinkingGenerator), but duplicate it here to avoid doing
- # extra work.
- if name:
- properties = prop_set.raw()
- # Determine the needed target type
- actual_type = None
- properties_grist = get_grist(properties)
- if '<source>' not in properties_grist and \
- ('<search>' in properties_grist or '<name>' in properties_grist):
- actual_type = 'SEARCHED_LIB'
- elif '<file>' in properties_grist:
- # The generator for
- actual_type = 'LIB'
- elif '<link>shared' in properties:
- actual_type = 'SHARED_LIB'
- else:
- actual_type = 'STATIC_LIB'
-
- prop_set = prop_set.add_raw(['<main-target-type>LIB'])
-
- # Construct the target.
- return generators.construct(project, name, actual_type, prop_set, sources)
-
- def viable_source_types(self):
- return ['*']
-
-generators.register(LibGenerator("builtin.lib-generator"))
-
-generators.override("builtin.prebuilt", "builtin.lib-generator")
-
-def lib(names, sources=[], requirements=[], default_build=[], usage_requirements=[]):
- """The implementation of the 'lib' rule. Beyond standard syntax that rule allows
- simplified: 'lib a b c ;'."""
-
- if len(names) > 1:
- if any(r.startswith('<name>') for r in requirements):
- get_manager().errors()("When several names are given to the 'lib' rule\n" +
- "it is not allowed to specify the <name> feature.")
-
- if sources:
- get_manager().errors()("When several names are given to the 'lib' rule\n" +
- "it is not allowed to specify sources.")
-
- project = get_manager().projects().current()
- result = []
-
- for name in names:
- r = requirements[:]
-
- # Support " lib a ; " and " lib a b c ; " syntax.
- if not sources and not any(r.startswith("<name>") for r in requirements) \
- and not any(r.startswith("<file") for r in requirements):
- r.append("<name>" + name)
-
- result.append(targets.create_typed_metatarget(name, "LIB", sources,
- r,
- default_build,
- usage_requirements))
- return result
-
-get_manager().projects().add_rule("lib", lib)
-
-
-# Updated to trunk@47077
-class SearchedLibGenerator (generators.Generator):
- def __init__ (self, id = 'SearchedLibGenerator', composing = False, source_types = [], target_types_and_names = ['SEARCHED_LIB'], requirements = []):
- # TODO: the comment below looks strange. There are no requirements!
- # The requirements cause the generators to be tried *only* when we're building
- # lib target and there's 'search' feature. This seems ugly --- all we want
- # is make sure SearchedLibGenerator is not invoked deep in transformation
- # search.
- generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements)
-
- def run(self, project, name, prop_set, sources):
-
- if not name:
- return None
-
- # If name is empty, it means we're called not from top-level.
- # In this case, we just fail immediately, because SearchedLibGenerator
- # cannot be used to produce intermediate targets.
-
- properties = prop_set.raw ()
- shared = '<link>shared' in properties
-
- a = virtual_target.NullAction (project.manager(), prop_set)
-
- real_name = feature.get_values ('<name>', properties)
- if real_name:
- real_name = real_name[0]
- else:
- real_nake = name
- search = feature.get_values('<search>', properties)
- usage_requirements = property_set.create(['<xdll-path>' + p for p in search])
- t = SearchedLibTarget(name, project, shared, real_name, search, a)
-
- # We return sources for a simple reason. If there's
- # lib png : z : <name>png ;
- # the 'z' target should be returned, so that apps linking to
- # 'png' will link to 'z', too.
- return(usage_requirements, [b2.manager.get_manager().virtual_targets().register(t)] + sources)
-
-generators.register (SearchedLibGenerator ())
-
-class PrebuiltLibGenerator(generators.Generator):
-
- def __init__(self, id, composing, source_types, target_types_and_names, requirements):
- generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements)
-
- def run(self, project, name, properties, sources):
- f = properties.get("file")
- return f + sources
-
-generators.register(PrebuiltLibGenerator("builtin.prebuilt", False, [],
- ["LIB"], ["<file>"]))
-
-generators.override("builtin.prebuilt", "builtin.lib-generator")
-
-
-class CompileAction (virtual_target.Action):
- def __init__ (self, manager, sources, action_name, prop_set):
- virtual_target.Action.__init__ (self, manager, sources, action_name, prop_set)
-
- def adjust_properties (self, prop_set):
- """ For all virtual targets for the same dependency graph as self,
- i.e. which belong to the same main target, add their directories
- to include path.
- """
- s = self.targets () [0].creating_subvariant ()
-
- return prop_set.add_raw (s.implicit_includes ('include', 'H'))
-
-class CCompilingGenerator (generators.Generator):
- """ Declare a special compiler generator.
- The only thing it does is changing the type used to represent
- 'action' in the constructed dependency graph to 'CompileAction'.
- That class in turn adds additional include paths to handle a case
- when a source file includes headers which are generated themselfs.
- """
- def __init__ (self, id, composing, source_types, target_types_and_names, requirements):
- # TODO: (PF) What to do with optional_properties? It seemed that, in the bjam version, the arguments are wrong.
- generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements)
-
- def action_class (self):
- return CompileAction
-
-def register_c_compiler (id, source_types, target_types, requirements, optional_properties = []):
- g = CCompilingGenerator (id, False, source_types, target_types, requirements + optional_properties)
- return generators.register (g)
-
-
-class LinkingGenerator (generators.Generator):
- """ The generator class for handling EXE and SHARED_LIB creation.
- """
- def __init__ (self, id, composing, source_types, target_types_and_names, requirements):
- generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements)
-
- def run (self, project, name, prop_set, sources):
-
- sources.extend(prop_set.get('<library>'))
-
- # Add <library-path> properties for all searched libraries
- extra = []
- for s in sources:
- if s.type () == 'SEARCHED_LIB':
- search = s.search()
- extra.extend(property.Property('<library-path>', sp) for sp in search)
-
- # It's possible that we have libraries in sources which did not came
- # from 'lib' target. For example, libraries which are specified
- # just as filenames as sources. We don't have xdll-path properties
- # for such target, but still need to add proper dll-path properties.
- extra_xdll_path = []
- for s in sources:
- if type.is_derived (s.type (), 'SHARED_LIB') and not s.action ():
- # Unfortunately, we don't have a good way to find the path
- # to a file, so use this nasty approach.
- p = s.project()
- location = path.root(s.name(), p.get('source-location')[0])
- extra_xdll_path.append(os.path.dirname(location))
-
- # Hardcode DLL paths only when linking executables.
- # Pros: do not need to relink libraries when installing.
- # Cons: "standalone" libraries (plugins, python extensions) can not
- # hardcode paths to dependent libraries.
- if prop_set.get('<hardcode-dll-paths>') == ['true'] \
- and type.is_derived(self.target_types_ [0], 'EXE'):
- xdll_path = prop_set.get('<xdll-path>')
- extra.extend(property.Property('<dll-path>', sp) \
- for sp in extra_xdll_path)
- extra.extend(property.Property('<dll-path>', sp) \
- for sp in xdll_path)
-
- if extra:
- prop_set = prop_set.add_raw (extra)
- result = generators.Generator.run(self, project, name, prop_set, sources)
-
- if result:
- ur = self.extra_usage_requirements(result, prop_set)
- ur = ur.add(property_set.create(['<xdll-path>' + p for p in extra_xdll_path]))
- else:
- return None
- return (ur, result)
-
- def extra_usage_requirements (self, created_targets, prop_set):
-
- result = property_set.empty ()
- extra = []
-
- # Add appropriate <xdll-path> usage requirements.
- raw = prop_set.raw ()
- if '<link>shared' in raw:
- paths = []
-
- # TODO: is it safe to use the current directory? I think we should use
- # another mechanism to allow this to be run from anywhere.
- pwd = os.getcwd()
-
- for t in created_targets:
- if type.is_derived(t.type(), 'SHARED_LIB'):
- paths.append(path.root(path.make(t.path()), pwd))
-
- extra += replace_grist(paths, '<xdll-path>')
-
- # We need to pass <xdll-path> features that we've got from sources,
- # because if shared library is built, exe which uses it must know paths
- # to other shared libraries this one depends on, to be able to find them
- # all at runtime.
-
- # Just pass all features in property_set, it's theorically possible
- # that we'll propagate <xdll-path> features explicitly specified by
- # the user, but then the user's to blaim for using internal feature.
- values = prop_set.get('<xdll-path>')
- extra += replace_grist(values, '<xdll-path>')
-
- if extra:
- result = property_set.create(extra)
-
- return result
-
- def generated_targets (self, sources, prop_set, project, name):
-
- # sources to pass to inherited rule
- sources2 = []
- # sources which are libraries
- libraries = []
-
- # Searched libraries are not passed as argument to linker
- # but via some option. So, we pass them to the action
- # via property.
- fsa = []
- fst = []
- for s in sources:
- if type.is_derived(s.type(), 'SEARCHED_LIB'):
- n = s.real_name()
- if s.shared():
- fsa.append(n)
-
- else:
- fst.append(n)
-
- else:
- sources2.append(s)
-
- add = []
- if fsa:
- add.append("<find-shared-library>" + '&&'.join(fsa))
- if fst:
- add.append("<find-static-library>" + '&&'.join(fst))
-
- spawn = generators.Generator.generated_targets(self, sources2, prop_set.add_raw(add), project, name)
- return spawn
-
-
-def register_linker(id, source_types, target_types, requirements):
- g = LinkingGenerator(id, True, source_types, target_types, requirements)
- generators.register(g)
-
-class ArchiveGenerator (generators.Generator):
- """ The generator class for handling STATIC_LIB creation.
- """
- def __init__ (self, id, composing, source_types, target_types_and_names, requirements):
- generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements)
-
- def run (self, project, name, prop_set, sources):
- sources += prop_set.get ('<library>')
-
- result = generators.Generator.run (self, project, name, prop_set, sources)
-
- return result
-
-
-def register_archiver(id, source_types, target_types, requirements):
- g = ArchiveGenerator(id, True, source_types, target_types, requirements)
- generators.register(g)
-
-class DummyGenerator(generators.Generator):
- """Generator that accepts everything and produces nothing. Useful as a general
- fallback for toolset-specific actions like PCH generation.
- """
- def run (self, project, name, prop_set, sources):
- return (property_set.empty(), [])
-
-
-get_manager().projects().add_rule("variant", variant)
-
-import stage
-import symlink
-import message
diff --git a/tools/build/v2/tools/cast.jam b/tools/build/v2/tools/cast.jam
deleted file mode 100644
index 211ce63296..0000000000
--- a/tools/build/v2/tools/cast.jam
+++ /dev/null
@@ -1,91 +0,0 @@
-# Copyright 2005 Vladimir Prus.
-# Distributed under the Boost Software License, Version 1.0. (See
-# accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-# Defines main target 'cast', used to change type for target. For example, in Qt
-# library one wants two kinds of CPP files -- those that just compiled and those
-# that are passed via the MOC tool.
-#
-# This is done with:
-#
-# exe main : main.cpp [ cast _ moccable-cpp : widget.cpp ] ;
-#
-# Boost.Build will assign target type CPP to both main.cpp and widget.cpp. Then,
-# the cast rule will change target type of widget.cpp to MOCCABLE-CPP, and Qt
-# support will run the MOC tool as part of the build process.
-#
-# At the moment, the 'cast' rule only works for non-derived (source) targets.
-#
-# TODO: The following comment is unclear or incorrect. Clean it up.
-# > Another solution would be to add a separate main target 'moc-them' that
-# > would moc all the passed sources, no matter what their type is, but I prefer
-# > cast, as defining a new target type + generator for that type is somewhat
-# > simpler than defining a main target rule.
-
-import "class" : new ;
-import errors ;
-import project ;
-import property-set ;
-import targets ;
-import type ;
-
-
-class cast-target-class : typed-target
-{
- import type ;
-
- rule __init__ ( name : project : type : sources * : requirements * :
- default-build * : usage-requirements * )
- {
- typed-target.__init__ $(name) : $(project) : $(type) : $(sources) :
- $(requirements) : $(default-build) : $(usage-requirements) ;
- }
-
- rule construct ( name : source-targets * : property-set )
- {
- local result ;
- for local s in $(source-targets)
- {
- if ! [ class.is-a $(s) : file-target ]
- {
- import errors ;
- errors.user-error Source to the 'cast' rule is not a file! ;
- }
- if [ $(s).action ]
- {
- import errors ;
- errors.user-error Only non-derived target are allowed for
- 'cast'. : when building [ full-name ] ;
- }
- local r = [ $(s).clone-with-different-type $(self.type) ] ;
- result += [ virtual-target.register $(r) ] ;
- }
- return [ property-set.empty ] $(result) ;
- }
-}
-
-
-rule cast ( name type : sources * : requirements * : default-build * :
- usage-requirements * )
-{
- local project = [ project.current ] ;
-
- local real-type = [ type.type-from-rule-name $(type) ] ;
- if ! $(real-type)
- {
- errors.user-error No type corresponds to the main target rule name
- '$(type)' : "Hint: try a lowercase name" ;
- }
-
- targets.main-target-alternative [ new cast-target-class $(name) : $(project)
- : $(real-type)
- : [ targets.main-target-sources $(sources) : $(name) ]
- : [ targets.main-target-requirements $(requirements) : $(project) ]
- : [ targets.main-target-default-build $(default-build) : $(project) ]
- : [ targets.main-target-usage-requirements $(usage-requirements) :
- $(project) ] ] ;
-}
-
-
-IMPORT $(__name__) : cast : : cast ;
diff --git a/tools/build/v2/tools/clang-darwin.jam b/tools/build/v2/tools/clang-darwin.jam
deleted file mode 100644
index a8abc7d6a0..0000000000
--- a/tools/build/v2/tools/clang-darwin.jam
+++ /dev/null
@@ -1,170 +0,0 @@
-# Copyright Vladimir Prus 2004.
-# Copyright Noel Belcourt 2007.
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt
-# or copy at http://www.boost.org/LICENSE_1_0.txt)
-
-import clang ;
-import feature : feature ;
-import os ;
-import toolset ;
-import toolset : flags ;
-import gcc ;
-import common ;
-import errors ;
-import generators ;
-
-feature.extend-subfeature toolset clang : platform : darwin ;
-
-toolset.inherit-generators clang-darwin
- <toolset>clang <toolset-clang:platform>darwin
- : gcc
- # Don't inherit PCH generators. They were not tested, and probably
- # don't work for this compiler.
- : gcc.mingw.link gcc.mingw.link.dll gcc.compile.c.pch gcc.compile.c++.pch
- ;
-
-generators.override clang-darwin.prebuilt : builtin.lib-generator ;
-generators.override clang-darwin.prebuilt : builtin.prebuilt ;
-generators.override clang-darwin.searched-lib-generator : searched-lib-generator ;
-
-toolset.inherit-rules clang-darwin : gcc ;
-toolset.inherit-flags clang-darwin : gcc
- : <inlining>off <inlining>on <inlining>full <optimization>space
- <warnings>off <warnings>all <warnings>on
- <architecture>x86/<address-model>32
- <architecture>x86/<address-model>64
- ;
-
-if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
-{
- .debug-configuration = true ;
-}
-
-# vectorization diagnostics
-feature vectorize : off on full ;
-
-# Initializes the clang-darwin toolset
-# version in optional
-# name (default clang++) is used to invoke the specified clang complier
-# compile and link options allow you to specify addition command line options for each version
-rule init ( version ? : command * : options * )
-{
- command = [ common.get-invocation-command clang-darwin : clang++
- : $(command) ] ;
-
- # Determine the version
- local command-string = $(command:J=" ") ;
- if $(command)
- {
- version ?= [ MATCH "^([0-9.]+)"
- : [ SHELL "$(command-string) -dumpversion" ] ] ;
- }
-
- local condition = [ common.check-init-parameters clang-darwin
- : version $(version) ] ;
-
- common.handle-options clang-darwin : $(condition) : $(command) : $(options) ;
-
- gcc.init-link-flags clang-darwin darwin $(condition) ;
-
-}
-
-SPACE = " " ;
-
-flags clang-darwin.compile OPTIONS <cflags> ;
-flags clang-darwin.compile OPTIONS <cxxflags> ;
-# flags clang-darwin.compile INCLUDES <include> ;
-
-# Declare flags and action for compilation.
-toolset.flags clang-darwin.compile OPTIONS <optimization>off : -O0 ;
-toolset.flags clang-darwin.compile OPTIONS <optimization>speed : -O3 ;
-toolset.flags clang-darwin.compile OPTIONS <optimization>space : -Os ;
-
-toolset.flags clang-darwin.compile OPTIONS <inlining>off : -fno-inline ;
-toolset.flags clang-darwin.compile OPTIONS <inlining>on : -Wno-inline ;
-toolset.flags clang-darwin.compile OPTIONS <inlining>full : -finline-functions -Wno-inline ;
-
-toolset.flags clang-darwin.compile OPTIONS <warnings>off : -w ;
-toolset.flags clang-darwin.compile OPTIONS <warnings>on : -Wall ;
-toolset.flags clang-darwin.compile OPTIONS <warnings>all : -Wall -pedantic ;
-toolset.flags clang-darwin.compile OPTIONS <warnings-as-errors>on : -Werror ;
-
-toolset.flags clang-darwin.compile OPTIONS <debug-symbols>on : -g ;
-toolset.flags clang-darwin.compile OPTIONS <profiling>on : -pg ;
-toolset.flags clang-darwin.compile OPTIONS <rtti>off : -fno-rtti ;
-
-actions compile.c
-{
- "$(CONFIG_COMMAND)" -x c $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
-}
-
-actions compile.c++
-{
- "$(CONFIG_COMMAND)" -x c++ $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
-}
-
-flags clang-darwin ARFLAGS <archiveflags> ;
-
-# Default value. Mostly for the sake of clang-linux
-# that inherits from gcc, but does not has the same
-# logic to set the .AR variable. We can put the same
-# logic in clang-linux, but that's hardly worth the trouble
-# as on Linux, 'ar' is always available.
-.AR = ar ;
-
-rule archive ( targets * : sources * : properties * )
-{
- # Always remove archive and start again. Here's rationale from
- # Andre Hentz:
- #
- # I had a file, say a1.c, that was included into liba.a.
- # I moved a1.c to a2.c, updated my Jamfiles and rebuilt.
- # My program was crashing with absurd errors.
- # After some debugging I traced it back to the fact that a1.o was *still*
- # in liba.a
- #
- # Rene Rivera:
- #
- # Originally removing the archive was done by splicing an RM
- # onto the archive action. That makes archives fail to build on NT
- # when they have many files because it will no longer execute the
- # action directly and blow the line length limit. Instead we
- # remove the file in a different action, just before the building
- # of the archive.
- #
- local clean.a = $(targets[1])(clean) ;
- TEMPORARY $(clean.a) ;
- NOCARE $(clean.a) ;
- LOCATE on $(clean.a) = [ on $(targets[1]) return $(LOCATE) ] ;
- DEPENDS $(clean.a) : $(sources) ;
- DEPENDS $(targets) : $(clean.a) ;
- common.RmTemps $(clean.a) : $(targets) ;
-}
-
-actions piecemeal archive
-{
- "$(.AR)" $(AROPTIONS) rc "$(<)" "$(>)"
- "ranlib" -cs "$(<)"
-}
-
-flags clang-darwin.link USER_OPTIONS <linkflags> ;
-
-# Declare actions for linking
-rule link ( targets * : sources * : properties * )
-{
- SPACE on $(targets) = " " ;
- # Serialize execution of the 'link' action, since
- # running N links in parallel is just slower.
- JAM_SEMAPHORE on $(targets) = <s>clang-darwin-link-semaphore ;
-}
-
-actions link bind LIBRARIES
-{
- "$(CONFIG_COMMAND)" $(USER_OPTIONS) -L"$(LINKPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS)
-}
-
-actions link.dll bind LIBRARIES
-{
- "$(CONFIG_COMMAND)" $(USER_OPTIONS) -L"$(LINKPATH)" -o "$(<)" -single_module -dynamiclib -install_name "$(<[1]:D=)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS)
-}
diff --git a/tools/build/v2/tools/clang-linux.jam b/tools/build/v2/tools/clang-linux.jam
deleted file mode 100644
index 036d749e60..0000000000
--- a/tools/build/v2/tools/clang-linux.jam
+++ /dev/null
@@ -1,196 +0,0 @@
-# Copyright (c) 2003 Michael Stevens
-# Copyright (c) 2010-2011 Bryce Lelbach (blelbach@cct.lsu.edu, maintainer)
-#
-# Use, modification and distribution is subject to the Boost Software
-# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
-# http://www.boost.org/LICENSE_1_0.txt)
-
-import toolset ;
-import feature ;
-import toolset : flags ;
-
-import clang ;
-import gcc ;
-import common ;
-import errors ;
-import generators ;
-import type ;
-import numbers ;
-
-feature.extend-subfeature toolset clang : platform : linux ;
-
-toolset.inherit-generators clang-linux
- <toolset>clang <toolset-clang:platform>linux : gcc
- : gcc.mingw.link gcc.mingw.link.dll gcc.cygwin.link gcc.cygwin.link.dll ;
-generators.override clang-linux.prebuilt : builtin.lib-generator ;
-generators.override clang-linux.prebuilt : builtin.prebuilt ;
-generators.override clang-linux.searched-lib-generator : searched-lib-generator ;
-
-# Override default do-nothing generators.
-generators.override clang-linux.compile.c.pch : pch.default-c-pch-generator ;
-generators.override clang-linux.compile.c++.pch : pch.default-cpp-pch-generator ;
-
-type.set-generated-target-suffix PCH
- : <toolset>clang <toolset-clang:platform>linux : pth ;
-
-toolset.inherit-rules clang-linux : gcc ;
-toolset.inherit-flags clang-linux : gcc
- : <inlining>off <inlining>on <inlining>full
- <optimization>space <optimization>speed
- <warnings>off <warnings>all <warnings>on ;
-
-if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ] {
- .debug-configuration = true ;
-}
-
-rule init ( version ? : command * : options * ) {
- command = [ common.get-invocation-command clang-linux : clang++
- : $(command) ] ;
-
- # Determine the version
- local command-string = $(command:J=" ") ;
-
- if $(command) {
- version ?= [ MATCH "version ([0-9.]+)"
- : [ SHELL "$(command-string) --version" ] ] ;
- }
-
- local condition = [ common.check-init-parameters clang-linux
- : version $(version) ] ;
-
- common.handle-options clang-linux : $(condition) : $(command) : $(options) ;
-
- gcc.init-link-flags clang-linux gnu $(condition) ;
-}
-
-###############################################################################
-# Flags
-
-toolset.flags clang-linux.compile OPTIONS <cflags> ;
-toolset.flags clang-linux.compile OPTIONS <cxxflags> ;
-
-toolset.flags clang-linux.compile OPTIONS <optimization>off : ;
-toolset.flags clang-linux.compile OPTIONS <optimization>speed : -O3 ;
-toolset.flags clang-linux.compile OPTIONS <optimization>space : -Os ;
-
-# note: clang silently ignores some of these inlining options
-toolset.flags clang-linux.compile OPTIONS <inlining>off : -fno-inline ;
-toolset.flags clang-linux.compile OPTIONS <inlining>on : -Wno-inline ;
-toolset.flags clang-linux.compile OPTIONS <inlining>full : -finline-functions -Wno-inline ;
-
-toolset.flags clang-linux.compile OPTIONS <warnings>off : -w ;
-toolset.flags clang-linux.compile OPTIONS <warnings>on : -Wall ;
-toolset.flags clang-linux.compile OPTIONS <warnings>all : -Wall -pedantic ;
-toolset.flags clang-linux.compile OPTIONS <warnings-as-errors>on : -Werror ;
-
-toolset.flags clang-linux.compile OPTIONS <debug-symbols>on : -g ;
-toolset.flags clang-linux.compile OPTIONS <profiling>on : -pg ;
-toolset.flags clang-linux.compile OPTIONS <rtti>off : -fno-rtti ;
-
-###############################################################################
-# C and C++ compilation
-
-rule compile.c++ ( targets * : sources * : properties * ) {
- gcc.setup-threading $(targets) : $(sources) : $(properties) ;
- gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
- gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
-
- local pth-file = [ on $(<) return $(PCH_FILE) ] ;
-
- if $(pth-file) {
- DEPENDS $(<) : $(pth-file) ;
- compile.c++.with-pch $(targets) : $(sources) ;
- }
- else {
- compile.c++.without-pth $(targets) : $(sources) ;
- }
-}
-
-actions compile.c++.without-pth {
- "$(CONFIG_COMMAND)" -c -x c++ $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -o "$(<)" "$(>)"
-}
-
-actions compile.c++.with-pch bind PCH_FILE
-{
- "$(CONFIG_COMMAND)" -c -x c++ $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -Xclang -include-pth -Xclang "$(PCH_FILE)" -o "$(<)" "$(>)"
-}
-
-rule compile.c ( targets * : sources * : properties * )
-{
- gcc.setup-threading $(targets) : $(sources) : $(properties) ;
- gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
- gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
-
- local pth-file = [ on $(<) return $(PCH_FILE) ] ;
-
- if $(pth-file) {
- DEPENDS $(<) : $(pth-file) ;
- compile.c.with-pch $(targets) : $(sources) ;
- }
- else {
- compile.c.without-pth $(targets) : $(sources) ;
- }
-}
-
-actions compile.c.without-pth
-{
- "$(CONFIG_COMMAND)" -c -x c $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
-}
-
-actions compile.c.with-pch bind PCH_FILE
-{
- "$(CONFIG_COMMAND)" -c -x c $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -Xclang -include-pth -Xclang "$(PCH_FILE)" -c -o "$(<)" "$(>)"
-}
-
-###############################################################################
-# PCH emission
-
-rule compile.c++.pch ( targets * : sources * : properties * ) {
- gcc.setup-threading $(targets) : $(sources) : $(properties) ;
- gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
- gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
-}
-
-actions compile.c++.pch {
- rm -f "$(<)" && "$(CONFIG_COMMAND)" -x c++-header $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -Xclang -emit-pth -o "$(<)" "$(>)"
-}
-
-rule compile.c.pch ( targets * : sources * : properties * ) {
- gcc.setup-threading $(targets) : $(sources) : $(properties) ;
- gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
- gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
-}
-
-actions compile.c.pch
-{
- rm -f "$(<)" && "$(CONFIG_COMMAND)" -x c-header $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -Xclang -emit-pth -o "$(<)" "$(>)"
-}
-
-###############################################################################
-# Linking
-
-SPACE = " " ;
-
-rule link ( targets * : sources * : properties * ) {
- gcc.setup-threading $(targets) : $(sources) : $(properties) ;
- gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
- SPACE on $(targets) = " " ;
- JAM_SEMAPHORE on $(targets) = <s>clang-linux-link-semaphore ;
-}
-
-actions link bind LIBRARIES {
- "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS) $(USER_OPTIONS)
-}
-
-rule link.dll ( targets * : sources * : properties * ) {
- gcc.setup-threading $(targets) : $(sources) : $(properties) ;
- gcc.setup-address-model $(targets) : $(sources) : $(properties) ;
- SPACE on $(targets) = " " ;
- JAM_SEMAPHORE on $(targets) = <s>clang-linux-link-semaphore ;
-}
-
-# Differ from 'link' above only by -shared.
-actions link.dll bind LIBRARIES {
- "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -o "$(<)" -Wl,-soname$(SPACE)-Wl,$(<[1]:D=) -shared "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS) $(USER_OPTIONS)
-}
-
diff --git a/tools/build/v2/tools/common.jam b/tools/build/v2/tools/common.jam
deleted file mode 100644
index 53e91b428f..0000000000
--- a/tools/build/v2/tools/common.jam
+++ /dev/null
@@ -1,983 +0,0 @@
-# Copyright 2003, 2005 Dave Abrahams
-# Copyright 2005, 2006 Rene Rivera
-# Copyright 2005 Toon Knapen
-# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Provides actions common to all toolsets, such as creating directories and
-# removing files.
-
-import os ;
-import modules ;
-import utility ;
-import print ;
-import type ;
-import feature ;
-import errors ;
-import path ;
-import sequence ;
-import toolset ;
-import virtual-target ;
-
-if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
-{
- .debug-configuration = true ;
-}
-if [ MATCH (--show-configuration) : [ modules.peek : ARGV ] ]
-{
- .show-configuration = true ;
-}
-
-# Configurations
-#
-# The following class helps to manage toolset configurations. Each configuration
-# has a unique ID and one or more parameters. A typical example of a unique ID
-# is a condition generated by 'common.check-init-parameters' rule. Other kinds
-# of IDs can be used. Parameters may include any details about the configuration
-# like 'command', 'path', etc.
-#
-# A toolset configuration may be in one of the following states:
-#
-# - registered
-# Configuration has been registered (e.g. explicitly or by auto-detection
-# code) but has not yet been marked as used, i.e. 'toolset.using' rule has
-# not yet been called for it.
-# - used
-# Once called 'toolset.using' rule marks the configuration as 'used'.
-#
-# The main difference between the states above is that while a configuration is
-# 'registered' its options can be freely changed. This is useful in particular
-# for autodetection code - all detected configurations may be safely overwritten
-# by user code.
-
-class configurations
-{
- import errors ;
-
- rule __init__ ( )
- {
- }
-
- # Registers a configuration.
- #
- # Returns 'true' if the configuration has been added and an empty value if
- # it already exists. Reports an error if the configuration is 'used'.
- #
- rule register ( id )
- {
- if $(id) in $(self.used)
- {
- errors.error "common: the configuration '$(id)' is in use" ;
- }
-
- local retval ;
-
- if ! $(id) in $(self.all)
- {
- self.all += $(id) ;
-
- # Indicate that a new configuration has been added.
- retval = true ;
- }
-
- return $(retval) ;
- }
-
- # Mark a configuration as 'used'.
- #
- # Returns 'true' if the state of the configuration has been changed to
- # 'used' and an empty value if it the state has not been changed. Reports an
- # error if the configuration is not known.
- #
- rule use ( id )
- {
- if ! $(id) in $(self.all)
- {
- errors.error "common: the configuration '$(id)' is not known" ;
- }
-
- local retval ;
-
- if ! $(id) in $(self.used)
- {
- self.used += $(id) ;
-
- # Indicate that the configuration has been marked as 'used'.
- retval = true ;
- }
-
- return $(retval) ;
- }
-
- # Return all registered configurations.
- #
- rule all ( )
- {
- return $(self.all) ;
- }
-
- # Return all used configurations.
- #
- rule used ( )
- {
- return $(self.used) ;
- }
-
- # Returns the value of a configuration parameter.
- #
- rule get ( id : param )
- {
- return $(self.$(param).$(id)) ;
- }
-
- # Sets the value of a configuration parameter.
- #
- rule set ( id : param : value * )
- {
- self.$(param).$(id) = $(value) ;
- }
-}
-
-
-# The rule for checking toolset parameters. Trailing parameters should all be
-# parameter name/value pairs. The rule will check that each parameter either has
-# a value in each invocation or has no value in each invocation. Also, the rule
-# will check that the combination of all parameter values is unique in all
-# invocations.
-#
-# Each parameter name corresponds to a subfeature. This rule will declare a
-# subfeature the first time a non-empty parameter value is passed and will
-# extend it with all the values.
-#
-# The return value from this rule is a condition to be used for flags settings.
-#
-rule check-init-parameters ( toolset requirement * : * )
-{
- local sig = $(toolset) ;
- local condition = <toolset>$(toolset) ;
- local subcondition ;
- for local index in 2 3 4 5 6 7 8 9
- {
- local name = $($(index)[1]) ;
- local value = $($(index)[2]) ;
-
- if $(value)-is-not-empty
- {
- condition = $(condition)-$(value) ;
- if $(.had-unspecified-value.$(toolset).$(name))
- {
- errors.user-error
- "$(toolset) initialization: parameter '$(name)'"
- "inconsistent" : "no value was specified in earlier"
- "initialization" : "an explicit value is specified now" ;
- }
- # The below logic is for intel compiler. It calls this rule with
- # 'intel-linux' and 'intel-win' as toolset, so we need to get the
- # base part of toolset name. We can not pass 'intel' as toolset
- # because in that case it will be impossible to register versionless
- # intel-linux and intel-win toolsets of a specific version.
- local t = $(toolset) ;
- local m = [ MATCH ([^-]*)- : $(toolset) ] ;
- if $(m)
- {
- t = $(m[1]) ;
- }
- if ! $(.had-value.$(toolset).$(name))
- {
- if ! $(.declared-subfeature.$(t).$(name))
- {
- feature.subfeature toolset $(t) : $(name) : : propagated ;
- .declared-subfeature.$(t).$(name) = true ;
- }
- .had-value.$(toolset).$(name) = true ;
- }
- feature.extend-subfeature toolset $(t) : $(name) : $(value) ;
- subcondition += <toolset-$(t):$(name)>$(value) ;
- }
- else
- {
- if $(.had-value.$(toolset).$(name))
- {
- errors.user-error
- "$(toolset) initialization: parameter '$(name)'"
- "inconsistent" : "an explicit value was specified in an"
- "earlier initialization" : "no value is specified now" ;
- }
- .had-unspecified-value.$(toolset).$(name) = true ;
- }
- sig = $(sig)$(value:E="")- ;
- }
- if $(sig) in $(.all-signatures)
- {
- local message =
- "duplicate initialization of $(toolset) with the following parameters: " ;
- for local index in 2 3 4 5 6 7 8 9
- {
- local p = $($(index)) ;
- if $(p)
- {
- message += "$(p[1]) = $(p[2]:E=<unspecified>)" ;
- }
- }
- message += "previous initialization at $(.init-loc.$(sig))" ;
- errors.user-error
- $(message[1]) : $(message[2]) : $(message[3]) : $(message[4]) :
- $(message[5]) : $(message[6]) : $(message[7]) : $(message[8]) ;
- }
- .all-signatures += $(sig) ;
- .init-loc.$(sig) = [ errors.nearest-user-location ] ;
-
- # If we have a requirement, this version should only be applied under that
- # condition. To accomplish this we add a toolset requirement that imposes
- # the toolset subcondition, which encodes the version.
- if $(requirement)
- {
- local r = <toolset>$(toolset) $(requirement) ;
- r = $(r:J=,) ;
- toolset.add-requirements $(r):$(subcondition) ;
- }
-
- # We add the requirements, if any, to the condition to scope the toolset
- # variables and options to this specific version.
- condition += $(requirement) ;
-
- if $(.show-configuration)
- {
- ECHO notice: $(condition) ;
- }
- return $(condition:J=/) ;
-}
-
-
-# A helper rule to get the command to invoke some tool. If
-# 'user-provided-command' is not given, tries to find binary named 'tool' in
-# PATH and in the passed 'additional-path'. Otherwise, verifies that the first
-# element of 'user-provided-command' is an existing program.
-#
-# This rule returns the command to be used when invoking the tool. If we can not
-# find the tool, a warning is issued. If 'path-last' is specified, PATH is
-# checked after 'additional-paths' when searching for 'tool'.
-#
-rule get-invocation-command-nodefault ( toolset : tool :
- user-provided-command * : additional-paths * : path-last ? )
-{
- local command ;
- if ! $(user-provided-command)
- {
- command = [ find-tool $(tool) : $(additional-paths) : $(path-last) ] ;
- if ! $(command) && $(.debug-configuration)
- {
- ECHO "warning: toolset $(toolset) initialization: can not find tool $(tool)" ;
- ECHO "warning: initialized from" [ errors.nearest-user-location ] ;
- }
- }
- else
- {
- command = [ check-tool $(user-provided-command) ] ;
- if ! $(command) && $(.debug-configuration)
- {
- ECHO "warning: toolset $(toolset) initialization: " ;
- ECHO "warning: can not find user-provided command " '$(user-provided-command)' ;
- ECHO "warning: initialized from" [ errors.nearest-user-location ] ;
- }
- }
-
- return $(command) ;
-}
-
-
-# Same as get-invocation-command-nodefault, except that if no tool is found,
-# returns either the user-provided-command, if present, or the 'tool' parameter.
-#
-rule get-invocation-command ( toolset : tool : user-provided-command * :
- additional-paths * : path-last ? )
-{
- local result = [ get-invocation-command-nodefault $(toolset) : $(tool) :
- $(user-provided-command) : $(additional-paths) : $(path-last) ] ;
-
- if ! $(result)
- {
- if $(user-provided-command)
- {
- result = $(user-provided-command) ;
- }
- else
- {
- result = $(tool) ;
- }
- }
- return $(result) ;
-}
-
-
-# Given an invocation command return the absolute path to the command. This
-# works even if command has no path element and was found on the PATH.
-#
-rule get-absolute-tool-path ( command )
-{
- if $(command:D)
- {
- return $(command:D) ;
- }
- else
- {
- local m = [ GLOB [ modules.peek : PATH Path path ] : $(command) $(command).exe ] ;
- return $(m[1]:D) ;
- }
-}
-
-
-# Attempts to find tool (binary) named 'name' in PATH and in 'additional-paths'.
-# If found in PATH, returns 'name' and if found in additional paths, returns
-# absolute name. If the tool is found in several directories, returns the
-# first path found. Otherwise, returns an empty string. If 'path-last' is
-# specified, PATH is searched after 'additional-paths'.
-#
-rule find-tool ( name : additional-paths * : path-last ? )
-{
- local path = [ path.programs-path ] ;
- local match = [ path.glob $(path) : $(name) $(name).exe ] ;
- local additional-match = [ path.glob $(additional-paths) : $(name) $(name).exe ] ;
-
- local result ;
- if $(path-last)
- {
- result = $(additional-match) ;
- if ! $(result) && $(match)
- {
- result = $(name) ;
- }
- }
- else
- {
- if $(match)
- {
- result = $(name) ;
- }
- else
- {
- result = $(additional-match) ;
- }
- }
- if $(result)
- {
- return [ path.native $(result[1]) ] ;
- }
-}
-
-
-# Checks if 'command' can be found either in path or is a full name to an
-# existing file.
-#
-local rule check-tool-aux ( command )
-{
- if $(command:D)
- {
- if [ path.exists $(command) ]
- # Both NT and Cygwin will run .exe files by their unqualified names.
- || ( [ os.on-windows ] && [ path.exists $(command).exe ] )
- # Only NT will run .bat & .cmd files by their unqualified names.
- || ( ( [ os.name ] = NT ) && ( [ path.exists $(command).bat ] ||
- [ path.exists $(command).cmd ] ) )
- {
- return $(command) ;
- }
- }
- else
- {
- if [ GLOB [ modules.peek : PATH Path path ] : $(command) ]
- {
- return $(command) ;
- }
- }
-}
-
-
-# Checks that a tool can be invoked by 'command'. If command is not an absolute
-# path, checks if it can be found in 'path'. If comand is an absolute path,
-# check that it exists. Returns 'command' if ok or empty string otherwise.
-#
-local rule check-tool ( xcommand + )
-{
- if [ check-tool-aux $(xcommand[1]) ] ||
- [ check-tool-aux $(xcommand[-1]) ]
- {
- return $(xcommand) ;
- }
-}
-
-
-# Handle common options for toolset, specifically sets the following flag
-# variables:
-# - CONFIG_COMMAND to $(command)
-# - OPTIONS for compile to the value of <compileflags> in $(options)
-# - OPTIONS for compile.c to the value of <cflags> in $(options)
-# - OPTIONS for compile.c++ to the value of <cxxflags> in $(options)
-# - OPTIONS for compile.fortran to the value of <fflags> in $(options)
-# - OPTIONS for link to the value of <linkflags> in $(options)
-#
-rule handle-options ( toolset : condition * : command * : options * )
-{
- if $(.debug-configuration)
- {
- ECHO "notice: will use '$(command)' for $(toolset), condition $(condition:E=(empty))" ;
- }
-
- # The last parameter ('unchecked') says it is OK to set flags for another
- # module.
- toolset.flags $(toolset) CONFIG_COMMAND $(condition) : $(command)
- : unchecked ;
-
- toolset.flags $(toolset).compile OPTIONS $(condition) :
- [ feature.get-values <compileflags> : $(options) ] : unchecked ;
-
- toolset.flags $(toolset).compile.c OPTIONS $(condition) :
- [ feature.get-values <cflags> : $(options) ] : unchecked ;
-
- toolset.flags $(toolset).compile.c++ OPTIONS $(condition) :
- [ feature.get-values <cxxflags> : $(options) ] : unchecked ;
-
- toolset.flags $(toolset).compile.fortran OPTIONS $(condition) :
- [ feature.get-values <fflags> : $(options) ] : unchecked ;
-
- toolset.flags $(toolset).link OPTIONS $(condition) :
- [ feature.get-values <linkflags> : $(options) ] : unchecked ;
-}
-
-
-# Returns the location of the "program files" directory on a Windows platform.
-#
-rule get-program-files-dir ( )
-{
- local ProgramFiles = [ modules.peek : ProgramFiles ] ;
- if $(ProgramFiles)
- {
- ProgramFiles = "$(ProgramFiles:J= )" ;
- }
- else
- {
- ProgramFiles = "c:\\Program Files" ;
- }
- return $(ProgramFiles) ;
-}
-
-
-if [ os.name ] = NT
-{
- RM = del /f /q ;
- CP = copy /b ;
- IGNORE = "2>nul >nul & setlocal" ;
- LN ?= $(CP) ;
- # Ugly hack to convince copy to set the timestamp of the
- # destination to the current time by concatenating the
- # source with a nonexistent file. Note that this requires
- # /b (binary) as the default when concatenating files is /a (ascii).
- WINDOWS-CP-HACK = "+ this-file-does-not-exist-A698EE7806899E69" ;
-}
-else
-{
- RM = rm -f ;
- CP = cp ;
- LN = ln ;
-}
-
-
-rule rm-command ( )
-{
- return $(RM) ;
-}
-
-
-rule copy-command ( )
-{
- return $(CP) ;
-}
-
-
-if "\n" = "n"
-{
- # Escape characters are not supported. Use ugly hacks that won't work,
- # see below.
- nl = "
-" ;
- q = "" ;
-}
-else
-{
- nl = "\n" ;
- q = "\"" ;
-}
-
-# Returns the command needed to set an environment variable on the current
-# platform. The variable setting persists through all following commands and is
-# visible in the environment seen by subsequently executed commands. In other
-# words, on Unix systems, the variable is exported, which is consistent with the
-# only possible behavior on Windows systems.
-#
-rule variable-setting-command ( variable : value )
-{
- if [ os.name ] = NT
- {
- return "set $(variable)=$(value)$(nl)" ;
- }
- else
- {
- # If we don't have escape characters support in bjam, the below blows
- # up on CYGWIN, since the $(nl) variable holds a Windows new-line \r\n
- # sequence that messes up the executed export command which then reports
- # that the passed variable name is incorrect.
- # But we have a check for cygwin in kernel/bootstrap.jam already.
- return "$(variable)=$(q)$(value)$(q)$(nl)export $(variable)$(nl)" ;
- }
-}
-
-
-# Returns a command to sets a named shell path variable to the given NATIVE
-# paths on the current platform.
-#
-rule path-variable-setting-command ( variable : paths * )
-{
- local sep = [ os.path-separator ] ;
- return [ variable-setting-command $(variable) : $(paths:J=$(sep)) ] ;
-}
-
-
-# Returns a command that prepends the given paths to the named path variable on
-# the current platform.
-#
-rule prepend-path-variable-command ( variable : paths * )
-{
- return [ path-variable-setting-command $(variable)
- : $(paths) [ os.expand-variable $(variable) ] ] ;
-}
-
-
-# Return a command which can create a file. If 'r' is result of invocation, then
-# 'r foobar' will create foobar with unspecified content. What happens if file
-# already exists is unspecified.
-#
-rule file-creation-command ( )
-{
- if [ os.name ] = NT
- {
- # A few alternative implementations on Windows:
- #
- # 'type NUL >> '
- # That would construct an empty file instead of a file containing
- # a space and an end-of-line marker but it would also not change
- # the target's timestamp in case the file already exists.
- #
- # 'type NUL > '
- # That would construct an empty file instead of a file containing
- # a space and an end-of-line marker but it would also destroy an
- # already existing file by overwriting it with an empty one.
- #
- # I guess the best solution would be to allow Boost Jam to define
- # built-in functions such as 'create a file', 'touch a file' or 'copy a
- # file' which could be used from inside action code. That would allow
- # completely portable operations without this kind of kludge.
- # (22.02.2009.) (Jurko)
- return "echo. > " ;
- }
- else
- {
- return "touch " ;
- }
-}
-
-
-# Returns a command that may be used for 'touching' files. It is not a real
-# 'touch' command on NT because it adds an empty line at the end of file but it
-# works with source files.
-#
-rule file-touch-command ( )
-{
- if [ os.name ] = NT
- {
- return "echo. >> " ;
- }
- else
- {
- return "touch " ;
- }
-}
-
-
-rule MkDir
-{
- # If dir exists, do not update it. Do this even for $(DOT).
- NOUPDATE $(<) ;
-
- if $(<) != $(DOT) && ! $($(<)-mkdir)
- {
- # Cheesy gate to prevent multiple invocations on same dir.
- $(<)-mkdir = true ;
-
- # Schedule the mkdir build action.
- common.mkdir $(<) ;
-
- # Prepare a Jam 'dirs' target that can be used to make the build only
- # construct all the target directories.
- DEPENDS dirs : $(<) ;
-
- # Recursively create parent directories. $(<:P) = $(<)'s parent & we
- # recurse until root.
-
- local s = $(<:P) ;
- if [ os.name ] = NT
- {
- switch $(s)
- {
- case *: : s = ;
- case *:\\ : s = ;
- }
- }
-
- if $(s)
- {
- if $(s) != $(<)
- {
- DEPENDS $(<) : $(s) ;
- MkDir $(s) ;
- }
- else
- {
- NOTFILE $(s) ;
- }
- }
- }
-}
-
-
-#actions MkDir1
-#{
-# mkdir "$(<)"
-#}
-
-# The following quick-fix actions should be replaced using the original MkDir1
-# action once Boost Jam gets updated to correctly detect different paths leading
-# up to the same filesystem target and triggers their build action only once.
-# (todo) (04.07.2008.) (Jurko)
-
-if [ os.name ] = NT
-{
- actions mkdir
- {
- if not exist "$(<)\\" mkdir "$(<)"
- }
-}
-else
-{
- actions mkdir
- {
- mkdir -p "$(<)"
- }
-}
-
-actions piecemeal together existing Clean
-{
- $(RM) "$(>)"
-}
-
-
-rule copy
-{
-}
-
-
-actions copy
-{
- $(CP) "$(>)" $(WINDOWS-CP-HACK) "$(<)"
-}
-
-
-rule RmTemps
-{
-}
-
-
-actions quietly updated piecemeal together RmTemps
-{
- $(RM) "$(>)" $(IGNORE)
-}
-
-
-actions hard-link
-{
- $(RM) "$(<)" 2$(NULL_OUT) $(NULL_OUT)
- $(LN) "$(>)" "$(<)" $(NULL_OUT)
-}
-
-
-# Given a target, as given to a custom tag rule, returns a string formatted
-# according to the passed format. Format is a list of properties that is
-# represented in the result. For each element of format the corresponding target
-# information is obtained and added to the result string. For all, but the
-# literal, the format value is taken as the as string to prepend to the output
-# to join the item to the rest of the result. If not given "-" is used as a
-# joiner.
-#
-# The format options can be:
-#
-# <base>[joiner]
-# :: The basename of the target name.
-# <toolset>[joiner]
-# :: The abbreviated toolset tag being used to build the target.
-# <threading>[joiner]
-# :: Indication of a multi-threaded build.
-# <runtime>[joiner]
-# :: Collective tag of the build runtime.
-# <version:/version-feature | X.Y[.Z]/>[joiner]
-# :: Short version tag taken from the given "version-feature" in the
-# build properties. Or if not present, the literal value as the
-# version number.
-# <property:/property-name/>[joiner]
-# :: Direct lookup of the given property-name value in the build
-# properties. /property-name/ is a regular expression. E.g.
-# <property:toolset-.*:flavor> will match every toolset.
-# /otherwise/
-# :: The literal value of the format argument.
-#
-# For example this format:
-#
-# boost_ <base> <toolset> <threading> <runtime> <version:boost-version>
-#
-# Might return:
-#
-# boost_thread-vc80-mt-gd-1_33.dll, or
-# boost_regex-vc80-gd-1_33.dll
-#
-# The returned name also has the target type specific prefix and suffix which
-# puts it in a ready form to use as the value from a custom tag rule.
-#
-rule format-name ( format * : name : type ? : property-set )
-{
- local result = "" ;
- for local f in $(format)
- {
- switch $(f:G)
- {
- case <base> :
- result += $(name:B) ;
-
- case <toolset> :
- result += [ join-tag $(f:G=) : [ toolset-tag $(name) : $(type) :
- $(property-set) ] ] ;
-
- case <threading> :
- result += [ join-tag $(f:G=) : [ threading-tag $(name) : $(type)
- : $(property-set) ] ] ;
-
- case <runtime> :
- result += [ join-tag $(f:G=) : [ runtime-tag $(name) : $(type) :
- $(property-set) ] ] ;
-
- case <qt> :
- result += [ join-tag $(f:G=) : [ qt-tag $(name) : $(type) :
- $(property-set) ] ] ;
-
- case <address-model> :
- result += [ join-tag $(f:G=) : [ address-model-tag $(name) : $(type) :
- $(property-set) ] ] ;
-
- case <version:*> :
- local key = [ MATCH <version:(.*)> : $(f:G) ] ;
- local version = [ $(property-set).get <$(key)> ] ;
- version ?= $(key) ;
- version = [ MATCH "^([^.]+)[.]([^.]+)[.]?([^.]*)" : $(version) ] ;
- result += [ join-tag $(f:G=) : $(version[1])_$(version[2]) ] ;
-
- case <property:*> :
- local key = [ MATCH <property:(.*)> : $(f:G) ] ;
- local p0 = [ MATCH <($(key))> : [ $(property-set).raw ] ] ;
- if $(p0)
- {
- local p = [ $(property-set).get <$(p0)> ] ;
- if $(p)
- {
- result += [ join-tag $(f:G=) : $(p) ] ;
- }
- }
-
- case * :
- result += $(f:G=) ;
- }
- }
- result = [ virtual-target.add-prefix-and-suffix $(result:J=) : $(type) :
- $(property-set) ] ;
- return $(result) ;
-}
-
-
-local rule join-tag ( joiner ? : tag ? )
-{
- if ! $(joiner) { joiner = - ; }
- return $(joiner)$(tag) ;
-}
-
-
-local rule toolset-tag ( name : type ? : property-set )
-{
- local tag = ;
-
- local properties = [ $(property-set).raw ] ;
- switch [ $(property-set).get <toolset> ]
- {
- case borland* : tag += bcb ;
- case clang* :
- {
- switch [ $(property-set).get <toolset-clang:platform> ]
- {
- case darwin : tag += clang-darwin ;
- case linux : tag += clang ;
- }
- }
- case como* : tag += como ;
- case cw : tag += cw ;
- case darwin* : tag += xgcc ;
- case edg* : tag += edg ;
- case gcc* :
- {
- switch [ $(property-set).get <toolset-gcc:flavor> ]
- {
- case *mingw* : tag += mgw ;
- case * : tag += gcc ;
- }
- }
- case intel :
- if [ $(property-set).get <toolset-intel:platform> ] = win
- {
- tag += iw ;
- }
- else
- {
- tag += il ;
- }
- case kcc* : tag += kcc ;
- case kylix* : tag += bck ;
- #case metrowerks* : tag += cw ;
- #case mingw* : tag += mgw ;
- case mipspro* : tag += mp ;
- case msvc* : tag += vc ;
- case qcc* : tag += qcc ;
- case sun* : tag += sw ;
- case tru64cxx* : tag += tru ;
- case vacpp* : tag += xlc ;
- }
- local version = [ MATCH "<toolset.*version>([0123456789]+)[.]([0123456789]*)"
- : $(properties) ] ;
- # For historical reasons, vc6.0 and vc7.0 use different naming.
- if $(tag) = vc
- {
- if $(version[1]) = 6
- {
- # Cancel minor version.
- version = 6 ;
- }
- else if $(version[1]) = 7 && $(version[2]) = 0
- {
- version = 7 ;
- }
- }
- # On intel, version is not added, because it does not matter and it is the
- # version of vc used as backend that matters. Ideally, we should encode the
- # backend version but that would break compatibility with V1.
- if $(tag) = iw
- {
- version = ;
- }
-
- # On borland, version is not added for compatibility with V1.
- if $(tag) = bcb
- {
- version = ;
- }
-
- tag += $(version) ;
-
- return $(tag:J=) ;
-}
-
-
-local rule threading-tag ( name : type ? : property-set )
-{
- local tag = ;
- local properties = [ $(property-set).raw ] ;
- if <threading>multi in $(properties) { tag = mt ; }
-
- return $(tag:J=) ;
-}
-
-
-local rule runtime-tag ( name : type ? : property-set )
-{
- local tag = ;
-
- local properties = [ $(property-set).raw ] ;
- if <runtime-link>static in $(properties) { tag += s ; }
-
- # This is an ugly thing. In V1, there is code to automatically detect which
- # properties affect a target. So, if <runtime-debugging> does not affect gcc
- # toolset, the tag rules will not even see <runtime-debugging>. Similar
- # functionality in V2 is not implemented yet, so we just check for toolsets
- # known to care about runtime debugging.
- if ( <toolset>msvc in $(properties) ) ||
- ( <stdlib>stlport in $(properties) ) ||
- ( <toolset-intel:platform>win in $(properties) )
- {
- if <runtime-debugging>on in $(properties) { tag += g ; }
- }
-
- if <python-debugging>on in $(properties) { tag += y ; }
- if <variant>debug in $(properties) { tag += d ; }
- if <stdlib>stlport in $(properties) { tag += p ; }
- if <stdlib-stlport:iostream>hostios in $(properties) { tag += n ; }
-
- return $(tag:J=) ;
-}
-
-# Create a tag for the Qt library version
-# "<qt>4.6.0" will result in tag "qt460"
-local rule qt-tag ( name : type ? : property-set )
-{
- local properties = [ $(property-set).get <qt> ] ;
- local version = [ MATCH "([0123456789]+)[.]?([0123456789]*)[.]?([0123456789]*)"
- : $(properties) ] ;
- local tag = "qt"$(version:J=) ;
- return $(tag) ;
-}
-
-# Create a tag for the address-model
-# <address-model>64 will simply generate "64"
-local rule address-model-tag ( name : type ? : property-set )
-{
- local tag = ;
- local version = [ $(property-set).get <address-model> ] ;
- return $(version) ;
-}
-
-rule __test__ ( )
-{
- import assert ;
-
- local save-os = [ modules.peek os : .name ] ;
-
- modules.poke os : .name : LINUX ;
-
- assert.result "PATH=\"foo:bar:baz\"\nexport PATH\n"
- : path-variable-setting-command PATH : foo bar baz ;
-
- assert.result "PATH=\"foo:bar:$PATH\"\nexport PATH\n"
- : prepend-path-variable-command PATH : foo bar ;
-
- modules.poke os : .name : NT ;
-
- assert.result "set PATH=foo;bar;baz\n"
- : path-variable-setting-command PATH : foo bar baz ;
-
- assert.result "set PATH=foo;bar;%PATH%\n"
- : prepend-path-variable-command PATH : foo bar ;
-
- modules.poke os : .name : $(save-os) ;
-}
diff --git a/tools/build/v2/tools/common.py b/tools/build/v2/tools/common.py
deleted file mode 100644
index 3eb0f7d3f1..0000000000
--- a/tools/build/v2/tools/common.py
+++ /dev/null
@@ -1,844 +0,0 @@
-# Status: being ported by Steven Watanabe
-# Base revision: 47174
-#
-# Copyright (C) Vladimir Prus 2002. Permission to copy, use, modify, sell and
-# distribute this software is granted provided this copyright notice appears in
-# all copies. This software is provided "as is" without express or implied
-# warranty, and with no claim as to its suitability for any purpose.
-
-""" Provides actions common to all toolsets, such as creating directories and
- removing files.
-"""
-
-import re
-import bjam
-import os
-import os.path
-import sys
-
-from b2.build import feature
-from b2.util.utility import *
-from b2.util import path
-
-__re__before_first_dash = re.compile ('([^-]*)-')
-
-def reset ():
- """ Clear the module state. This is mainly for testing purposes.
- Note that this must be called _after_ resetting the module 'feature'.
- """
- global __had_unspecified_value, __had_value, __declared_subfeature
- global __init_loc
- global __all_signatures, __debug_configuration, __show_configuration
-
- # Stores toolsets without specified initialization values.
- __had_unspecified_value = {}
-
- # Stores toolsets with specified initialization values.
- __had_value = {}
-
- # Stores toolsets with declared subfeatures.
- __declared_subfeature = {}
-
- # Stores all signatures of the toolsets.
- __all_signatures = {}
-
- # Stores the initialization locations of each toolset
- __init_loc = {}
-
- __debug_configuration = '--debug-configuration' in bjam.variable('ARGV')
- __show_configuration = '--show-configuration' in bjam.variable('ARGV')
-
- global __executable_path_variable
- OS = bjam.call("peek", [], "OS")[0]
- if OS == "NT":
- # On Windows the case and capitalization of PATH is not always predictable, so
- # let's find out what variable name was really set.
- for n in os.environ:
- if n.lower() == "path":
- __executable_path_variable = n
- break
- else:
- __executable_path_variable = "PATH"
-
- m = {"NT": __executable_path_variable,
- "CYGWIN": "PATH",
- "MACOSX": "DYLD_LIBRARY_PATH",
- "AIX": "LIBPATH"}
- global __shared_library_path_variable
- __shared_library_path_variable = m.get(OS, "LD_LIBRARY_PATH")
-
-reset()
-
-def shared_library_path_variable():
- return __shared_library_path_variable
-
-# ported from trunk@47174
-class Configurations(object):
- """
- This class helps to manage toolset configurations. Each configuration
- has a unique ID and one or more parameters. A typical example of a unique ID
- is a condition generated by 'common.check-init-parameters' rule. Other kinds
- of IDs can be used. Parameters may include any details about the configuration
- like 'command', 'path', etc.
-
- A toolset configuration may be in one of the following states:
-
- - registered
- Configuration has been registered (e.g. by autodetection code) but has
- not yet been marked as used, i.e. 'toolset.using' rule has not yet been
- called for it.
- - used
- Once called 'toolset.using' rule marks the configuration as 'used'.
-
- The main difference between the states above is that while a configuration is
- 'registered' its options can be freely changed. This is useful in particular
- for autodetection code - all detected configurations may be safely overwritten
- by user code.
- """
-
- def __init__(self):
- self.used_ = set()
- self.all_ = set()
- self.params_ = {}
-
- def register(self, id):
- """
- Registers a configuration.
-
- Returns True if the configuration has been added and False if
- it already exists. Reports an error if the configuration is 'used'.
- """
- if id in self.used_:
- #FIXME
- errors.error("common: the configuration '$(id)' is in use")
-
- if id not in self.all_:
- self.all_.add(id)
-
- # Indicate that a new configuration has been added.
- return True
- else:
- return False
-
- def use(self, id):
- """
- Mark a configuration as 'used'.
-
- Returns True if the state of the configuration has been changed to
- 'used' and False if it the state wasn't changed. Reports an error
- if the configuration isn't known.
- """
- if id not in self.all_:
- #FIXME:
- errors.error("common: the configuration '$(id)' is not known")
-
- if id not in self.used_:
- self.used_.add(id)
-
- # indicate that the configuration has been marked as 'used'
- return True
- else:
- return False
-
- def all(self):
- """ Return all registered configurations. """
- return self.all_
-
- def used(self):
- """ Return all used configurations. """
- return self.used_
-
- def get(self, id, param):
- """ Returns the value of a configuration parameter. """
- return self.params_.get(param, {}).get(id)
-
- def set (self, id, param, value):
- """ Sets the value of a configuration parameter. """
- self.params_.setdefault(param, {})[id] = value
-
-# Ported from trunk@47174
-def check_init_parameters(toolset, requirement, *args):
- """ The rule for checking toolset parameters. Trailing parameters should all be
- parameter name/value pairs. The rule will check that each parameter either has
- a value in each invocation or has no value in each invocation. Also, the rule
- will check that the combination of all parameter values is unique in all
- invocations.
-
- Each parameter name corresponds to a subfeature. This rule will declare a
- subfeature the first time a non-empty parameter value is passed and will
- extend it with all the values.
-
- The return value from this rule is a condition to be used for flags settings.
- """
- # The type checking here is my best guess about
- # what the types should be.
- assert(isinstance(toolset, str))
- assert(isinstance(requirement, str) or requirement is None)
- sig = toolset
- condition = replace_grist(toolset, '<toolset>')
- subcondition = []
-
- for arg in args:
- assert(isinstance(arg, tuple))
- assert(len(arg) == 2)
- name = arg[0]
- value = arg[1]
- assert(isinstance(name, str))
- assert(isinstance(value, str) or value is None)
-
- str_toolset_name = str((toolset, name))
-
- # FIXME: is this the correct translation?
- ### if $(value)-is-not-empty
- if value is not None:
- condition = condition + '-' + value
- if __had_unspecified_value.has_key(str_toolset_name):
- raise BaseException("'%s' initialization: parameter '%s' inconsistent\n" \
- "no value was specified in earlier initialization\n" \
- "an explicit value is specified now" % (toolset, name))
-
- # The logic below is for intel compiler. It calls this rule
- # with 'intel-linux' and 'intel-win' as toolset, so we need to
- # get the base part of toolset name.
- # We can't pass 'intel' as toolset, because it that case it will
- # be impossible to register versionles intel-linux and
- # intel-win of specific version.
- t = toolset
- m = __re__before_first_dash.match(toolset)
- if m:
- t = m.group(1)
-
- if not __had_value.has_key(str_toolset_name):
- if not __declared_subfeature.has_key(str((t, name))):
- feature.subfeature('toolset', t, name, [], ['propagated'])
- __declared_subfeature[str((t, name))] = True
-
- __had_value[str_toolset_name] = True
-
- feature.extend_subfeature('toolset', t, name, [value])
- subcondition += ['<toolset-' + t + ':' + name + '>' + value ]
-
- else:
- if __had_value.has_key(str_toolset_name):
- raise BaseException ("'%s' initialization: parameter '%s' inconsistent\n" \
- "an explicit value was specified in an earlier initialization\n" \
- "no value is specified now" % (toolset, name))
-
- __had_unspecified_value[str_toolset_name] = True
-
- if value == None: value = ''
-
- sig = sig + value + '-'
-
- if __all_signatures.has_key(sig):
- message = "duplicate initialization of '%s' with the following parameters: " % toolset
-
- for arg in args:
- name = arg[0]
- value = arg[1]
- if value == None: value = '<unspecified>'
-
- message += "'%s' = '%s'\n" % (name, value)
-
- raise BaseException(message)
-
- __all_signatures[sig] = True
- # FIXME
- __init_loc[sig] = "User location unknown" #[ errors.nearest-user-location ] ;
-
- # If we have a requirement, this version should only be applied under that
- # condition. To accomplish this we add a toolset requirement that imposes
- # the toolset subcondition, which encodes the version.
- if requirement:
- r = ['<toolset>' + toolset, requirement]
- r = ','.join(r)
- toolset.add_requirements([r + ':' + c for c in subcondition])
-
- # We add the requirements, if any, to the condition to scope the toolset
- # variables and options to this specific version.
- condition = [condition]
- if requirement:
- condition += [requirement]
-
- if __show_configuration:
- print "notice:", condition
- return ['/'.join(condition)]
-
-# Ported from trunk@47077
-def get_invocation_command_nodefault(
- toolset, tool, user_provided_command=[], additional_paths=[], path_last=False):
- """
- A helper rule to get the command to invoke some tool. If
- 'user-provided-command' is not given, tries to find binary named 'tool' in
- PATH and in the passed 'additional-path'. Otherwise, verifies that the first
- element of 'user-provided-command' is an existing program.
-
- This rule returns the command to be used when invoking the tool. If we can't
- find the tool, a warning is issued. If 'path-last' is specified, PATH is
- checked after 'additional-paths' when searching for 'tool'.
- """
- assert(isinstance(toolset, str))
- assert(isinstance(tool, str))
- assert(isinstance(user_provided_command, list))
- if additional_paths is not None:
- assert(isinstance(additional_paths, list))
- assert(all([isinstance(path, str) for path in additional_paths]))
- assert(all(isinstance(path, str) for path in additional_paths))
- assert(isinstance(path_last, bool))
-
- if not user_provided_command:
- command = find_tool(tool, additional_paths, path_last)
- if not command and __debug_configuration:
- print "warning: toolset", toolset, "initialization: can't find tool, tool"
- #FIXME
- #print "warning: initialized from" [ errors.nearest-user-location ] ;
- else:
- command = check_tool(user_provided_command)
- assert(isinstance(command, list))
- command=' '.join(command)
- if not command and __debug_configuration:
- print "warning: toolset", toolset, "initialization:"
- print "warning: can't find user-provided command", user_provided_command
- #FIXME
- #ECHO "warning: initialized from" [ errors.nearest-user-location ]
-
- assert(isinstance(command, str))
-
- return command
-
-# ported from trunk@47174
-def get_invocation_command(toolset, tool, user_provided_command = [],
- additional_paths = [], path_last = False):
- """ Same as get_invocation_command_nodefault, except that if no tool is found,
- returns either the user-provided-command, if present, or the 'tool' parameter.
- """
-
- assert(isinstance(toolset, str))
- assert(isinstance(tool, str))
- assert(isinstance(user_provided_command, list))
- if additional_paths is not None:
- assert(isinstance(additional_paths, list))
- assert(all([isinstance(path, str) for path in additional_paths]))
- assert(isinstance(path_last, bool))
-
- result = get_invocation_command_nodefault(toolset, tool,
- user_provided_command,
- additional_paths,
- path_last)
-
- if not result:
- if user_provided_command:
- result = user_provided_command[0]
- else:
- result = tool
-
- assert(isinstance(result, str))
-
- return result
-
-# ported from trunk@47281
-def get_absolute_tool_path(command):
- """
- Given an invocation command,
- return the absolute path to the command. This works even if commnad
- has not path element and is present in PATH.
- """
- if os.path.dirname(command):
- return os.path.dirname(command)
- else:
- programs = path.programs_path()
- m = path.glob(programs, [command, command + '.exe' ])
- if not len(m):
- if __debug_configuration:
- print "Could not find:", command, "in", programs
- return None
- return os.path.dirname(m[0])
-
-# ported from trunk@47174
-def find_tool(name, additional_paths = [], path_last = False):
- """ Attempts to find tool (binary) named 'name' in PATH and in
- 'additional-paths'. If found in path, returns 'name'. If
- found in additional paths, returns full name. If the tool
- is found in several directories, returns the first path found.
- Otherwise, returns the empty string. If 'path_last' is specified,
- path is checked after 'additional_paths'.
- """
- assert(isinstance(name, str))
- assert(isinstance(additional_paths, list))
- assert(isinstance(path_last, bool))
-
- programs = path.programs_path()
- match = path.glob(programs, [name, name + '.exe'])
- additional_match = path.glob(additional_paths, [name, name + '.exe'])
-
- result = []
- if path_last:
- result = additional_match
- if not result and match:
- result = match
-
- else:
- if match:
- result = match
-
- elif additional_match:
- result = additional_match
-
- if result:
- return path.native(result[0])
- else:
- return ''
-
-#ported from trunk@47281
-def check_tool_aux(command):
- """ Checks if 'command' can be found either in path
- or is a full name to an existing file.
- """
- assert(isinstance(command, str))
- dirname = os.path.dirname(command)
- if dirname:
- if os.path.exists(command):
- return command
- # Both NT and Cygwin will run .exe files by their unqualified names.
- elif on_windows() and os.path.exists(command + '.exe'):
- return command
- # Only NT will run .bat files by their unqualified names.
- elif os_name() == 'NT' and os.path.exists(command + '.bat'):
- return command
- else:
- paths = path.programs_path()
- if path.glob(paths, [command]):
- return command
-
-# ported from trunk@47281
-def check_tool(command):
- """ Checks that a tool can be invoked by 'command'.
- If command is not an absolute path, checks if it can be found in 'path'.
- If comand is absolute path, check that it exists. Returns 'command'
- if ok and empty string otherwise.
- """
- assert(isinstance(command, list))
- assert(all(isinstance(c, str) for c in command))
- #FIXME: why do we check the first and last elements????
- if check_tool_aux(command[0]) or check_tool_aux(command[-1]):
- return command
-
-# ported from trunk@47281
-def handle_options(tool, condition, command, options):
- """ Handle common options for toolset, specifically sets the following
- flag variables:
- - CONFIG_COMMAND to 'command'
- - OPTIOns for compile to the value of <compileflags> in options
- - OPTIONS for compile.c to the value of <cflags> in options
- - OPTIONS for compile.c++ to the value of <cxxflags> in options
- - OPTIONS for compile.fortran to the value of <fflags> in options
- - OPTIONs for link to the value of <linkflags> in options
- """
- from b2.build import toolset
-
- assert(isinstance(tool, str))
- assert(isinstance(condition, list))
- assert(isinstance(command, str))
- assert(isinstance(options, list))
- assert(command)
- toolset.flags(tool, 'CONFIG_COMMAND', condition, [command])
- toolset.flags(tool + '.compile', 'OPTIONS', condition, feature.get_values('<compileflags>', options))
- toolset.flags(tool + '.compile.c', 'OPTIONS', condition, feature.get_values('<cflags>', options))
- toolset.flags(tool + '.compile.c++', 'OPTIONS', condition, feature.get_values('<cxxflags>', options))
- toolset.flags(tool + '.compile.fortran', 'OPTIONS', condition, feature.get_values('<fflags>', options))
- toolset.flags(tool + '.link', 'OPTIONS', condition, feature.get_values('<linkflags>', options))
-
-# ported from trunk@47281
-def get_program_files_dir():
- """ returns the location of the "program files" directory on a windows
- platform
- """
- ProgramFiles = bjam.variable("ProgramFiles")
- if ProgramFiles:
- ProgramFiles = ' '.join(ProgramFiles)
- else:
- ProgramFiles = "c:\\Program Files"
- return ProgramFiles
-
-# ported from trunk@47281
-def rm_command():
- return __RM
-
-# ported from trunk@47281
-def copy_command():
- return __CP
-
-# ported from trunk@47281
-def variable_setting_command(variable, value):
- """
- Returns the command needed to set an environment variable on the current
- platform. The variable setting persists through all following commands and is
- visible in the environment seen by subsequently executed commands. In other
- words, on Unix systems, the variable is exported, which is consistent with the
- only possible behavior on Windows systems.
- """
- assert(isinstance(variable, str))
- assert(isinstance(value, str))
-
- if os_name() == 'NT':
- return "set " + variable + "=" + value + os.linesep
- else:
- # (todo)
- # The following does not work on CYGWIN and needs to be fixed. On
- # CYGWIN the $(nl) variable holds a Windows new-line \r\n sequence that
- # messes up the executed export command which then reports that the
- # passed variable name is incorrect. This is most likely due to the
- # extra \r character getting interpreted as a part of the variable name.
- #
- # Several ideas pop to mind on how to fix this:
- # * One way would be to separate the commands using the ; shell
- # command separator. This seems like the quickest possible
- # solution but I do not know whether this would break code on any
- # platforms I I have no access to.
- # * Another would be to not use the terminating $(nl) but that would
- # require updating all the using code so it does not simply
- # prepend this variable to its own commands.
- # * I guess the cleanest solution would be to update Boost Jam to
- # allow explicitly specifying \n & \r characters in its scripts
- # instead of always relying only on the 'current OS native newline
- # sequence'.
- #
- # Some code found to depend on this behaviour:
- # * This Boost Build module.
- # * __test__ rule.
- # * path-variable-setting-command rule.
- # * python.jam toolset.
- # * xsltproc.jam toolset.
- # * fop.jam toolset.
- # (todo) (07.07.2008.) (Jurko)
- #
- # I think that this works correctly in python -- Steven Watanabe
- return variable + "=" + value + os.linesep + "export " + variable + os.linesep
-
-def path_variable_setting_command(variable, paths):
- """
- Returns a command to sets a named shell path variable to the given NATIVE
- paths on the current platform.
- """
- assert(isinstance(variable, str))
- assert(isinstance(paths, list))
- sep = os.path.pathsep
- return variable_setting_command(variable, sep.join(paths))
-
-def prepend_path_variable_command(variable, paths):
- """
- Returns a command that prepends the given paths to the named path variable on
- the current platform.
- """
- return path_variable_setting_command(variable,
- paths + os.environ.get(variable, "").split(os.pathsep))
-
-def file_creation_command():
- """
- Return a command which can create a file. If 'r' is result of invocation, then
- 'r foobar' will create foobar with unspecified content. What happens if file
- already exists is unspecified.
- """
- if os_name() == 'NT':
- return "echo. > "
- else:
- return "touch "
-
-#FIXME: global variable
-__mkdir_set = set()
-__re_windows_drive = re.compile(r'^.*:\$')
-
-def mkdir(engine, target):
- # If dir exists, do not update it. Do this even for $(DOT).
- bjam.call('NOUPDATE', target)
-
- global __mkdir_set
-
- # FIXME: Where is DOT defined?
- #if $(<) != $(DOT) && ! $($(<)-mkdir):
- if target != '.' and target not in __mkdir_set:
- # Cheesy gate to prevent multiple invocations on same dir.
- __mkdir_set.add(target)
-
- # Schedule the mkdir build action.
- if os_name() == 'NT':
- engine.set_update_action("common.MkDir1-quick-fix-for-windows", target, [])
- else:
- engine.set_update_action("common.MkDir1-quick-fix-for-unix", target, [])
-
- # Prepare a Jam 'dirs' target that can be used to make the build only
- # construct all the target directories.
- engine.add_dependency('dirs', target)
-
- # Recursively create parent directories. $(<:P) = $(<)'s parent & we
- # recurse until root.
-
- s = os.path.dirname(target)
- if os_name() == 'NT':
- if(__re_windows_drive.match(s)):
- s = ''
-
- if s:
- if s != target:
- engine.add_dependency(target, s)
- mkdir(engine, s)
- else:
- bjam.call('NOTFILE', s)
-
-__re_version = re.compile(r'^([^.]+)[.]([^.]+)[.]?([^.]*)')
-
-def format_name(format, name, target_type, prop_set):
- """ Given a target, as given to a custom tag rule, returns a string formatted
- according to the passed format. Format is a list of properties that is
- represented in the result. For each element of format the corresponding target
- information is obtained and added to the result string. For all, but the
- literal, the format value is taken as the as string to prepend to the output
- to join the item to the rest of the result. If not given "-" is used as a
- joiner.
-
- The format options can be:
-
- <base>[joiner]
- :: The basename of the target name.
- <toolset>[joiner]
- :: The abbreviated toolset tag being used to build the target.
- <threading>[joiner]
- :: Indication of a multi-threaded build.
- <runtime>[joiner]
- :: Collective tag of the build runtime.
- <version:/version-feature | X.Y[.Z]/>[joiner]
- :: Short version tag taken from the given "version-feature"
- in the build properties. Or if not present, the literal
- value as the version number.
- <property:/property-name/>[joiner]
- :: Direct lookup of the given property-name value in the
- build properties. /property-name/ is a regular expression.
- e.g. <property:toolset-.*:flavor> will match every toolset.
- /otherwise/
- :: The literal value of the format argument.
-
- For example this format:
-
- boost_ <base> <toolset> <threading> <runtime> <version:boost-version>
-
- Might return:
-
- boost_thread-vc80-mt-gd-1_33.dll, or
- boost_regex-vc80-gd-1_33.dll
-
- The returned name also has the target type specific prefix and suffix which
- puts it in a ready form to use as the value from a custom tag rule.
- """
- assert(isinstance(format, list))
- assert(isinstance(name, str))
- assert(isinstance(target_type, str) or not type)
- # assert(isinstance(prop_set, property_set.PropertySet))
- if type.is_derived(target_type, 'LIB'):
- result = "" ;
- for f in format:
- grist = get_grist(f)
- if grist == '<base>':
- result += os.path.basename(name)
- elif grist == '<toolset>':
- result += join_tag(ungrist(f),
- toolset_tag(name, target_type, prop_set))
- elif grist == '<threading>':
- result += join_tag(ungrist(f),
- threading_tag(name, target_type, prop_set))
- elif grist == '<runtime>':
- result += join_tag(ungrist(f),
- runtime_tag(name, target_type, prop_set))
- elif grist.startswith('<version:'):
- key = grist[len('<version:'):-1]
- version = prop_set.get('<' + key + '>')
- if not version:
- version = key
- version = __re_version.match(version)
- result += join_tag(ungrist(f), version[1] + '_' + version[2])
- elif grist.startswith('<property:'):
- key = grist[len('<property:'):-1]
- property_re = re.compile('<(' + key + ')>')
- p0 = None
- for prop in prop_set.raw():
- match = property_re.match(prop)
- if match:
- p0 = match[1]
- break
- if p0:
- p = prop_set.get('<' + p0 + '>')
- if p:
- assert(len(p) == 1)
- result += join_tag(ungrist(f), p)
- else:
- result += ungrist(f)
-
- result = virtual_target.add_prefix_and_suffix(
- ''.join(result), target_type, prop_set)
- return result
-
-def join_tag(joiner, tag):
- if not joiner: joiner = '-'
- return joiner + tag
-
-__re_toolset_version = re.compile(r"<toolset.*version>(\d+)[.](\d*)")
-
-def toolset_tag(name, target_type, prop_set):
- tag = ''
-
- properties = prop_set.raw()
- tools = prop_set.get('<toolset>')
- assert(len(tools) == 0)
- tools = tools[0]
- if tools.startswith('borland'): tag += 'bcb'
- elif tools.startswith('como'): tag += 'como'
- elif tools.startswith('cw'): tag += 'cw'
- elif tools.startswith('darwin'): tag += 'xgcc'
- elif tools.startswith('edg'): tag += edg
- elif tools.startswith('gcc'):
- flavor = prop_set.get('<toolset-gcc:flavor>')
- ''.find
- if flavor.find('mingw') != -1:
- tag += 'mgw'
- else:
- tag += 'gcc'
- elif tools == 'intel':
- if prop_set.get('<toolset-intel:platform>') == ['win']:
- tag += 'iw'
- else:
- tag += 'il'
- elif tools.startswith('kcc'): tag += 'kcc'
- elif tools.startswith('kylix'): tag += 'bck'
- #case metrowerks* : tag += cw ;
- #case mingw* : tag += mgw ;
- elif tools.startswith('mipspro'): tag += 'mp'
- elif tools.startswith('msvc'): tag += 'vc'
- elif tools.startswith('sun'): tag += 'sw'
- elif tools.startswith('tru64cxx'): tag += 'tru'
- elif tools.startswith('vacpp'): tag += 'xlc'
-
- for prop in properties:
- match = __re_toolset_version.match(prop)
- if(match):
- version = match
- break
- version_string = None
- # For historical reasons, vc6.0 and vc7.0 use different naming.
- if tag == 'vc':
- if version.group(1) == '6':
- # Cancel minor version.
- version_string = '6'
- elif version.group(1) == '7' and version.group(2) == '0':
- version_string = '7'
-
- # On intel, version is not added, because it does not matter and it's the
- # version of vc used as backend that matters. Ideally, we'd encode the
- # backend version but that would break compatibility with V1.
- elif tag == 'iw':
- version_string = ''
-
- # On borland, version is not added for compatibility with V1.
- elif tag == 'bcb':
- version_string = ''
-
- if version_string is None:
- version = version.group(1) + version.group(2)
-
- tag += version
-
- return tag
-
-
-def threading_tag(name, target_type, prop_set):
- tag = ''
- properties = prop_set.raw()
- if '<threading>multi' in properties: tag = 'mt'
-
- return tag
-
-
-def runtime_tag(name, target_type, prop_set ):
- tag = ''
-
- properties = prop_set.raw()
- if '<runtime-link>static' in properties: tag += 's'
-
- # This is an ugly thing. In V1, there's a code to automatically detect which
- # properties affect a target. So, if <runtime-debugging> does not affect gcc
- # toolset, the tag rules won't even see <runtime-debugging>. Similar
- # functionality in V2 is not implemented yet, so we just check for toolsets
- # which are known to care about runtime debug.
- if '<toolset>msvc' in properties \
- or '<stdlib>stlport' in properties \
- or '<toolset-intel:platform>win' in properties:
- if '<runtime-debugging>on' in properties: tag += 'g'
-
- if '<python-debugging>on' in properties: tag += 'y'
- if '<variant>debug' in properties: tag += 'd'
- if '<stdlib>stlport' in properties: tag += 'p'
- if '<stdlib-stlport:iostream>hostios' in properties: tag += 'n'
-
- return tag
-
-
-## TODO:
-##rule __test__ ( )
-##{
-## import assert ;
-##
-## local nl = "
-##" ;
-##
-## local save-os = [ modules.peek os : .name ] ;
-##
-## modules.poke os : .name : LINUX ;
-##
-## assert.result "PATH=foo:bar:baz$(nl)export PATH$(nl)"
-## : path-variable-setting-command PATH : foo bar baz ;
-##
-## assert.result "PATH=foo:bar:$PATH$(nl)export PATH$(nl)"
-## : prepend-path-variable-command PATH : foo bar ;
-##
-## modules.poke os : .name : NT ;
-##
-## assert.result "set PATH=foo;bar;baz$(nl)"
-## : path-variable-setting-command PATH : foo bar baz ;
-##
-## assert.result "set PATH=foo;bar;%PATH%$(nl)"
-## : prepend-path-variable-command PATH : foo bar ;
-##
-## modules.poke os : .name : $(save-os) ;
-##}
-
-def init(manager):
- engine = manager.engine()
-
- engine.register_action("common.MkDir1-quick-fix-for-unix", 'mkdir -p "$(<)"')
- engine.register_action("common.MkDir1-quick-fix-for-windows", 'if not exist "$(<)\\" mkdir "$(<)"')
-
- import b2.tools.make
- import b2.build.alias
-
- global __RM, __CP, __IGNORE, __LN
- # ported from trunk@47281
- if os_name() == 'NT':
- __RM = 'del /f /q'
- __CP = 'copy'
- __IGNORE = '2>nul >nul & setlocal'
- __LN = __CP
- #if not __LN:
- # __LN = CP
- else:
- __RM = 'rm -f'
- __CP = 'cp'
- __IGNORE = ''
- __LN = 'ln'
-
- engine.register_action("common.Clean", __RM + ' "$(>)"',
- flags=['piecemeal', 'together', 'existing'])
- engine.register_action("common.copy", __CP + ' "$(>)" "$(<)"')
- engine.register_action("common.RmTemps", __RM + ' "$(>)" ' + __IGNORE,
- flags=['quietly', 'updated', 'piecemeal', 'together'])
-
- engine.register_action("common.hard-link",
- __RM + ' "$(<)" 2$(NULL_OUT) $(NULL_OUT)' + os.linesep +
- __LN + ' "$(>)" "$(<)" $(NULL_OUT)')
diff --git a/tools/build/v2/tools/cray.jam b/tools/build/v2/tools/cray.jam
deleted file mode 100644
index 1d5271e972..0000000000
--- a/tools/build/v2/tools/cray.jam
+++ /dev/null
@@ -1,112 +0,0 @@
-# Copyright 2001 David Abrahams.
-# Copyright 2004, 2005 Markus Schoepflin.
-# Copyright 2011, John Maddock
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-#
-# Cray C++ Compiler
-# See http://docs.cray.com/books/S-2179-50/html-S-2179-50/S-2179-50-toc.html
-#
-
-import feature generators common ;
-import toolset : flags ;
-
-feature.extend toolset : cray ;
-
-# Inherit from Unix toolset to get library ordering magic.
-toolset.inherit cray : unix ;
-
-generators.override cray.prebuilt : builtin.lib-generator ;
-generators.override cray.prebuilt : builtin.prebuilt ;
-generators.override cray.searched-lib-generator : searched-lib-generator ;
-
-
-rule init ( version ? : command * : options * )
-{
- local condition = [ common.check-init-parameters cray : version $(version) ] ;
-
- local command = [ common.get-invocation-command cray : CC : $(command) ] ;
-
- if $(command)
- {
- local root = [ common.get-absolute-tool-path $(command[-1]) ] ;
-
- if $(root)
- {
- flags cray .root $(condition) : "\"$(root)\"/" ;
- }
- }
- # If we can't find 'CC' anyway, at least show 'CC' in the commands
- command ?= CC ;
-
- common.handle-options cray : $(condition) : $(command) : $(options) ;
-}
-
-generators.register-c-compiler cray.compile.c++ : CPP : OBJ : <toolset>cray ;
-generators.register-c-compiler cray.compile.c : C : OBJ : <toolset>cray ;
-
-
-
-# No static linking as far as I can tell.
-# flags cxx LINKFLAGS <runtime-link>static : -bstatic ;
-flags cray.compile OPTIONS <debug-symbols>on : -Gn ;
-flags cray.link OPTIONS <debug-symbols>on : -Gn ;
-
-flags cray.compile OPTIONS <optimization>off : -O0 ;
-flags cray.compile OPTIONS <optimization>speed : -O3 ;
-flags cray.compile OPTIONS <optimization>space : -O1 ;
-
-flags cray.compile OPTIONS <cflags> ;
-flags cray.compile.c++ OPTIONS <cxxflags> ;
-flags cray.compile DEFINES <define> ;
-flags cray.compile INCLUDES <include> ;
-flags cray.link OPTIONS <linkflags> ;
-
-flags cray.link LIBPATH <library-path> ;
-flags cray.link LIBRARIES <library-file> ;
-flags cray.link FINDLIBS-ST <find-static-library> ;
-flags cray.link FINDLIBS-SA <find-shared-library> ;
-
-actions link bind LIBRARIES
-{
- $(CONFIG_COMMAND) $(OPTIONS) -o "$(<)" -L$(LIBPATH) "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA)
-}
-
-# When creating dynamic libraries, we don't want to be warned about unresolved
-# symbols, therefore all unresolved symbols are marked as expected by
-# '-expect_unresolved *'. This also mirrors the behaviour of the GNU tool
-# chain.
-
-actions link.dll bind LIBRARIES
-{
- $(CONFIG_COMMAND) -shared $(OPTIONS) -o "$(<[1])" -L$(LIBPATH) "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA)
-}
-
-
-# Note: Relaxed ANSI mode (-std) is used for compilation because in strict ANSI
-# C89 mode (-std1) the compiler doesn't accept C++ comments in C files. As -std
-# is the default, no special flag is needed.
-actions compile.c
-{
- $(.root:E=)cc -c $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -o "$(<)" "$(>)"
-}
-
-# Note: The compiler is forced to compile the files as C++ (-x cxx) because
-# otherwise it will silently ignore files with no file extension.
-#
-# Note: We deliberately don't suppress any warnings on the compiler command
-# line, the user can always do this in a customized toolset later on.
-
-actions compile.c++
-{
- $(CONFIG_COMMAND) -c -h gnu $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -o "$(<)" "$(>)"
-}
-
-# Always create archive from scratch. See the gcc toolet for rationale.
-RM = [ common.rm-command ] ;
-actions together piecemeal archive
-{
- $(RM) "$(<)"
- ar rc $(<) $(>)
-}
diff --git a/tools/build/v2/tools/darwin.jam b/tools/build/v2/tools/darwin.jam
deleted file mode 100644
index dd6eacb114..0000000000
--- a/tools/build/v2/tools/darwin.jam
+++ /dev/null
@@ -1,583 +0,0 @@
-# Copyright 2003 Christopher Currie
-# Copyright 2006 Dave Abrahams
-# Copyright 2003, 2004, 2005, 2006 Vladimir Prus
-# Copyright 2005-2007 Mat Marcus
-# Copyright 2005-2007 Adobe Systems Incorporated
-# Copyright 2007-2010 Rene Rivera
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Please see http://article.gmane.org/gmane.comp.lib.boost.build/3389/
-# for explanation why it's a separate toolset.
-
-import feature : feature ;
-import toolset : flags ;
-import type ;
-import common ;
-import generators ;
-import path : basename ;
-import version ;
-import property-set ;
-import regex ;
-import errors ;
-
-## Use a framework.
-feature framework : : free ;
-
-## The MacOSX version to compile for, which maps to the SDK to use (sysroot).
-feature macosx-version : : propagated link-incompatible symmetric optional ;
-
-## The minimal MacOSX version to target.
-feature macosx-version-min : : propagated optional ;
-
-## A dependency, that is forced to be included in the link.
-feature force-load : : free dependency incidental ;
-
-#############################################################################
-
-if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
-{
- .debug-configuration = true ;
-}
-
-feature.extend toolset : darwin ;
-import gcc ;
-toolset.inherit-generators darwin : gcc : gcc.mingw.link gcc.mingw.link.dll ;
-
-generators.override darwin.prebuilt : builtin.prebuilt ;
-generators.override darwin.searched-lib-generator : searched-lib-generator ;
-
-# Override default do-nothing generators.
-generators.override darwin.compile.c.pch : pch.default-c-pch-generator ;
-generators.override darwin.compile.c++.pch : pch.default-cpp-pch-generator ;
-
-type.set-generated-target-suffix PCH : <toolset>darwin : gch ;
-
-toolset.inherit-rules darwin : gcc : localize ;
-toolset.inherit-flags darwin : gcc
- : <runtime-link>static
- <architecture>arm/<address-model>32
- <architecture>arm/<address-model>64
- <architecture>arm/<instruction-set>
- <architecture>x86/<address-model>32
- <architecture>x86/<address-model>64
- <architecture>x86/<instruction-set>
- <architecture>power/<address-model>32
- <architecture>power/<address-model>64
- <architecture>power/<instruction-set> ;
-
-# Options:
-#
-# <root>PATH
-# Platform root path. The common autodetection will set this to
-# "/Developer". And when a command is given it will be set to
-# the corresponding "*.platform/Developer" directory.
-#
-rule init ( version ? : command * : options * : requirement * )
-{
- # First time around, figure what is host OSX version
- if ! $(.host-osx-version)
- {
- .host-osx-version = [ MATCH "^([0-9.]+)"
- : [ SHELL "/usr/bin/sw_vers -productVersion" ] ] ;
- if $(.debug-configuration)
- {
- ECHO notice: OSX version on this machine is $(.host-osx-version) ;
- }
- }
-
- # - The root directory of the tool install.
- local root = [ feature.get-values <root> : $(options) ] ;
-
- # - The bin directory where to find the commands to execute.
- local bin ;
-
- # - The configured compile driver command.
- local command = [ common.get-invocation-command darwin : g++ : $(command) ] ;
-
- # The version as reported by the compiler
- local real-version ;
-
- # - Autodetect the root and bin dir if not given.
- if $(command)
- {
- bin ?= [ common.get-absolute-tool-path $(command[1]) ] ;
- if $(bin) = "/usr/bin"
- {
- root ?= /Developer ;
- }
- else
- {
- local r = $(bin:D) ;
- r = $(r:D) ;
- root ?= $(r) ;
- }
- }
-
- # - Autodetect the version if not given.
- if $(command)
- {
- # - The 'command' variable can have multiple elements. When calling
- # the SHELL builtin we need a single string.
- local command-string = $(command:J=" ") ;
- real-version = [ MATCH "^([0-9.]+)"
- : [ SHELL "$(command-string) -dumpversion" ] ] ;
- version ?= $(real-version) ;
- }
-
- .real-version.$(version) = $(real-version) ;
-
- # - Define the condition for this toolset instance.
- local condition =
- [ common.check-init-parameters darwin $(requirement) : version $(version) ] ;
-
- # - Set the toolset generic common options.
- common.handle-options darwin : $(condition) : $(command) : $(options) ;
-
- # - GCC 4.0 and higher in Darwin does not have -fcoalesce-templates.
- if $(real-version) < "4.0.0"
- {
- flags darwin.compile.c++ OPTIONS $(condition) : -fcoalesce-templates ;
- }
- # - GCC 4.2 and higher in Darwin does not have -Wno-long-double.
- if $(real-version) < "4.2.0"
- {
- flags darwin.compile OPTIONS $(condition) : -Wno-long-double ;
- }
-
- # - Set the link flags common with the GCC toolset.
- gcc.init-link-flags darwin darwin $(condition) ;
-
- # - The symbol strip program.
- local strip ;
- if <striper> in $(options)
- {
- # We can turn off strip by specifying it as empty. In which
- # case we switch to using the linker to do the strip.
- flags darwin.link.dll OPTIONS
- $(condition)/<main-target-type>LIB/<link>shared/<address-model>32/<strip>on : -Wl,-x ;
- flags darwin.link.dll OPTIONS
- $(condition)/<main-target-type>LIB/<link>shared/<address-model>/<strip>on : -Wl,-x ;
- flags darwin.link OPTIONS
- $(condition)/<main-target-type>EXE/<address-model>32/<strip>on : -s ;
- flags darwin.link OPTIONS
- $(condition)/<main-target-type>EXE/<address-model>/<strip>on : -s ;
- }
- else
- {
- # Otherwise we need to find a strip program to use. And hence
- # also tell the link action that we need to use a strip
- # post-process.
- flags darwin.link NEED_STRIP $(condition)/<strip>on : "" ;
- strip =
- [ common.get-invocation-command darwin
- : strip : [ feature.get-values <striper> : $(options) ] : $(bin) : search-path ] ;
- flags darwin.link .STRIP $(condition) : $(strip[1]) ;
- if $(.debug-configuration)
- {
- ECHO notice: using strip for $(condition) at $(strip[1]) ;
- }
- }
-
- # - The archive builder (libtool is the default as creating
- # archives in darwin is complicated.
- local archiver =
- [ common.get-invocation-command darwin
- : libtool : [ feature.get-values <archiver> : $(options) ] : $(bin) : search-path ] ;
- flags darwin.archive .LIBTOOL $(condition) : $(archiver[1]) ;
- if $(.debug-configuration)
- {
- ECHO notice: using archiver for $(condition) at $(archiver[1]) ;
- }
-
- # - Initialize the SDKs available in the root for this tool.
- local sdks = [ init-available-sdk-versions $(condition) : $(root) ] ;
-
- #~ ECHO --- ;
- #~ ECHO --- bin :: $(bin) ;
- #~ ECHO --- root :: $(root) ;
- #~ ECHO --- version :: $(version) ;
- #~ ECHO --- condition :: $(condition) ;
- #~ ECHO --- strip :: $(strip) ;
- #~ ECHO --- archiver :: $(archiver) ;
- #~ ECHO --- sdks :: $(sdks) ;
- #~ ECHO --- ;
- #~ EXIT ;
-}
-
-# Add and set options for a discovered SDK version.
-local rule init-sdk ( condition * : root ? : version + : version-feature ? )
-{
- local rule version-to-feature ( version + )
- {
- switch $(version[1])
- {
- case iphone* :
- {
- return $(version[1])-$(version[2-]:J=.) ;
- }
- case mac* :
- {
- return $(version[2-]:J=.) ;
- }
- case * :
- {
- return $(version:J=.) ;
- }
- }
- }
-
- if $(version-feature)
- {
- if $(.debug-configuration)
- {
- ECHO notice: available sdk for $(condition)/<macosx-version>$(version-feature) at $(root) ;
- }
-
- # Add the version to the features for specifying them.
- if ! $(version-feature) in [ feature.values macosx-version ]
- {
- feature.extend macosx-version : $(version-feature) ;
- }
- if ! $(version-feature) in [ feature.values macosx-version-min ]
- {
- feature.extend macosx-version-min : $(version-feature) ;
- }
-
- # Set the flags the version needs to compile with, first
- # generic options.
- flags darwin.compile OPTIONS $(condition)/<macosx-version>$(version-feature)
- : -isysroot $(root) ;
- flags darwin.link OPTIONS $(condition)/<macosx-version>$(version-feature)
- : -isysroot $(root) ;
-
- # Then device variation options.
- switch $(version[1])
- {
- case iphonesim* :
- {
- local N = $(version[2]) ;
- if ! $(version[3]) { N += 00 ; }
- else if [ regex.match (..) : $(version[3]) ] { N += $(version[3]) ; }
- else { N += 0$(version[3]) ; }
- if ! $(version[4]) { N += 00 ; }
- else if [ regex.match (..) : $(version[4]) ] { N += $(version[4]) ; }
- else { N += 0$(version[4]) ; }
- N = $(N:J=) ;
- flags darwin.compile OPTIONS <macosx-version-min>$(version-feature)
- : -D__IPHONE_OS_VERSION_MIN_REQUIRED=$(N) ;
- flags darwin.link OPTIONS <macosx-version-min>$(version-feature)
- : -D__IPHONE_OS_VERSION_MIN_REQUIRED=$(N) ;
- }
-
- case iphone* :
- {
- flags darwin.compile OPTIONS <macosx-version-min>$(version-feature)
- : -miphoneos-version-min=$(version[2-]:J=.) ;
- flags darwin.link OPTIONS <macosx-version-min>$(version-feature)
- : -miphoneos-version-min=$(version[2-]:J=.) ;
- }
-
- case mac* :
- {
- flags darwin.compile OPTIONS <macosx-version-min>$(version-feature)
- : -mmacosx-version-min=$(version[2-]:J=.) ;
- flags darwin.link OPTIONS <macosx-version-min>$(version-feature)
- : -mmacosx-version-min=$(version[2-]:J=.) ;
- }
- }
-
- if $(version[3]) > 0
- {
- # We have a minor version of an SDK. We want to set up
- # previous minor versions, plus the current minor version.
- # So we recurse to set up the previous minor versions, up to
- # the current version.
- local minor-minus-1 = [ CALC $(version[3]) - 1 ] ;
- return
- [ init-sdk $(condition) : $(root)
- : $(version[1-2]) $(minor-minus-1) : [ version-to-feature $(version[1-2]) $(minor-minus-1) ] ]
- $(version-feature) ;
- }
- else
- {
- return $(version-feature) ;
- }
- }
- else if $(version[4])
- {
- # We have a patch version of an SDK. We want to set up
- # both the specific patch version, and the minor version.
- # So we recurse to set up the patch version. Plus the minor version.
- return
- [ init-sdk $(condition) : $(root)
- : $(version[1-3]) : [ version-to-feature $(version[1-3]) ] ]
- [ init-sdk $(condition) : $(root)
- : $(version) : [ version-to-feature $(version) ] ] ;
- }
- else
- {
- # Yes, this is intentionally recursive.
- return
- [ init-sdk $(condition) : $(root)
- : $(version) : [ version-to-feature $(version) ] ] ;
- }
-}
-
-# Determine the MacOSX SDK versions installed and their locations.
-local rule init-available-sdk-versions ( condition * : root ? )
-{
- root ?= /Developer ;
- local sdks-root = $(root)/SDKs ;
- local sdks = [ GLOB $(sdks-root) : MacOSX*.sdk iPhoneOS*.sdk iPhoneSimulator*.sdk ] ;
- local result ;
- for local sdk in $(sdks)
- {
- local sdk-match = [ MATCH ([^0-9]+)([0-9]+)[.]([0-9x]+)[.]?([0-9x]+)? : $(sdk:D=) ] ;
- local sdk-platform = $(sdk-match[1]:L) ;
- local sdk-version = $(sdk-match[2-]) ;
- if $(sdk-version)
- {
- switch $(sdk-platform)
- {
- case macosx :
- {
- sdk-version = mac $(sdk-version) ;
- }
- case iphoneos :
- {
- sdk-version = iphone $(sdk-version) ;
- }
- case iphonesimulator :
- {
- sdk-version = iphonesim $(sdk-version) ;
- }
- case * :
- {
- sdk-version = $(sdk-version:J=-) ;
- }
- }
- result += [ init-sdk $(condition) : $(sdk) : $(sdk-version) ] ;
- }
- }
- return $(result) ;
-}
-
-# Generic options.
-flags darwin.compile OPTIONS <flags> ;
-
-# The following adds objective-c support to darwin.
-# Thanks to http://thread.gmane.org/gmane.comp.lib.boost.build/13759
-
-generators.register-c-compiler darwin.compile.m : OBJECTIVE_C : OBJ : <toolset>darwin ;
-generators.register-c-compiler darwin.compile.mm : OBJECTIVE_CPP : OBJ : <toolset>darwin ;
-
-rule setup-address-model ( targets * : sources * : properties * )
-{
- local ps = [ property-set.create $(properties) ] ;
- local arch = [ $(ps).get <architecture> ] ;
- local address-model = [ $(ps).get <address-model> ] ;
- local osx-version = [ $(ps).get <macosx-version> ] ;
- local gcc-version = [ $(ps).get <toolset-darwin:version> ] ;
- gcc-version = $(.real-version.$(gcc-version)) ;
- local options ;
-
- local support-ppc64 = 1 ;
-
- osx-version ?= $(.host-osx-version) ;
-
- switch $(osx-version)
- {
- case iphone* :
- {
- support-ppc64 = ;
- }
-
- case * :
- if $(osx-version) && ! [ version.version-less [ regex.split $(osx-version) \\. ] : 10 6 ]
- {
- # When targeting 10.6:
- # - gcc 4.2 will give a compiler errir if ppc64 compilation is requested
- # - gcc 4.0 will compile fine, somehow, but then fail at link time
- support-ppc64 = ;
- }
- }
- switch $(arch)
- {
- case combined :
- {
- if $(address-model) = 32_64 {
- if $(support-ppc64) {
- options = -arch i386 -arch ppc -arch x86_64 -arch ppc64 ;
- } else {
- # Build 3-way binary
- options = -arch i386 -arch ppc -arch x86_64 ;
- }
- } else if $(address-model) = 64 {
- if $(support-ppc64) {
- options = -arch x86_64 -arch ppc64 ;
- } else {
- errors.user-error "64-bit PPC compilation is not supported when targeting OSX 10.6 or later" ;
- }
- } else {
- options = -arch i386 -arch ppc ;
- }
- }
-
- case x86 :
- {
- if $(address-model) = 32_64 {
- options = -arch i386 -arch x86_64 ;
- } else if $(address-model) = 64 {
- options = -arch x86_64 ;
- } else {
- options = -arch i386 ;
- }
- }
-
- case power :
- {
- if ! $(support-ppc64)
- && ( $(address-model) = 32_64 || $(address-model) = 64 )
- {
- errors.user-error "64-bit PPC compilation is not supported when targeting OSX 10.6 or later" ;
- }
-
- if $(address-model) = 32_64 {
- options = -arch ppc -arch ppc64 ;
- } else if $(address-model) = 64 {
- options = -arch ppc64 ;
- } else {
- options = -arch ppc ;
- }
- }
-
- case arm :
- {
- options = -arch armv6 ;
- }
- }
-
- if $(options)
- {
- OPTIONS on $(targets) += $(options) ;
- }
-}
-
-rule setup-threading ( targets * : sources * : properties * )
-{
- gcc.setup-threading $(targets) : $(sources) : $(properties) ;
-}
-
-rule setup-fpic ( targets * : sources * : properties * )
-{
- gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
-}
-
-rule compile.m ( targets * : sources * : properties * )
-{
- LANG on $(<) = "-x objective-c" ;
- gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
- setup-address-model $(targets) : $(sources) : $(properties) ;
-}
-
-actions compile.m
-{
- "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
-}
-
-rule compile.mm ( targets * : sources * : properties * )
-{
- LANG on $(<) = "-x objective-c++" ;
- gcc.setup-fpic $(targets) : $(sources) : $(properties) ;
- setup-address-model $(targets) : $(sources) : $(properties) ;
-}
-
-actions compile.mm
-{
- "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
-}
-
-# Set the max header padding to allow renaming of libs for installation.
-flags darwin.link.dll OPTIONS : -headerpad_max_install_names ;
-
-# To link the static runtime we need to link to all the core runtime libraries.
-flags darwin.link OPTIONS <runtime-link>static
- : -nodefaultlibs -shared-libgcc -lstdc++-static -lgcc_eh -lgcc -lSystem ;
-
-# Strip as much as possible when optimizing.
-flags darwin.link OPTIONS <optimization>speed : -Wl,-dead_strip -no_dead_strip_inits_and_terms ;
-flags darwin.link OPTIONS <optimization>space : -Wl,-dead_strip -no_dead_strip_inits_and_terms ;
-
-# Dynamic/shared linking.
-flags darwin.compile OPTIONS <link>shared : -dynamic ;
-
-# Misc options.
-flags darwin.compile OPTIONS : -gdwarf-2 -fexceptions ;
-#~ flags darwin.link OPTIONS : -fexceptions ;
-
-# Add the framework names to use.
-flags darwin.link FRAMEWORK <framework> ;
-
-#
-flags darwin.link FORCE_LOAD <force-load> ;
-
-# This is flag is useful for debugging the link step
-# uncomment to see what libtool is doing under the hood
-#~ flags darwin.link.dll OPTIONS : -Wl,-v ;
-
-_ = " " ;
-
-# set up the -F option to include the paths to any frameworks used.
-local rule prepare-framework-path ( target + )
-{
- # The -framework option only takes basename of the framework.
- # The -F option specifies the directories where a framework
- # is searched for. So, if we find <framework> feature
- # with some path, we need to generate property -F option.
- local framework-paths = [ on $(target) return $(FRAMEWORK:D) ] ;
-
- # Be sure to generate no -F if there's no path.
- for local framework-path in $(framework-paths)
- {
- if $(framework-path) != ""
- {
- FRAMEWORK_PATH on $(target) += -F$(framework-path) ;
- }
- }
-}
-
-rule link ( targets * : sources * : properties * )
-{
- DEPENDS $(targets) : [ on $(targets) return $(FORCE_LOAD) ] ;
- setup-address-model $(targets) : $(sources) : $(properties) ;
- prepare-framework-path $(<) ;
-}
-
-# Note that using strip without any options was reported to result in broken
-# binaries, at least on OS X 10.5.5, see:
-# http://svn.boost.org/trac/boost/ticket/2347
-# So we pass -S -x.
-actions link bind LIBRARIES FORCE_LOAD
-{
- "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -o "$(<)" "$(>)" -Wl,-force_load$(_)"$(FORCE_LOAD)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(FRAMEWORK_PATH) -framework$(_)$(FRAMEWORK:D=:S=) $(OPTIONS) $(USER_OPTIONS)
- $(NEED_STRIP)"$(.STRIP)" $(NEED_STRIP)-S $(NEED_STRIP)-x $(NEED_STRIP)"$(<)"
-}
-
-rule link.dll ( targets * : sources * : properties * )
-{
- setup-address-model $(targets) : $(sources) : $(properties) ;
- prepare-framework-path $(<) ;
-}
-
-actions link.dll bind LIBRARIES
-{
- "$(CONFIG_COMMAND)" -dynamiclib -Wl,-single_module -install_name "$(<:B)$(<:S)" -L"$(LINKPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(FRAMEWORK_PATH) -framework$(_)$(FRAMEWORK:D=:S=) $(OPTIONS) $(USER_OPTIONS)
-}
-
-# We use libtool instead of ar to support universal binary linking
-# TODO: Find a way to use the underlying tools, i.e. lipo, to do this.
-actions piecemeal archive
-{
- "$(.LIBTOOL)" -static -o "$(<:T)" $(ARFLAGS) "$(>:T)"
-}
diff --git a/tools/build/v2/tools/docutils.jam b/tools/build/v2/tools/docutils.jam
deleted file mode 100644
index fc775b6fc3..0000000000
--- a/tools/build/v2/tools/docutils.jam
+++ /dev/null
@@ -1,85 +0,0 @@
-# Copyright David Abrahams 2004. Distributed under the Boost
-# Software License, Version 1.0. (See accompanying
-# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
-
-# Support for docutils ReStructuredText processing.
-
-import type ;
-import scanner ;
-import generators ;
-import os ;
-import common ;
-import toolset ;
-import path ;
-import feature : feature ;
-import property ;
-
-.initialized = ;
-
-type.register ReST : rst ;
-
-class rst-scanner : common-scanner
-{
- rule __init__ ( paths * )
- {
- common-scanner.__init__ . $(paths) ;
- }
-
- rule pattern ( )
- {
- return "^[ ]*\\.\\.[ ]+include::[ ]+([^
-]+)"
- "^[ ]*\\.\\.[ ]+image::[ ]+([^
-]+)"
- "^[ ]*\\.\\.[ ]+figure::[ ]+([^
-]+)"
- ;
- }
-}
-
-scanner.register rst-scanner : include ;
-type.set-scanner ReST : rst-scanner ;
-
-generators.register-standard docutils.html : ReST : HTML ;
-
-rule init ( docutils-dir ? : tools-dir ? )
-{
- docutils-dir ?= [ modules.peek : DOCUTILS_DIR ] ;
- tools-dir ?= $(docutils-dir)/tools ;
-
- if ! $(.initialized)
- {
- .initialized = true ;
- .docutils-dir = $(docutils-dir) ;
- .tools-dir = $(tools-dir:R="") ;
-
- .setup = [
- common.prepend-path-variable-command PYTHONPATH
- : $(.docutils-dir) $(.docutils-dir)/extras ] ;
- RST2XXX = [ common.find-tool rst2html ] ;
- }
-}
-
-rule html ( target : source : properties * )
-{
- if ! [ on $(target) return $(RST2XXX) ]
- {
- local python-cmd = [ property.select <python.interpreter> : $(properties) ] ;
- RST2XXX on $(target) = $(python-cmd:G=:E="python") $(.tools-dir)/rst2html.py ;
- }
-}
-
-
-feature docutils : : free ;
-feature docutils-html : : free ;
-feature docutils-cmd : : free ;
-toolset.flags docutils COMMON-FLAGS : <docutils> ;
-toolset.flags docutils HTML-FLAGS : <docutils-html> ;
-toolset.flags docutils RST2XXX : <docutils-cmd> ;
-
-actions html
-{
- $(.setup)
- "$(RST2XXX)" $(COMMON-FLAGS) $(HTML-FLAGS) $(>) $(<)
-}
-
diff --git a/tools/build/v2/tools/doxygen.jam b/tools/build/v2/tools/doxygen.jam
deleted file mode 100644
index 8394848dd5..0000000000
--- a/tools/build/v2/tools/doxygen.jam
+++ /dev/null
@@ -1,776 +0,0 @@
-# Copyright 2003, 2004 Douglas Gregor
-# Copyright 2003, 2004, 2005 Vladimir Prus
-# Copyright 2006 Rene Rivera
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# This module defines rules to handle generation of various outputs from source
-# files documented with doxygen comments. The supported transformations are:
-#
-# * Source -> Doxygen XML -> BoostBook XML
-# * Source -> Doxygen HTML
-#
-# The type of transformation is selected based on the target requested. For
-# BoostBook XML, the default, specifying a target with an ".xml" suffix, or an
-# empty suffix, will produce a <target>.xml and <target>.boostbook. For Doxygen
-# HTML specifying a target with an ".html" suffix will produce a directory
-# <target> with the Doxygen html files, and a <target>.html file redirecting to
-# that directory.
-
-import "class" : new ;
-import targets ;
-import feature ;
-import property ;
-import generators ;
-import boostbook ;
-import type ;
-import path ;
-import print ;
-import regex ;
-import stage ;
-import project ;
-import xsltproc ;
-import make ;
-import os ;
-import toolset : flags ;
-import alias ;
-import common ;
-import modules ;
-import project ;
-import utility ;
-import errors ;
-
-
-# Use to specify extra configuration paramters. These get translated
-# into a doxyfile which configures the building of the docs.
-feature.feature doxygen:param : : free ;
-
-# Specify the "<xsl:param>boost.doxygen.header.prefix" XSLT option.
-feature.feature prefix : : free ;
-
-# Specify the "<xsl:param>boost.doxygen.reftitle" XSLT option.
-feature.feature reftitle : : free ;
-
-# Which processor to use for various translations from Doxygen.
-feature.feature doxygen.processor : xsltproc doxproc : propagated implicit ;
-
-# To generate, or not, index sections.
-feature.feature doxygen.doxproc.index : no yes : propagated incidental ;
-
-# The ID for the resulting BoostBook reference section.
-feature.feature doxygen.doxproc.id : : free ;
-
-# The title for the resulting BoostBook reference section.
-feature.feature doxygen.doxproc.title : : free ;
-
-# Location for images when generating XML
-feature.feature doxygen:xml-imagedir : : free ;
-
-# Indicates whether the entire directory should be deleted
-feature.feature doxygen.rmdir : off on : optional incidental ;
-
-# Doxygen configuration input file.
-type.register DOXYFILE : doxyfile ;
-
-# Doxygen XML multi-file output.
-type.register DOXYGEN_XML_MULTIFILE : xml-dir : XML ;
-
-# Doxygen XML coallesed output.
-type.register DOXYGEN_XML : doxygen : XML ;
-
-# Doxygen HTML multifile directory.
-type.register DOXYGEN_HTML_MULTIFILE : html-dir : HTML ;
-
-# Redirection HTML file to HTML multifile directory.
-type.register DOXYGEN_HTML : : HTML ;
-
-type.register DOXYGEN_XML_IMAGES : doxygen-xml-images ;
-
-# Initialize the Doxygen module. Parameters are:
-# name: the name of the 'doxygen' executable. If not specified, the name
-# 'doxygen' will be used
-#
-rule init ( name ? )
-{
- if ! $(.initialized)
- {
- .initialized = true ;
-
- .doxproc = [ modules.binding $(__name__) ] ;
- .doxproc = $(.doxproc:D)/doxproc.py ;
-
- generators.register-composing doxygen.headers-to-doxyfile
- : H HPP CPP : DOXYFILE ;
- generators.register-standard doxygen.run
- : DOXYFILE : DOXYGEN_XML_MULTIFILE ;
- generators.register-standard doxygen.xml-dir-to-boostbook
- : DOXYGEN_XML_MULTIFILE : BOOSTBOOK : <doxygen.processor>doxproc ;
- generators.register-standard doxygen.xml-to-boostbook
- : DOXYGEN_XML : BOOSTBOOK : <doxygen.processor>xsltproc ;
- generators.register-standard doxygen.collect
- : DOXYGEN_XML_MULTIFILE : DOXYGEN_XML ;
- generators.register-standard doxygen.run
- : DOXYFILE : DOXYGEN_HTML_MULTIFILE ;
- generators.register-standard doxygen.html-redirect
- : DOXYGEN_HTML_MULTIFILE : DOXYGEN_HTML ;
- generators.register-standard doxygen.copy-latex-pngs
- : DOXYGEN_HTML : DOXYGEN_XML_IMAGES ;
-
- IMPORT $(__name__) : doxygen : : doxygen ;
- }
-
- if $(name)
- {
- modify-config ;
- .doxygen = $(name) ;
- check-doxygen ;
- }
-
- if ! $(.doxygen)
- {
- check-doxygen ;
- }
-}
-
-rule freeze-config ( )
-{
- if ! $(.initialized)
- {
- errors.user-error "doxygen must be initialized before it can be used." ;
- }
- if ! $(.config-frozen)
- {
- .config-frozen = true ;
-
- if [ .is-cygwin ]
- {
- .is-cygwin = true ;
- }
- }
-}
-
-rule modify-config ( )
-{
- if $(.config-frozen)
- {
- errors.user-error "Cannot change doxygen after it has been used." ;
- }
-}
-
-rule check-doxygen ( )
-{
- if --debug-configuration in [ modules.peek : ARGV ]
- {
- ECHO "notice:" using doxygen ":" $(.doxygen) ;
- }
- local extra-paths ;
- if [ os.name ] = NT
- {
- local ProgramFiles = [ modules.peek : ProgramFiles ] ;
- if $(ProgramFiles)
- {
- extra-paths = "$(ProgramFiles:J= )" ;
- }
- else
- {
- extra-paths = "C:\\Program Files" ;
- }
- }
- .doxygen = [ common.get-invocation-command doxygen :
- doxygen : $(.doxygen) : $(extra-paths) ] ;
-}
-
-rule name ( )
-{
- freeze-config ;
- return $(.doxygen) ;
-}
-
-rule .is-cygwin ( )
-{
- if [ os.on-windows ]
- {
- local file = [ path.make [ modules.binding $(__name__) ] ] ;
- local dir = [ path.native
- [ path.join [ path.parent $(file) ] doxygen ] ] ;
- local command =
- "cd \"$(dir)\" && \"$(.doxygen)\" windows-paths-check.doxyfile 2>&1" ;
- result = [ SHELL $(command) ] ;
- if [ MATCH "(Parsing file /)" : $(result) ]
- {
- return true ;
- }
- }
-}
-
-# Runs Doxygen on the given Doxygen configuration file (the source) to generate
-# the Doxygen files. The output is dumped according to the settings in the
-# Doxygen configuration file, not according to the target! Because of this, we
-# essentially "touch" the target file, in effect making it look like we have
-# really written something useful to it. Anyone that uses this action must deal
-# with this behavior.
-#
-actions doxygen-action
-{
- $(RM) "$(*.XML)" & "$(NAME:E=doxygen)" "$(>)" && echo "Stamped" > "$(<)"
-}
-
-
-# Runs the Python doxproc XML processor.
-#
-actions doxproc
-{
- python "$(DOXPROC)" "--xmldir=$(>)" "--output=$(<)" "$(OPTIONS)" "--id=$(ID)" "--title=$(TITLE)"
-}
-
-
-rule translate-path ( path )
-{
- freeze-config ;
- if [ os.on-windows ]
- {
- if [ os.name ] = CYGWIN
- {
- if $(.is-cygwin)
- {
- return $(path) ;
- }
- else
- {
- return $(path:W) ;
- }
- }
- else
- {
- if $(.is-cygwin)
- {
- match = [ MATCH ^(.):(.*) : $(path) ] ;
- if $(match)
- {
- return /cygdrive/$(match[1])$(match[2]:T) ;
- }
- else
- {
- return $(path:T) ;
- }
- }
- else
- {
- return $(path) ;
- }
- }
- }
- else
- {
- return $(path) ;
- }
-}
-
-
-# Generates a doxygen configuration file (doxyfile) given a set of C++ sources
-# and a property list that may contain <doxygen:param> features.
-#
-rule headers-to-doxyfile ( target : sources * : properties * )
-{
- local text "# Generated by Boost.Build version 2" ;
-
- local output-dir ;
-
- # Translate <doxygen:param> into command line flags.
- for local param in [ feature.get-values <doxygen:param> : $(properties) ]
- {
- local namevalue = [ regex.match ([^=]*)=(.*) : $(param) ] ;
- if $(namevalue[1]) = OUTPUT_DIRECTORY
- {
- output-dir = [ translate-path
- [ utility.unquote $(namevalue[2]) ] ] ;
- text += "OUTPUT_DIRECTORY = \"$(output-dir)\"" ;
- }
- else
- {
- text += "$(namevalue[1]) = $(namevalue[2])" ;
- }
- }
-
- if ! $(output-dir)
- {
- output-dir = [ translate-path [ on $(target) return $(LOCATE) ] ] ;
- text += "OUTPUT_DIRECTORY = \"$(output-dir)\"" ;
- }
-
- local headers = ;
- for local header in $(sources:G=)
- {
- header = [ translate-path $(header) ] ;
- headers += \"$(header)\" ;
- }
-
- # Doxygen generates LaTex by default. So disable it unconditionally, or at
- # least until someone needs, and hence writes support for, LaTex output.
- text += "GENERATE_LATEX = NO" ;
- text += "INPUT = $(headers:J= )" ;
- print.output $(target) plain ;
- print.text $(text) : true ;
-}
-
-
-# Run Doxygen. See doxygen-action for a description of the strange properties of
-# this rule.
-#
-rule run ( target : source : properties * )
-{
- freeze-config ;
- if <doxygen.rmdir>on in $(properties)
- {
- local output-dir =
- [ path.make
- [ MATCH <doxygen:param>OUTPUT_DIRECTORY=\"?([^\"]*) :
- $(properties) ] ] ;
- local html-dir =
- [ path.make
- [ MATCH <doxygen:param>HTML_OUTPUT=(.*) :
- $(properties) ] ] ;
- if $(output-dir) && $(html-dir) &&
- [ path.glob $(output-dir) : $(html-dir) ]
- {
- HTMLDIR on $(target) =
- [ path.native [ path.join $(output-dir) $(html-dir) ] ] ;
- rm-htmldir $(target) ;
- }
- }
- doxygen-action $(target) : $(source) ;
- NAME on $(target) = $(.doxygen) ;
- RM on $(target) = [ modules.peek common : RM ] ;
- *.XML on $(target) =
- [ path.native
- [ path.join
- [ path.make [ on $(target) return $(LOCATE) ] ]
- $(target:B:S=)
- *.xml ] ] ;
-}
-
-if [ os.name ] = NT
-{
- RMDIR = rmdir /s /q ;
-}
-else
-{
- RMDIR = rm -rf ;
-}
-
-actions quietly rm-htmldir
-{
- $(RMDIR) $(HTMLDIR)
-}
-
-# The rules below require Boost.Book stylesheets, so we need some code to check
-# that the boostbook module has actualy been initialized.
-#
-rule check-boostbook ( )
-{
- if ! [ modules.peek boostbook : .initialized ]
- {
- ECHO "error: the boostbook module is not initialized" ;
- ECHO "error: you've attempted to use the 'doxygen' toolset, " ;
- ECHO "error: which requires Boost.Book," ;
- ECHO "error: but never initialized Boost.Book." ;
- EXIT "error: Hint: add 'using boostbook ;' to your user-config.jam" ;
- }
-}
-
-
-# Collect the set of Doxygen XML files into a single XML source file that can be
-# handled by an XSLT processor. The source is completely ignored (see
-# doxygen-action), because this action picks up the Doxygen XML index file
-# xml/index.xml. This is because we can not teach Doxygen to act like a NORMAL
-# program and take a "-o output.xml" argument (grrrr). The target of the
-# collection will be a single Doxygen XML file.
-#
-rule collect ( target : source : properties * )
-{
- check-boostbook ;
- local collect-xsl-dir
- = [ path.native [ path.join [ boostbook.xsl-dir ] doxygen collect ] ] ;
- local source-path
- = [ path.make [ on $(source) return $(LOCATE) ] ] ;
- local collect-path
- = [ path.root [ path.join $(source-path) $(source:B) ] [ path.pwd ] ] ;
- local native-path
- = [ path.native $(collect-path) ] ;
- local real-source
- = [ path.native [ path.join $(collect-path) index.xml ] ] ;
- xsltproc.xslt $(target) : $(real-source) $(collect-xsl-dir:S=.xsl)
- : <xsl:param>doxygen.xml.path=$(native-path) ;
-}
-
-
-# Translate Doxygen XML into BoostBook.
-#
-rule xml-to-boostbook ( target : source : properties * )
-{
- check-boostbook ;
- local xsl-dir = [ boostbook.xsl-dir ] ;
- local d2b-xsl = [ path.native [ path.join [ boostbook.xsl-dir ] doxygen
- doxygen2boostbook.xsl ] ] ;
-
- local xslt-properties = $(properties) ;
- for local prefix in [ feature.get-values <prefix> : $(properties) ]
- {
- xslt-properties += "<xsl:param>boost.doxygen.header.prefix=$(prefix)" ;
- }
- for local title in [ feature.get-values <reftitle> : $(properties) ]
- {
- xslt-properties += "<xsl:param>boost.doxygen.reftitle=$(title)" ;
- }
-
- xsltproc.xslt $(target) : $(source) $(d2b-xsl) : $(xslt-properties) ;
-}
-
-
-flags doxygen.xml-dir-to-boostbook OPTIONS <doxygen.doxproc.index>yes : --enable-index ;
-flags doxygen.xml-dir-to-boostbook ID <doxygen.doxproc.id> ;
-flags doxygen.xml-dir-to-boostbook TITLE <doxygen.doxproc.title> ;
-
-
-rule xml-dir-to-boostbook ( target : source : properties * )
-{
- DOXPROC on $(target) = $(.doxproc) ;
-
- LOCATE on $(source:S=) = [ on $(source) return $(LOCATE) ] ;
-
- doxygen.doxproc $(target) : $(source:S=) ;
-}
-
-
-# Generate the HTML redirect to HTML dir index.html file.
-#
-rule html-redirect ( target : source : properties * )
-{
- local uri = "$(target:B)/index.html" ;
- print.output $(target) plain ;
- print.text
-"<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Strict//EN\"
- \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd\">
-<html xmlns=\"http://www.w3.org/1999/xhtml\">
-<head>
- <meta http-equiv=\"refresh\" content=\"0; URL=$(uri)\" />
-
- <title></title>
-</head>
-
-<body>
- Automatic redirection failed, please go to <a href=
- \"$(uri)\">$(uri)</a>.
-</body>
-</html>
-"
- : true ;
-}
-
-rule copy-latex-pngs ( target : source : requirements * )
-{
- local directory = [ path.native
- [ feature.get-values <doxygen:xml-imagedir> :
- $(requirements) ] ] ;
-
- local location = [ on $(target) return $(LOCATE) ] ;
-
- local pdf-location =
- [ path.native
- [ path.join
- [ path.make $(location) ]
- [ path.make $(directory) ] ] ] ;
- local html-location =
- [ path.native
- [ path.join
- .
- html
- [ path.make $(directory) ] ] ] ;
-
- common.MkDir $(pdf-location) ;
- common.MkDir $(html-location) ;
-
- DEPENDS $(target) : $(pdf-location) $(html-location) ;
-
- if [ os.name ] = NT
- {
- CP on $(target) = copy /y ;
- FROM on $(target) = \\*.png ;
- TOHTML on $(target) = .\\html\\$(directory) ;
- TOPDF on $(target) = \\$(directory) ;
- }
- else
- {
- CP on $(target) = cp ;
- FROM on $(target) = /*.png ;
- TOHTML on $(target) = ./html/$(directory) ;
- TOPDF on $(target) = $(target:D)/$(directory) ;
- }
-}
-
-actions copy-latex-pngs
-{
- $(CP) $(>:S=)$(FROM) $(TOHTML)
- $(CP) $(>:S=)$(FROM) $(<:D)$(TOPDF)
- echo "Stamped" > "$(<)"
-}
-
-# building latex images for doxygen XML depends
-# on latex, dvips, and ps being in your PATH.
-# This is true for most Unix installs, but
-# not on Win32, where you will need to install
-# MkTex and Ghostscript and add these tools
-# to your path.
-
-actions check-latex
-{
- latex -version >$(<)
-}
-
-actions check-dvips
-{
- dvips -version >$(<)
-}
-
-if [ os.name ] = "NT"
-{
- actions check-gs
- {
- gswin32c -version >$(<)
- }
-}
-else
-{
- actions check-gs
- {
- gs -version >$(<)
- }
-}
-
-rule check-tools ( )
-{
- if ! $(.check-tools-targets)
- {
- # Find the root project.
- local root-project = [ project.current ] ;
- root-project = [ $(root-project).project-module ] ;
- while
- [ project.attribute $(root-project) parent-module ] &&
- [ project.attribute $(root-project) parent-module ] != user-config
- {
- root-project =
- [ project.attribute $(root-project) parent-module ] ;
- }
-
- .latex.check = [ new file-target latex.check
- :
- : [ project.target $(root-project) ]
- : [ new action : doxygen.check-latex ]
- :
- ] ;
- .dvips.check = [ new file-target dvips.check
- :
- : [ project.target $(root-project) ]
- : [ new action : doxygen.check-dvips ]
- :
- ] ;
- .gs.check = [ new file-target gs.check
- :
- : [ project.target $(root-project) ]
- : [ new action : doxygen.check-gs ]
- :
- ] ;
- .check-tools-targets = $(.latex.check) $(.dvips.check) $(.gs.check) ;
- }
- return $(.check-tools-targets) ;
-}
-
-project.initialize $(__name__) ;
-project doxygen ;
-
-class doxygen-check-tools-target-class : basic-target
-{
- import doxygen ;
- rule construct ( name : sources * : property-set )
- {
- return [ property-set.empty ] [ doxygen.check-tools ] ;
- }
-}
-
-local project = [ project.current ] ;
-
-targets.main-target-alternative
- [ new doxygen-check-tools-target-class check-tools : $(project)
- : [ targets.main-target-sources : check-tools : no-renaming ]
- : [ targets.main-target-requirements : $(project) ]
- : [ targets.main-target-default-build : $(project) ]
- : [ targets.main-target-usage-requirements : $(project) ]
- ] ;
-
-# User-level rule to generate BoostBook XML from a set of headers via Doxygen.
-#
-rule doxygen ( target : sources * : requirements * : default-build * : usage-requirements * )
-{
- freeze-config ;
- local project = [ project.current ] ;
-
- if $(target:S) = .html
- {
- # Build an HTML directory from the sources.
- local html-location = [ feature.get-values <location> : $(requirements) ] ;
- local output-dir ;
- if [ $(project).get build-dir ]
- {
- # Explicitly specified build dir. Add html at the end.
- output-dir = [ path.join [ $(project).build-dir ] $(html-location:E=html) ] ;
- }
- else
- {
- # Trim 'bin' from implicit build dir, for no other reason that backward
- # compatibility.
- output-dir = [ path.join [ path.parent [ $(project).build-dir ] ]
- $(html-location:E=html) ] ;
- }
- output-dir = [ path.root $(output-dir) [ path.pwd ] ] ;
- local output-dir-native = [ path.native $(output-dir) ] ;
- requirements = [ property.change $(requirements) : <location> ] ;
-
- ## The doxygen configuration file.
- targets.main-target-alternative
- [ new typed-target $(target:S=.tag) : $(project) : DOXYFILE
- : [ targets.main-target-sources $(sources) : $(target:S=.tag) ]
- : [ targets.main-target-requirements $(requirements)
- <doxygen:param>GENERATE_HTML=YES
- <doxygen:param>GENERATE_XML=NO
- <doxygen:param>"OUTPUT_DIRECTORY=\"$(output-dir-native)\""
- <doxygen:param>HTML_OUTPUT=$(target:B)
- : $(project) ]
- : [ targets.main-target-default-build $(default-build) : $(project) ]
- ] ;
- $(project).mark-target-as-explicit $(target:S=.tag) ;
-
- ## The html directory to generate by running doxygen.
- targets.main-target-alternative
- [ new typed-target $(target:S=.dir) : $(project) : DOXYGEN_HTML_MULTIFILE
- : $(target:S=.tag)
- : [ targets.main-target-requirements $(requirements)
- <doxygen:param>"OUTPUT_DIRECTORY=\"$(output-dir-native)\""
- <doxygen:param>HTML_OUTPUT=$(target:B)
- : $(project) ]
- : [ targets.main-target-default-build $(default-build) : $(project) ]
- ] ;
- $(project).mark-target-as-explicit $(target:S=.dir) ;
-
- ## The redirect html file into the generated html.
- targets.main-target-alternative
- [ new typed-target $(target) : $(project) : DOXYGEN_HTML
- : $(target:S=.dir)
- : [ targets.main-target-requirements $(requirements)
- <location>$(output-dir)
- : $(project) ]
- : [ targets.main-target-default-build $(default-build) : $(project) ]
- ] ;
- }
- else
- {
- # Build a BoostBook XML file from the sources.
- local location-xml = [ feature.get-values <location> : $(requirements) ] ;
- requirements = [ property.change $(requirements) : <location> ] ;
- local target-xml = $(target:B=$(target:B)-xml) ;
-
- # Check whether we need to build images
- local images-location =
- [ feature.get-values <doxygen:xml-imagedir> : $(requirements) ] ;
- if $(images-location)
- {
- doxygen $(target).doxygen-xml-images.html : $(sources)
- : $(requirements)
- <doxygen.rmdir>on
- <doxygen:param>QUIET=YES
- <doxygen:param>WARNINGS=NO
- <doxygen:param>WARN_IF_UNDOCUMENTED=NO
- <dependency>/doxygen//check-tools ;
- $(project).mark-target-as-explicit
- $(target).doxygen-xml-images.html ;
-
- targets.main-target-alternative
- [ new typed-target $(target).doxygen-xml-images
- : $(project) : DOXYGEN_XML_IMAGES
- : $(target).doxygen-xml-images.html
- : [ targets.main-target-requirements $(requirements)
- : $(project) ]
- : [ targets.main-target-default-build $(default-build)
- : $(project) ]
- ] ;
-
- $(project).mark-target-as-explicit
- $(target).doxygen-xml-images ;
-
- if ! [ regex.match "^(.*/)$" : $(images-location) ]
- {
- images-location = $(images-location)/ ;
- }
-
- requirements +=
- <dependency>$(target).doxygen-xml-images
- <xsl:param>boost.doxygen.formuladir=$(images-location) ;
- }
-
- ## The doxygen configuration file.
- targets.main-target-alternative
- [ new typed-target $(target-xml:S=.tag) : $(project) : DOXYFILE
- : [ targets.main-target-sources $(sources) : $(target-xml:S=.tag) ]
- : [ targets.main-target-requirements $(requirements)
- <doxygen:param>GENERATE_HTML=NO
- <doxygen:param>GENERATE_XML=YES
- <doxygen:param>XML_OUTPUT=$(target-xml)
- : $(project) ]
- : [ targets.main-target-default-build $(default-build) : $(project) ]
- ] ;
- $(project).mark-target-as-explicit $(target-xml:S=.tag) ;
-
- ## The Doxygen XML directory of the processed source files.
- targets.main-target-alternative
- [ new typed-target $(target-xml:S=.dir) : $(project) : DOXYGEN_XML_MULTIFILE
- : $(target-xml:S=.tag)
- : [ targets.main-target-requirements $(requirements)
- : $(project) ]
- : [ targets.main-target-default-build $(default-build) : $(project) ]
- ] ;
- $(project).mark-target-as-explicit $(target-xml:S=.dir) ;
-
- ## The resulting BoostBook file is generated by the processor tool. The
- ## tool can be either the xsltproc plus accompanying XSL scripts. Or it
- ## can be the python doxproc.py script.
- targets.main-target-alternative
- [ new typed-target $(target-xml) : $(project) : BOOSTBOOK
- : $(target-xml:S=.dir)
- : [ targets.main-target-requirements $(requirements)
- : $(project) ]
- : [ targets.main-target-default-build $(default-build) : $(project) ]
- ] ;
- $(project).mark-target-as-explicit $(target-xml) ;
-
- targets.main-target-alternative
- [ new install-target-class $(target:S=.xml) : $(project)
- : $(target-xml)
- : [ targets.main-target-requirements $(requirements)
- <location>$(location-xml:E=.)
- <name>$(target:S=.xml)
- : $(project) ]
- : [ targets.main-target-default-build $(default-build) : $(project) ]
- ] ;
- $(project).mark-target-as-explicit $(target:S=.xml) ;
-
- targets.main-target-alternative
- [ new alias-target-class $(target) : $(project)
- :
- : [ targets.main-target-requirements $(requirements)
- : $(project) ]
- : [ targets.main-target-default-build $(default-build) : $(project) ]
- : [ targets.main-target-usage-requirements $(usage-requirements)
- <dependency>$(target:S=.xml)
- : $(project) ]
- ] ;
- }
-}
diff --git a/tools/build/v2/tools/gcc.jam b/tools/build/v2/tools/gcc.jam
deleted file mode 100644
index ee3aae128a..0000000000
--- a/tools/build/v2/tools/gcc.jam
+++ /dev/null
@@ -1,1190 +0,0 @@
-# Copyright 2001 David Abrahams.
-# Copyright 2002-2006 Rene Rivera.
-# Copyright 2002-2003 Vladimir Prus.
-# Copyright (c) 2005 Reece H. Dunn.
-# Copyright 2006 Ilya Sokolov.
-# Copyright 2007 Roland Schwarz
-# Copyright 2007 Boris Gubenko.
-#
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-import "class" : new ;
-import common ;
-import errors ;
-import feature ;
-import generators ;
-import os ;
-import pch ;
-import property ;
-import property-set ;
-import toolset ;
-import type ;
-import rc ;
-import regex ;
-import set ;
-import unix ;
-import fortran ;
-
-
-if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
-{
- .debug-configuration = true ;
-}
-
-
-feature.extend toolset : gcc ;
-# feature.subfeature toolset gcc : flavor : : optional ;
-
-toolset.inherit-generators gcc : unix : unix.link unix.link.dll ;
-toolset.inherit-flags gcc : unix ;
-toolset.inherit-rules gcc : unix ;
-
-generators.override gcc.prebuilt : builtin.prebuilt ;
-generators.override gcc.searched-lib-generator : searched-lib-generator ;
-
-# Make gcc toolset object files use the "o" suffix on all platforms.
-type.set-generated-target-suffix OBJ : <toolset>gcc : o ;
-type.set-generated-target-suffix OBJ : <toolset>gcc <target-os>windows : o ;
-type.set-generated-target-suffix OBJ : <toolset>gcc <target-os>cygwin : o ;
-
-# Initializes the gcc toolset for the given version. If necessary, command may
-# be used to specify where the compiler is located. The parameter 'options' is a
-# space-delimited list of options, each one specified as
-# <option-name>option-value. Valid option names are: cxxflags, linkflags and
-# linker-type. Accepted linker-type values are aix, darwin, gnu, hpux, osf or
-# sun and the default value will be selected based on the current OS.
-# Example:
-# using gcc : 3.4 : : <cxxflags>foo <linkflags>bar <linker-type>sun ;
-#
-# The compiler command to use is detected in a three step manner:
-# 1) If an explicit command is specified by the user, it will be used and must available.
-# 2) If only a certain version is specified, it is enforced:
-# - either a command 'g++-VERSION' must be available
-# - or the default command 'g++' must be available and match the exact version.
-# 3) Without user-provided restrictions use default 'g++'
-rule init ( version ? : command * : options * )
-{
- #1): use user-provided command
- local tool-command = ;
- if $(command)
- {
- tool-command = [ common.get-invocation-command-nodefault gcc : g++ : $(command) ] ;
- if ! $(tool-command)
- {
- errors.error "toolset gcc initialization:" :
- "provided command '$(command)' not found" :
- "initialized from" [ errors.nearest-user-location ] ;
- }
- }
- #2): enforce user-provided version
- else if $(version)
- {
- tool-command = [ common.get-invocation-command-nodefault gcc : "g++-$(version[1])" ] ;
-
- #2.1) fallback: check whether "g++" reports the requested version
- if ! $(tool-command)
- {
- tool-command = [ common.get-invocation-command-nodefault gcc : g++ ] ;
- if $(tool-command)
- {
- local tool-command-string = $(tool-command:J=" ") ;
- local tool-version = [ MATCH "^([0-9.]+)" : [ SHELL "$(tool-command-string) -dumpversion" ] ] ;
- if $(tool-version) != $(version)
- {
- # Permit a match betwen two-digit version specified by the user
- # (e.g. 4.4) and 3-digit version reported by gcc.
- # Since only two digits are present in binary name anyway,
- # insisting that user specify 3-digit version when
- # configuring Boost.Build while it's not required on
- # command like would be strange.
- local stripped = [ MATCH "^([0-9]+\.[0-9]+).*" : $(tool-version) ] ;
- if $(stripped) != $(version)
- {
- errors.error "toolset gcc initialization:" :
- "version '$(version)' requested but 'g++-$(version)' not found and version '$(tool-version)' of default '$(tool-command)' does not match" :
- "initialized from" [ errors.nearest-user-location ] ;
- tool-command = ;
- }
- # Use full 3-digit version to be compatible with the 'using gcc ;' case
- version = $(tool-version) ;
- }
- }
- else
- {
- errors.error "toolset gcc initialization:" :
- "version '$(version)' requested but neither 'g++-$(version)' nor default 'g++' found" :
- "initialized from" [ errors.nearest-user-location ] ;
- }
- }
- }
- #3) default: no command and no version specified, try using default command "g++"
- else
- {
- tool-command = [ common.get-invocation-command-nodefault gcc : g++ ] ;
- if ! $(tool-command)
- {
- errors.error "toolset gcc initialization:" :
- "no command provided, default command 'g++' not found" :
- "initialized from" [ errors.nearest-user-location ] ;
- }
- }
-
-
- # Information about the gcc command...
- # The command.
- local command = $(tool-command) ;
- # The root directory of the tool install.
- local root = [ feature.get-values <root> : $(options) ] ;
- # The bin directory where to find the command to execute.
- local bin ;
- # The flavor of compiler.
- local flavor = [ feature.get-values <flavor> : $(options) ] ;
- # Autodetect the root and bin dir if not given.
- if $(command)
- {
- bin ?= [ common.get-absolute-tool-path $(command[-1]) ] ;
- root ?= $(bin:D) ;
- }
- # The 'command' variable can have multiple elements. When calling
- # the SHELL builtin we need a single string.
- local command-string = $(command:J=" ") ;
- # Autodetect the version and flavor if not given.
- if $(command)
- {
- local machine = [ MATCH "^([^ ]+)"
- : [ SHELL "$(command-string) -dumpmachine" ] ] ;
- version ?= [ MATCH "^([0-9.]+)"
- : [ SHELL "$(command-string) -dumpversion" ] ] ;
- switch $(machine:L)
- {
- case *mingw* : flavor ?= mingw ;
- }
- }
-
- local condition ;
- if $(flavor)
- {
- condition = [ common.check-init-parameters gcc
- : version $(version)
- : flavor $(flavor)
- ] ;
- }
- else
- {
- condition = [ common.check-init-parameters gcc
- : version $(version)
- ] ;
- condition = $(condition) ; #/<toolset-gcc:flavor> ;
- }
-
- common.handle-options gcc : $(condition) : $(command) : $(options) ;
-
- local linker = [ feature.get-values <linker-type> : $(options) ] ;
- # The logic below should actually be keyed on <target-os>
- if ! $(linker)
- {
- if [ os.name ] = OSF
- {
- linker = osf ;
- }
- else if [ os.name ] = HPUX
- {
- linker = hpux ;
- }
- else if [ os.name ] = AIX
- {
- linker = aix ;
- }
- else if [ os.name ] = SOLARIS
- {
- linker = sun ;
- }
- else
- {
- linker = gnu ;
- }
- }
- init-link-flags gcc $(linker) $(condition) ;
-
-
- # If gcc is installed in non-standard location, we'd need to add
- # LD_LIBRARY_PATH when running programs created with it (for unit-test/run
- # rules).
- if $(command)
- {
- # On multilib 64-bit boxes, there are both 32-bit and 64-bit libraries
- # and all must be added to LD_LIBRARY_PATH. The linker will pick the
- # right onces. Note that we don't provide a clean way to build 32-bit
- # binary with 64-bit compiler, but user can always pass -m32 manually.
- local lib_path = $(root)/bin $(root)/lib $(root)/lib32 $(root)/lib64 ;
- if $(.debug-configuration)
- {
- ECHO notice: using gcc libraries :: $(condition) :: $(lib_path) ;
- }
- toolset.flags gcc.link RUN_PATH $(condition) : $(lib_path) ;
- }
-
- # If it's not a system gcc install we should adjust the various programs as
- # needed to prefer using the install specific versions. This is essential
- # for correct use of MinGW and for cross-compiling.
-
- local nl = "
-" ;
-
- # - The archive builder.
- local archiver = [ common.get-invocation-command gcc
- : [ NORMALIZE_PATH [ MATCH "(.*)[$(nl)]+" : [ SHELL "$(command-string) -print-prog-name=ar" ] ] ]
- : [ feature.get-values <archiver> : $(options) ]
- : $(bin)
- : search-path ] ;
- toolset.flags gcc.archive .AR $(condition) : $(archiver[1]) ;
- if $(.debug-configuration)
- {
- ECHO notice: using gcc archiver :: $(condition) :: $(archiver[1]) ;
- }
-
- # - Ranlib
- local ranlib = [ common.get-invocation-command gcc
- : [ NORMALIZE_PATH [ MATCH "(.*)[$(nl)]+" : [ SHELL "$(command-string) -print-prog-name=ranlib" ] ] ]
- : [ feature.get-values <ranlib> : $(options) ]
- : $(bin)
- : search-path ] ;
- toolset.flags gcc.archive .RANLIB $(condition) : $(ranlib[1]) ;
- if $(.debug-configuration)
- {
- ECHO notice: using gcc ranlib :: $(condition) :: $(ranlib[1]) ;
- }
-
-
- # - The resource compiler.
- local rc =
- [ common.get-invocation-command-nodefault gcc
- : windres : [ feature.get-values <rc> : $(options) ] : $(bin) : search-path ] ;
- local rc-type =
- [ feature.get-values <rc-type> : $(options) ] ;
- rc-type ?= windres ;
- if ! $(rc)
- {
- # If we can't find an RC compiler we fallback to a null RC compiler that
- # creates empty object files. This allows the same Jamfiles to work
- # across the board. The null RC uses the assembler to create the empty
- # objects, so configure that.
- rc = [ common.get-invocation-command gcc : as : : $(bin) : search-path ] ;
- rc-type = null ;
- }
- rc.configure $(rc) : $(condition) : <rc-type>$(rc-type) ;
-}
-
-if [ os.name ] = NT
-{
- # This causes single-line command invocation to not go through .bat files,
- # thus avoiding command-line length limitations.
- JAMSHELL = % ;
-}
-
-generators.register-c-compiler gcc.compile.c++.preprocess : CPP : PREPROCESSED_CPP : <toolset>gcc ;
-generators.register-c-compiler gcc.compile.c.preprocess : C : PREPROCESSED_C : <toolset>gcc ;
-generators.register-c-compiler gcc.compile.c++ : CPP : OBJ : <toolset>gcc ;
-generators.register-c-compiler gcc.compile.c : C : OBJ : <toolset>gcc ;
-generators.register-c-compiler gcc.compile.asm : ASM : OBJ : <toolset>gcc ;
-generators.register-fortran-compiler gcc.compile.fortran : FORTRAN FORTRAN90 : OBJ : <toolset>gcc ;
-
-# pch support
-
-# The compiler looks for a precompiled header in each directory just before it
-# looks for the include file in that directory. The name searched for is the
-# name specified in the #include directive with ".gch" suffix appended. The
-# logic in gcc-pch-generator will make sure that BASE_PCH suffix is appended to
-# full name of the header.
-
-type.set-generated-target-suffix PCH : <toolset>gcc : gch ;
-
-# GCC-specific pch generator.
-class gcc-pch-generator : pch-generator
-{
- import project ;
- import property-set ;
- import type ;
-
- rule run-pch ( project name ? : property-set : sources + )
- {
- # Find the header in sources. Ignore any CPP sources.
- local header ;
- for local s in $(sources)
- {
- if [ type.is-derived [ $(s).type ] H ]
- {
- header = $(s) ;
- }
- }
-
- # Error handling: Base header file name should be the same as the base
- # precompiled header name.
- local header-name = [ $(header).name ] ;
- local header-basename = $(header-name:B) ;
- if $(header-basename) != $(name)
- {
- local location = [ $(project).project-module ] ;
- errors.user-error "in" $(location)": pch target name `"$(name)"' should be the same as the base name of header file `"$(header-name)"'" ;
- }
-
- local pch-file = [ generator.run $(project) $(name) : $(property-set)
- : $(header) ] ;
-
- # return result of base class and pch-file property as usage-requirements
- return
- [ property-set.create <pch-file>$(pch-file) <cflags>-Winvalid-pch ]
- $(pch-file)
- ;
- }
-
- # Calls the base version specifying source's name as the name of the created
- # target. As result, the PCH will be named whatever.hpp.gch, and not
- # whatever.gch.
- rule generated-targets ( sources + : property-set : project name ? )
- {
- name = [ $(sources[1]).name ] ;
- return [ generator.generated-targets $(sources)
- : $(property-set) : $(project) $(name) ] ;
- }
-}
-
-# Note: the 'H' source type will catch both '.h' header and '.hpp' header. The
-# latter have HPP type, but HPP type is derived from H. The type of compilation
-# is determined entirely by the destination type.
-generators.register [ new gcc-pch-generator gcc.compile.c.pch : H : C_PCH : <pch>on <toolset>gcc ] ;
-generators.register [ new gcc-pch-generator gcc.compile.c++.pch : H : CPP_PCH : <pch>on <toolset>gcc ] ;
-
-# Override default do-nothing generators.
-generators.override gcc.compile.c.pch : pch.default-c-pch-generator ;
-generators.override gcc.compile.c++.pch : pch.default-cpp-pch-generator ;
-
-toolset.flags gcc.compile PCH_FILE <pch>on : <pch-file> ;
-
-# Declare flags and action for compilation.
-toolset.flags gcc.compile OPTIONS <optimization>off : -O0 ;
-toolset.flags gcc.compile OPTIONS <optimization>speed : -O3 ;
-toolset.flags gcc.compile OPTIONS <optimization>space : -Os ;
-
-toolset.flags gcc.compile OPTIONS <inlining>off : -fno-inline ;
-toolset.flags gcc.compile OPTIONS <inlining>on : -Wno-inline ;
-toolset.flags gcc.compile OPTIONS <inlining>full : -finline-functions -Wno-inline ;
-
-toolset.flags gcc.compile OPTIONS <warnings>off : -w ;
-toolset.flags gcc.compile OPTIONS <warnings>on : -Wall ;
-toolset.flags gcc.compile OPTIONS <warnings>all : -Wall -pedantic ;
-toolset.flags gcc.compile OPTIONS <warnings-as-errors>on : -Werror ;
-
-toolset.flags gcc.compile OPTIONS <debug-symbols>on : -g ;
-toolset.flags gcc.compile OPTIONS <profiling>on : -pg ;
-toolset.flags gcc.compile OPTIONS <rtti>off : -fno-rtti ;
-
-rule setup-fpic ( targets * : sources * : properties * )
-{
- local link = [ feature.get-values link : $(properties) ] ;
- if $(link) = shared
- {
- local target = [ feature.get-values target-os : $(properties) ] ;
-
- # This logic will add -fPIC for all compilations:
- #
- # lib a : a.cpp b ;
- # obj b : b.cpp ;
- # exe c : c.cpp a d ;
- # obj d : d.cpp ;
- #
- # This all is fine, except that 'd' will be compiled with -fPIC even though
- # it is not needed, as 'd' is used only in exe. However, it is hard to
- # detect where a target is going to be used. Alternatively, we can set -fPIC
- # only when main target type is LIB but than 'b' would be compiled without
- # -fPIC which would lead to link errors on x86-64. So, compile everything
- # with -fPIC.
- #
- # Yet another alternative would be to create a propagated <sharedable>
- # feature and set it when building shared libraries, but that would be hard
- # to implement and would increase the target path length even more.
-
- # On Windows, fPIC is default, specifying -fPIC explicitly leads to
- # a warning.
- if $(target) != cygwin && $(target) != windows
- {
- OPTIONS on $(targets) += -fPIC ;
- }
- }
-}
-
-rule setup-address-model ( targets * : sources * : properties * )
-{
- local model = [ feature.get-values address-model : $(properties) ] ;
- if $(model)
- {
- local option ;
- local os = [ feature.get-values target-os : $(properties) ] ;
- if $(os) = aix
- {
- if $(model) = 32
- {
- option = -maix32 ;
- }
- else
- {
- option = -maix64 ;
- }
- }
- else if $(os) = hpux
- {
- if $(model) = 32
- {
- option = -milp32 ;
- }
- else
- {
- option = -mlp64 ;
- }
- }
- else
- {
- local arch = [ feature.get-values architecture : $(properties) ] ;
- if $(arch) != arm
- {
- if $(model) = 32
- {
- option = -m32 ;
- }
- else if $(model) = 64
- {
- option = -m64 ;
- }
- }
- # For darwin, the model can be 32_64. darwin.jam will handle that
- # on its own.
- }
- OPTIONS on $(targets) += $(option) ;
- }
-}
-
-
-# FIXME: this should not use os.name.
-if [ os.name ] != NT && [ os.name ] != OSF && [ os.name ] != HPUX && [ os.name ] != AIX
-{
- # OSF does have an option called -soname but it does not seem to work as
- # expected, therefore it has been disabled.
- HAVE_SONAME = "" ;
- SONAME_OPTION = -h ;
-}
-
-# HPUX, for some reason, seem to use '+h', not '-h'.
-if [ os.name ] = HPUX
-{
- HAVE_SONAME = "" ;
- SONAME_OPTION = +h ;
-}
-
-toolset.flags gcc.compile USER_OPTIONS <cflags> ;
-toolset.flags gcc.compile.c++ USER_OPTIONS <cxxflags> ;
-toolset.flags gcc.compile DEFINES <define> ;
-toolset.flags gcc.compile INCLUDES <include> ;
-toolset.flags gcc.compile.c++ TEMPLATE_DEPTH <c++-template-depth> ;
-toolset.flags gcc.compile.fortran USER_OPTIONS <fflags> ;
-
-rule compile.c++.pch ( targets * : sources * : properties * )
-{
- setup-threading $(targets) : $(sources) : $(properties) ;
- setup-fpic $(targets) : $(sources) : $(properties) ;
- setup-address-model $(targets) : $(sources) : $(properties) ;
-}
-
-actions compile.c++.pch
-{
- "$(CONFIG_COMMAND)" -x c++-header $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
-}
-
-rule compile.c.pch ( targets * : sources * : properties * )
-{
- setup-threading $(targets) : $(sources) : $(properties) ;
- setup-fpic $(targets) : $(sources) : $(properties) ;
- setup-address-model $(targets) : $(sources) : $(properties) ;
-}
-
-actions compile.c.pch
-{
- "$(CONFIG_COMMAND)" -x c-header $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
-}
-
-rule compile.c++.preprocess ( targets * : sources * : properties * )
-{
- setup-threading $(targets) : $(sources) : $(properties) ;
- setup-fpic $(targets) : $(sources) : $(properties) ;
- setup-address-model $(targets) : $(sources) : $(properties) ;
-
- # Some extensions are compiled as C++ by default. For others, we need to
- # pass -x c++. We could always pass -x c++ but distcc does not work with it.
- if ! $(>:S) in .cc .cp .cxx .cpp .c++ .C
- {
- LANG on $(<) = "-x c++" ;
- }
- DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ;
-}
-
-rule compile.c.preprocess ( targets * : sources * : properties * )
-{
- setup-threading $(targets) : $(sources) : $(properties) ;
- setup-fpic $(targets) : $(sources) : $(properties) ;
- setup-address-model $(targets) : $(sources) : $(properties) ;
-
- # If we use the name g++ then default file suffix -> language mapping does
- # not work. So have to pass -x option. Maybe, we can work around this by
- # allowing the user to specify both C and C++ compiler names.
- #if $(>:S) != .c
- #{
- LANG on $(<) = "-x c" ;
- #}
- DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ;
-}
-
-rule compile.c++ ( targets * : sources * : properties * )
-{
- setup-threading $(targets) : $(sources) : $(properties) ;
- setup-fpic $(targets) : $(sources) : $(properties) ;
- setup-address-model $(targets) : $(sources) : $(properties) ;
-
- # Some extensions are compiled as C++ by default. For others, we need to
- # pass -x c++. We could always pass -x c++ but distcc does not work with it.
- if ! $(>:S) in .cc .cp .cxx .cpp .c++ .C
- {
- LANG on $(<) = "-x c++" ;
- }
- DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ;
-
- # Here we want to raise the template-depth parameter value to something
- # higher than the default value of 17. Note that we could do this using the
- # feature.set-default rule but we do not want to set the default value for
- # all toolsets as well.
- #
- # TODO: This 'modified default' has been inherited from some 'older Boost
- # Build implementation' and has most likely been added to make some Boost
- # library parts compile correctly. We should see what exactly prompted this
- # and whether we can get around the problem more locally.
- local template-depth = [ on $(<) return $(TEMPLATE_DEPTH) ] ;
- if ! $(template-depth)
- {
- TEMPLATE_DEPTH on $(<) = 128 ;
- }
-}
-
-rule compile.c ( targets * : sources * : properties * )
-{
- setup-threading $(targets) : $(sources) : $(properties) ;
- setup-fpic $(targets) : $(sources) : $(properties) ;
- setup-address-model $(targets) : $(sources) : $(properties) ;
-
- # If we use the name g++ then default file suffix -> language mapping does
- # not work. So have to pass -x option. Maybe, we can work around this by
- # allowing the user to specify both C and C++ compiler names.
- #if $(>:S) != .c
- #{
- LANG on $(<) = "-x c" ;
- #}
- DEPENDS $(<) : [ on $(<) return $(PCH_FILE) ] ;
-}
-
-rule compile.fortran ( targets * : sources * : properties * )
-{
- setup-threading $(targets) : $(sources) : $(properties) ;
- setup-fpic $(targets) : $(sources) : $(properties) ;
- setup-address-model $(targets) : $(sources) : $(properties) ;
-}
-
-actions compile.c++ bind PCH_FILE
-{
- "$(CONFIG_COMMAND)" $(LANG) -ftemplate-depth-$(TEMPLATE_DEPTH) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" -c -o "$(<:W)" "$(>:W)"
-}
-
-actions compile.c bind PCH_FILE
-{
- "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
-}
-
-actions compile.c++.preprocess bind PCH_FILE
-{
- "$(CONFIG_COMMAND)" $(LANG) -ftemplate-depth-$(TEMPLATE_DEPTH) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" "$(>:W)" -E >"$(<:W)"
-}
-
-actions compile.c.preprocess bind PCH_FILE
-{
- "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" "$(>)" -E >$(<)
-}
-
-actions compile.fortran
-{
- "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
-}
-
-rule compile.asm ( targets * : sources * : properties * )
-{
- setup-fpic $(targets) : $(sources) : $(properties) ;
- setup-address-model $(targets) : $(sources) : $(properties) ;
- LANG on $(<) = "-x assembler-with-cpp" ;
-}
-
-actions compile.asm
-{
- "$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
-}
-
-# The class which check that we don't try to use the <runtime-link>static
-# property while creating or using shared library, since it's not supported by
-# gcc/libc.
-class gcc-linking-generator : unix-linking-generator
-{
- rule run ( project name ? : property-set : sources + )
- {
- # TODO: Replace this with the use of a target-os property.
- local no-static-link = ;
- if [ modules.peek : UNIX ]
- {
- switch [ modules.peek : JAMUNAME ]
- {
- case * : no-static-link = true ;
- }
- }
-
- local properties = [ $(property-set).raw ] ;
- local reason ;
- if $(no-static-link) && <runtime-link>static in $(properties)
- {
- if <link>shared in $(properties)
- {
- reason =
- "On gcc, DLL can't be build with '<runtime-link>static'." ;
- }
- else if [ type.is-derived $(self.target-types[1]) EXE ]
- {
- for local s in $(sources)
- {
- local type = [ $(s).type ] ;
- if $(type) && [ type.is-derived $(type) SHARED_LIB ]
- {
- reason =
- "On gcc, using DLLS together with the"
- "<runtime-link>static options is not possible " ;
- }
- }
- }
- }
- if $(reason)
- {
- ECHO warning:
- $(reason) ;
- ECHO warning:
- "It is suggested to use '<runtime-link>static' together"
- "with '<link>static'." ;
- return ;
- }
- else
- {
- local generated-targets = [ unix-linking-generator.run $(project)
- $(name) : $(property-set) : $(sources) ] ;
- return $(generated-targets) ;
- }
- }
-}
-
-# The set of permissible input types is different on mingw.
-# So, define two sets of generators, with mingw generators
-# selected when target-os=windows.
-
-local g ;
-g = [ new gcc-linking-generator gcc.mingw.link
- : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB
- : EXE
- : <toolset>gcc <target-os>windows ] ;
-$(g).set-rule-name gcc.link ;
-generators.register $(g) ;
-
-g = [ new gcc-linking-generator gcc.mingw.link.dll
- : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB
- : IMPORT_LIB SHARED_LIB
- : <toolset>gcc <target-os>windows ] ;
-$(g).set-rule-name gcc.link.dll ;
-generators.register $(g) ;
-
-generators.register
- [ new gcc-linking-generator gcc.link
- : LIB OBJ
- : EXE
- : <toolset>gcc ] ;
-generators.register
- [ new gcc-linking-generator gcc.link.dll
- : LIB OBJ
- : SHARED_LIB
- : <toolset>gcc ] ;
-
-generators.override gcc.mingw.link : gcc.link ;
-generators.override gcc.mingw.link.dll : gcc.link.dll ;
-
-# Cygwin is similar to msvc and mingw in that it uses import libraries.
-# While in simple cases, it can directly link to a shared library,
-# it is believed to be slower, and not always possible. Define cygwin-specific
-# generators here.
-
-g = [ new gcc-linking-generator gcc.cygwin.link
- : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB
- : EXE
- : <toolset>gcc <target-os>cygwin ] ;
-$(g).set-rule-name gcc.link ;
-generators.register $(g) ;
-
-g = [ new gcc-linking-generator gcc.cygwin.link.dll
- : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB
- : IMPORT_LIB SHARED_LIB
- : <toolset>gcc <target-os>cygwin ] ;
-$(g).set-rule-name gcc.link.dll ;
-generators.register $(g) ;
-
-generators.override gcc.cygwin.link : gcc.link ;
-generators.override gcc.cygwin.link.dll : gcc.link.dll ;
-
-# Declare flags for linking.
-# First, the common flags.
-toolset.flags gcc.link OPTIONS <debug-symbols>on : -g ;
-toolset.flags gcc.link OPTIONS <profiling>on : -pg ;
-toolset.flags gcc.link USER_OPTIONS <linkflags> ;
-toolset.flags gcc.link LINKPATH <library-path> ;
-toolset.flags gcc.link FINDLIBS-ST <find-static-library> ;
-toolset.flags gcc.link FINDLIBS-SA <find-shared-library> ;
-toolset.flags gcc.link LIBRARIES <library-file> ;
-
-toolset.flags gcc.link.dll .IMPLIB-COMMAND <target-os>windows : "-Wl,--out-implib," ;
-toolset.flags gcc.link.dll .IMPLIB-COMMAND <target-os>cygwin : "-Wl,--out-implib," ;
-
-# For <runtime-link>static we made sure there are no dynamic libraries in the
-# link. On HP-UX not all system libraries exist as archived libraries (for
-# example, there is no libunwind.a), so, on this platform, the -static option
-# cannot be specified.
-if [ os.name ] != HPUX
-{
- toolset.flags gcc.link OPTIONS <runtime-link>static : -static ;
-}
-
-# Now, the vendor specific flags.
-# The parameter linker can be either aix, darwin, gnu, hpux, osf or sun.
-rule init-link-flags ( toolset linker condition )
-{
- switch $(linker)
- {
- case aix :
- {
- #
- # On AIX we *have* to use the native linker.
- #
- # Using -brtl, the AIX linker will look for libraries with both the .a
- # and .so extensions, such as libfoo.a and libfoo.so. Without -brtl, the
- # AIX linker looks only for libfoo.a. Note that libfoo.a is an archived
- # file that may contain shared objects and is different from static libs
- # as on Linux.
- #
- # The -bnoipath strips the prepending (relative) path of libraries from
- # the loader section in the target library or executable. Hence, during
- # load-time LIBPATH (identical to LD_LIBRARY_PATH) or a hard-coded
- # -blibpath (*similar* to -lrpath/-lrpath-link) is searched. Without
- # this option, the prepending (relative) path + library name is
- # hard-coded in the loader section, causing *only* this path to be
- # searched during load-time. Note that the AIX linker does not have an
- # -soname equivalent, this is as close as it gets.
- #
- # The above options are definately for AIX 5.x, and most likely also for
- # AIX 4.x and AIX 6.x. For details about the AIX linker see:
- # http://download.boulder.ibm.com/ibmdl/pub/software/dw/aix/es-aix_ll.pdf
- #
-
- toolset.flags $(toolset).link OPTIONS : -Wl,-brtl -Wl,-bnoipath
- : unchecked ;
- }
-
- case darwin :
- {
- # On Darwin, the -s option to ld does not work unless we pass -static,
- # and passing -static unconditionally is a bad idea. So, don't pass -s.
- # at all, darwin.jam will use separate 'strip' invocation.
- toolset.flags $(toolset).link RPATH $(condition) : <dll-path> : unchecked ;
- toolset.flags $(toolset).link RPATH_LINK $(condition) : <xdll-path> : unchecked ;
- }
-
- case gnu :
- {
- # Strip the binary when no debugging is needed. We use --strip-all flag
- # as opposed to -s since icc (intel's compiler) is generally
- # option-compatible with and inherits from the gcc toolset, but does not
- # support -s.
- toolset.flags $(toolset).link OPTIONS $(condition)/<strip>on : -Wl,--strip-all : unchecked ;
- toolset.flags $(toolset).link RPATH $(condition) : <dll-path> : unchecked ;
- toolset.flags $(toolset).link RPATH_LINK $(condition) : <xdll-path> : unchecked ;
- toolset.flags $(toolset).link START-GROUP $(condition) : -Wl,--start-group : unchecked ;
- toolset.flags $(toolset).link END-GROUP $(condition) : -Wl,--end-group : unchecked ;
-
- # gnu ld has the ability to change the search behaviour for libraries
- # referenced by -l switch. These modifiers are -Bstatic and -Bdynamic
- # and change search for -l switches that follow them. The following list
- # shows the tried variants.
- # The search stops at the first variant that has a match.
- # *nix: -Bstatic -lxxx
- # libxxx.a
- #
- # *nix: -Bdynamic -lxxx
- # libxxx.so
- # libxxx.a
- #
- # windows (mingw,cygwin) -Bstatic -lxxx
- # libxxx.a
- # xxx.lib
- #
- # windows (mingw,cygwin) -Bdynamic -lxxx
- # libxxx.dll.a
- # xxx.dll.a
- # libxxx.a
- # xxx.lib
- # cygxxx.dll (*)
- # libxxx.dll
- # xxx.dll
- # libxxx.a
- #
- # (*) This is for cygwin
- # Please note that -Bstatic and -Bdynamic are not a guarantee that a
- # static or dynamic lib indeed gets linked in. The switches only change
- # search patterns!
-
- # On *nix mixing shared libs with static runtime is not a good idea.
- toolset.flags $(toolset).link FINDLIBS-ST-PFX $(condition)/<runtime-link>shared
- : -Wl,-Bstatic : unchecked ;
- toolset.flags $(toolset).link FINDLIBS-SA-PFX $(condition)/<runtime-link>shared
- : -Wl,-Bdynamic : unchecked ;
-
- # On windows allow mixing of static and dynamic libs with static
- # runtime.
- toolset.flags $(toolset).link FINDLIBS-ST-PFX $(condition)/<runtime-link>static/<target-os>windows
- : -Wl,-Bstatic : unchecked ;
- toolset.flags $(toolset).link FINDLIBS-SA-PFX $(condition)/<runtime-link>static/<target-os>windows
- : -Wl,-Bdynamic : unchecked ;
- toolset.flags $(toolset).link OPTIONS $(condition)/<runtime-link>static/<target-os>windows
- : -Wl,-Bstatic : unchecked ;
- }
-
- case hpux :
- {
- toolset.flags $(toolset).link OPTIONS $(condition)/<strip>on
- : -Wl,-s : unchecked ;
- toolset.flags $(toolset).link OPTIONS $(condition)/<link>shared
- : -fPIC : unchecked ;
- }
-
- case osf :
- {
- # No --strip-all, just -s.
- toolset.flags $(toolset).link OPTIONS $(condition)/<strip>on
- : -Wl,-s : unchecked ;
- toolset.flags $(toolset).link RPATH $(condition) : <dll-path>
- : unchecked ;
- # This does not supports -R.
- toolset.flags $(toolset).link RPATH_OPTION $(condition) : -rpath
- : unchecked ;
- # -rpath-link is not supported at all.
- }
-
- case sun :
- {
- toolset.flags $(toolset).link OPTIONS $(condition)/<strip>on
- : -Wl,-s : unchecked ;
- toolset.flags $(toolset).link RPATH $(condition) : <dll-path>
- : unchecked ;
- # Solaris linker does not have a separate -rpath-link, but allows to use
- # -L for the same purpose.
- toolset.flags $(toolset).link LINKPATH $(condition) : <xdll-path>
- : unchecked ;
-
- # This permits shared libraries with non-PIC code on Solaris.
- # VP, 2004/09/07: Now that we have -fPIC hardcode in link.dll, the
- # following is not needed. Whether -fPIC should be hardcoded, is a
- # separate question.
- # AH, 2004/10/16: it is still necessary because some tests link against
- # static libraries that were compiled without PIC.
- toolset.flags $(toolset).link OPTIONS $(condition)/<link>shared
- : -mimpure-text : unchecked ;
- }
-
- case * :
- {
- errors.user-error
- "$(toolset) initialization: invalid linker '$(linker)'" :
- "The value '$(linker)' specified for <linker> is not recognized." :
- "Possible values are 'aix', 'darwin', 'gnu', 'hpux', 'osf' or 'sun'" ;
- }
- }
-}
-
-# Enclose the RPATH variable on 'targets' in (double) quotes,
-# unless it's already enclosed in single quotes.
-# This special casing is done because it's common to pass
-# '$ORIGIN' to linker -- and it has to have single quotes
-# to prevent expansion by shell -- and if we add double
-# quotes then preventing properties of single quotes disappear.
-rule quote-rpath ( targets * )
-{
- local r = [ on $(targets[1]) return $(RPATH) ] ;
- if ! [ MATCH "('.*')" : $(r) ]
- {
- r = "\"$(r)\"" ;
- }
- RPATH on $(targets) = $(r) ;
-}
-
-# Declare actions for linking.
-rule link ( targets * : sources * : properties * )
-{
- setup-threading $(targets) : $(sources) : $(properties) ;
- setup-address-model $(targets) : $(sources) : $(properties) ;
- SPACE on $(targets) = " " ;
- # Serialize execution of the 'link' action, since running N links in
- # parallel is just slower. For now, serialize only gcc links, it might be a
- # good idea to serialize all links.
- JAM_SEMAPHORE on $(targets) = <s>gcc-link-semaphore ;
- quote-rpath $(targets) ;
-}
-
-actions link bind LIBRARIES
-{
- "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,$(RPATH) -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" -o "$(<)" $(START-GROUP) "$(>)" "$(LIBRARIES)" $(FINDLIBS-ST-PFX) -l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA) $(END-GROUP) $(OPTIONS) $(USER_OPTIONS)
-
-}
-
-# Default value. Mostly for the sake of intel-linux that inherits from gcc, but
-# does not have the same logic to set the .AR variable. We can put the same
-# logic in intel-linux, but that's hardly worth the trouble as on Linux, 'ar' is
-# always available.
-.AR = ar ;
-.RANLIB = ranlib ;
-
-toolset.flags gcc.archive AROPTIONS <archiveflags> ;
-
-rule archive ( targets * : sources * : properties * )
-{
- # Always remove archive and start again. Here is the rationale from
- #
- # Andre Hentz:
- #
- # I had a file, say a1.c, that was included into liba.a. I moved a1.c to
- # a2.c, updated my Jamfiles and rebuilt. My program was crashing with absurd
- # errors. After some debugging I traced it back to the fact that a1.o was
- # *still* in liba.a
- #
- # Rene Rivera:
- #
- # Originally removing the archive was done by splicing an RM onto the
- # archive action. That makes archives fail to build on NT when they have
- # many files because it will no longer execute the action directly and blow
- # the line length limit. Instead we remove the file in a different action,
- # just before building the archive.
- #
- local clean.a = $(targets[1])(clean) ;
- TEMPORARY $(clean.a) ;
- NOCARE $(clean.a) ;
- LOCATE on $(clean.a) = [ on $(targets[1]) return $(LOCATE) ] ;
- DEPENDS $(clean.a) : $(sources) ;
- DEPENDS $(targets) : $(clean.a) ;
- common.RmTemps $(clean.a) : $(targets) ;
-}
-
-# Declare action for creating static libraries.
-# The letter 'r' means to add files to the archive with replacement. Since we
-# remove archive, we don't care about replacement, but there's no option "add
-# without replacement".
-# The letter 'c' suppresses the warning in case the archive does not exists yet.
-# That warning is produced only on some platforms, for whatever reasons.
-actions piecemeal archive
-{
- "$(.AR)" $(AROPTIONS) rc "$(<)" "$(>)"
- "$(.RANLIB)" "$(<)"
-}
-
-rule link.dll ( targets * : sources * : properties * )
-{
- setup-threading $(targets) : $(sources) : $(properties) ;
- setup-address-model $(targets) : $(sources) : $(properties) ;
- SPACE on $(targets) = " " ;
- JAM_SEMAPHORE on $(targets) = <s>gcc-link-semaphore ;
- quote-rpath $(targets) ;
-}
-
-# Differs from 'link' above only by -shared.
-actions link.dll bind LIBRARIES
-{
- "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,$(RPATH) "$(.IMPLIB-COMMAND)$(<[1])" -o "$(<[-1])" $(HAVE_SONAME)-Wl,$(SONAME_OPTION)$(SPACE)-Wl,$(<[-1]:D=) -shared $(START-GROUP) "$(>)" "$(LIBRARIES)" $(FINDLIBS-ST-PFX) -l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA) $(END-GROUP) $(OPTIONS) $(USER_OPTIONS)
-}
-
-rule setup-threading ( targets * : sources * : properties * )
-{
- local threading = [ feature.get-values threading : $(properties) ] ;
- if $(threading) = multi
- {
- local target = [ feature.get-values target-os : $(properties) ] ;
- local option ;
- local libs ;
-
- switch $(target)
- {
- case windows :
- {
- option = -mthreads ;
- }
- case cygwin :
- {
- option = -mthreads ;
- }
- case solaris :
- {
- option = -pthreads ;
- libs = rt ;
- }
- case beos :
- {
- # BeOS has no threading options, so do not set anything here.
- }
- case *bsd :
- {
- option = -pthread ;
- # There is no -lrt on BSD.
- }
- case sgi :
- {
- # gcc on IRIX does not support multi-threading so do not set anything
- # here.
- }
- case darwin :
- {
- # Darwin has no threading options so do not set anything here.
- }
- case * :
- {
- option = -pthread ;
- libs = rt ;
- }
- }
-
- if $(option)
- {
- OPTIONS on $(targets) += $(option) ;
- }
- if $(libs)
- {
- FINDLIBS-SA on $(targets) += $(libs) ;
- }
- }
-}
-
-local rule cpu-flags ( toolset variable : architecture : instruction-set + : values + : default ? )
-{
- if $(default)
- {
- toolset.flags $(toolset) $(variable)
- <architecture>$(architecture)/<instruction-set>
- : $(values) ;
- }
- toolset.flags $(toolset) $(variable)
- <architecture>/<instruction-set>$(instruction-set)
- <architecture>$(architecture)/<instruction-set>$(instruction-set)
- : $(values) ;
-}
-
-# Set architecture/instruction-set options.
-#
-# x86 and compatible
-# The 'native' option appeared in gcc 4.2 so we cannot safely use it
-# as default. Use conservative i386 instead for 32-bit.
-toolset.flags gcc OPTIONS <architecture>x86/<address-model>32/<instruction-set> : -march=i386 ;
-cpu-flags gcc OPTIONS : x86 : native : -march=native ;
-cpu-flags gcc OPTIONS : x86 : i386 : -march=i386 ;
-cpu-flags gcc OPTIONS : x86 : i486 : -march=i486 ;
-cpu-flags gcc OPTIONS : x86 : i586 : -march=i586 ;
-cpu-flags gcc OPTIONS : x86 : i686 : -march=i686 ;
-cpu-flags gcc OPTIONS : x86 : pentium : -march=pentium ;
-cpu-flags gcc OPTIONS : x86 : pentium-mmx : -march=pentium-mmx ;
-cpu-flags gcc OPTIONS : x86 : pentiumpro : -march=pentiumpro ;
-cpu-flags gcc OPTIONS : x86 : pentium2 : -march=pentium2 ;
-cpu-flags gcc OPTIONS : x86 : pentium3 : -march=pentium3 ;
-cpu-flags gcc OPTIONS : x86 : pentium3m : -march=pentium3m ;
-cpu-flags gcc OPTIONS : x86 : pentium-m : -march=pentium-m ;
-cpu-flags gcc OPTIONS : x86 : pentium4 : -march=pentium4 ;
-cpu-flags gcc OPTIONS : x86 : pentium4m : -march=pentium4m ;
-cpu-flags gcc OPTIONS : x86 : prescott : -march=prescott ;
-cpu-flags gcc OPTIONS : x86 : nocona : -march=nocona ;
-cpu-flags gcc OPTIONS : x86 : core2 : -march=core2 ;
-cpu-flags gcc OPTIONS : x86 : k6 : -march=k6 ;
-cpu-flags gcc OPTIONS : x86 : k6-2 : -march=k6-2 ;
-cpu-flags gcc OPTIONS : x86 : k6-3 : -march=k6-3 ;
-cpu-flags gcc OPTIONS : x86 : athlon : -march=athlon ;
-cpu-flags gcc OPTIONS : x86 : athlon-tbird : -march=athlon-tbird ;
-cpu-flags gcc OPTIONS : x86 : athlon-4 : -march=athlon-4 ;
-cpu-flags gcc OPTIONS : x86 : athlon-xp : -march=athlon-xp ;
-cpu-flags gcc OPTIONS : x86 : athlon-mp : -march=athlon-mp ;
-##
-cpu-flags gcc OPTIONS : x86 : k8 : -march=k8 ;
-cpu-flags gcc OPTIONS : x86 : opteron : -march=opteron ;
-cpu-flags gcc OPTIONS : x86 : athlon64 : -march=athlon64 ;
-cpu-flags gcc OPTIONS : x86 : athlon-fx : -march=athlon-fx ;
-cpu-flags gcc OPTIONS : x86 : winchip-c6 : -march=winchip-c6 ;
-cpu-flags gcc OPTIONS : x86 : winchip2 : -march=winchip2 ;
-cpu-flags gcc OPTIONS : x86 : c3 : -march=c3 ;
-cpu-flags gcc OPTIONS : x86 : c3-2 : -march=c3-2 ;
-# Sparc
-cpu-flags gcc OPTIONS : sparc : c3 : -mcpu=c3 : default ;
-cpu-flags gcc OPTIONS : sparc : v7 : -mcpu=v7 ;
-cpu-flags gcc OPTIONS : sparc : cypress : -mcpu=cypress ;
-cpu-flags gcc OPTIONS : sparc : v8 : -mcpu=v8 ;
-cpu-flags gcc OPTIONS : sparc : supersparc : -mcpu=supersparc ;
-cpu-flags gcc OPTIONS : sparc : sparclite : -mcpu=sparclite ;
-cpu-flags gcc OPTIONS : sparc : hypersparc : -mcpu=hypersparc ;
-cpu-flags gcc OPTIONS : sparc : sparclite86x : -mcpu=sparclite86x ;
-cpu-flags gcc OPTIONS : sparc : f930 : -mcpu=f930 ;
-cpu-flags gcc OPTIONS : sparc : f934 : -mcpu=f934 ;
-cpu-flags gcc OPTIONS : sparc : sparclet : -mcpu=sparclet ;
-cpu-flags gcc OPTIONS : sparc : tsc701 : -mcpu=tsc701 ;
-cpu-flags gcc OPTIONS : sparc : v9 : -mcpu=v9 ;
-cpu-flags gcc OPTIONS : sparc : ultrasparc : -mcpu=ultrasparc ;
-cpu-flags gcc OPTIONS : sparc : ultrasparc3 : -mcpu=ultrasparc3 ;
-# RS/6000 & PowerPC
-cpu-flags gcc OPTIONS : power : 403 : -mcpu=403 ;
-cpu-flags gcc OPTIONS : power : 505 : -mcpu=505 ;
-cpu-flags gcc OPTIONS : power : 601 : -mcpu=601 ;
-cpu-flags gcc OPTIONS : power : 602 : -mcpu=602 ;
-cpu-flags gcc OPTIONS : power : 603 : -mcpu=603 ;
-cpu-flags gcc OPTIONS : power : 603e : -mcpu=603e ;
-cpu-flags gcc OPTIONS : power : 604 : -mcpu=604 ;
-cpu-flags gcc OPTIONS : power : 604e : -mcpu=604e ;
-cpu-flags gcc OPTIONS : power : 620 : -mcpu=620 ;
-cpu-flags gcc OPTIONS : power : 630 : -mcpu=630 ;
-cpu-flags gcc OPTIONS : power : 740 : -mcpu=740 ;
-cpu-flags gcc OPTIONS : power : 7400 : -mcpu=7400 ;
-cpu-flags gcc OPTIONS : power : 7450 : -mcpu=7450 ;
-cpu-flags gcc OPTIONS : power : 750 : -mcpu=750 ;
-cpu-flags gcc OPTIONS : power : 801 : -mcpu=801 ;
-cpu-flags gcc OPTIONS : power : 821 : -mcpu=821 ;
-cpu-flags gcc OPTIONS : power : 823 : -mcpu=823 ;
-cpu-flags gcc OPTIONS : power : 860 : -mcpu=860 ;
-cpu-flags gcc OPTIONS : power : 970 : -mcpu=970 ;
-cpu-flags gcc OPTIONS : power : 8540 : -mcpu=8540 ;
-cpu-flags gcc OPTIONS : power : power : -mcpu=power ;
-cpu-flags gcc OPTIONS : power : power2 : -mcpu=power2 ;
-cpu-flags gcc OPTIONS : power : power3 : -mcpu=power3 ;
-cpu-flags gcc OPTIONS : power : power4 : -mcpu=power4 ;
-cpu-flags gcc OPTIONS : power : power5 : -mcpu=power5 ;
-cpu-flags gcc OPTIONS : power : powerpc : -mcpu=powerpc ;
-cpu-flags gcc OPTIONS : power : powerpc64 : -mcpu=powerpc64 ;
-cpu-flags gcc OPTIONS : power : rios : -mcpu=rios ;
-cpu-flags gcc OPTIONS : power : rios1 : -mcpu=rios1 ;
-cpu-flags gcc OPTIONS : power : rios2 : -mcpu=rios2 ;
-cpu-flags gcc OPTIONS : power : rsc : -mcpu=rsc ;
-cpu-flags gcc OPTIONS : power : rs64a : -mcpu=rs64 ;
-# AIX variant of RS/6000 & PowerPC
-toolset.flags gcc AROPTIONS <address-model>64/<target-os>aix : "-X 64" ;
diff --git a/tools/build/v2/tools/gcc.py b/tools/build/v2/tools/gcc.py
deleted file mode 100644
index b0aba1d293..0000000000
--- a/tools/build/v2/tools/gcc.py
+++ /dev/null
@@ -1,800 +0,0 @@
-# Status: being ported by Steven Watanabe
-# Base revision: 47077
-# TODO: common.jam needs to be ported
-# TODO: generators.jam needs to have register_c_compiler.
-#
-# Copyright 2001 David Abrahams.
-# Copyright 2002-2006 Rene Rivera.
-# Copyright 2002-2003 Vladimir Prus.
-# Copyright (c) 2005 Reece H. Dunn.
-# Copyright 2006 Ilya Sokolov.
-# Copyright 2007 Roland Schwarz
-# Copyright 2007 Boris Gubenko.
-# Copyright 2008 Steven Watanabe
-#
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-import os
-import subprocess
-import re
-
-import bjam
-
-from b2.tools import unix, common, rc, pch, builtin
-from b2.build import feature, type, toolset, generators, property_set
-from b2.build.property import Property
-from b2.util.utility import os_name, on_windows
-from b2.manager import get_manager
-from b2.build.generators import Generator
-from b2.build.toolset import flags
-from b2.util.utility import to_seq
-
-
-
-__debug = None
-
-def debug():
- global __debug
- if __debug is None:
- __debug = "--debug-configuration" in bjam.variable("ARGV")
- return __debug
-
-feature.extend('toolset', ['gcc'])
-
-
-toolset.inherit_generators('gcc', [], 'unix', ['unix.link', 'unix.link.dll'])
-toolset.inherit_flags('gcc', 'unix')
-toolset.inherit_rules('gcc', 'unix')
-
-generators.override('gcc.prebuilt', 'builtin.prebuilt')
-generators.override('gcc.searched-lib-generator', 'searched-lib-generator')
-
-# Target naming is determined by types/lib.jam and the settings below this
-# comment.
-#
-# On *nix:
-# libxxx.a static library
-# libxxx.so shared library
-#
-# On windows (mingw):
-# libxxx.lib static library
-# xxx.dll DLL
-# xxx.lib import library
-#
-# On windows (cygwin) i.e. <target-os>cygwin
-# libxxx.a static library
-# xxx.dll DLL
-# libxxx.dll.a import library
-#
-# Note: user can always override by using the <tag>@rule
-# This settings have been choosen, so that mingw
-# is in line with msvc naming conventions. For
-# cygwin the cygwin naming convention has been choosen.
-
-# Make the "o" suffix used for gcc toolset on all
-# platforms
-type.set_generated_target_suffix('OBJ', ['<toolset>gcc'], 'o')
-type.set_generated_target_suffix('STATIC_LIB', ['<toolset>gcc', '<target-os>cygwin'], 'a')
-
-type.set_generated_target_suffix('IMPORT_LIB', ['<toolset>gcc', '<target-os>cygwin'], 'dll.a')
-type.set_generated_target_prefix('IMPORT_LIB', ['<toolset>gcc', '<target-os>cygwin'], 'lib')
-
-__machine_match = re.compile('^([^ ]+)')
-__version_match = re.compile('^([0-9.]+)')
-
-def init(version = None, command = None, options = None):
- """
- Initializes the gcc toolset for the given version. If necessary, command may
- be used to specify where the compiler is located. The parameter 'options' is a
- space-delimited list of options, each one specified as
- <option-name>option-value. Valid option names are: cxxflags, linkflags and
- linker-type. Accepted linker-type values are gnu, darwin, osf, hpux or sun
- and the default value will be selected based on the current OS.
- Example:
- using gcc : 3.4 : : <cxxflags>foo <linkflags>bar <linker-type>sun ;
- """
-
- options = to_seq(options)
- command = to_seq(command)
-
- # Information about the gcc command...
- # The command.
- command = to_seq(common.get_invocation_command('gcc', 'g++', command))
- # The root directory of the tool install.
- root = feature.get_values('<root>', options) ;
- # The bin directory where to find the command to execute.
- bin = None
- # The flavor of compiler.
- flavor = feature.get_values('<flavor>', options)
- # Autodetect the root and bin dir if not given.
- if command:
- if not bin:
- bin = common.get_absolute_tool_path(command[-1])
- if not root:
- root = os.path.dirname(bin)
- # Autodetect the version and flavor if not given.
- if command:
- machine_info = subprocess.Popen(command + ['-dumpmachine'], stdout=subprocess.PIPE).communicate()[0]
- machine = __machine_match.search(machine_info).group(1)
-
- version_info = subprocess.Popen(command + ['-dumpversion'], stdout=subprocess.PIPE).communicate()[0]
- version = __version_match.search(version_info).group(1)
- if not flavor and machine.find('mingw') != -1:
- flavor = 'mingw'
-
- condition = None
- if flavor:
- condition = common.check_init_parameters('gcc', None,
- ('version', version),
- ('flavor', flavor))
- else:
- condition = common.check_init_parameters('gcc', None,
- ('version', version))
-
- if command:
- command = command[0]
-
- common.handle_options('gcc', condition, command, options)
-
- linker = feature.get_values('<linker-type>', options)
- if not linker:
- if os_name() == 'OSF':
- linker = 'osf'
- elif os_name() == 'HPUX':
- linker = 'hpux' ;
- else:
- linker = 'gnu'
-
- init_link_flags('gcc', linker, condition)
-
- # If gcc is installed in non-standard location, we'd need to add
- # LD_LIBRARY_PATH when running programs created with it (for unit-test/run
- # rules).
- if command:
- # On multilib 64-bit boxes, there are both 32-bit and 64-bit libraries
- # and all must be added to LD_LIBRARY_PATH. The linker will pick the
- # right onces. Note that we don't provide a clean way to build 32-bit
- # binary with 64-bit compiler, but user can always pass -m32 manually.
- lib_path = [os.path.join(root, 'bin'),
- os.path.join(root, 'lib'),
- os.path.join(root, 'lib32'),
- os.path.join(root, 'lib64')]
- if debug():
- print 'notice: using gcc libraries ::', condition, '::', lib_path
- toolset.flags('gcc.link', 'RUN_PATH', condition, lib_path)
-
- # If it's not a system gcc install we should adjust the various programs as
- # needed to prefer using the install specific versions. This is essential
- # for correct use of MinGW and for cross-compiling.
-
- # - The archive builder.
- archiver = common.get_invocation_command('gcc',
- 'ar', feature.get_values('<archiver>', options), [bin], path_last=True)
- toolset.flags('gcc.archive', '.AR', condition, [archiver])
- if debug():
- print 'notice: using gcc archiver ::', condition, '::', archiver
-
- # - The resource compiler.
- rc_command = common.get_invocation_command_nodefault('gcc',
- 'windres', feature.get_values('<rc>', options), [bin], path_last=True)
- rc_type = feature.get_values('<rc-type>', options)
-
- if not rc_type:
- rc_type = 'windres'
-
- if not rc_command:
- # If we can't find an RC compiler we fallback to a null RC compiler that
- # creates empty object files. This allows the same Jamfiles to work
- # across the board. The null RC uses the assembler to create the empty
- # objects, so configure that.
- rc_command = common.get_invocation_command('gcc', 'as', [], [bin], path_last=True)
- rc_type = 'null'
- rc.configure(rc_command, condition, '<rc-type>' + rc_type)
-
-###if [ os.name ] = NT
-###{
-### # This causes single-line command invocation to not go through .bat files,
-### # thus avoiding command-line length limitations.
-### JAMSHELL = % ;
-###}
-
-#FIXME: when register_c_compiler is moved to
-# generators, these should be updated
-builtin.register_c_compiler('gcc.compile.c++', ['CPP'], ['OBJ'], ['<toolset>gcc'])
-builtin.register_c_compiler('gcc.compile.c', ['C'], ['OBJ'], ['<toolset>gcc'])
-builtin.register_c_compiler('gcc.compile.asm', ['ASM'], ['OBJ'], ['<toolset>gcc'])
-
-# pch support
-
-# The compiler looks for a precompiled header in each directory just before it
-# looks for the include file in that directory. The name searched for is the
-# name specified in the #include directive with ".gch" suffix appended. The
-# logic in gcc-pch-generator will make sure that BASE_PCH suffix is appended to
-# full name of the header.
-
-type.set_generated_target_suffix('PCH', ['<toolset>gcc'], 'gch')
-
-# GCC-specific pch generator.
-class GccPchGenerator(pch.PchGenerator):
-
- # Inherit the __init__ method
-
- def run_pch(self, project, name, prop_set, sources):
- # Find the header in sources. Ignore any CPP sources.
- header = None
- for s in sources:
- if type.is_derived(s.type(), 'H'):
- header = s
-
- # Error handling: Base header file name should be the same as the base
- # precompiled header name.
- header_name = header.name()
- header_basename = os.path.basename(header_name).rsplit('.', 1)[0]
- if header_basename != name:
- location = project.project_module
- ###FIXME:
- raise Exception()
- ### errors.user-error "in" $(location)": pch target name `"$(name)"' should be the same as the base name of header file `"$(header-name)"'" ;
-
- pch_file = Generator.run(self, project, name, prop_set, [header])
-
- # return result of base class and pch-file property as usage-requirements
- # FIXME: what about multiple results from generator.run?
- return (property_set.create([Property('pch-file', pch_file[0]),
- Property('cflags', '-Winvalid-pch')]),
- pch_file)
-
- # Calls the base version specifying source's name as the name of the created
- # target. As result, the PCH will be named whatever.hpp.gch, and not
- # whatever.gch.
- def generated_targets(self, sources, prop_set, project, name = None):
- name = sources[0].name()
- return Generator.generated_targets(self, sources,
- prop_set, project, name)
-
-# Note: the 'H' source type will catch both '.h' header and '.hpp' header. The
-# latter have HPP type, but HPP type is derived from H. The type of compilation
-# is determined entirely by the destination type.
-generators.register(GccPchGenerator('gcc.compile.c.pch', False, ['H'], ['C_PCH'], ['<pch>on', '<toolset>gcc' ]))
-generators.register(GccPchGenerator('gcc.compile.c++.pch', False, ['H'], ['CPP_PCH'], ['<pch>on', '<toolset>gcc' ]))
-
-# Override default do-nothing generators.
-generators.override('gcc.compile.c.pch', 'pch.default-c-pch-generator')
-generators.override('gcc.compile.c++.pch', 'pch.default-cpp-pch-generator')
-
-flags('gcc.compile', 'PCH_FILE', ['<pch>on'], ['<pch-file>'])
-
-# Declare flags and action for compilation
-flags('gcc.compile', 'OPTIONS', ['<optimization>off'], ['-O0'])
-flags('gcc.compile', 'OPTIONS', ['<optimization>speed'], ['-O3'])
-flags('gcc.compile', 'OPTIONS', ['<optimization>space'], ['-Os'])
-
-flags('gcc.compile', 'OPTIONS', ['<inlining>off'], ['-fno-inline'])
-flags('gcc.compile', 'OPTIONS', ['<inlining>on'], ['-Wno-inline'])
-flags('gcc.compile', 'OPTIONS', ['<inlining>full'], ['-finline-functions', '-Wno-inline'])
-
-flags('gcc.compile', 'OPTIONS', ['<warnings>off'], ['-w'])
-flags('gcc.compile', 'OPTIONS', ['<warnings>on'], ['-Wall'])
-flags('gcc.compile', 'OPTIONS', ['<warnings>all'], ['-Wall', '-pedantic'])
-flags('gcc.compile', 'OPTIONS', ['<warnings-as-errors>on'], ['-Werror'])
-
-flags('gcc.compile', 'OPTIONS', ['<debug-symbols>on'], ['-g'])
-flags('gcc.compile', 'OPTIONS', ['<profiling>on'], ['-pg'])
-flags('gcc.compile', 'OPTIONS', ['<rtti>off'], ['-fno-rtti'])
-
-# On cygwin and mingw, gcc generates position independent code by default, and
-# warns if -fPIC is specified. This might not be the right way of checking if
-# we're using cygwin. For example, it's possible to run cygwin gcc from NT
-# shell, or using crosscompiling. But we'll solve that problem when it's time.
-# In that case we'll just add another parameter to 'init' and move this login
-# inside 'init'.
-if not os_name () in ['CYGWIN', 'NT']:
- # This logic will add -fPIC for all compilations:
- #
- # lib a : a.cpp b ;
- # obj b : b.cpp ;
- # exe c : c.cpp a d ;
- # obj d : d.cpp ;
- #
- # This all is fine, except that 'd' will be compiled with -fPIC even though
- # it's not needed, as 'd' is used only in exe. However, it's hard to detect
- # where a target is going to be used. Alternative, we can set -fPIC only
- # when main target type is LIB but than 'b' will be compiled without -fPIC.
- # In x86-64 that will lead to link errors. So, compile everything with
- # -fPIC.
- #
- # Yet another alternative would be to create propagated <sharedable>
- # feature, and set it when building shared libraries, but that's hard to
- # implement and will increase target path length even more.
- flags('gcc.compile', 'OPTIONS', ['<link>shared'], ['-fPIC'])
-
-if os_name() != 'NT' and os_name() != 'OSF' and os_name() != 'HPUX':
- # OSF does have an option called -soname but it doesn't seem to work as
- # expected, therefore it has been disabled.
- HAVE_SONAME = ''
- SONAME_OPTION = '-h'
-
-
-flags('gcc.compile', 'USER_OPTIONS', [], ['<cflags>'])
-flags('gcc.compile.c++', 'USER_OPTIONS',[], ['<cxxflags>'])
-flags('gcc.compile', 'DEFINES', [], ['<define>'])
-flags('gcc.compile', 'INCLUDES', [], ['<include>'])
-
-engine = get_manager().engine()
-
-engine.register_action('gcc.compile.c++.pch',
- '"$(CONFIG_COMMAND)" -x c++-header $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"')
-
-engine.register_action('gcc.compile.c.pch',
- '"$(CONFIG_COMMAND)" -x c-header $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"')
-
-
-def gcc_compile_cpp(targets, sources, properties):
- # Some extensions are compiled as C++ by default. For others, we need to
- # pass -x c++. We could always pass -x c++ but distcc does not work with it.
- extension = os.path.splitext (sources [0]) [1]
- lang = ''
- if not extension in ['.cc', '.cp', '.cxx', '.cpp', '.c++', '.C']:
- lang = '-x c++'
- get_manager().engine().set_target_variable (targets, 'LANG', lang)
- engine.add_dependency(targets, bjam.call('get-target-variable', targets, 'PCH_FILE'))
-
-def gcc_compile_c(targets, sources, properties):
- engine = get_manager().engine()
- # If we use the name g++ then default file suffix -> language mapping does
- # not work. So have to pass -x option. Maybe, we can work around this by
- # allowing the user to specify both C and C++ compiler names.
- #if $(>:S) != .c
- #{
- engine.set_target_variable (targets, 'LANG', '-x c')
- #}
- engine.add_dependency(targets, bjam.call('get-target-variable', targets, 'PCH_FILE'))
-
-engine.register_action(
- 'gcc.compile.c++',
- '"$(CONFIG_COMMAND)" $(LANG) -ftemplate-depth-128 $(OPTIONS) ' +
- '$(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" ' +
- '-c -o "$(<:W)" "$(>:W)"',
- function=gcc_compile_cpp,
- bound_list=['PCH_FILE'])
-
-engine.register_action(
- 'gcc.compile.c',
- '"$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) ' +
- '-I"$(PCH_FILE:D)" -I"$(INCLUDES)" -c -o "$(<)" "$(>)"',
- function=gcc_compile_c,
- bound_list=['PCH_FILE'])
-
-def gcc_compile_asm(targets, sources, properties):
- get_manager().engine().set_target_variable(targets, 'LANG', '-x assembler-with-cpp')
-
-engine.register_action(
- 'gcc.compile.asm',
- '"$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"',
- function=gcc_compile_asm)
-
-
-class GccLinkingGenerator(unix.UnixLinkingGenerator):
- """
- The class which check that we don't try to use the <runtime-link>static
- property while creating or using shared library, since it's not supported by
- gcc/libc.
- """
- def run(self, project, name, ps, sources):
- # TODO: Replace this with the use of a target-os property.
-
- no_static_link = False
- if bjam.variable('UNIX'):
- no_static_link = True;
- ##FIXME: what does this mean?
-## {
-## switch [ modules.peek : JAMUNAME ]
-## {
-## case * : no-static-link = true ;
-## }
-## }
-
- reason = None
- if no_static_link and ps.get('runtime-link') == 'static':
- if ps.get('link') == 'shared':
- reason = "On gcc, DLL can't be build with '<runtime-link>static'."
- elif type.is_derived(self.target_types[0], 'EXE'):
- for s in sources:
- source_type = s.type()
- if source_type and type.is_derived(source_type, 'SHARED_LIB'):
- reason = "On gcc, using DLLS together with the " +\
- "<runtime-link>static options is not possible "
- if reason:
- print 'warning:', reason
- print 'warning:',\
- "It is suggested to use '<runtime-link>static' together",\
- "with '<link>static'." ;
- return
- else:
- generated_targets = unix.UnixLinkingGenerator.run(self, project,
- name, ps, sources)
- return generated_targets
-
-if on_windows():
- flags('gcc.link.dll', '.IMPLIB-COMMAND', [], ['-Wl,--out-implib,'])
- generators.register(
- GccLinkingGenerator('gcc.link', True,
- ['OBJ', 'SEARCHED_LIB', 'STATIC_LIB', 'IMPORT_LIB'],
- [ 'EXE' ],
- [ '<toolset>gcc' ]))
- generators.register(
- GccLinkingGenerator('gcc.link.dll', True,
- ['OBJ', 'SEARCHED_LIB', 'STATIC_LIB', 'IMPORT_LIB'],
- ['IMPORT_LIB', 'SHARED_LIB'],
- ['<toolset>gcc']))
-else:
- generators.register(
- GccLinkingGenerator('gcc.link', True,
- ['LIB', 'OBJ'],
- ['EXE'],
- ['<toolset>gcc']))
- generators.register(
- GccLinkingGenerator('gcc.link.dll', True,
- ['LIB', 'OBJ'],
- ['SHARED_LIB'],
- ['<toolset>gcc']))
-
-# Declare flags for linking.
-# First, the common flags.
-flags('gcc.link', 'OPTIONS', ['<debug-symbols>on'], ['-g'])
-flags('gcc.link', 'OPTIONS', ['<profiling>on'], ['-pg'])
-flags('gcc.link', 'USER_OPTIONS', [], ['<linkflags>'])
-flags('gcc.link', 'LINKPATH', [], ['<library-path>'])
-flags('gcc.link', 'FINDLIBS-ST', [], ['<find-static-library>'])
-flags('gcc.link', 'FINDLIBS-SA', [], ['<find-shared-library>'])
-flags('gcc.link', 'LIBRARIES', [], ['<library-file>'])
-
-# For <runtime-link>static we made sure there are no dynamic libraries in the
-# link. On HP-UX not all system libraries exist as archived libraries (for
-# example, there is no libunwind.a), so, on this platform, the -static option
-# cannot be specified.
-if os_name() != 'HPUX':
- flags('gcc.link', 'OPTIONS', ['<runtime-link>static'], ['-static'])
-
-# Now, the vendor specific flags.
-# The parameter linker can be either gnu, darwin, osf, hpux or sun.
-def init_link_flags(toolset, linker, condition):
- """
- Now, the vendor specific flags.
- The parameter linker can be either gnu, darwin, osf, hpux or sun.
- """
- toolset_link = toolset + '.link'
- if linker == 'gnu':
- # Strip the binary when no debugging is needed. We use --strip-all flag
- # as opposed to -s since icc (intel's compiler) is generally
- # option-compatible with and inherits from the gcc toolset, but does not
- # support -s.
-
- # FIXME: what does unchecked translate to?
- flags(toolset_link, 'OPTIONS', map(lambda x: x + '/<debug-symbols>off', condition), ['-Wl,--strip-all']) # : unchecked ;
- flags(toolset_link, 'RPATH', condition, ['<dll-path>']) # : unchecked ;
- flags(toolset_link, 'RPATH_LINK', condition, ['<xdll-path>']) # : unchecked ;
- flags(toolset_link, 'START-GROUP', condition, ['-Wl,--start-group'])# : unchecked ;
- flags(toolset_link, 'END-GROUP', condition, ['-Wl,--end-group']) # : unchecked ;
-
- # gnu ld has the ability to change the search behaviour for libraries
- # referenced by -l switch. These modifiers are -Bstatic and -Bdynamic
- # and change search for -l switches that follow them. The following list
- # shows the tried variants.
- # The search stops at the first variant that has a match.
- # *nix: -Bstatic -lxxx
- # libxxx.a
- #
- # *nix: -Bdynamic -lxxx
- # libxxx.so
- # libxxx.a
- #
- # windows (mingw,cygwin) -Bstatic -lxxx
- # libxxx.a
- # xxx.lib
- #
- # windows (mingw,cygwin) -Bdynamic -lxxx
- # libxxx.dll.a
- # xxx.dll.a
- # libxxx.a
- # xxx.lib
- # cygxxx.dll (*)
- # libxxx.dll
- # xxx.dll
- # libxxx.a
- #
- # (*) This is for cygwin
- # Please note that -Bstatic and -Bdynamic are not a guarantee that a
- # static or dynamic lib indeed gets linked in. The switches only change
- # search patterns!
-
- # On *nix mixing shared libs with static runtime is not a good idea.
- flags(toolset_link, 'FINDLIBS-ST-PFX',
- map(lambda x: x + '/<runtime-link>shared', condition),
- ['-Wl,-Bstatic']) # : unchecked ;
- flags(toolset_link, 'FINDLIBS-SA-PFX',
- map(lambda x: x + '/<runtime-link>shared', condition),
- ['-Wl,-Bdynamic']) # : unchecked ;
-
- # On windows allow mixing of static and dynamic libs with static
- # runtime.
- flags(toolset_link, 'FINDLIBS-ST-PFX',
- map(lambda x: x + '/<runtime-link>static/<target-os>windows', condition),
- ['-Wl,-Bstatic']) # : unchecked ;
- flags(toolset_link, 'FINDLIBS-SA-PFX',
- map(lambda x: x + '/<runtime-link>static/<target-os>windows', condition),
- ['-Wl,-Bdynamic']) # : unchecked ;
- flags(toolset_link, 'OPTIONS',
- map(lambda x: x + '/<runtime-link>static/<target-os>windows', condition),
- ['-Wl,-Bstatic']) # : unchecked ;
-
- elif linker == 'darwin':
- # On Darwin, the -s option to ld does not work unless we pass -static,
- # and passing -static unconditionally is a bad idea. So, don't pass -s.
- # at all, darwin.jam will use separate 'strip' invocation.
- flags(toolset_link, 'RPATH', condition, ['<dll-path>']) # : unchecked ;
- flags(toolset_link, 'RPATH_LINK', condition, ['<xdll-path>']) # : unchecked ;
-
- elif linker == 'osf':
- # No --strip-all, just -s.
- flags(toolset_link, 'OPTIONS', map(lambda x: x + '/<debug-symbols>off', condition), ['-Wl,-s'])
- # : unchecked ;
- flags(toolset_link, 'RPATH', condition, ['<dll-path>']) # : unchecked ;
- # This does not supports -R.
- flags(toolset_link, 'RPATH_OPTION', condition, ['-rpath']) # : unchecked ;
- # -rpath-link is not supported at all.
-
- elif linker == 'sun':
- flags(toolset_link, 'OPTIONS', map(lambda x: x + '/<debug-symbols>off', condition), ['-Wl,-s'])
- # : unchecked ;
- flags(toolset_link, 'RPATH', condition, ['<dll-path>']) # : unchecked ;
- # Solaris linker does not have a separate -rpath-link, but allows to use
- # -L for the same purpose.
- flags(toolset_link, 'LINKPATH', condition, ['<xdll-path>']) # : unchecked ;
-
- # This permits shared libraries with non-PIC code on Solaris.
- # VP, 2004/09/07: Now that we have -fPIC hardcode in link.dll, the
- # following is not needed. Whether -fPIC should be hardcoded, is a
- # separate question.
- # AH, 2004/10/16: it is still necessary because some tests link against
- # static libraries that were compiled without PIC.
- flags(toolset_link, 'OPTIONS', map(lambda x: x + '/<link>shared', condition), ['-mimpure-text'])
- # : unchecked ;
-
- elif linker == 'hpux':
- flags(toolset_link, 'OPTIONS', map(lambda x: x + '/<debug-symbols>off', condition),
- ['-Wl,-s']) # : unchecked ;
- flags(toolset_link, 'OPTIONS', map(lambda x: x + '/<link>shared', condition),
- ['-fPIC']) # : unchecked ;
-
- else:
- # FIXME:
- errors.user_error(
- "$(toolset) initialization: invalid linker '$(linker)' " +
- "The value '$(linker)' specified for <linker> is not recognized. " +
- "Possible values are 'gnu', 'darwin', 'osf', 'hpux' or 'sun'")
-
-# Declare actions for linking.
-def gcc_link(targets, sources, properties):
- engine = get_manager().engine()
- engine.set_target_variable(targets, 'SPACE', ' ')
- # Serialize execution of the 'link' action, since running N links in
- # parallel is just slower. For now, serialize only gcc links, it might be a
- # good idea to serialize all links.
- engine.set_target_variable(targets, 'JAM_SEMAPHORE', '<s>gcc-link-semaphore')
-
-engine.register_action(
- 'gcc.link',
- '"$(CONFIG_COMMAND)" -L"$(LINKPATH)" ' +
- '-Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,"$(RPATH)" ' +
- '-Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" -o "$(<)" ' +
- '$(START-GROUP) "$(>)" "$(LIBRARIES)" $(FINDLIBS-ST-PFX) ' +
- '-l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA) $(END-GROUP) ' +
- '$(OPTIONS) $(USER_OPTIONS)',
- function=gcc_link,
- bound_list=['LIBRARIES'])
-
-# Default value. Mostly for the sake of intel-linux that inherits from gcc, but
-# does not have the same logic to set the .AR variable. We can put the same
-# logic in intel-linux, but that's hardly worth the trouble as on Linux, 'ar' is
-# always available.
-__AR = 'ar'
-
-flags('gcc.archive', 'AROPTIONS', [], ['<archiveflags>'])
-
-def gcc_archive(targets, sources, properties):
- # Always remove archive and start again. Here's rationale from
- #
- # Andre Hentz:
- #
- # I had a file, say a1.c, that was included into liba.a. I moved a1.c to
- # a2.c, updated my Jamfiles and rebuilt. My program was crashing with absurd
- # errors. After some debugging I traced it back to the fact that a1.o was
- # *still* in liba.a
- #
- # Rene Rivera:
- #
- # Originally removing the archive was done by splicing an RM onto the
- # archive action. That makes archives fail to build on NT when they have
- # many files because it will no longer execute the action directly and blow
- # the line length limit. Instead we remove the file in a different action,
- # just before building the archive.
- clean = targets[0] + '(clean)'
- bjam.call('TEMPORARY', clean)
- bjam.call('NOCARE', clean)
- engine = get_manager().engine()
- engine.set_target_variable('LOCATE', clean, bjam.call('get-target-variable', targets, 'LOCATE'))
- engine.add_dependency(clean, sources)
- engine.add_dependency(targets, clean)
- engine.set_update_action('common.RmTemps', clean, targets)
-
-# Declare action for creating static libraries.
-# The letter 'r' means to add files to the archive with replacement. Since we
-# remove archive, we don't care about replacement, but there's no option "add
-# without replacement".
-# The letter 'c' suppresses the warning in case the archive does not exists yet.
-# That warning is produced only on some platforms, for whatever reasons.
-engine.register_action('gcc.archive',
- '"$(.AR)" $(AROPTIONS) rc "$(<)" "$(>)"',
- function=gcc_archive,
- flags=['piecemeal'])
-
-def gcc_link_dll(targets, sources, properties):
- engine = get_manager().engine()
- engine.set_target_variable(targets, 'SPACE', ' ')
- engine.set_target_variable(targets, 'JAM_SEMAPHORE', '<s>gcc-link-semaphore')
- engine.set_target_variable(targets, "HAVE_SONAME", HAVE_SONAME)
- engine.set_target_variable(targets, "SONAME_OPTION", SONAME_OPTION)
-
-engine.register_action(
- 'gcc.link.dll',
- # Differ from 'link' above only by -shared.
- '"$(CONFIG_COMMAND)" -L"$(LINKPATH)" ' +
- '-Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,"$(RPATH)" ' +
- '"$(.IMPLIB-COMMAND)$(<[1])" -o "$(<[-1])" ' +
- '$(HAVE_SONAME)-Wl,$(SONAME_OPTION)$(SPACE)-Wl,$(<[-1]:D=) ' +
- '-shared $(START-GROUP) "$(>)" "$(LIBRARIES)" $(FINDLIBS-ST-PFX) ' +
- '-l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA) $(END-GROUP) ' +
- '$(OPTIONS) $(USER_OPTIONS)',
- function = gcc_link_dll,
- bound_list=['LIBRARIES'])
-
-# Set up threading support. It's somewhat contrived, so perform it at the end,
-# to avoid cluttering other code.
-
-if on_windows():
- flags('gcc', 'OPTIONS', ['<threading>multi'], ['-mthreads'])
-elif bjam.variable('UNIX'):
- jamuname = bjam.variable('JAMUNAME')
- host_os_name = jamuname[0]
- if host_os_name.startswith('SunOS'):
- flags('gcc', 'OPTIONS', ['<threading>multi'], ['-pthreads'])
- flags('gcc', 'FINDLIBS-SA', [], ['rt'])
- elif host_os_name == 'BeOS':
- # BeOS has no threading options, don't set anything here.
- pass
- elif host_os_name.endswith('BSD'):
- flags('gcc', 'OPTIONS', ['<threading>multi'], ['-pthread'])
- # there is no -lrt on BSD
- elif host_os_name == 'DragonFly':
- flags('gcc', 'OPTIONS', ['<threading>multi'], ['-pthread'])
- # there is no -lrt on BSD - DragonFly is a FreeBSD variant,
- # which anoyingly doesn't say it's a *BSD.
- elif host_os_name == 'IRIX':
- # gcc on IRIX does not support multi-threading, don't set anything here.
- pass
- elif host_os_name == 'Darwin':
- # Darwin has no threading options, don't set anything here.
- pass
- else:
- flags('gcc', 'OPTIONS', ['<threading>multi'], ['-pthread'])
- flags('gcc', 'FINDLIBS-SA', [], ['rt'])
-
-def cpu_flags(toolset, variable, architecture, instruction_set, values, default=None):
- #FIXME: for some reason this fails. Probably out of date feature code
-## if default:
-## flags(toolset, variable,
-## ['<architecture>' + architecture + '/<instruction-set>'],
-## values)
- flags(toolset, variable,
- #FIXME: same as above
- [##'<architecture>/<instruction-set>' + instruction_set,
- '<architecture>' + architecture + '/<instruction-set>' + instruction_set],
- values)
-
-# Set architecture/instruction-set options.
-#
-# x86 and compatible
-flags('gcc', 'OPTIONS', ['<architecture>x86/<address-model>32'], ['-m32'])
-flags('gcc', 'OPTIONS', ['<architecture>x86/<address-model>64'], ['-m64'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'i386', ['-march=i386'], default=True)
-cpu_flags('gcc', 'OPTIONS', 'x86', 'i486', ['-march=i486'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'i586', ['-march=i586'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'i686', ['-march=i686'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium', ['-march=pentium'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium-mmx', ['-march=pentium-mmx'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'pentiumpro', ['-march=pentiumpro'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium2', ['-march=pentium2'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium3', ['-march=pentium3'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium3m', ['-march=pentium3m'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium-m', ['-march=pentium-m'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium4', ['-march=pentium4'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium4m', ['-march=pentium4m'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'prescott', ['-march=prescott'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'nocona', ['-march=nocona'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'k6', ['-march=k6'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'k6-2', ['-march=k6-2'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'k6-3', ['-march=k6-3'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon', ['-march=athlon'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon-tbird', ['-march=athlon-tbird'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon-4', ['-march=athlon-4'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon-xp', ['-march=athlon-xp'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon-mp', ['-march=athlon-mp'])
-##
-cpu_flags('gcc', 'OPTIONS', 'x86', 'k8', ['-march=k8'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'opteron', ['-march=opteron'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon64', ['-march=athlon64'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon-fx', ['-march=athlon-fx'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'winchip-c6', ['-march=winchip-c6'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'winchip2', ['-march=winchip2'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'c3', ['-march=c3'])
-cpu_flags('gcc', 'OPTIONS', 'x86', 'c3-2', ['-march=c3-2'])
-# Sparc
-flags('gcc', 'OPTIONS', ['<architecture>sparc/<address-model>32'], ['-m32'])
-flags('gcc', 'OPTIONS', ['<architecture>sparc/<address-model>64'], ['-m64'])
-cpu_flags('gcc', 'OPTIONS', 'sparc', 'c3', ['-mcpu=c3'], default=True)
-cpu_flags('gcc', 'OPTIONS', 'sparc', 'v7', ['-mcpu=v7'])
-cpu_flags('gcc', 'OPTIONS', 'sparc', 'cypress', ['-mcpu=cypress'])
-cpu_flags('gcc', 'OPTIONS', 'sparc', 'v8', ['-mcpu=v8'])
-cpu_flags('gcc', 'OPTIONS', 'sparc', 'supersparc', ['-mcpu=supersparc'])
-cpu_flags('gcc', 'OPTIONS', 'sparc', 'sparclite', ['-mcpu=sparclite'])
-cpu_flags('gcc', 'OPTIONS', 'sparc', 'hypersparc', ['-mcpu=hypersparc'])
-cpu_flags('gcc', 'OPTIONS', 'sparc', 'sparclite86x', ['-mcpu=sparclite86x'])
-cpu_flags('gcc', 'OPTIONS', 'sparc', 'f930', ['-mcpu=f930'])
-cpu_flags('gcc', 'OPTIONS', 'sparc', 'f934', ['-mcpu=f934'])
-cpu_flags('gcc', 'OPTIONS', 'sparc', 'sparclet', ['-mcpu=sparclet'])
-cpu_flags('gcc', 'OPTIONS', 'sparc', 'tsc701', ['-mcpu=tsc701'])
-cpu_flags('gcc', 'OPTIONS', 'sparc', 'v9', ['-mcpu=v9'])
-cpu_flags('gcc', 'OPTIONS', 'sparc', 'ultrasparc', ['-mcpu=ultrasparc'])
-cpu_flags('gcc', 'OPTIONS', 'sparc', 'ultrasparc3', ['-mcpu=ultrasparc3'])
-# RS/6000 & PowerPC
-flags('gcc', 'OPTIONS', ['<architecture>power/<address-model>32'], ['-m32'])
-flags('gcc', 'OPTIONS', ['<architecture>power/<address-model>64'], ['-m64'])
-cpu_flags('gcc', 'OPTIONS', 'power', '403', ['-mcpu=403'])
-cpu_flags('gcc', 'OPTIONS', 'power', '505', ['-mcpu=505'])
-cpu_flags('gcc', 'OPTIONS', 'power', '601', ['-mcpu=601'])
-cpu_flags('gcc', 'OPTIONS', 'power', '602', ['-mcpu=602'])
-cpu_flags('gcc', 'OPTIONS', 'power', '603', ['-mcpu=603'])
-cpu_flags('gcc', 'OPTIONS', 'power', '603e', ['-mcpu=603e'])
-cpu_flags('gcc', 'OPTIONS', 'power', '604', ['-mcpu=604'])
-cpu_flags('gcc', 'OPTIONS', 'power', '604e', ['-mcpu=604e'])
-cpu_flags('gcc', 'OPTIONS', 'power', '620', ['-mcpu=620'])
-cpu_flags('gcc', 'OPTIONS', 'power', '630', ['-mcpu=630'])
-cpu_flags('gcc', 'OPTIONS', 'power', '740', ['-mcpu=740'])
-cpu_flags('gcc', 'OPTIONS', 'power', '7400', ['-mcpu=7400'])
-cpu_flags('gcc', 'OPTIONS', 'power', '7450', ['-mcpu=7450'])
-cpu_flags('gcc', 'OPTIONS', 'power', '750', ['-mcpu=750'])
-cpu_flags('gcc', 'OPTIONS', 'power', '801', ['-mcpu=801'])
-cpu_flags('gcc', 'OPTIONS', 'power', '821', ['-mcpu=821'])
-cpu_flags('gcc', 'OPTIONS', 'power', '823', ['-mcpu=823'])
-cpu_flags('gcc', 'OPTIONS', 'power', '860', ['-mcpu=860'])
-cpu_flags('gcc', 'OPTIONS', 'power', '970', ['-mcpu=970'])
-cpu_flags('gcc', 'OPTIONS', 'power', '8540', ['-mcpu=8540'])
-cpu_flags('gcc', 'OPTIONS', 'power', 'power', ['-mcpu=power'])
-cpu_flags('gcc', 'OPTIONS', 'power', 'power2', ['-mcpu=power2'])
-cpu_flags('gcc', 'OPTIONS', 'power', 'power3', ['-mcpu=power3'])
-cpu_flags('gcc', 'OPTIONS', 'power', 'power4', ['-mcpu=power4'])
-cpu_flags('gcc', 'OPTIONS', 'power', 'power5', ['-mcpu=power5'])
-cpu_flags('gcc', 'OPTIONS', 'power', 'powerpc', ['-mcpu=powerpc'])
-cpu_flags('gcc', 'OPTIONS', 'power', 'powerpc64', ['-mcpu=powerpc64'])
-cpu_flags('gcc', 'OPTIONS', 'power', 'rios', ['-mcpu=rios'])
-cpu_flags('gcc', 'OPTIONS', 'power', 'rios1', ['-mcpu=rios1'])
-cpu_flags('gcc', 'OPTIONS', 'power', 'rios2', ['-mcpu=rios2'])
-cpu_flags('gcc', 'OPTIONS', 'power', 'rsc', ['-mcpu=rsc'])
-cpu_flags('gcc', 'OPTIONS', 'power', 'rs64a', ['-mcpu=rs64'])
-# AIX variant of RS/6000 & PowerPC
-flags('gcc', 'OPTIONS', ['<architecture>power/<address-model>32/<target-os>aix'], ['-maix32'])
-flags('gcc', 'OPTIONS', ['<architecture>power/<address-model>64/<target-os>aix'], ['-maix64'])
-flags('gcc', 'AROPTIONS', ['<architecture>power/<address-model>64/<target-os>aix'], ['-X 64'])
diff --git a/tools/build/v2/tools/intel-darwin.jam b/tools/build/v2/tools/intel-darwin.jam
deleted file mode 100644
index aa0fd8fb6d..0000000000
--- a/tools/build/v2/tools/intel-darwin.jam
+++ /dev/null
@@ -1,220 +0,0 @@
-# Copyright Vladimir Prus 2004.
-# Copyright Noel Belcourt 2007.
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt
-# or copy at http://www.boost.org/LICENSE_1_0.txt)
-
-import intel ;
-import feature : feature ;
-import os ;
-import toolset ;
-import toolset : flags ;
-import gcc ;
-import common ;
-import errors ;
-import generators ;
-
-feature.extend-subfeature toolset intel : platform : darwin ;
-
-toolset.inherit-generators intel-darwin
- <toolset>intel <toolset-intel:platform>darwin
- : gcc
- # Don't inherit PCH generators. They were not tested, and probably
- # don't work for this compiler.
- : gcc.mingw.link gcc.mingw.link.dll gcc.compile.c.pch gcc.compile.c++.pch
- ;
-
-generators.override intel-darwin.prebuilt : builtin.lib-generator ;
-generators.override intel-darwin.prebuilt : builtin.prebuilt ;
-generators.override intel-darwin.searched-lib-generator : searched-lib-generator ;
-
-toolset.inherit-rules intel-darwin : gcc ;
-toolset.inherit-flags intel-darwin : gcc
- : <inlining>off <inlining>on <inlining>full <optimization>space
- <warnings>off <warnings>all <warnings>on
- <architecture>x86/<address-model>32
- <architecture>x86/<address-model>64
- ;
-
-if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
-{
- .debug-configuration = true ;
-}
-
-# vectorization diagnostics
-feature vectorize : off on full ;
-
-# Initializes the intel-darwin toolset
-# version in mandatory
-# name (default icc) is used to invoke the specified intel complier
-# compile and link options allow you to specify addition command line options for each version
-rule init ( version ? : command * : options * )
-{
- local condition = [ common.check-init-parameters intel-darwin
- : version $(version) ] ;
-
- command = [ common.get-invocation-command intel-darwin : icc
- : $(command) : /opt/intel_cc_80/bin ] ;
-
- common.handle-options intel-darwin : $(condition) : $(command) : $(options) ;
-
- gcc.init-link-flags intel-darwin darwin $(condition) ;
-
- # handle <library-path>
- # local library-path = [ feature.get-values <library-path> : $(options) ] ;
- # flags intel-darwin.link USER_OPTIONS $(condition) : [ feature.get-values <dll-path> : $(options) ] ;
-
- local root = [ feature.get-values <root> : $(options) ] ;
- local bin ;
- if $(command) || $(root)
- {
- bin ?= [ common.get-absolute-tool-path $(command[-1]) ] ;
- root ?= $(bin:D) ;
-
- if $(root)
- {
- # Libraries required to run the executable may be in either
- # $(root)/lib (10.1 and earlier)
- # or
- # $(root)/lib/architecture-name (11.0 and later:
- local lib_path = $(root)/lib $(root:P)/lib/$(bin:B) ;
- if $(.debug-configuration)
- {
- ECHO notice: using intel libraries :: $(condition) :: $(lib_path) ;
- }
- flags intel-darwin.link RUN_PATH $(condition) : $(lib_path) ;
- }
- }
-
- local m = [ MATCH (..).* : $(version) ] ;
- local n = [ MATCH (.)\\. : $(m) ] ;
- if $(n) {
- m = $(n) ;
- }
-
- local major = $(m) ;
-
- if $(major) = "9" {
- flags intel-darwin.compile OPTIONS $(condition)/<inlining>off : -Ob0 ;
- flags intel-darwin.compile OPTIONS $(condition)/<inlining>on : -Ob1 ;
- flags intel-darwin.compile OPTIONS $(condition)/<inlining>full : -Ob2 ;
- flags intel-darwin.compile OPTIONS $(condition)/<vectorize>off : -vec-report0 ;
- flags intel-darwin.compile OPTIONS $(condition)/<vectorize>on : -vec-report1 ;
- flags intel-darwin.compile OPTIONS $(condition)/<vectorize>full : -vec-report5 ;
- flags intel-darwin.link OPTIONS $(condition)/<runtime-link>static : -static -static-libcxa -lstdc++ -lpthread ;
- flags intel-darwin.link OPTIONS $(condition)/<runtime-link>shared : -shared-libcxa -lstdc++ -lpthread ;
- }
- else {
- flags intel-darwin.compile OPTIONS $(condition)/<inlining>off : -inline-level=0 ;
- flags intel-darwin.compile OPTIONS $(condition)/<inlining>on : -inline-level=1 ;
- flags intel-darwin.compile OPTIONS $(condition)/<inlining>full : -inline-level=2 ;
- flags intel-darwin.compile OPTIONS $(condition)/<vectorize>off : -vec-report0 ;
- flags intel-darwin.compile OPTIONS $(condition)/<vectorize>on : -vec-report1 ;
- flags intel-darwin.compile OPTIONS $(condition)/<vectorize>full : -vec-report5 ;
- flags intel-darwin.link OPTIONS $(condition)/<runtime-link>static : -static -static-intel -lstdc++ -lpthread ;
- flags intel-darwin.link OPTIONS $(condition)/<runtime-link>shared : -shared-intel -lstdc++ -lpthread ;
- }
-
- local minor = [ MATCH ".*\\.(.).*" : $(version) ] ;
-
- # wchar_t char_traits workaround for compilers older than 10.2
- if $(major) = "9" || ( $(major) = "10" && ( $(minor) = "0" || $(minor) = "1" ) ) {
- flags intel-darwin.compile DEFINES $(condition) : __WINT_TYPE__=int : unchecked ;
- }
-}
-
-SPACE = " " ;
-
-flags intel-darwin.compile OPTIONS <cflags> ;
-flags intel-darwin.compile OPTIONS <cxxflags> ;
-# flags intel-darwin.compile INCLUDES <include> ;
-
-flags intel-darwin.compile OPTIONS <optimization>space : -O1 ; # no specific space optimization flag in icc
-
-#
-cpu-type-em64t = prescott nocona ;
-flags intel-darwin.compile OPTIONS <instruction-set>$(cpu-type-em64t)/<address-model>32 : -m32 ; # -mcmodel=small ;
-flags intel-darwin.compile OPTIONS <instruction-set>$(cpu-type-em64t)/<address-model>64 : -m64 ; # -mcmodel=large ;
-
-flags intel-darwin.compile.c OPTIONS <warnings>off : -w0 ;
-flags intel-darwin.compile.c OPTIONS <warnings>on : -w1 ;
-flags intel-darwin.compile.c OPTIONS <warnings>all : -w2 ;
-
-flags intel-darwin.compile.c++ OPTIONS <warnings>off : -w0 ;
-flags intel-darwin.compile.c++ OPTIONS <warnings>on : -w1 ;
-flags intel-darwin.compile.c++ OPTIONS <warnings>all : -w2 ;
-
-actions compile.c
-{
- "$(CONFIG_COMMAND)" -xc $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
-}
-
-actions compile.c++
-{
- "$(CONFIG_COMMAND)" -xc++ $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
-}
-
-flags intel-darwin ARFLAGS <archiveflags> ;
-
-# Default value. Mostly for the sake of intel-linux
-# that inherits from gcc, but does not has the same
-# logic to set the .AR variable. We can put the same
-# logic in intel-linux, but that's hardly worth the trouble
-# as on Linux, 'ar' is always available.
-.AR = ar ;
-
-rule archive ( targets * : sources * : properties * )
-{
- # Always remove archive and start again. Here's rationale from
- # Andre Hentz:
- #
- # I had a file, say a1.c, that was included into liba.a.
- # I moved a1.c to a2.c, updated my Jamfiles and rebuilt.
- # My program was crashing with absurd errors.
- # After some debugging I traced it back to the fact that a1.o was *still*
- # in liba.a
- #
- # Rene Rivera:
- #
- # Originally removing the archive was done by splicing an RM
- # onto the archive action. That makes archives fail to build on NT
- # when they have many files because it will no longer execute the
- # action directly and blow the line length limit. Instead we
- # remove the file in a different action, just before the building
- # of the archive.
- #
- local clean.a = $(targets[1])(clean) ;
- TEMPORARY $(clean.a) ;
- NOCARE $(clean.a) ;
- LOCATE on $(clean.a) = [ on $(targets[1]) return $(LOCATE) ] ;
- DEPENDS $(clean.a) : $(sources) ;
- DEPENDS $(targets) : $(clean.a) ;
- common.RmTemps $(clean.a) : $(targets) ;
-}
-
-actions piecemeal archive
-{
- "$(.AR)" $(AROPTIONS) rc "$(<)" "$(>)"
- "ranlib" -cs "$(<)"
-}
-
-flags intel-darwin.link USER_OPTIONS <linkflags> ;
-
-# Declare actions for linking
-rule link ( targets * : sources * : properties * )
-{
- SPACE on $(targets) = " " ;
- # Serialize execution of the 'link' action, since
- # running N links in parallel is just slower.
- JAM_SEMAPHORE on $(targets) = <s>intel-darwin-link-semaphore ;
-}
-
-actions link bind LIBRARIES
-{
- "$(CONFIG_COMMAND)" $(USER_OPTIONS) -L"$(LINKPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS)
-}
-
-actions link.dll bind LIBRARIES
-{
- "$(CONFIG_COMMAND)" $(USER_OPTIONS) -L"$(LINKPATH)" -o "$(<)" -single_module -dynamiclib -install_name "$(<[1]:D=)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) $(OPTIONS)
-}
diff --git a/tools/build/v2/tools/intel-win.jam b/tools/build/v2/tools/intel-win.jam
deleted file mode 100644
index c9adac0d96..0000000000
--- a/tools/build/v2/tools/intel-win.jam
+++ /dev/null
@@ -1,184 +0,0 @@
-# Copyright Vladimir Prus 2004.
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt
-# or copy at http://www.boost.org/LICENSE_1_0.txt)
-
-# Importing common is needed because the rules we inherit here depend on it.
-# That is nasty.
-import common ;
-import errors ;
-import feature ;
-import intel ;
-import msvc ;
-import os ;
-import toolset ;
-import generators ;
-import type ;
-
-feature.extend-subfeature toolset intel : platform : win ;
-
-toolset.inherit-generators intel-win <toolset>intel <toolset-intel:platform>win : msvc ;
-toolset.inherit-flags intel-win : msvc : : YLOPTION ;
-toolset.inherit-rules intel-win : msvc ;
-
-# Override default do-nothing generators.
-generators.override intel-win.compile.c.pch : pch.default-c-pch-generator ;
-generators.override intel-win.compile.c++.pch : pch.default-cpp-pch-generator ;
-generators.override intel-win.compile.rc : rc.compile.resource ;
-generators.override intel-win.compile.mc : mc.compile ;
-
-toolset.flags intel-win.compile PCH_SOURCE <pch>on : <pch-source> ;
-
-toolset.add-requirements <toolset>intel-win,<runtime-link>shared:<threading>multi ;
-
-# Initializes the intel toolset for windows
-rule init ( version ? : # the compiler version
- command * : # the command to invoke the compiler itself
- options * # Additional option: <compatibility>
- # either 'vc6', 'vc7', 'vc7.1'
- # or 'native'(default).
- )
-{
- local compatibility =
- [ feature.get-values <compatibility> : $(options) ] ;
- local condition = [ common.check-init-parameters intel-win
- : version $(version) : compatibility $(compatibility) ] ;
-
- command = [ common.get-invocation-command intel-win : icl.exe :
- $(command) ] ;
-
- common.handle-options intel-win : $(condition) : $(command) : $(options) ;
-
- local root ;
- if $(command)
- {
- root = [ common.get-absolute-tool-path $(command[-1]) ] ;
- root = $(root)/ ;
- }
-
- local setup ;
- setup = [ GLOB $(root) : iclvars_*.bat ] ;
- if ! $(setup)
- {
- setup = $(root)/iclvars.bat ;
- }
- setup = "call \""$(setup)"\" > nul " ;
-
- if [ os.name ] = NT
- {
- setup = $(setup)"
-" ;
- }
- else
- {
- setup = "cmd /S /C "$(setup)" \"&&\" " ;
- }
-
- toolset.flags intel-win.compile .CC $(condition) : $(setup)icl ;
- toolset.flags intel-win.link .LD $(condition) : $(setup)xilink ;
- toolset.flags intel-win.archive .LD $(condition) : $(setup)xilink /lib ;
- toolset.flags intel-win.link .MT $(condition) : $(setup)mt -nologo ;
- toolset.flags intel-win.compile .MC $(condition) : $(setup)mc ;
- toolset.flags intel-win.compile .RC $(condition) : $(setup)rc ;
-
- local m = [ MATCH (.).* : $(version) ] ;
- local major = $(m[1]) ;
-
- local C++FLAGS ;
-
- C++FLAGS += /nologo ;
-
- # Reduce the number of spurious error messages
- C++FLAGS += /Qwn5 /Qwd985 ;
-
- # Enable ADL
- C++FLAGS += -Qoption,c,--arg_dep_lookup ; #"c" works for C++, too
-
- # Disable Microsoft "secure" overloads in Dinkumware libraries since they
- # cause compile errors with Intel versions 9 and 10.
- C++FLAGS += -D_SECURE_SCL=0 ;
-
- if $(major) > 5
- {
- C++FLAGS += /Zc:forScope ; # Add support for correct for loop scoping.
- }
-
- # Add options recognized only by intel7 and above.
- if $(major) >= 7
- {
- C++FLAGS += /Qansi_alias ;
- }
-
- if $(compatibility) = vc6
- {
- C++FLAGS +=
- # Emulate VC6
- /Qvc6
-
- # No wchar_t support in vc6 dinkum library. Furthermore, in vc6
- # compatibility-mode, wchar_t is not a distinct type from unsigned
- # short.
- -DBOOST_NO_INTRINSIC_WCHAR_T
- ;
- }
- else
- {
- if $(major) > 5
- {
- # Add support for wchar_t
- C++FLAGS += /Zc:wchar_t
- # Tell the dinkumware library about it.
- -D_NATIVE_WCHAR_T_DEFINED
- ;
- }
- }
-
- if $(compatibility) && $(compatibility) != native
- {
- C++FLAGS += /Q$(base-vc) ;
- }
- else
- {
- C++FLAGS +=
- -Qoption,cpp,--arg_dep_lookup
- # The following options were intended to disable the Intel compiler's
- # 'bug-emulation' mode, but were later reported to be causing ICE with
- # Intel-Win 9.0. It is not yet clear which options can be safely used.
- # -Qoption,cpp,--const_string_literals
- # -Qoption,cpp,--new_for_init
- # -Qoption,cpp,--no_implicit_typename
- # -Qoption,cpp,--no_friend_injection
- # -Qoption,cpp,--no_microsoft_bugs
- ;
- }
-
- toolset.flags intel-win CFLAGS $(condition) : $(C++FLAGS) ;
- # By default, when creating PCH, intel adds 'i' to the explicitly
- # specified name of the PCH file. Of course, Boost.Build is not
- # happy when compiler produces not the file it was asked for.
- # The option below stops this behaviour.
- toolset.flags intel-win CFLAGS : -Qpchi- ;
-
- if ! $(compatibility)
- {
- # If there's no backend version, assume 10.
- compatibility = vc10 ;
- }
-
- local extract-version = [ MATCH ^vc(.*) : $(compatibility) ] ;
- if ! $(extract-version)
- {
- errors.user-error "Invalid value for compatibility option:"
- $(compatibility) ;
- }
-
- # Depending on the settings, running of tests require some runtime DLLs.
- toolset.flags intel-win RUN_PATH $(condition) : $(root) ;
-
- msvc.configure-version-specific intel-win : $(extract-version[1]) : $(condition) ;
-}
-
-toolset.flags intel-win.link LIBRARY_OPTION <toolset>intel : "" ;
-
-toolset.flags intel-win YLOPTION ;
-
diff --git a/tools/build/v2/tools/make.jam b/tools/build/v2/tools/make.jam
deleted file mode 100644
index 085672857b..0000000000
--- a/tools/build/v2/tools/make.jam
+++ /dev/null
@@ -1,72 +0,0 @@
-# Copyright 2003 Dave Abrahams
-# Copyright 2003 Douglas Gregor
-# Copyright 2006 Rene Rivera
-# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# This module defines the 'make' main target rule.
-
-import "class" : new ;
-import errors : error ;
-import project ;
-import property ;
-import property-set ;
-import regex ;
-import targets ;
-
-
-class make-target-class : basic-target
-{
- import type regex virtual-target ;
- import "class" : new ;
-
- rule __init__ ( name : project : sources * : requirements *
- : default-build * : usage-requirements * )
- {
- basic-target.__init__ $(name) : $(project) : $(sources) :
- $(requirements) : $(default-build) : $(usage-requirements) ;
- }
-
- rule construct ( name : source-targets * : property-set )
- {
- local action-name = [ $(property-set).get <action> ] ;
- # 'm' will always be set -- we add '@' ourselves in the 'make' rule
- # below.
- local m = [ MATCH ^@(.*) : $(action-name) ] ;
-
- local a = [ new action $(source-targets) : $(m[1]) : $(property-set) ] ;
- local t = [ new file-target $(self.name) exact : [ type.type
- $(self.name) ] : $(self.project) : $(a) ] ;
- return [ property-set.empty ] [ virtual-target.register $(t) ] ;
- }
-}
-
-
-# Declares the 'make' main target.
-#
-rule make ( target-name : sources * : generating-rule + : requirements * :
- usage-requirements * )
-{
- local project = [ project.current ] ;
-
- # The '@' sign causes the feature.jam module to qualify rule name with the
- # module name of current project, if needed.
- local m = [ MATCH ^(@).* : $(generating-rule) ] ;
- if ! $(m)
- {
- generating-rule = @$(generating-rule) ;
- }
- requirements += <action>$(generating-rule) ;
-
- targets.main-target-alternative
- [ new make-target-class $(target-name) : $(project)
- : [ targets.main-target-sources $(sources) : $(target-name) ]
- : [ targets.main-target-requirements $(requirements) : $(project) ]
- : [ targets.main-target-default-build : $(project) ]
- : [ targets.main-target-usage-requirements $(usage-requirements) :
- $(project) ] ] ;
-}
-
-
-IMPORT $(__name__) : make : : make ;
diff --git a/tools/build/v2/tools/message.jam b/tools/build/v2/tools/message.jam
deleted file mode 100644
index 212d8542cd..0000000000
--- a/tools/build/v2/tools/message.jam
+++ /dev/null
@@ -1,55 +0,0 @@
-# Copyright 2008 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Defines main target type 'message', that prints a message when built for the
-# first time.
-
-import project ;
-import "class" : new ;
-import targets ;
-import property-set ;
-
-class message-target-class : basic-target
-{
- rule __init__ ( name-and-dir : project : * )
- {
- basic-target.__init__ $(name-and-dir) : $(project) ;
- self.3 = $(3) ;
- self.4 = $(4) ;
- self.5 = $(5) ;
- self.6 = $(6) ;
- self.7 = $(7) ;
- self.8 = $(8) ;
- self.9 = $(9) ;
- self.built = ;
- }
-
- rule construct ( name : source-targets * : property-set )
- {
- if ! $(self.built)
- {
- for i in 3 4 5 6 7 8 9
- {
- if $(self.$(i))
- {
- ECHO $(self.$(i)) ;
- }
- }
- self.built = 1 ;
- }
-
- return [ property-set.empty ] ;
- }
-}
-
-
-rule message ( name : * )
-{
- local project = [ project.current ] ;
-
- targets.main-target-alternative
- [ new message-target-class $(name) : $(project)
- : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) ] ;
-}
-IMPORT $(__name__) : message : : message ; \ No newline at end of file
diff --git a/tools/build/v2/tools/midl.py b/tools/build/v2/tools/midl.py
deleted file mode 100644
index 45811d16bc..0000000000
--- a/tools/build/v2/tools/midl.py
+++ /dev/null
@@ -1,134 +0,0 @@
-# Copyright (c) 2005 Alexey Pakhunov.
-# Copyright (c) 2011 Juraj Ivancic
-#
-# Use, modification and distribution is subject to the Boost Software
-# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
-# http://www.boost.org/LICENSE_1_0.txt)
-
-# Microsoft Interface Definition Language (MIDL) related routines
-from b2.build import scanner, type
-from b2.build.toolset import flags
-from b2.build.feature import feature
-from b2.manager import get_manager
-from b2.tools import builtin, common
-from b2.util import regex
-
-def init():
- pass
-
-type.register('IDL', ['idl'])
-
-# A type library (.tlb) is generated by MIDL compiler and can be included
-# to resources of an application (.rc). In order to be found by a resource
-# compiler its target type should be derived from 'H' - otherwise
-# the property '<implicit-dependency>' will be ignored.
-type.register('MSTYPELIB', 'tlb', 'H')
-
-# Register scanner for MIDL files
-class MidlScanner(scanner.Scanner):
- def __init__ (self, includes=[]):
- scanner.Scanner.__init__(self)
- self.includes = includes
-
- # List of quoted strings
- re_strings = "[ \t]*\"([^\"]*)\"([ \t]*,[ \t]*\"([^\"]*)\")*[ \t]*" ;
-
- # 'import' and 'importlib' directives
- self.re_import = "import" + re_strings + "[ \t]*;" ;
- self.re_importlib = "importlib[ \t]*[(]" + re_strings + "[)][ \t]*;" ;
-
- # C preprocessor 'include' directive
- self.re_include_angle = "#[ \t]*include[ \t]*<(.*)>" ;
- self.re_include_quoted = "#[ \t]*include[ \t]*\"(.*)\"" ;
-
- def pattern():
- # Match '#include', 'import' and 'importlib' directives
- return "((#[ \t]*include|import(lib)?).+(<(.*)>|\"(.*)\").+)"
-
- def process(self, target, matches, binding):
- included_angle = regex.transform(matches, self.re_include_angle)
- included_quoted = regex.transform(matches, self.re_include_quoted)
- imported = regex.transform(matches, self.re_import, [1, 3])
- imported_tlbs = regex.transform(matches, self.re_importlib, [1, 3])
-
- # CONSIDER: the new scoping rule seem to defeat "on target" variables.
- g = bjam.call('get-target-variable', target, 'HDRGRIST')
- b = os.path.normalize_path(os.path.dirname(binding))
-
- # Attach binding of including file to included targets.
- # When target is directly created from virtual target
- # this extra information is unnecessary. But in other
- # cases, it allows to distinguish between two headers of the
- # same name included from different places.
- g2 = g + "#" + b
-
- g = "<" + g + ">"
- g2 = "<" + g2 + ">"
-
- included_angle = [ g + x for x in included_angle ]
- included_quoted = [ g + x for x in included_quoted ]
- imported = [ g + x for x in imported ]
- imported_tlbs = [ g + x for x in imported_tlbs ]
-
- all = included_angle + included_quoted + imported
-
- bjam.call('INCLUDES', [target], all)
- bjam.call('DEPENDS', [target], imported_tlbs)
- bjam.call('NOCARE', all + imported_tlbs)
- engine.set_target_variable(included_angle , 'SEARCH', ungrist(self.includes))
- engine.set_target_variable(included_quoted, 'SEARCH', b + ungrist(self.includes))
- engine.set_target_variable(imported , 'SEARCH', b + ungrist(self.includes))
- engine.set_target_variable(imported_tlbs , 'SEARCH', b + ungrist(self.includes))
-
- get_manager().scanners().propagate(type.get_scanner('CPP', PropertySet(self.includes)), included_angle + included_quoted)
- get_manager().scanners().propagate(self, imported)
-
-scanner.register(MidlScanner, 'include')
-type.set_scanner('IDL', MidlScanner)
-
-
-# Command line options
-feature('midl-stubless-proxy', ['yes', 'no'], ['propagated'] )
-feature('midl-robust', ['yes', 'no'], ['propagated'] )
-
-flags('midl.compile.idl', 'MIDLFLAGS', ['<midl-stubless-proxy>yes'], ['/Oicf' ])
-flags('midl.compile.idl', 'MIDLFLAGS', ['<midl-stubless-proxy>no' ], ['/Oic' ])
-flags('midl.compile.idl', 'MIDLFLAGS', ['<midl-robust>yes' ], ['/robust' ])
-flags('midl.compile.idl', 'MIDLFLAGS', ['<midl-robust>no' ], ['/no_robust'])
-
-# Architecture-specific options
-architecture_x86 = ['<architecture>' , '<architecture>x86']
-address_model_32 = ['<address-model>', '<address-model>32']
-address_model_64 = ['<address-model>', '<address-model>64']
-
-flags('midl.compile.idl', 'MIDLFLAGS', [ar + '/' + m for ar in architecture_x86 for m in address_model_32 ], ['/win32'])
-flags('midl.compile.idl', 'MIDLFLAGS', [ar + '/<address-model>64' for ar in architecture_x86], ['/x64'])
-flags('midl.compile.idl', 'MIDLFLAGS', ['<architecture>ia64/' + m for m in address_model_64], ['/ia64'])
-
-flags('midl.compile.idl', 'DEFINES', [], ['<define>'])
-flags('midl.compile.idl', 'UNDEFS', [], ['<undef>'])
-flags('midl.compile.idl', 'INCLUDES', [], ['<include>'])
-
-
-builtin.register_c_compiler('midl.compile.idl', ['IDL'], ['MSTYPELIB', 'H', 'C(%_i)', 'C(%_proxy)', 'C(%_dlldata)'], [])
-
-
-# MIDL does not always generate '%_proxy.c' and '%_dlldata.c'. This behavior
-# depends on contents of the source IDL file. Calling TOUCH_FILE below ensures
-# that both files will be created so bjam will not try to recreate them
-# constantly.
-get_manager().engine().register_action(
- 'midl.compile.idl',
- '''midl /nologo @"@($(<[1]:W).rsp:E=
-"$(>:W)"
--D$(DEFINES)
-"-I$(INCLUDES)"
--U$(UNDEFS)
-$(MIDLFLAGS)
-/tlb "$(<[1]:W)"
-/h "$(<[2]:W)"
-/iid "$(<[3]:W)"
-/proxy "$(<[4]:W)"
-/dlldata "$(<[5]:W)")"
-{touch} "$(<[4]:W)"
-{touch} "$(<[5]:W)"'''.format(touch=common.file_creation_command()))
diff --git a/tools/build/v2/tools/mpi.jam b/tools/build/v2/tools/mpi.jam
deleted file mode 100644
index 0fe490becd..0000000000
--- a/tools/build/v2/tools/mpi.jam
+++ /dev/null
@@ -1,583 +0,0 @@
-# Support for the Message Passing Interface (MPI)
-#
-# (C) Copyright 2005, 2006 Trustees of Indiana University
-# (C) Copyright 2005 Douglas Gregor
-#
-# Distributed under the Boost Software License, Version 1.0. (See accompanying
-# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt.)
-#
-# Authors: Douglas Gregor
-# Andrew Lumsdaine
-#
-# ==== MPI Configuration ====
-#
-# For many users, MPI support can be enabled simply by adding the following
-# line to your user-config.jam file:
-#
-# using mpi ;
-#
-# This should auto-detect MPI settings based on the MPI wrapper compiler in
-# your path, e.g., "mpic++". If the wrapper compiler is not in your path, or
-# has a different name, you can pass the name of the wrapper compiler as the
-# first argument to the mpi module:
-#
-# using mpi : /opt/mpich2-1.0.4/bin/mpiCC ;
-#
-# If your MPI implementation does not have a wrapper compiler, or the MPI
-# auto-detection code does not work with your MPI's wrapper compiler,
-# you can pass MPI-related options explicitly via the second parameter to the
-# mpi module:
-#
-# using mpi : : <find-shared-library>lammpio <find-shared-library>lammpi++
-# <find-shared-library>mpi <find-shared-library>lam
-# <find-shared-library>dl ;
-#
-# To see the results of MPI auto-detection, pass "--debug-configuration" on
-# the bjam command line.
-#
-# The (optional) fourth argument configures Boost.MPI for running
-# regression tests. These parameters specify the executable used to
-# launch jobs (default: "mpirun") followed by any necessary arguments
-# to this to run tests and tell the program to expect the number of
-# processors to follow (default: "-np"). With the default parameters,
-# for instance, the test harness will execute, e.g.,
-#
-# mpirun -np 4 all_gather_test
-#
-# ==== Linking Against the MPI Libraries ===
-#
-# To link against the MPI libraries, import the "mpi" module and add the
-# following requirement to your target:
-#
-# <library>/mpi//mpi
-#
-# Since MPI support is not always available, you should check
-# "mpi.configured" before trying to link against the MPI libraries.
-
-import "class" : new ;
-import common ;
-import feature : feature ;
-import generators ;
-import os ;
-import project ;
-import property ;
-import testing ;
-import toolset ;
-import type ;
-import path ;
-
-# Make this module a project
-project.initialize $(__name__) ;
-project mpi ;
-
-if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
-{
- .debug-configuration = true ;
-}
-
-# Assuming the first part of the command line is the given prefix
-# followed by some non-empty value, remove the first argument. Returns
-# either nothing (if there was no prefix or no value) or a pair
-#
-# <name>value rest-of-cmdline
-#
-# This is a subroutine of cmdline_to_features
-rule add_feature ( prefix name cmdline )
-{
- local match = [ MATCH "^$(prefix)([^\" ]+|\"[^\"]+\") *(.*)$" : $(cmdline) ] ;
-
- # If there was no value associated with the prefix, abort
- if ! $(match) {
- return ;
- }
-
- local value = $(match[1]) ;
-
- if [ MATCH " +" : $(value) ] {
- value = "\"$(value)\"" ;
- }
-
- return "<$(name)>$(value)" $(match[2]) ;
-}
-
-# Strip any end-of-line characters off the given string and return the
-# result.
-rule strip-eol ( string )
-{
- local match = [ MATCH "^(([A-Za-z0-9~`\.!@#$%^&*()_+={};:'\",.<>/?\\| -]|[|])*).*$" : $(string) ] ;
-
- if $(match)
- {
- return $(match[1]) ;
- }
- else
- {
- return $(string) ;
- }
-}
-
-# Split a command-line into a set of features. Certain kinds of
-# compiler flags are recognized (e.g., -I, -D, -L, -l) and replaced
-# with their Boost.Build equivalents (e.g., <include>, <define>,
-# <library-path>, <find-library>). All other arguments are introduced
-# using the features in the unknown-features parameter, because we
-# don't know how to deal with them. For instance, if your compile and
-# correct. The incoming command line should be a string starting with
-# an executable (e.g., g++ -I/include/path") and may contain any
-# number of command-line arguments thereafter. The result is a list of
-# features corresponding to the given command line, ignoring the
-# executable.
-rule cmdline_to_features ( cmdline : unknown-features ? )
-{
- local executable ;
- local features ;
- local otherflags ;
- local result ;
-
- unknown-features ?= <cxxflags> <linkflags> ;
-
- # Pull the executable out of the command line. At this point, the
- # executable is just thrown away.
- local match = [ MATCH "^([^\" ]+|\"[^\"]+\") *(.*)$" : $(cmdline) ] ;
- executable = $(match[1]) ;
- cmdline = $(match[2]) ;
-
- # List the prefix/feature pairs that we will be able to transform.
- # Every kind of parameter not mentioned here will be placed in both
- # cxxflags and linkflags, because we don't know where they should go.
- local feature_kinds-D = "define" ;
- local feature_kinds-I = "include" ;
- local feature_kinds-L = "library-path" ;
- local feature_kinds-l = "find-shared-library" ;
-
- while $(cmdline) {
-
- # Check for one of the feature prefixes we know about. If we
- # find one (and the associated value is nonempty), convert it
- # into a feature.
- local match = [ MATCH "^(-.)(.*)" : $(cmdline) ] ;
- local matched ;
- if $(match) && $(match[2]) {
- local prefix = $(match[1]) ;
- if $(feature_kinds$(prefix)) {
- local name = $(feature_kinds$(prefix)) ;
- local add = [ add_feature $(prefix) $(name) $(cmdline) ] ;
-
- if $(add) {
-
- if $(add[1]) = <find-shared-library>pthread
- {
- # Uhm. It's not really nice that this MPI implementation
- # uses -lpthread as opposed to -pthread. We do want to
- # set <threading>multi, instead of -lpthread.
- result += "<threading>multi" ;
- MPI_EXTRA_REQUIREMENTS += "<threading>multi" ;
- }
- else
- {
- result += $(add[1]) ;
- }
-
- cmdline = $(add[2]) ;
- matched = yes ;
- }
- }
- }
-
- # If we haven't matched a feature prefix, just grab the command-line
- # argument itself. If we can map this argument to a feature
- # (e.g., -pthread -> <threading>multi), then do so; otherwise,
- # and add it to the list of "other" flags that we don't
- # understand.
- if ! $(matched) {
- match = [ MATCH "^([^\" ]+|\"[^\"]+\") *(.*)$" : $(cmdline) ] ;
- local value = $(match[1]) ;
- cmdline = $(match[2]) ;
-
- # Check for multithreading support
- if $(value) = "-pthread" || $(value) = "-pthreads"
- {
- result += "<threading>multi" ;
-
- # DPG: This is a hack intended to work around a BBv2 bug where
- # requirements propagated from libraries are not checked for
- # conflicts when BBv2 determines which "common" properties to
- # apply to a target. In our case, the <threading>single property
- # gets propagated from the common properties to Boost.MPI
- # targets, even though <threading>multi is in the usage
- # requirements of <library>/mpi//mpi.
- MPI_EXTRA_REQUIREMENTS += "<threading>multi" ;
- }
- else if [ MATCH "(.*[a-zA-Z0-9<>?-].*)" : $(value) ] {
- otherflags += $(value) ;
- }
- }
- }
-
- # If there are other flags that we don't understand, add them to the
- # result as both <cxxflags> and <linkflags>
- if $(otherflags) {
- for unknown in $(unknown-features)
- {
- result += "$(unknown)$(otherflags:J= )" ;
- }
- }
-
- return $(result) ;
-}
-
-# Determine if it is safe to execute the given shell command by trying
-# to execute it and determining whether the exit code is zero or
-# not. Returns true for an exit code of zero, false otherwise.
-local rule safe-shell-command ( cmdline )
-{
- local result = [ SHELL "$(cmdline) > /dev/null 2>/dev/null; if [ "$?" -eq "0" ]; then echo SSCOK; fi" ] ;
- return [ MATCH ".*(SSCOK).*" : $(result) ] ;
-}
-
-# Initialize the MPI module.
-rule init ( mpicxx ? : options * : mpirun-with-options * )
-{
- if ! $(options) && $(.debug-configuration)
- {
- ECHO "===============MPI Auto-configuration===============" ;
- }
-
- if ! $(mpicxx) && [ os.on-windows ]
- {
- # Try to auto-configure to the Microsoft Compute Cluster Pack
- local cluster_pack_path_native = "C:\\Program Files\\Microsoft Compute Cluster Pack" ;
- local cluster_pack_path = [ path.make $(cluster_pack_path_native) ] ;
- if [ GLOB $(cluster_pack_path_native)\\Include : mpi.h ]
- {
- if $(.debug-configuration)
- {
- ECHO "Found Microsoft Compute Cluster Pack: $(cluster_pack_path_native)" ;
- }
-
- # Pick up either the 32-bit or 64-bit library, depending on which address
- # model the user has selected. Default to 32-bit.
- options = <include>$(cluster_pack_path)/Include
- <address-model>64:<library-path>$(cluster_pack_path)/Lib/amd64
- <library-path>$(cluster_pack_path)/Lib/i386
- <find-static-library>msmpi
- <toolset>msvc:<define>_SECURE_SCL=0
- ;
-
- # Setup the "mpirun" equivalent (mpiexec)
- .mpirun = "\"$(cluster_pack_path_native)\\Bin\\mpiexec.exe"\" ;
- .mpirun_flags = -n ;
- }
- else if $(.debug-configuration)
- {
- ECHO "Did not find Microsoft Compute Cluster Pack in $(cluster_pack_path_native)." ;
- }
- }
-
- if ! $(options)
- {
- # Try to auto-detect options based on the wrapper compiler
- local command = [ common.get-invocation-command mpi : mpic++ : $(mpicxx) ] ;
-
- if ! $(mpicxx) && ! $(command)
- {
- # Try "mpiCC", which is used by MPICH
- command = [ common.get-invocation-command mpi : mpiCC ] ;
- }
-
- if ! $(mpicxx) && ! $(command)
- {
- # Try "mpicxx", which is used by OpenMPI and MPICH2
- command = [ common.get-invocation-command mpi : mpicxx ] ;
- }
-
- local result ;
- local compile_flags ;
- local link_flags ;
-
- if ! $(command)
- {
- # Do nothing: we'll complain later
- }
- # OpenMPI and newer versions of LAM-MPI have -showme:compile and
- # -showme:link.
- else if [ safe-shell-command "$(command) -showme:compile" ] &&
- [ safe-shell-command "$(command) -showme:link" ]
- {
- if $(.debug-configuration)
- {
- ECHO "Found recent LAM-MPI or Open MPI wrapper compiler: $(command)" ;
- }
-
- compile_flags = [ SHELL "$(command) -showme:compile" ] ;
- link_flags = [ SHELL "$(command) -showme:link" ] ;
-
- # Prepend COMPILER as the executable name, to match the format of
- # other compilation commands.
- compile_flags = "COMPILER $(compile_flags)" ;
- link_flags = "COMPILER $(link_flags)" ;
- }
- # Look for LAM-MPI's -showme
- else if [ safe-shell-command "$(command) -showme" ]
- {
- if $(.debug-configuration)
- {
- ECHO "Found older LAM-MPI wrapper compiler: $(command)" ;
- }
-
- result = [ SHELL "$(command) -showme" ] ;
- }
- # Look for MPICH
- else if [ safe-shell-command "$(command) -show" ]
- {
- if $(.debug-configuration)
- {
- ECHO "Found MPICH wrapper compiler: $(command)" ;
- }
- compile_flags = [ SHELL "$(command) -compile_info" ] ;
- link_flags = [ SHELL "$(command) -link_info" ] ;
- }
- # Sun HPC and Ibm POE
- else if [ SHELL "$(command) -v 2>/dev/null" ]
- {
- compile_flags = [ SHELL "$(command) -c -v -xtarget=native64 2>/dev/null" ] ;
-
- local back = [ MATCH "--------------------(.*)" : $(compile_flags) ] ;
- if $(back)
- {
- # Sun HPC
- if $(.debug-configuration)
- {
- ECHO "Found Sun MPI wrapper compiler: $(command)" ;
- }
-
- compile_flags = [ MATCH "(.*)--------------------" : $(back) ] ;
- compile_flags = [ MATCH "(.*)-v" : $(compile_flags) ] ;
- link_flags = [ SHELL "$(command) -v -xtarget=native64 2>/dev/null" ] ;
- link_flags = [ MATCH "--------------------(.*)" : $(link_flags) ] ;
- link_flags = [ MATCH "(.*)--------------------" : $(link_flags) ] ;
-
- # strip out -v from compile options
- local front = [ MATCH "(.*)-v" : $(link_flags) ] ;
- local back = [ MATCH "-v(.*)" : $(link_flags) ] ;
- link_flags = "$(front) $(back)" ;
- front = [ MATCH "(.*)-xtarget=native64" : $(link_flags) ] ;
- back = [ MATCH "-xtarget=native64(.*)" : $(link_flags) ] ;
- link_flags = "$(front) $(back)" ;
- }
- else
- {
- # Ibm POE
- if $(.debug-configuration)
- {
- ECHO "Found IBM MPI wrapper compiler: $(command)" ;
- }
-
- #
- compile_flags = [ SHELL "$(command) -c -v 2>/dev/null" ] ;
- compile_flags = [ MATCH "(.*)exec: export.*" : $(compile_flags) ] ;
- local front = [ MATCH "(.*)-v" : $(compile_flags) ] ;
- local back = [ MATCH "-v(.*)" : $(compile_flags) ] ;
- compile_flags = "$(front) $(back)" ;
- front = [ MATCH "(.*)-c" : $(compile_flags) ] ;
- back = [ MATCH "-c(.*)" : $(compile_flags) ] ;
- compile_flags = "$(front) $(back)" ;
- link_flags = $(compile_flags) ;
-
- # get location of mpif.h from mpxlf
- local f_flags = [ SHELL "mpxlf -v 2>/dev/null" ] ;
- f_flags = [ MATCH "(.*)exec: export.*" : $(f_flags) ] ;
- front = [ MATCH "(.*)-v" : $(f_flags) ] ;
- back = [ MATCH "-v(.*)" : $(f_flags) ] ;
- f_flags = "$(front) $(back)" ;
- f_flags = [ MATCH "xlf_r(.*)" : $(f_flags) ] ;
- f_flags = [ MATCH "-F:mpxlf_r(.*)" : $(f_flags) ] ;
- compile_flags = [ strip-eol $(compile_flags) ] ;
- compile_flags = "$(compile_flags) $(f_flags)" ;
- }
- }
-
- if $(result) || $(compile_flags) && $(link_flags)
- {
- if $(result)
- {
- result = [ strip-eol $(result) ] ;
- options = [ cmdline_to_features $(result) ] ;
- }
- else
- {
- compile_flags = [ strip-eol $(compile_flags) ] ;
- link_flags = [ strip-eol $(link_flags) ] ;
-
- # Separately process compilation and link features, then combine
- # them at the end.
- local compile_features = [ cmdline_to_features $(compile_flags)
- : "<cxxflags>" ] ;
- local link_features = [ cmdline_to_features $(link_flags)
- : "<linkflags>" ] ;
- options = $(compile_features) $(link_features) ;
- }
-
- # If requested, display MPI configuration information.
- if $(.debug-configuration)
- {
- if $(result)
- {
- ECHO " Wrapper compiler command line: $(result)" ;
- }
- else
- {
- local match = [ MATCH "^([^\" ]+|\"[^\"]+\") *(.*)$"
- : $(compile_flags) ] ;
- ECHO "MPI compilation flags: $(match[2])" ;
- local match = [ MATCH "^([^\" ]+|\"[^\"]+\") *(.*)$"
- : $(link_flags) ] ;
- ECHO "MPI link flags: $(match[2])" ;
- }
- }
- }
- else
- {
- if $(command)
- {
- ECHO "MPI auto-detection failed: unknown wrapper compiler $(command)" ;
- ECHO "Please report this error to the Boost mailing list: http://www.boost.org" ;
- }
- else if $(mpicxx)
- {
- ECHO "MPI auto-detection failed: unable to find wrapper compiler $(mpicxx)" ;
- }
- else
- {
- ECHO "MPI auto-detection failed: unable to find wrapper compiler `mpic++' or `mpiCC'" ;
- }
- ECHO "You will need to manually configure MPI support." ;
- }
-
- }
-
- # Find mpirun (or its equivalent) and its flags
- if ! $(.mpirun)
- {
- .mpirun =
- [ common.get-invocation-command mpi : mpirun : $(mpirun-with-options[1]) ] ;
- .mpirun_flags = $(mpirun-with-options[2-]) ;
- .mpirun_flags ?= -np ;
- }
-
- if $(.debug-configuration)
- {
- if $(options)
- {
- echo "MPI build features: " ;
- ECHO $(options) ;
- }
-
- if $(.mpirun)
- {
- echo "MPI launcher: $(.mpirun) $(.mpirun_flags)" ;
- }
-
- ECHO "====================================================" ;
- }
-
- if $(options)
- {
- .configured = true ;
-
- # Set up the "mpi" alias
- alias mpi : : : : $(options) ;
- }
-}
-
-# States whether MPI has bee configured
-rule configured ( )
-{
- return $(.configured) ;
-}
-
-# Returs the "extra" requirements needed to build MPI. These requirements are
-# part of the /mpi//mpi library target, but they need to be added to anything
-# that uses MPI directly to work around bugs in BBv2's propagation of
-# requirements.
-rule extra-requirements ( )
-{
- return $(MPI_EXTRA_REQUIREMENTS) ;
-}
-
-# Support for testing; borrowed from Python
-type.register RUN_MPI_OUTPUT ;
-type.register RUN_MPI : : TEST ;
-
-class mpi-test-generator : generator
-{
- import property-set ;
-
- rule __init__ ( * : * )
- {
- generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
- self.composing = true ;
- }
-
- rule run ( project name ? : property-set : sources * : multiple ? )
- {
- # Generate an executable from the sources. This is the executable we will run.
- local executable =
- [ generators.construct $(project) $(name) : EXE : $(property-set) : $(sources) ] ;
-
- result =
- [ construct-result $(executable[2-]) : $(project) $(name)-run : $(property-set) ] ;
- }
-}
-
-# Use mpi-test-generator to generate MPI tests from sources
-generators.register
- [ new mpi-test-generator mpi.capture-output : : RUN_MPI_OUTPUT ] ;
-
-generators.register-standard testing.expect-success
- : RUN_MPI_OUTPUT : RUN_MPI ;
-
-# The number of processes to spawn when executing an MPI test.
-feature mpi:processes : : free incidental ;
-
-# The flag settings on testing.capture-output do not
-# apply to mpi.capture output at the moment.
-# Redo this explicitly.
-toolset.flags mpi.capture-output ARGS <testing.arg> ;
-rule capture-output ( target : sources * : properties * )
-{
- # Use the standard capture-output rule to run the tests
- testing.capture-output $(target) : $(sources[1]) : $(properties) ;
-
- # Determine the number of processes we should run on.
- local num_processes = [ property.select <mpi:processes> : $(properties) ] ;
- num_processes = $(num_processes:G=) ;
-
- # serialize the MPI tests to avoid overloading systems
- JAM_SEMAPHORE on $(target) = <s>mpi-run-semaphore ;
-
- # We launch MPI processes using the "mpirun" equivalent specified by the user.
- LAUNCHER on $(target) =
- [ on $(target) return $(.mpirun) $(.mpirun_flags) $(num_processes) ] ;
-}
-
-# Creates a set of test cases to be run through the MPI launcher. The name, sources,
-# and requirements are the same as for any other test generator. However, schedule is
-# a list of numbers, which indicates how many processes each test run will use. For
-# example, passing 1 2 7 will run the test with 1 process, then 2 processes, then 7
-# 7 processes. The name provided is just the base name: the actual tests will be
-# the name followed by a hypen, then the number of processes.
-rule mpi-test ( name : sources * : requirements * : schedule * )
-{
- sources ?= $(name).cpp ;
- schedule ?= 1 2 3 4 7 8 13 17 ;
-
- local result ;
- for processes in $(schedule)
- {
- result += [ testing.make-test
- run-mpi : $(sources) /boost/mpi//boost_mpi
- : $(requirements) <toolset>msvc:<link>static <mpi:processes>$(processes) : $(name)-$(processes) ] ;
- }
- return $(result) ;
-}
diff --git a/tools/build/v2/tools/msvc.jam b/tools/build/v2/tools/msvc.jam
deleted file mode 100644
index 22548323ad..0000000000
--- a/tools/build/v2/tools/msvc.jam
+++ /dev/null
@@ -1,1398 +0,0 @@
-# Copyright (c) 2003 David Abrahams.
-# Copyright (c) 2005 Vladimir Prus.
-# Copyright (c) 2005 Alexey Pakhunov.
-# Copyright (c) 2006 Bojan Resnik.
-# Copyright (c) 2006 Ilya Sokolov.
-# Copyright (c) 2007 Rene Rivera
-# Copyright (c) 2008 Jurko Gospodnetic
-#
-# Use, modification and distribution is subject to the Boost Software
-# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
-# http://www.boost.org/LICENSE_1_0.txt)
-
-################################################################################
-#
-# MSVC Boost Build toolset module.
-# --------------------------------
-#
-# All toolset versions need to have their location either auto-detected or
-# explicitly specified except for the special 'default' version that expects the
-# environment to find the needed tools or report an error.
-#
-################################################################################
-
-import "class" : new ;
-import common ;
-import errors ;
-import feature ;
-import generators ;
-import mc ;
-import midl ;
-import os ;
-import path ;
-import pch ;
-import property ;
-import rc ;
-import toolset ;
-import type ;
-
-
-type.register MANIFEST : manifest ;
-feature.feature embed-manifest : on off : incidental propagated ;
-
-type.register PDB : pdb ;
-
-################################################################################
-#
-# Public rules.
-#
-################################################################################
-
-# Initialize a specific toolset version configuration. As the result, path to
-# compiler and, possible, program names are set up, and will be used when that
-# version of compiler is requested. For example, you might have:
-#
-# using msvc : 6.5 : cl.exe ;
-# using msvc : 7.0 : Y:/foo/bar/cl.exe ;
-#
-# The version parameter may be ommited:
-#
-# using msvc : : Z:/foo/bar/cl.exe ;
-#
-# The following keywords have special meanings when specified as versions:
-# - all - all detected but not yet used versions will be marked as used
-# with their default options.
-# - default - this is an equivalent to an empty version.
-#
-# Depending on a supplied version, detected configurations and presence 'cl.exe'
-# in the path different results may be achieved. The following table describes
-# the possible scenarios:
-#
-# Nothing "x.y"
-# Passed Nothing "x.y" detected, detected,
-# version detected detected cl.exe in path cl.exe in path
-#
-# default Error Use "x.y" Create "default" Use "x.y"
-# all None Use all None Use all
-# x.y - Use "x.y" - Use "x.y"
-# a.b Error Error Create "a.b" Create "a.b"
-#
-# "x.y" - refers to a detected version;
-# "a.b" - refers to an undetected version.
-#
-# FIXME: Currently the command parameter and the <compiler> property parameter
-# seem to overlap in duties. Remove this duplication. This seems to be related
-# to why someone started preparing to replace init with configure rules.
-#
-rule init (
- # The msvc version being configured. When omitted the tools invoked when no
- # explicit version is given will be configured.
- version ?
-
- # The command used to invoke the compiler. If not specified:
- # - if version is given, default location for that version will be
- # searched
- #
- # - if version is not given, default locations for MSVC 9.0, 8.0, 7.1, 7.0
- # and 6.* will be searched
- #
- # - if compiler is not found in the default locations, PATH will be
- # searched.
- : command *
-
- # Options may include:
- #
- # All options shared by multiple toolset types as handled by the
- # common.handle-options() rule, e.g. <cflags>, <compileflags>, <cxxflags>,
- # <fflags> & <linkflags>.
- #
- # <assembler>
- # <compiler>
- # <idl-compiler>
- # <linker>
- # <mc-compiler>
- # <resource-compiler>
- # Exact tool names to be used by this msvc toolset configuration.
- #
- # <compiler-filter>
- # Command through which to pipe the output of running the compiler.
- # For example to pass the output to STLfilt.
- #
- # <setup>
- # Global setup command to invoke before running any of the msvc tools.
- # It will be passed additional option parameters depending on the actual
- # target platform.
- #
- # <setup-amd64>
- # <setup-i386>
- # <setup-ia64>
- # Platform specific setup command to invoke before running any of the
- # msvc tools used when builing a target for a specific platform, e.g.
- # when building a 32 or 64 bit executable.
- : options *
-)
-{
- if $(command)
- {
- options += <command>$(command) ;
- }
- configure $(version) : $(options) ;
-}
-
-
-# 'configure' is a newer version of 'init'. The parameter 'command' is passed as
-# a part of the 'options' list. See the 'init' rule comment for more detailed
-# information.
-#
-rule configure ( version ? : options * )
-{
- switch $(version)
- {
- case "all" :
- if $(options)
- {
- errors.error "MSVC toolset configuration: options should be"
- "empty when '$(version)' is specified." ;
- }
-
- # Configure (i.e. mark as used) all registered versions.
- local all-versions = [ $(.versions).all ] ;
- if ! $(all-versions)
- {
- if $(.debug-configuration)
- {
- ECHO "notice: [msvc-cfg] Asked to configure all registered"
- "msvc toolset versions when there are none currently"
- "registered." ;
- }
- }
- else
- {
- for local v in $(all-versions)
- {
- # Note that there is no need to skip already configured
- # versions here as this will request configure-really rule
- # to configure the version using default options which will
- # in turn cause it to simply do nothing in case the version
- # has already been configured.
- configure-really $(v) ;
- }
- }
-
- case "default" :
- configure-really : $(options) ;
-
- case * :
- configure-really $(version) : $(options) ;
- }
-}
-
-
-# Sets up flag definitions dependent on the compiler version used.
-# - 'version' is the version of compiler in N.M format.
-# - 'conditions' is the property set to be used as flag conditions.
-# - 'toolset' is the toolset for which flag settings are to be defined.
-# This makes the rule reusable for other msvc-option-compatible compilers.
-#
-rule configure-version-specific ( toolset : version : conditions )
-{
- toolset.push-checking-for-flags-module unchecked ;
- # Starting with versions 7.0, the msvc compiler have the /Zc:forScope and
- # /Zc:wchar_t options that improve C++ standard conformance, but those
- # options are off by default. If we are sure that the msvc version is at
- # 7.*, add those options explicitly. We can be sure either if user specified
- # version 7.* explicitly or if we auto-detected the version ourselves.
- if ! [ MATCH ^(6\\.) : $(version) ]
- {
- toolset.flags $(toolset).compile CFLAGS $(conditions) : /Zc:forScope /Zc:wchar_t ;
- toolset.flags $(toolset).compile.c++ C++FLAGS $(conditions) : /wd4675 ;
-
- # Explicitly disable the 'function is deprecated' warning. Some msvc
- # versions have a bug, causing them to emit the deprecation warning even
- # with /W0.
- toolset.flags $(toolset).compile CFLAGS $(conditions)/<warnings>off : /wd4996 ;
-
- if [ MATCH ^([78]\\.) : $(version) ]
- {
- # 64-bit compatibility warning deprecated since 9.0, see
- # http://msdn.microsoft.com/en-us/library/yt4xw8fh.aspx
- toolset.flags $(toolset).compile CFLAGS $(conditions)/<warnings>all : /Wp64 ;
- }
- }
-
- #
- # Processor-specific optimization.
- #
-
- if [ MATCH ^([67]) : $(version) ]
- {
- # 8.0 deprecates some of the options.
- toolset.flags $(toolset).compile CFLAGS $(conditions)/<optimization>speed $(conditions)/<optimization>space : /Ogiy /Gs ;
- toolset.flags $(toolset).compile CFLAGS $(conditions)/<optimization>speed : /Ot ;
- toolset.flags $(toolset).compile CFLAGS $(conditions)/<optimization>space : /Os ;
-
- toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-i386)/<instruction-set> : /GB ;
- toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-i386)/<instruction-set>i386 : /G3 ;
- toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-i386)/<instruction-set>i486 : /G4 ;
- toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-i386)/<instruction-set>$(.cpu-type-g5) : /G5 ;
- toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-i386)/<instruction-set>$(.cpu-type-g6) : /G6 ;
- toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-i386)/<instruction-set>$(.cpu-type-g7) : /G7 ;
-
- # Improve floating-point accuracy. Otherwise, some of C++ Boost's "math"
- # tests will fail.
- toolset.flags $(toolset).compile CFLAGS $(conditions) : /Op ;
-
- # 7.1 and below have single-threaded static RTL.
- toolset.flags $(toolset).compile CFLAGS $(conditions)/<runtime-debugging>off/<runtime-link>static/<threading>single : /ML ;
- toolset.flags $(toolset).compile CFLAGS $(conditions)/<runtime-debugging>on/<runtime-link>static/<threading>single : /MLd ;
- }
- else
- {
- # 8.0 and above adds some more options.
- toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-amd64)/<instruction-set> : /favor:blend ;
- toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-amd64)/<instruction-set>$(.cpu-type-em64t) : /favor:EM64T ;
- toolset.flags $(toolset).compile CFLAGS $(conditions)/$(.cpu-arch-amd64)/<instruction-set>$(.cpu-type-amd64) : /favor:AMD64 ;
-
- # 8.0 and above only has multi-threaded static RTL.
- toolset.flags $(toolset).compile CFLAGS $(conditions)/<runtime-debugging>off/<runtime-link>static/<threading>single : /MT ;
- toolset.flags $(toolset).compile CFLAGS $(conditions)/<runtime-debugging>on/<runtime-link>static/<threading>single : /MTd ;
-
- # Specify target machine type so the linker will not need to guess.
- toolset.flags $(toolset).link LINKFLAGS $(conditions)/$(.cpu-arch-amd64) : /MACHINE:X64 ;
- toolset.flags $(toolset).link LINKFLAGS $(conditions)/$(.cpu-arch-i386) : /MACHINE:X86 ;
- toolset.flags $(toolset).link LINKFLAGS $(conditions)/$(.cpu-arch-ia64) : /MACHINE:IA64 ;
-
- # Make sure that manifest will be generated even if there is no
- # dependencies to put there.
- toolset.flags $(toolset).link LINKFLAGS $(conditions)/<embed-manifest>off : /MANIFEST ;
- }
- toolset.pop-checking-for-flags-module ;
-}
-
-
-# Registers this toolset including all of its flags, features & generators. Does
-# nothing on repeated calls.
-#
-rule register-toolset ( )
-{
- if ! msvc in [ feature.values toolset ]
- {
- register-toolset-really ;
- }
-}
-
-
-# Declare action for creating static libraries. If library exists, remove it
-# before adding files. See
-# http://article.gmane.org/gmane.comp.lib.boost.build/4241 for rationale.
-if [ os.name ] in NT
-{
- # The 'DEL' command would issue a message to stdout if the file does not
- # exist, so need a check.
- actions archive
- {
- if exist "$(<[1])" DEL "$(<[1])"
- $(.LD) $(AROPTIONS) /out:"$(<[1])" @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
- }
-}
-else
-{
- actions archive
- {
- $(.RM) "$(<[1])"
- $(.LD) $(AROPTIONS) /out:"$(<[1])" @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
- }
-}
-
-
-# For the assembler the following options are turned on by default:
-#
-# -Zp4 align structures to 4 bytes
-# -Cp preserve case of user identifiers
-# -Cx preserve case in publics, externs
-#
-actions compile.asm
-{
- $(.ASM) -c -Zp4 -Cp -Cx -D$(DEFINES) $(ASMFLAGS) $(USER_ASMFLAGS) -Fo "$(<:W)" "$(>:W)"
-}
-
-
-rule compile.c ( targets + : sources * : properties * )
-{
- C++FLAGS on $(targets[1]) = ;
- get-rspline $(targets) : -TC ;
- compile-c-c++ $(<) : $(>) [ on $(<) return $(PCH_FILE) ] [ on $(<) return $(PCH_HEADER) ] ;
-}
-
-
-rule compile.c.preprocess ( targets + : sources * : properties * )
-{
- C++FLAGS on $(targets[1]) = ;
- get-rspline $(targets) : -TC ;
- preprocess-c-c++ $(<) : $(>) [ on $(<) return $(PCH_FILE) ] [ on $(<) return $(PCH_HEADER) ] ;
-}
-
-
-rule compile.c.pch ( targets + : sources * : properties * )
-{
- C++FLAGS on $(targets[1]) = ;
- get-rspline $(targets[1]) : -TC ;
- get-rspline $(targets[2]) : -TC ;
- local pch-source = [ on $(<) return $(PCH_SOURCE) ] ;
- if $(pch-source)
- {
- DEPENDS $(<) : $(pch-source) ;
- compile-c-c++-pch-s $(targets) : $(sources) $(pch-source) ;
- }
- else
- {
- compile-c-c++-pch $(targets) : $(sources) ;
- }
-}
-
-toolset.flags msvc YLOPTION : "-Yl" ;
-
-# Action for running the C/C++ compiler without using precompiled headers.
-#
-# WARNING: Synchronize any changes this in action with intel-win
-#
-# Notes regarding PDB generation, for when we use <debug-symbols>on/<debug-store>database
-#
-# 1. PDB_CFLAG is only set for <debug-symbols>on/<debug-store>database, ensuring that the /Fd flag is dropped if PDB_CFLAG is empty
-#
-# 2. When compiling executables's source files, PDB_NAME is set on a per-source file basis by rule compile-c-c++.
-# The linker will pull these into the executable's PDB
-#
-# 3. When compiling library's source files, PDB_NAME is updated to <libname>.pdb for each source file by rule archive,
-# as in this case the compiler must be used to create a single PDB for our library.
-#
-actions compile-c-c++ bind PDB_NAME
-{
- $(.CC) @"@($(<[1]:W).rsp:E="$(>[1]:W)" -Fo"$(<[1]:W)" $(PDB_CFLAG)"$(PDB_NAME)" -Yu"$(>[3]:D=)" -Fp"$(>[2]:W)" $(CC_RSPLINE))" $(.CC.FILTER)
-}
-
-actions preprocess-c-c++ bind PDB_NAME
-{
- $(.CC) @"@($(<[1]:W).rsp:E="$(>[1]:W)" -E $(PDB_CFLAG)"$(PDB_NAME)" -Yu"$(>[3]:D=)" -Fp"$(>[2]:W)" $(CC_RSPLINE))" >"$(<[1]:W)"
-}
-
-rule compile-c-c++ ( targets + : sources * )
-{
- DEPENDS $(<[1]) : [ on $(<[1]) return $(PCH_HEADER) ] ;
- DEPENDS $(<[1]) : [ on $(<[1]) return $(PCH_FILE) ] ;
- PDB_NAME on $(<) = $(<:S=.pdb) ;
-}
-
-rule preprocess-c-c++ ( targets + : sources * )
-{
- DEPENDS $(<[1]) : [ on $(<[1]) return $(PCH_HEADER) ] ;
- DEPENDS $(<[1]) : [ on $(<[1]) return $(PCH_FILE) ] ;
- PDB_NAME on $(<) = $(<:S=.pdb) ;
-}
-
-# Action for running the C/C++ compiler using precompiled headers. In addition
-# to whatever else it needs to compile, this action also adds a temporary source
-# .cpp file used to compile the precompiled headers themselves.
-#
-# The global .escaped-double-quote variable is used to avoid messing up Emacs
-# syntax highlighting in the messy N-quoted code below.
-actions compile-c-c++-pch
-{
- $(.CC) @"@($(<[1]:W).rsp:E="$(>[2]:W)" -Fo"$(<[2]:W)" -Yc"$(>[1]:D=)" $(YLOPTION)"__bjam_pch_symbol_$(>[1]:D=)" -Fp"$(<[1]:W)" $(CC_RSPLINE))" "@($(<[1]:W).cpp:E=#include $(.escaped-double-quote)$(>[1]:D=)$(.escaped-double-quote)$(.nl))" $(.CC.FILTER)
-}
-
-
-# Action for running the C/C++ compiler using precompiled headers. An already
-# built source file for compiling the precompiled headers is expected to be
-# given as one of the source parameters.
-actions compile-c-c++-pch-s
-{
- $(.CC) @"@($(<[1]:W).rsp:E="$(>[2]:W)" -Fo"$(<[2]:W)" -Yc"$(>[1]:D=)" $(YLOPTION)"__bjam_pch_symbol_$(>[1]:D=)" -Fp"$(<[1]:W)" $(CC_RSPLINE))" $(.CC.FILTER)
-}
-
-
-rule compile.c++ ( targets + : sources * : properties * )
-{
- get-rspline $(targets) : -TP ;
- compile-c-c++ $(<) : $(>) [ on $(<) return $(PCH_FILE) ] [ on $(<) return $(PCH_HEADER) ] ;
-}
-
-rule compile.c++.preprocess ( targets + : sources * : properties * )
-{
- get-rspline $(targets) : -TP ;
- preprocess-c-c++ $(<) : $(>) [ on $(<) return $(PCH_FILE) ] [ on $(<) return $(PCH_HEADER) ] ;
-}
-
-
-rule compile.c++.pch ( targets + : sources * : properties * )
-{
- get-rspline $(targets[1]) : -TP ;
- get-rspline $(targets[2]) : -TP ;
- local pch-source = [ on $(<) return $(PCH_SOURCE) ] ;
- if $(pch-source)
- {
- DEPENDS $(<) : $(pch-source) ;
- compile-c-c++-pch-s $(targets) : $(sources) $(pch-source) ;
- }
- else
- {
- compile-c-c++-pch $(targets) : $(sources) ;
- }
-}
-
-
-# See midl.jam for details.
-#
-actions compile.idl
-{
- $(.IDL) /nologo @"@($(<[1]:W).rsp:E=$(.nl)"$(>:W)" $(.nl)-D$(DEFINES) $(.nl)"-I$(INCLUDES:W)" $(.nl)-U$(UNDEFS) $(.nl)$(MIDLFLAGS) $(.nl)/tlb "$(<[1]:W)" $(.nl)/h "$(<[2]:W)" $(.nl)/iid "$(<[3]:W)" $(.nl)/proxy "$(<[4]:W)" $(.nl)/dlldata "$(<[5]:W)")"
- $(.TOUCH_FILE) "$(<[4]:W)"
- $(.TOUCH_FILE) "$(<[5]:W)"
-}
-
-
-actions compile.mc
-{
- $(.MC) $(MCFLAGS) -h "$(<[1]:DW)" -r "$(<[2]:DW)" "$(>:W)"
-}
-
-
-actions compile.rc
-{
- $(.RC) -l 0x409 -U$(UNDEFS) -D$(DEFINES) -I"$(INCLUDES:W)" -fo "$(<:W)" "$(>:W)"
-}
-
-
-rule link ( targets + : sources * : properties * )
-{
- if <embed-manifest>on in $(properties)
- {
- msvc.manifest $(targets) : $(sources) : $(properties) ;
- }
-}
-
-rule link.dll ( targets + : sources * : properties * )
-{
- DEPENDS $(<) : [ on $(<) return $(DEF_FILE) ] ;
- if <embed-manifest>on in $(properties)
- {
- msvc.manifest.dll $(targets) : $(sources) : $(properties) ;
- }
-}
-
-# Incremental linking a DLL causes no end of problems: if the actual exports do
-# not change, the import .lib file is never updated. Therefore, the .lib is
-# always out-of-date and gets rebuilt every time. I am not sure that incremental
-# linking is such a great idea in general, but in this case I am sure we do not
-# want it.
-
-# Windows manifest is a new way to specify dependencies on managed DotNet
-# assemblies and Windows native DLLs. The manifests are embedded as resources
-# and are useful in any PE target (both DLL and EXE).
-
-if [ os.name ] in NT
-{
- actions link bind DEF_FILE LIBRARIES_MENTIONED_BY_FILE
- {
- $(.LD) $(LINKFLAGS) /out:"$(<[1]:W)" /LIBPATH:"$(LINKPATH:W)" $(OPTIONS) @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)$(LIBRARIES) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
- if %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL%
- }
-
- actions manifest
- {
- if exist "$(<[1]).manifest" (
- $(.MT) -manifest "$(<[1]).manifest" "-outputresource:$(<[1]);1"
- )
- }
-
- actions link.dll bind DEF_FILE LIBRARIES_MENTIONED_BY_FILE
- {
- $(.LD) /DLL $(LINKFLAGS) /out:"$(<[1]:W)" /IMPLIB:"$(<[2]:W)" /LIBPATH:"$(LINKPATH:W)" /def:"$(DEF_FILE)" $(OPTIONS) @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)$(LIBRARIES) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
- if %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL%
- }
-
- actions manifest.dll
- {
- if exist "$(<[1]).manifest" (
- $(.MT) -manifest "$(<[1]).manifest" "-outputresource:$(<[1]);2"
- )
- }
-}
-else
-{
- actions link bind DEF_FILE LIBRARIES_MENTIONED_BY_FILE
- {
- $(.LD) $(LINKFLAGS) /out:"$(<[1]:W)" /LIBPATH:"$(LINKPATH:W)" $(OPTIONS) @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)$(LIBRARIES) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
- }
-
- actions manifest
- {
- if test -e "$(<[1]).manifest"; then
- $(.MT) -manifest "$(<[1]:W).manifest" "-outputresource:$(<[1]:W);1"
- fi
- }
-
- actions link.dll bind DEF_FILE LIBRARIES_MENTIONED_BY_FILE
- {
- $(.LD) /DLL $(LINKFLAGS) /out:"$(<[1]:W)" /IMPLIB:"$(<[2]:W)" /LIBPATH:"$(LINKPATH:W)" /def:"$(DEF_FILE)" $(OPTIONS) @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)$(LIBRARIES) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
- }
-
- actions manifest.dll
- {
- if test -e "$(<[1]).manifest"; then
- $(.MT) -manifest "$(<[1]:W).manifest" "-outputresource:$(<[1]:W);2"
- fi
- }
-}
-
-# this rule sets up the pdb file that will be used when generating static
-# libraries and the debug-store option is database, so that the compiler
-# puts all debug info into a single .pdb file named after the library
-#
-# Poking at source targets this way is probably not clean, but it's the
-# easiest approach.
-rule archive ( targets + : sources * : properties * )
-{
- PDB_NAME on $(>) = $(<:S=.pdb) ;
-}
-
-################################################################################
-#
-# Classes.
-#
-################################################################################
-
-class msvc-pch-generator : pch-generator
-{
- import property-set ;
-
- rule run-pch ( project name ? : property-set : sources * )
- {
- # Searching for the header and source file in the sources.
- local pch-header ;
- local pch-source ;
- for local s in $(sources)
- {
- if [ type.is-derived [ $(s).type ] H ]
- {
- pch-header = $(s) ;
- }
- else if
- [ type.is-derived [ $(s).type ] CPP ] ||
- [ type.is-derived [ $(s).type ] C ]
- {
- pch-source = $(s) ;
- }
- }
-
- if ! $(pch-header)
- {
- errors.user-error "can not build pch without pch-header" ;
- }
-
- # If we do not have the PCH source - that is fine. We will just create a
- # temporary .cpp file in the action.
-
- local generated = [ generator.run $(project) $(name)
- : [ property-set.create
- # Passing of <pch-source> is a dirty trick, needed because
- # non-composing generators with multiple inputs are subtly
- # broken. For more detailed information see:
- # https://zigzag.cs.msu.su:7813/boost.build/ticket/111
- <pch-source>$(pch-source)
- [ $(property-set).raw ] ]
- : $(pch-header) ] ;
-
- local pch-file ;
- for local g in $(generated)
- {
- if [ type.is-derived [ $(g).type ] PCH ]
- {
- pch-file = $(g) ;
- }
- }
-
- return [ property-set.create <pch-header>$(pch-header)
- <pch-file>$(pch-file) ] $(generated) ;
- }
-}
-
-
-################################################################################
-#
-# Local rules.
-#
-################################################################################
-
-# Detects versions listed as '.known-versions' by checking registry information,
-# environment variables & default paths. Supports both native Windows and
-# Cygwin.
-#
-local rule auto-detect-toolset-versions ( )
-{
- if [ os.name ] in NT CYGWIN
- {
- # Get installation paths from the registry.
- for local i in $(.known-versions)
- {
- if $(.version-$(i)-reg)
- {
- local vc-path ;
- for local x in "" "Wow6432Node\\"
- {
- vc-path += [ W32_GETREG
- "HKEY_LOCAL_MACHINE\\SOFTWARE\\"$(x)"\\Microsoft\\"$(.version-$(i)-reg)
- : "ProductDir" ] ;
- }
-
- if $(vc-path)
- {
- vc-path = [ path.join [ path.make-NT $(vc-path[1]) ] "bin" ] ;
- register-configuration $(i) : [ path.native $(vc-path[1]) ] ;
- }
- }
- }
- }
-
- # Check environment and default installation paths.
- for local i in $(.known-versions)
- {
- if ! $(i) in [ $(.versions).all ]
- {
- register-configuration $(i) : [ default-path $(i) ] ;
- }
- }
-}
-
-
-# Worker rule for toolset version configuration. Takes an explicit version id or
-# nothing in case it should configure the default toolset version (the first
-# registered one or a new 'default' one in case no toolset versions have been
-# registered yet).
-#
-local rule configure-really ( version ? : options * )
-{
- local v = $(version) ;
-
- # Decide what the 'default' version is.
- if ! $(v)
- {
- # Take the first registered (i.e. auto-detected) version.
- version = [ $(.versions).all ] ;
- version = $(version[1]) ;
- v = $(version) ;
-
- # Note: 'version' can still be empty at this point if no versions have
- # been auto-detected.
- version ?= "default" ;
- }
-
- # Version alias -> real version number.
- if $(.version-alias-$(version))
- {
- version = $(.version-alias-$(version)) ;
- }
-
- # Check whether the selected configuration is already in use.
- if $(version) in [ $(.versions).used ]
- {
- # Allow multiple 'toolset.using' calls for the same configuration if the
- # identical sets of options are used.
- if $(options) && ( $(options) != [ $(.versions).get $(version) : options ] )
- {
- errors.error "MSVC toolset configuration: Toolset version"
- "'$(version)' already configured." ;
- }
- }
- else
- {
- # Register a new configuration.
- $(.versions).register $(version) ;
-
- # Add user-supplied to auto-detected options.
- options = [ $(.versions).get $(version) : options ] $(options) ;
-
- # Mark the configuration as 'used'.
- $(.versions).use $(version) ;
-
- # Generate conditions and save them.
- local conditions = [ common.check-init-parameters msvc : version $(v) ]
- ;
-
- $(.versions).set $(version) : conditions : $(conditions) ;
-
- local command = [ feature.get-values <command> : $(options) ] ;
-
- # If version is specified, we try to search first in default paths, and
- # only then in PATH.
- command = [ common.get-invocation-command msvc : cl.exe : $(command) :
- [ default-paths $(version) ] : $(version) ] ;
-
- common.handle-options msvc : $(conditions) : $(command) : $(options) ;
-
- if ! $(version)
- {
- # Even if version is not explicitly specified, try to detect the
- # version from the path.
- # FIXME: We currently detect both Microsoft Visual Studio 9.0 and
- # 9.0express as 9.0 here.
- if [ MATCH "(Microsoft Visual Studio 11)" : $(command) ]
- {
- version = 11.0 ;
- }
- else if [ MATCH "(Microsoft Visual Studio 10)" : $(command) ]
- {
- version = 10.0 ;
- }
- else if [ MATCH "(Microsoft Visual Studio 9)" : $(command) ]
- {
- version = 9.0 ;
- }
- else if [ MATCH "(Microsoft Visual Studio 8)" : $(command) ]
- {
- version = 8.0 ;
- }
- else if [ MATCH "(NET 2003[\/\\]VC7)" : $(command) ]
- {
- version = 7.1 ;
- }
- else if [ MATCH "(Microsoft Visual C\\+\\+ Toolkit 2003)" :
- $(command) ]
- {
- version = 7.1toolkit ;
- }
- else if [ MATCH "(.NET[\/\\]VC7)" : $(command) ]
- {
- version = 7.0 ;
- }
- else
- {
- version = 6.0 ;
- }
- }
-
- # Generate and register setup command.
-
- local below-8.0 = [ MATCH ^([67]\\.) : $(version) ] ;
-
- local cpu = i386 amd64 ia64 ;
- if $(below-8.0)
- {
- cpu = i386 ;
- }
-
- local setup-amd64 ;
- local setup-i386 ;
- local setup-ia64 ;
-
- if $(command)
- {
- # TODO: Note that if we specify a non-existant toolset version then
- # this rule may find and use a corresponding compiler executable
- # belonging to an incorrect toolset version. For example, if you
- # have only MSVC 7.1 installed, have its executable on the path and
- # specify you want Boost Build to use MSVC 9.0, then you want Boost
- # Build to report an error but this may cause it to silently use the
- # MSVC 7.1 compiler even though it thinks it is using the msvc-9.0
- # toolset version.
- command = [ common.get-absolute-tool-path $(command[-1]) ] ;
- }
-
- if $(command)
- {
- local parent = [ path.make $(command) ] ;
- parent = [ path.parent $(parent) ] ;
- parent = [ path.native $(parent) ] ;
-
- # Setup will be used if the command name has been specified. If
- # setup is not specified explicitly then a default setup script will
- # be used instead. Setup scripts may be global or arhitecture/
- # /platform/cpu specific. Setup options are used only in case of
- # global setup scripts.
-
- # Default setup scripts provided with different VC distributions:
- #
- # VC 7.1 had only the vcvars32.bat script specific to 32 bit i386
- # builds. It was located in the bin folder for the regular version
- # and in the root folder for the free VC 7.1 tools.
- #
- # Later 8.0 & 9.0 versions introduce separate platform specific
- # vcvars*.bat scripts (e.g. 32 bit, 64 bit AMD or 64 bit Itanium)
- # located in or under the bin folder. Most also include a global
- # vcvarsall.bat helper script located in the root folder which runs
- # one of the aforementioned vcvars*.bat scripts based on the options
- # passed to it. So far only the version coming with some PlatformSDK
- # distributions does not include this top level script but to
- # support those we need to fall back to using the worker scripts
- # directly in case the top level script can not be found.
-
- local global-setup = [ feature.get-values <setup> : $(options) ] ;
- global-setup = $(global-setup[1]) ;
- if ! $(below-8.0)
- {
- global-setup ?= [ locate-default-setup $(command) : $(parent) :
- vcvarsall.bat ] ;
- }
-
- local default-setup-amd64 = vcvarsx86_amd64.bat ;
- local default-setup-i386 = vcvars32.bat ;
- local default-setup-ia64 = vcvarsx86_ia64.bat ;
-
- # http://msdn2.microsoft.com/en-us/library/x4d2c09s(VS.80).aspx and
- # http://msdn2.microsoft.com/en-us/library/x4d2c09s(vs.90).aspx
- # mention an x86_IPF option, that seems to be a documentation bug
- # and x86_ia64 is the correct option.
- local default-global-setup-options-amd64 = x86_amd64 ;
- local default-global-setup-options-i386 = x86 ;
- local default-global-setup-options-ia64 = x86_ia64 ;
-
- # When using 64-bit Windows, and targeting 64-bit, it is possible to
- # use a native 64-bit compiler, selected by the "amd64" & "ia64"
- # parameters to vcvarsall.bat. There are two variables we can use --
- # PROCESSOR_ARCHITECTURE and PROCESSOR_IDENTIFIER. The first is
- # 'x86' when running 32-bit Windows, no matter which processor is
- # used, and 'AMD64' on 64-bit windows on x86 (either AMD64 or EM64T)
- # Windows.
- #
- if [ MATCH ^(AMD64) : [ os.environ PROCESSOR_ARCHITECTURE ] ]
- {
- default-global-setup-options-amd64 = amd64 ;
- }
- # TODO: The same 'native compiler usage' should be implemented for
- # the Itanium platform by using the "ia64" parameter. For this
- # though we need someone with access to this platform who can find
- # out how to correctly detect this case.
- else if $(somehow-detect-the-itanium-platform)
- {
- default-global-setup-options-ia64 = ia64 ;
- }
-
- local setup-prefix = "call " ;
- local setup-suffix = " >nul"$(.nl) ;
- if ! [ os.name ] in NT
- {
- setup-prefix = "cmd.exe /S /C call " ;
- setup-suffix = " \">nul\" \"&&\" " ;
- }
-
- for local c in $(cpu)
- {
- local setup-options ;
-
- setup-$(c) = [ feature.get-values <setup-$(c)> : $(options) ] ;
-
- if ! $(setup-$(c))-is-not-empty
- {
- if $(global-setup)-is-not-empty
- {
- setup-$(c) = $(global-setup) ;
-
- # If needed we can easily add using configuration flags
- # here for overriding which options get passed to the
- # global setup command for which target platform:
- # setup-options = [ feature.get-values <setup-options-$(c)> : $(options) ] ;
-
- setup-options ?= $(default-global-setup-options-$(c)) ;
- }
- else
- {
- setup-$(c) = [ locate-default-setup $(command) : $(parent) : $(default-setup-$(c)) ] ;
- }
- }
-
- # Cygwin to Windows path translation.
- setup-$(c) = "\""$(setup-$(c):W)"\"" ;
-
- # Append setup options to the setup name and add the final setup
- # prefix & suffix.
- setup-options ?= "" ;
- setup-$(c) = $(setup-prefix)$(setup-$(c):J=" ")" "$(setup-options:J=" ")$(setup-suffix) ;
- }
- }
-
- # Get tool names (if any) and finish setup.
-
- compiler = [ feature.get-values <compiler> : $(options) ] ;
- compiler ?= cl ;
-
- linker = [ feature.get-values <linker> : $(options) ] ;
- linker ?= link ;
-
- resource-compiler = [ feature.get-values <resource-compiler> : $(options) ] ;
- resource-compiler ?= rc ;
-
- # Turn on some options for i386 assembler
- # -coff generate COFF format object file (compatible with cl.exe output)
- local default-assembler-amd64 = ml64 ;
- local default-assembler-i386 = "ml -coff" ;
- local default-assembler-ia64 = ias ;
-
- assembler = [ feature.get-values <assembler> : $(options) ] ;
-
- idl-compiler = [ feature.get-values <idl-compiler> : $(options) ] ;
- idl-compiler ?= midl ;
-
- mc-compiler = [ feature.get-values <mc-compiler> : $(options) ] ;
- mc-compiler ?= mc ;
-
- manifest-tool = [ feature.get-values <manifest-tool> : $(options) ] ;
- manifest-tool ?= mt ;
-
- local cc-filter = [ feature.get-values <compiler-filter> : $(options) ] ;
-
- for local c in $(cpu)
- {
- # Setup script is not required in some configurations.
- setup-$(c) ?= "" ;
-
- local cpu-conditions = $(conditions)/$(.cpu-arch-$(c)) ;
-
- if $(.debug-configuration)
- {
- for local cpu-condition in $(cpu-conditions)
- {
- ECHO "notice: [msvc-cfg] condition: '$(cpu-condition)', setup: '$(setup-$(c))'" ;
- }
- }
-
- local cpu-assembler = $(assembler) ;
- cpu-assembler ?= $(default-assembler-$(c)) ;
-
- toolset.flags msvc.compile .CC $(cpu-conditions) : $(setup-$(c))$(compiler) /Zm800 -nologo ;
- toolset.flags msvc.compile .RC $(cpu-conditions) : $(setup-$(c))$(resource-compiler) ;
- toolset.flags msvc.compile .ASM $(cpu-conditions) : $(setup-$(c))$(cpu-assembler) -nologo ;
- toolset.flags msvc.link .LD $(cpu-conditions) : $(setup-$(c))$(linker) /NOLOGO /INCREMENTAL:NO ;
- toolset.flags msvc.archive .LD $(cpu-conditions) : $(setup-$(c))$(linker) /lib /NOLOGO ;
- toolset.flags msvc.compile .IDL $(cpu-conditions) : $(setup-$(c))$(idl-compiler) ;
- toolset.flags msvc.compile .MC $(cpu-conditions) : $(setup-$(c))$(mc-compiler) ;
-
- toolset.flags msvc.link .MT $(cpu-conditions) : $(setup-$(c))$(manifest-tool) -nologo ;
-
- if $(cc-filter)
- {
- toolset.flags msvc .CC.FILTER $(cpu-conditions) : "|" $(cc-filter) ;
- }
- }
-
- # Set version-specific flags.
- configure-version-specific msvc : $(version) : $(conditions) ;
- }
-}
-
-
-# Returns the default installation path for the given version.
-#
-local rule default-path ( version )
-{
- # Use auto-detected path if possible.
- local path = [ feature.get-values <command> : [ $(.versions).get $(version)
- : options ] ] ;
-
- if $(path)
- {
- path = $(path:D) ;
- }
- else
- {
- # Check environment.
- if $(.version-$(version)-env)
- {
- local vc-path = [ os.environ $(.version-$(version)-env) ] ;
- if $(vc-path)
- {
- vc-path = [ path.make $(vc-path) ] ;
- vc-path = [ path.join $(vc-path) $(.version-$(version)-envpath) ] ;
- vc-path = [ path.native $(vc-path) ] ;
-
- path = $(vc-path) ;
- }
- }
-
- # Check default path.
- if ! $(path) && $(.version-$(version)-path)
- {
- path = [ path.native [ path.join $(.ProgramFiles) $(.version-$(version)-path) ] ] ;
- }
- }
-
- return $(path) ;
-}
-
-
-# Returns either the default installation path (if 'version' is not empty) or
-# list of all known default paths (if no version is given)
-#
-local rule default-paths ( version ? )
-{
- local possible-paths ;
-
- if $(version)
- {
- possible-paths += [ default-path $(version) ] ;
- }
- else
- {
- for local i in $(.known-versions)
- {
- possible-paths += [ default-path $(i) ] ;
- }
- }
-
- return $(possible-paths) ;
-}
-
-
-rule get-rspline ( target : lang-opt )
-{
- CC_RSPLINE on $(target) = [ on $(target) return $(lang-opt) -U$(UNDEFS)
- $(CFLAGS) $(C++FLAGS) $(OPTIONS) -c $(.nl)-D$(DEFINES)
- $(.nl)\"-I$(INCLUDES:W)\" ] ;
-}
-
-class msvc-linking-generator : linking-generator
-{
- # Calls the base version. If necessary, also create a target for the
- # manifest file.specifying source's name as the name of the created
- # target. As result, the PCH will be named whatever.hpp.gch, and not
- # whatever.gch.
- rule generated-targets ( sources + : property-set : project name ? )
- {
- local result = [ linking-generator.generated-targets $(sources)
- : $(property-set) : $(project) $(name) ] ;
-
- if $(result)
- {
- local name-main = [ $(result[0]).name ] ;
- local action = [ $(result[0]).action ] ;
-
- if [ $(property-set).get <debug-symbols> ] = "on"
- {
- # We force exact name on PDB. The reason is tagging -- the tag rule may
- # reasonably special case some target types, like SHARED_LIB. The tag rule
- # will not catch PDB, and it cannot even easily figure if PDB is paired with
- # SHARED_LIB or EXE or something else. Because PDB always get the
- # same name as the main target, with .pdb as extension, just force it.
- local target = [ class.new file-target $(name-main:S=.pdb) exact : PDB : $(project) : $(action) ] ;
- local registered-target = [ virtual-target.register $(target) ] ;
- if $(target) != $(registered-target)
- {
- $(action).replace-targets $(target) : $(registered-target) ;
- }
- result += $(registered-target) ;
- }
-
- if [ $(property-set).get <embed-manifest> ] = "off"
- {
- # Manifest is evil target. It has .manifest appened to the name of
- # main target, including extension. E.g. a.exe.manifest. We use 'exact'
- # name because to achieve this effect.
- local target = [ class.new file-target $(name-main).manifest exact : MANIFEST : $(project) : $(action) ] ;
- local registered-target = [ virtual-target.register $(target) ] ;
- if $(target) != $(registered-target)
- {
- $(action).replace-targets $(target) : $(registered-target) ;
- }
- result += $(registered-target) ;
- }
- }
- return $(result) ;
- }
-}
-
-
-
-# Unsafe worker rule for the register-toolset() rule. Must not be called
-# multiple times.
-#
-local rule register-toolset-really ( )
-{
- feature.extend toolset : msvc ;
-
- # Intel and msvc supposedly have link-compatible objects.
- feature.subfeature toolset msvc : vendor : intel : propagated optional ;
-
- # Inherit MIDL flags.
- toolset.inherit-flags msvc : midl ;
-
- # Inherit MC flags.
- toolset.inherit-flags msvc : mc ;
-
- # Dynamic runtime comes only in MT flavour.
- toolset.add-requirements
- <toolset>msvc,<runtime-link>shared:<threading>multi ;
-
- # Declare msvc toolset specific features.
- {
- feature.feature debug-store : object database : propagated ;
- feature.feature pch-source : : dependency free ;
- }
-
- # Declare generators.
- {
- # TODO: Is it possible to combine these? Make the generators
- # non-composing so that they do not convert each source into a separate
- # .rsp file.
- generators.register [ new msvc-linking-generator
- msvc.link : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : EXE : <toolset>msvc ] ;
- generators.register [ new msvc-linking-generator
- msvc.link.dll : OBJ SEARCHED_LIB STATIC_LIB IMPORT_LIB : SHARED_LIB IMPORT_LIB : <toolset>msvc ] ;
-
- generators.register-archiver msvc.archive : OBJ : STATIC_LIB : <toolset>msvc ;
- generators.register-c-compiler msvc.compile.c++ : CPP : OBJ : <toolset>msvc ;
- generators.register-c-compiler msvc.compile.c : C : OBJ : <toolset>msvc ;
- generators.register-c-compiler msvc.compile.c++.preprocess : CPP : PREPROCESSED_CPP : <toolset>msvc ;
- generators.register-c-compiler msvc.compile.c.preprocess : C : PREPROCESSED_C : <toolset>msvc ;
-
- # Using 'register-c-compiler' adds the build directory to INCLUDES.
- generators.register-c-compiler msvc.compile.rc : RC : OBJ(%_res) : <toolset>msvc ;
- generators.override msvc.compile.rc : rc.compile.resource ;
- generators.register-standard msvc.compile.asm : ASM : OBJ : <toolset>msvc ;
-
- generators.register-c-compiler msvc.compile.idl : IDL : MSTYPELIB H C(%_i) C(%_proxy) C(%_dlldata) : <toolset>msvc ;
- generators.override msvc.compile.idl : midl.compile.idl ;
-
- generators.register-standard msvc.compile.mc : MC : H RC : <toolset>msvc ;
- generators.override msvc.compile.mc : mc.compile ;
-
- # Note: the 'H' source type will catch both '.h' and '.hpp' headers as
- # the latter have their HPP type derived from H. The type of compilation
- # is determined entirely by the destination type.
- generators.register [ new msvc-pch-generator msvc.compile.c.pch : H : C_PCH OBJ : <pch>on <toolset>msvc ] ;
- generators.register [ new msvc-pch-generator msvc.compile.c++.pch : H : CPP_PCH OBJ : <pch>on <toolset>msvc ] ;
-
- generators.override msvc.compile.c.pch : pch.default-c-pch-generator ;
- generators.override msvc.compile.c++.pch : pch.default-cpp-pch-generator ;
- }
-
- toolset.flags msvc.compile PCH_FILE <pch>on : <pch-file> ;
- toolset.flags msvc.compile PCH_SOURCE <pch>on : <pch-source> ;
- toolset.flags msvc.compile PCH_HEADER <pch>on : <pch-header> ;
-
- #
- # Declare flags for compilation.
- #
-
- toolset.flags msvc.compile CFLAGS <optimization>speed : /O2 ;
- toolset.flags msvc.compile CFLAGS <optimization>space : /O1 ;
-
- toolset.flags msvc.compile CFLAGS $(.cpu-arch-ia64)/<instruction-set>$(.cpu-type-itanium) : /G1 ;
- toolset.flags msvc.compile CFLAGS $(.cpu-arch-ia64)/<instruction-set>$(.cpu-type-itanium2) : /G2 ;
-
- toolset.flags msvc.compile CFLAGS <debug-symbols>on/<debug-store>object : /Z7 ;
- toolset.flags msvc.compile CFLAGS <debug-symbols>on/<debug-store>database : /Zi ;
- toolset.flags msvc.compile CFLAGS <optimization>off : /Od ;
- toolset.flags msvc.compile CFLAGS <inlining>off : /Ob0 ;
- toolset.flags msvc.compile CFLAGS <inlining>on : /Ob1 ;
- toolset.flags msvc.compile CFLAGS <inlining>full : /Ob2 ;
-
- toolset.flags msvc.compile CFLAGS <warnings>on : /W3 ;
- toolset.flags msvc.compile CFLAGS <warnings>off : /W0 ;
- toolset.flags msvc.compile CFLAGS <warnings>all : /W4 ;
- toolset.flags msvc.compile CFLAGS <warnings-as-errors>on : /WX ;
-
- toolset.flags msvc.compile C++FLAGS <exception-handling>on/<asynch-exceptions>off/<extern-c-nothrow>off : /EHs ;
- toolset.flags msvc.compile C++FLAGS <exception-handling>on/<asynch-exceptions>off/<extern-c-nothrow>on : /EHsc ;
- toolset.flags msvc.compile C++FLAGS <exception-handling>on/<asynch-exceptions>on/<extern-c-nothrow>off : /EHa ;
- toolset.flags msvc.compile C++FLAGS <exception-handling>on/<asynch-exceptions>on/<extern-c-nothrow>on : /EHac ;
-
- # By default 8.0 enables rtti support while prior versions disabled it. We
- # simply enable or disable it explicitly so we do not have to depend on this
- # default behaviour.
- toolset.flags msvc.compile CFLAGS <rtti>on : /GR ;
- toolset.flags msvc.compile CFLAGS <rtti>off : /GR- ;
- toolset.flags msvc.compile CFLAGS <runtime-debugging>off/<runtime-link>shared : /MD ;
- toolset.flags msvc.compile CFLAGS <runtime-debugging>on/<runtime-link>shared : /MDd ;
-
- toolset.flags msvc.compile CFLAGS <runtime-debugging>off/<runtime-link>static/<threading>multi : /MT ;
- toolset.flags msvc.compile CFLAGS <runtime-debugging>on/<runtime-link>static/<threading>multi : /MTd ;
-
- toolset.flags msvc.compile OPTIONS <cflags> : ;
- toolset.flags msvc.compile.c++ OPTIONS <cxxflags> : ;
-
- toolset.flags msvc.compile PDB_CFLAG <debug-symbols>on/<debug-store>database : /Fd ;
-
- toolset.flags msvc.compile DEFINES <define> ;
- toolset.flags msvc.compile UNDEFS <undef> ;
- toolset.flags msvc.compile INCLUDES <include> ;
-
- # Declare flags for the assembler.
- toolset.flags msvc.compile.asm USER_ASMFLAGS <asmflags> ;
-
- toolset.flags msvc.compile.asm ASMFLAGS <debug-symbols>on : "/Zi /Zd" ;
-
- toolset.flags msvc.compile.asm ASMFLAGS <warnings>on : /W3 ;
- toolset.flags msvc.compile.asm ASMFLAGS <warnings>off : /W0 ;
- toolset.flags msvc.compile.asm ASMFLAGS <warnings>all : /W4 ;
- toolset.flags msvc.compile.asm ASMFLAGS <warnings-as-errors>on : /WX ;
-
- toolset.flags msvc.compile.asm DEFINES <define> ;
-
- # Declare flags for linking.
- {
- toolset.flags msvc.link PDB_LINKFLAG <debug-symbols>on/<debug-store>database : /PDB: ; # not used yet
- toolset.flags msvc.link LINKFLAGS <debug-symbols>on : /DEBUG ;
- toolset.flags msvc.link DEF_FILE <def-file> ;
-
- # The linker disables the default optimizations when using /DEBUG so we
- # have to enable them manually for release builds with debug symbols.
- toolset.flags msvc LINKFLAGS <debug-symbols>on/<runtime-debugging>off : /OPT:REF,ICF ;
-
- toolset.flags msvc LINKFLAGS <user-interface>console : /subsystem:console ;
- toolset.flags msvc LINKFLAGS <user-interface>gui : /subsystem:windows ;
- toolset.flags msvc LINKFLAGS <user-interface>wince : /subsystem:windowsce ;
- toolset.flags msvc LINKFLAGS <user-interface>native : /subsystem:native ;
- toolset.flags msvc LINKFLAGS <user-interface>auto : /subsystem:posix ;
-
- toolset.flags msvc.link OPTIONS <linkflags> ;
- toolset.flags msvc.link LINKPATH <library-path> ;
-
- toolset.flags msvc.link FINDLIBS_ST <find-static-library> ;
- toolset.flags msvc.link FINDLIBS_SA <find-shared-library> ;
- toolset.flags msvc.link LIBRARY_OPTION <toolset>msvc : "" : unchecked ;
- toolset.flags msvc.link LIBRARIES_MENTIONED_BY_FILE : <library-file> ;
- }
-
- toolset.flags msvc.archive AROPTIONS <archiveflags> ;
-}
-
-
-# Locates the requested setup script under the given folder and returns its full
-# path or nothing in case the script can not be found. In case multiple scripts
-# are found only the first one is returned.
-#
-# TODO: There used to exist a code comment for the msvc.init rule stating that
-# we do not correctly detect the location of the vcvars32.bat setup script for
-# the free VC7.1 tools in case user explicitly provides a path. This should be
-# tested or simply remove this whole comment in case this toolset version is no
-# longer important.
-#
-local rule locate-default-setup ( command : parent : setup-name )
-{
- local result = [ GLOB $(command) $(parent) : $(setup-name) ] ;
- if $(result[1])
- {
- return $(result[1]) ;
- }
-}
-
-
-# Validates given path, registers found configuration and prints debug
-# information about it.
-#
-local rule register-configuration ( version : path ? )
-{
- if $(path)
- {
- local command = [ GLOB $(path) : cl.exe ] ;
-
- if $(command)
- {
- if $(.debug-configuration)
- {
- ECHO "notice: [msvc-cfg] msvc-$(version) detected, command: '$(command)'" ;
- }
-
- $(.versions).register $(version) ;
- $(.versions).set $(version) : options : <command>$(command) ;
- }
- }
-}
-
-
-################################################################################
-#
-# Startup code executed when loading this module.
-#
-################################################################################
-
-if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
-{
- .debug-configuration = true ;
-}
-
-# Miscellaneous constants.
-.RM = [ common.rm-command ] ;
-.nl = "
-" ;
-.ProgramFiles = [ path.make [ common.get-program-files-dir ] ] ;
-.escaped-double-quote = "\"" ;
-.TOUCH_FILE = [ common.file-touch-command ] ;
-
-# List of all registered configurations.
-.versions = [ new configurations ] ;
-
-# Supported CPU architectures.
-.cpu-arch-i386 =
- <architecture>/<address-model>
- <architecture>/<address-model>32
- <architecture>x86/<address-model>
- <architecture>x86/<address-model>32 ;
-
-.cpu-arch-amd64 =
- <architecture>/<address-model>64
- <architecture>x86/<address-model>64 ;
-
-.cpu-arch-ia64 =
- <architecture>ia64/<address-model>
- <architecture>ia64/<address-model>64 ;
-
-
-# Supported CPU types (only Itanium optimization options are supported from
-# VC++ 2005 on). See
-# http://msdn2.microsoft.com/en-us/library/h66s5s0e(vs.90).aspx for more
-# detailed information.
-.cpu-type-g5 = i586 pentium pentium-mmx ;
-.cpu-type-g6 = i686 pentiumpro pentium2 pentium3 pentium3m pentium-m k6
- k6-2 k6-3 winchip-c6 winchip2 c3 c3-2 ;
-.cpu-type-em64t = prescott nocona conroe conroe-xe conroe-l allendale mermon
- mermon-xe kentsfield kentsfield-xe penryn wolfdale
- yorksfield nehalem ;
-.cpu-type-amd64 = k8 opteron athlon64 athlon-fx ;
-.cpu-type-g7 = pentium4 pentium4m athlon athlon-tbird athlon-4 athlon-xp
- athlon-mp $(.cpu-type-em64t) $(.cpu-type-amd64) ;
-.cpu-type-itanium = itanium itanium1 merced ;
-.cpu-type-itanium2 = itanium2 mckinley ;
-
-
-# Known toolset versions, in order of preference.
-.known-versions = 11.0 10.0 10.0express 9.0 9.0express 8.0 8.0express 7.1 7.1toolkit 7.0 6.0 ;
-
-# Version aliases.
-.version-alias-6 = 6.0 ;
-.version-alias-6.5 = 6.0 ;
-.version-alias-7 = 7.0 ;
-.version-alias-8 = 8.0 ;
-.version-alias-9 = 9.0 ;
-.version-alias-10 = 10.0 ;
-.version-alias-11 = 11.0 ;
-
-# Names of registry keys containing the Visual C++ installation path (relative
-# to "HKEY_LOCAL_MACHINE\SOFTWARE\\Microsoft").
-.version-6.0-reg = "VisualStudio\\6.0\\Setup\\Microsoft Visual C++" ;
-.version-7.0-reg = "VisualStudio\\7.0\\Setup\\VC" ;
-.version-7.1-reg = "VisualStudio\\7.1\\Setup\\VC" ;
-.version-8.0-reg = "VisualStudio\\8.0\\Setup\\VC" ;
-.version-8.0express-reg = "VCExpress\\8.0\\Setup\\VC" ;
-.version-9.0-reg = "VisualStudio\\9.0\\Setup\\VC" ;
-.version-9.0express-reg = "VCExpress\\9.0\\Setup\\VC" ;
-.version-10.0-reg = "VisualStudio\\10.0\\Setup\\VC" ;
-.version-10.0express-reg = "VCExpress\\10.0\\Setup\\VC" ;
-.version-11.0-reg = "VisualStudio\\11.0\\Setup\\VC" ;
-
-# Visual C++ Toolkit 2003 does not store its installation path in the registry.
-# The environment variable 'VCToolkitInstallDir' and the default installation
-# path will be checked instead.
-.version-7.1toolkit-path = "Microsoft Visual C++ Toolkit 2003" "bin" ;
-.version-7.1toolkit-env = VCToolkitInstallDir ;
-
-# Path to the folder containing "cl.exe" relative to the value of the
-# corresponding environment variable.
-.version-7.1toolkit-envpath = "bin" ;
-
-
-# Auto-detect all the available msvc installations on the system.
-auto-detect-toolset-versions ;
-
-
-# And finally trigger the actual Boost Build toolset registration.
-register-toolset ;
diff --git a/tools/build/v2/tools/msvc.py b/tools/build/v2/tools/msvc.py
deleted file mode 100644
index f4448daab4..0000000000
--- a/tools/build/v2/tools/msvc.py
+++ /dev/null
@@ -1,1198 +0,0 @@
-# Copyright (c) 2003 David Abrahams.
-# Copyright (c) 2005 Vladimir Prus.
-# Copyright (c) 2005 Alexey Pakhunov.
-# Copyright (c) 2006 Bojan Resnik.
-# Copyright (c) 2006 Ilya Sokolov.
-# Copyright (c) 2007 Rene Rivera
-# Copyright (c) 2008 Jurko Gospodnetic
-# Copyright (c) 2011 Juraj Ivancic
-#
-# Use, modification and distribution is subject to the Boost Software
-# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
-# http://www.boost.org/LICENSE_1_0.txt)
-
-################################################################################
-#
-# MSVC Boost Build toolset module.
-# --------------------------------
-#
-# All toolset versions need to have their location either auto-detected or
-# explicitly specified except for the special 'default' version that expects the
-# environment to find the needed tools or report an error.
-#
-################################################################################
-
-from os import environ
-import os.path
-import re
-import _winreg
-
-import bjam
-
-from b2.tools import common, rc, pch, builtin, mc, midl
-from b2.build import feature, type, toolset, generators, property_set
-from b2.build.property import Property
-from b2.util import path
-from b2.manager import get_manager
-from b2.build.generators import Generator
-from b2.build.toolset import flags
-from b2.util.utility import to_seq, on_windows
-from b2.tools.common import Configurations
-
-__debug = None
-
-def debug():
- global __debug
- if __debug is None:
- __debug = "--debug-configuration" in bjam.variable("ARGV")
- return __debug
-
-
-# It is not yet clear what to do with Cygwin on python port.
-def on_cygwin():
- return False
-
-
-type.register('MANIFEST', ['manifest'])
-feature.feature('embed-manifest',['on','off'], ['incidental', 'propagated']) ;
-
-type.register('PDB',['pdb'])
-
-################################################################################
-#
-# Public rules.
-#
-################################################################################
-
-# Initialize a specific toolset version configuration. As the result, path to
-# compiler and, possible, program names are set up, and will be used when that
-# version of compiler is requested. For example, you might have:
-#
-# using msvc : 6.5 : cl.exe ;
-# using msvc : 7.0 : Y:/foo/bar/cl.exe ;
-#
-# The version parameter may be ommited:
-#
-# using msvc : : Z:/foo/bar/cl.exe ;
-#
-# The following keywords have special meanings when specified as versions:
-# - all - all detected but not yet used versions will be marked as used
-# with their default options.
-# - default - this is an equivalent to an empty version.
-#
-# Depending on a supplied version, detected configurations and presence 'cl.exe'
-# in the path different results may be achieved. The following table describes
-# the possible scenarios:
-#
-# Nothing "x.y"
-# Passed Nothing "x.y" detected, detected,
-# version detected detected cl.exe in path cl.exe in path
-#
-# default Error Use "x.y" Create "default" Use "x.y"
-# all None Use all None Use all
-# x.y - Use "x.y" - Use "x.y"
-# a.b Error Error Create "a.b" Create "a.b"
-#
-# "x.y" - refers to a detected version;
-# "a.b" - refers to an undetected version.
-#
-# FIXME: Currently the command parameter and the <compiler> property parameter
-# seem to overlap in duties. Remove this duplication. This seems to be related
-# to why someone started preparing to replace init with configure rules.
-
-def init(version = None, command = None, options = None):
- # When initialized from
- # using msvc : x.0 ;
- # we get version as a single element list i.e. ['x.0'],
- # but when specified from the command line we get a string i.e. 'x.0'.
- # We want to work with a string, so unpack the list if needed.
- is_single_element_list = (isinstance(version,list) and len(version) == 1)
- assert(version==None or isinstance(version,str) or is_single_element_list)
- if is_single_element_list:
- version = version[0]
-
- options = to_seq(options)
- command = to_seq(command)
-
- if command:
- options.append("<command>"+command)
- configure(version,options)
-
-def configure(version=None, options=None):
- if version == "all":
- if options:
- raise RuntimeError("MSVC toolset configuration: options should be empty when '{}' is specified.".format(version))
-
- # Configure (i.e. mark as used) all registered versions.
- all_versions = __versions.all()
- if not all_versions:
- if debug():
- print "notice: [msvc-cfg] Asked to configure all registered" \
- "msvc toolset versions when there are none currently" \
- "registered." ;
- else:
- for v in all_versions:
- # Note that there is no need to skip already configured
- # versions here as this will request configure-really rule
- # to configure the version using default options which will
- # in turn cause it to simply do nothing in case the version
- # has already been configured.
- configure_really(v)
- elif version == "default":
- configure_really(None,options)
- else:
- configure_really(version, options)
-
-def extend_conditions(conditions,exts):
- return [ cond + '/' + ext for cond in conditions for ext in exts ]
-
-def configure_version_specific(toolset_arg, version, conditions):
- # Starting with versions 7.0, the msvc compiler have the /Zc:forScope and
- # /Zc:wchar_t options that improve C++ standard conformance, but those
- # options are off by default. If we are sure that the msvc version is at
- # 7.*, add those options explicitly. We can be sure either if user specified
- # version 7.* explicitly or if we auto-detected the version ourselves.
- if not re.match('^6\\.', version):
- toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS',conditions, ['/Zc:forScope','/Zc:wchar_t'])
- toolset.flags('{}.compile.c++'.format(toolset_arg), 'C++FLAGS',conditions, ['/wd4675'])
-
- # Explicitly disable the 'function is deprecated' warning. Some msvc
- # versions have a bug, causing them to emit the deprecation warning even
- # with /W0.
- toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS',extend_conditions(conditions,['<warnings>off']), ['/wd4996'])
- if re.match('^[78]\\.', version):
- # 64-bit compatibility warning deprecated since 9.0, see
- # http://msdn.microsoft.com/en-us/library/yt4xw8fh.aspx
- toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS',extend_conditions(conditions,['<warnings>all']), ['/Wp64'])
-
- #
- # Processor-specific optimization.
- #
- if re.match('^[67]', version ):
- # 8.0 deprecates some of the options.
- toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(conditions,['<optimization>speed','<optimization>space']), ['/Ogiy', '/Gs'])
- toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(conditions,['<optimization>speed']), ['/Ot'])
- toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(conditions,['<optimization>space']), ['/Os'])
-
- cpu_arch_i386_cond = extend_conditions(conditions, __cpu_arch_i386)
- toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(cpu_arch_i386_cond,['<instruction-set>']),['/GB'])
- toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(cpu_arch_i386_cond,['<instruction-set>i386']),['/G3'])
- toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(cpu_arch_i386_cond,['<instruction-set>i486']),['/G4'])
-
- toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(cpu_arch_i386_cond,['<instruction-set>' + t for t in __cpu_type_g5]), ['/G5'])
- toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(cpu_arch_i386_cond,['<instruction-set>' + t for t in __cpu_type_g6]), ['/G6'])
- toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(cpu_arch_i386_cond,['<instruction-set>' + t for t in __cpu_type_g7]), ['/G7'])
-
- # Improve floating-point accuracy. Otherwise, some of C++ Boost's "math"
- # tests will fail.
- toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', conditions, ['/Op'])
-
- # 7.1 and below have single-threaded static RTL.
- toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(conditions,['<runtime-debugging>off/<runtime-link>static/<threading>single']), ['/ML'])
- toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(conditions,['<runtime-debugging>on/<runtime-link>static/<threading>single']), ['/MLd'])
- else:
- # 8.0 and above adds some more options.
- toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(conditions, [a + '/<instruction-set>' for a in __cpu_arch_amd64]), ['/favor:blend'])
-
- toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(conditions, [a + '/<instruction-set>' + t for a in __cpu_arch_amd64 for t in __cpu_type_em64t]), ['/favor:EM64T'])
- toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(conditions, [a + '/<instruction-set>' + t for a in __cpu_arch_amd64 for t in __cpu_type_amd64]), ['/favor:AMD64'])
-
- # 8.0 and above only has multi-threaded static RTL.
- toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(conditions,['<runtime-debugging>off/<runtime-link>static/<threading>single']), ['/MT'])
- toolset.flags('{}.compile'.format(toolset_arg), 'CFLAGS', extend_conditions(conditions,['<runtime-debugging>on/<runtime-link>static/<threading>single']), ['/MTd'])
-
- # Specify target machine type so the linker will not need to guess.
- toolset.flags('{}.link'.format(toolset_arg), 'LINKFLAGS', extend_conditions(conditions, __cpu_arch_amd64), ['/MACHINE:X64'])
- toolset.flags('{}.link'.format(toolset_arg), 'LINKFLAGS', extend_conditions(conditions, __cpu_arch_i386), ['/MACHINE:X86'])
- toolset.flags('{}.link'.format(toolset_arg), 'LINKFLAGS', extend_conditions(conditions, __cpu_arch_ia64), ['/MACHINE:IA64'])
-
- # Make sure that manifest will be generated even if there is no
- # dependencies to put there.
- toolset.flags('{}.link'.format(toolset_arg), 'LINKFLAGS', extend_conditions(conditions,["<embed-manifest>off"]), ['/MANIFEST'])
-
-
-# Registers this toolset including all of its flags, features & generators. Does
-# nothing on repeated calls.
-
-def register_toolset():
- if not 'msvc' in feature.values('toolset'):
- register_toolset_really()
-
-
-engine = get_manager().engine()
-
-# this rule sets up the pdb file that will be used when generating static
-# libraries and the debug-store option is database, so that the compiler
-# puts all debug info into a single .pdb file named after the library
-#
-# Poking at source targets this way is probably not clean, but it's the
-# easiest approach.
-def archive(targets, sources=None, properties=None):
- bjam.call('set-target-variable',targets,'PDB_NAME', os.path.splitext(targets[0])[0] + '.pdb')
-
-# Declare action for creating static libraries. If library exists, remove it
-# before adding files. See
-# http://article.gmane.org/gmane.comp.lib.boost.build/4241 for rationale.
-if not on_cygwin():
- engine.register_action(
- 'msvc.archive',
- '''if exist "$(<[1])" DEL "$(<[1])"
- $(.LD) $(AROPTIONS) /out:"$(<[1])" @"@($(<[1]:W).rsp:E=
-"$(>)"
-$(LIBRARIES_MENTIONED_BY_FILE)
-"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib"
-"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"''',
- function=archive)
-else:
- engine.register_action(
- 'msvc.archive',
- '''{rm} "$(<[1])"
- $(.LD) $(AROPTIONS) /out:"$(<[1])" @"@($(<[1]:W).rsp:E=
-"$(>)"
-$(LIBRARIES_MENTIONED_BY_FILE)
-"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib"
-"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"'''.format(rm=common.rm_command()),
- function=archive)
-
-# For the assembler the following options are turned on by default:
-#
-# -Zp4 align structures to 4 bytes
-# -Cp preserve case of user identifiers
-# -Cx preserve case in publics, externs
-#
-engine.register_action(
- 'msvc.compile.asm',
- '$(.ASM) -c -Zp4 -Cp -Cx -D$(DEFINES) $(ASMFLAGS) $(USER_ASMFLAGS) -Fo "$(<:W)" "$(>:W)"' )
-
-
-# Equivalent to [ on $(target) return $(prefix)$(var)$(suffix) ]. Note that $(var) can be a list.
-def expand_target_variable(target,var,prefix=None,suffix=None):
- list = bjam.call( 'get-target-variable', target, var )
- return " ".join([ ("" if prefix is None else prefix) + elem + ("" if suffix is None else suffix) for elem in list ])
-
-
-compile_c_cpp_pch = '''$(.CC) @"@($(<[1]:W).rsp:E="$(>[2]:W)" -Fo"$(<[2]:W)" -Yc"$(>[1]:D=)" $(YLOPTION)"__bjam_pch_symbol_$(>[1]:D=)" -Fp"$(<[1]:W)" $(CC_RSPLINE))" "@($(<[1]:W).cpp:E=#include $(.escaped-double-quote)$(>[1]:D=)$(.escaped-double-quote)$(.nl))" $(.CC.FILTER)'''
-# Action for running the C/C++ compiler using precompiled headers. An already
-# built source file for compiling the precompiled headers is expected to be
-# given as one of the source parameters.
-compile_c_cpp_pch_s = '''$(.CC) @"@($(<[1]:W).rsp:E="$(>[2]:W)" -Fo"$(<[2]:W)" -Yc"$(>[1]:D=)" $(YLOPTION)"__bjam_pch_symbol_$(>[1]:D=)" -Fp"$(<[1]:W)" $(CC_RSPLINE))" $(.CC.FILTER)'''
-
-def get_rspline(targets, lang_opt):
- result = lang_opt + ' ' + \
- expand_target_variable(targets, 'UNDEFS', '-U' ) + ' ' + \
- expand_target_variable(targets, 'CFLAGS' ) + ' ' + \
- expand_target_variable(targets, 'C++FLAGS' ) + ' ' + \
- expand_target_variable(targets, 'OPTIONS' ) + ' -c ' + \
- expand_target_variable(targets, 'DEFINES', '\n-D' ) + ' ' + \
- expand_target_variable(targets, 'INCLUDES', '\n"-I', '"' )
- bjam.call('set-target-variable', targets, 'CC_RSPLINE', result)
-
-def compile_c(targets, sources = [], properties = None):
- get_manager().engine().set_target_variable( targets[1], 'C++FLAGS', '' )
- get_rspline(targets, '-TC')
- sources += bjam.call('get-target-variable',targets,'PCH_FILE')
- sources += bjam.call('get-target-variable',targets,'PCH_HEADER')
- compile_c_cpp(targets,sources)
-
-def compile_c_preprocess(targets, sources = [], properties = None):
- get_manager().engine().set_target_variable( target[1], 'C++FLAGS', '' )
- get_rspline(targets, '-TC')
- sources += bjam.call('get-target-variable',targets,'PCH_FILE')
- sources += bjam.call('get-target-variable',targets,'PCH_HEADER')
- preprocess_c_cpp(targets,sources)
-
-def compile_c_pch(targets, sources = [], properties = []):
- get_manager().engine().set_target_variable( target[1], 'C++FLAGS', '' )
- get_rspline([targets[1]], '-TC')
- get_rspline([targets[2]], '-TC')
- pch_source = bjam.call('get-target-variable', targets, 'PCH_SOURCE')
- sources += pch_source
- if pch_source:
- get_manager().engine().set_update_action('compile-c-c++-pch-s', targets, sources, properties)
- get_manager().engine().add_dependency(targets,pch_source)
- compile_c_cpp_pch_s(targets,sources)
- else:
- get_manager().engine().set_update_action('compile-c-c++-pch', targets, sources, properties)
- compile_c_cpp_pch(targets,sources)
-
-toolset.flags( 'msvc', 'YLOPTION', [], ['-Yl'] )
-
-def compile_cpp(targets,sources=[],properties=None):
- get_rspline(targets,'-TP')
- sources += bjam.call('get-target-variable',targets,'PCH_FILE')
- sources += bjam.call('get-target-variable',targets,'PCH_HEADER')
- compile_c_cpp(targets,sources)
-
-def compile_cpp_preprocess(targets,sources=[],properties=None):
- get_rspline(targets,'-TP')
- sources += bjam.call('get-target-variable',targets,'PCH_FILE')
- sources += bjam.call('get-target-variable',targets,'PCH_HEADER')
- preprocess_c_cpp(targets,sources)
-
-def compile_cpp_pch(targets,sources=[],properties=None):
- get_rspline([targets[1]], '-TP')
- get_rspline([targets[2]], '-TP')
- pch_source = bjam.call('get-target-variable', targets, 'PCH_SOURCE')
- sources += pch_source
- if pch_source:
- get_manager().engine().set_update_action('compile-c-c++-pch-s', targets, sources, properties)
- get_manager().engine().add_dependency(targets,pch_source)
- compile_c_cpp_pch_s(targets,sources)
- else:
- get_manager().engine().set_update_action('compile-c-c++-pch', targets, sources, properties)
- compile_c_cpp_pch(targets,sources)
-
-
-# Action for running the C/C++ compiler without using precompiled headers.
-#
-# WARNING: Synchronize any changes this in action with intel-win
-#
-# Notes regarding PDB generation, for when we use <debug-symbols>on/<debug-store>database
-#
-# 1. PDB_CFLAG is only set for <debug-symbols>on/<debug-store>database, ensuring that the /Fd flag is dropped if PDB_CFLAG is empty
-#
-# 2. When compiling executables's source files, PDB_NAME is set on a per-source file basis by rule compile-c-c++.
-# The linker will pull these into the executable's PDB
-#
-# 3. When compiling library's source files, PDB_NAME is updated to <libname>.pdb for each source file by rule archive,
-# as in this case the compiler must be used to create a single PDB for our library.
-#
-
-compile_action = '$(.CC) @"@($(<[1]:W).rsp:E="$(>[1]:W)" -Fo"$(<[1]:W)" $(PDB_CFLAG)"$(PDB_NAME)" -Yu"$(>[3]:D=)" -Fp"$(>[2]:W)" $(CC_RSPLINE))" $(.CC.FILTER)'
-engine.register_action(
- 'msvc.compile.c',
- compile_action,
- function=compile_c,
- bound_list=['PDB_NAME'])
-
-engine.register_action(
- 'msvc.compile.c++',
- compile_action,
- function=compile_cpp,
- bound_list=['PDB_NAME'])
-
-
-preprocess_action = '$(.CC) @"@($(<[1]:W).rsp:E="$(>[1]:W)" -E $(PDB_CFLAG)"$(PDB_NAME)" -Yu"$(>[3]:D=)" -Fp"$(>[2]:W)" $(CC_RSPLINE))" >"$(<[1]:W)"'
-
-engine.register_action(
- 'msvc.preprocess.c',
- preprocess_action,
- function=compile_c_preprocess,
- bound_list=['PDB_NAME'])
-
-engine.register_action(
- 'msvc.preprocess.c++',
- preprocess_action,
- function=compile_cpp_preprocess,
- bound_list=['PDB_NAME'])
-
-def compile_c_cpp(targets,sources=None):
- pch_header = bjam.call('get-target-variable',targets[0],'PCH_HEADER')
- pch_file = bjam.call('get-target-variable',targets[0],'PCH_FILE')
- if pch_header: get_manager().engine().add_dependency(targets[0],pch_header)
- if pch_file: get_manager().engine().add_dependency(targets[0],pch_file)
- bjam.call('set-target-variable',targets,'PDB_NAME', os.path.splitext(targets[0])[0] + '.pdb')
-
-def preprocess_c_cpp(targets,sources=None):
- #same as above
- return compile_c_cpp(targets,sources)
-
-# Action for running the C/C++ compiler using precompiled headers. In addition
-# to whatever else it needs to compile, this action also adds a temporary source
-# .cpp file used to compile the precompiled headers themselves.
-
-engine.register_action(
- 'msvc.compile.c.pch',
- None, # action set by the function
- function=compile_c_pch)
-
-engine.register_action(
- 'msvc.compile.c++.pch',
- None, # action set by the function
- function=compile_cpp_pch)
-
-
-# See midl.py for details.
-#
-engine.register_action(
- 'msvc.compile.idl',
- '''$(.IDL) /nologo @"@($(<[1]:W).rsp:E=
-"$(>:W)"
--D$(DEFINES)
-"-I$(INCLUDES:W)"
--U$(UNDEFS)
-$(MIDLFLAGS)
-/tlb "$(<[1]:W)"
-/h "$(<[2]:W)"
-/iid "$(<[3]:W)"
-/proxy "$(<[4]:W)"
-/dlldata "$(<[5]:W)")"
- {touch} "$(<[4]:W)"
- {touch} "$(<[5]:W)"'''.format(touch=common.file_creation_command()))
-
-engine.register_action(
- 'msvc.compile.mc',
- '$(.MC) $(MCFLAGS) -h "$(<[1]:DW)" -r "$(<[2]:DW)" "$(>:W)"')
-
-engine.register_action(
- 'msvc.compile.rc',
- '$(.RC) -l 0x409 -U$(UNDEFS) -D$(DEFINES) -I"$(INCLUDES:W)" -fo "$(<:W)" "$(>:W)"')
-
-def link_dll(targets,sources=None,properties=None):
- get_manager().engine().add_dependency(targets,bjam.call('get-target-variable',targets,'DEF_FILE'))
- manifest(targets, sources, properties)
-
-def manifest(targets,sources=None,properties=None):
- if 'on' in properties.get('<embed-manifest>'):
- get_manager().engine().set_update_action('msvc.manifest', targets, sources, properties)
-
-
-# Incremental linking a DLL causes no end of problems: if the actual exports do
-# not change, the import .lib file is never updated. Therefore, the .lib is
-# always out-of-date and gets rebuilt every time. I am not sure that incremental
-# linking is such a great idea in general, but in this case I am sure we do not
-# want it.
-
-# Windows manifest is a new way to specify dependencies on managed DotNet
-# assemblies and Windows native DLLs. The manifests are embedded as resources
-# and are useful in any PE target (both DLL and EXE).
-
-if not on_cygwin():
- engine.register_action(
- 'msvc.link',
- '''$(.LD) $(LINKFLAGS) /out:"$(<[1]:W)" /LIBPATH:"$(LINKPATH:W)" $(OPTIONS) @"@($(<[1]:W).rsp:E=
-"$(>)"
-$(LIBRARIES_MENTIONED_BY_FILE)
-$(LIBRARIES)
-"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib"
-"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
-if %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL%''',
- function=manifest,
- bound_list=['PDB_NAME','DEF_FILE','LIBRARIES_MENTIONED_BY_FILE'])
-
- engine.register_action(
- 'msvc.manifest',
- '''if exist "$(<[1]).manifest" (
- $(.MT) -manifest "$(<[1]).manifest" "-outputresource:$(<[1]);1"
- )''')
-
- engine.register_action(
- 'msvc.link.dll',
- '''$(.LD) /DLL $(LINKFLAGS) /out:"$(<[1]:W)" /IMPLIB:"$(<[2]:W)" /LIBPATH:"$(LINKPATH:W)" /def:"$(DEF_FILE)" $(OPTIONS) @"@($(<[1]:W).rsp:E=
-"$(>)"
-$(LIBRARIES_MENTIONED_BY_FILE)
-$(LIBRARIES)
-"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib"
-"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
-if %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL%''',
- function=link_dll,
- bound_list=['DEF_FILE','LIBRARIES_MENTIONED_BY_FILE'])
-
- engine.register_action(
- 'msvc.manifest.dll',
- '''if exist "$(<[1]).manifest" (
- $(.MT) -manifest "$(<[1]).manifest" "-outputresource:$(<[1]);2"
- )''')
-else:
- engine.register_action(
- 'msvc.link',
- '''$(.LD) $(LINKFLAGS) /out:"$(<[1]:W)" /LIBPATH:"$(LINKPATH:W)" $(OPTIONS) @"@($(<[1]:W).rsp:E=
-"$(>)"
-$(LIBRARIES_MENTIONED_BY_FILE)
-$(LIBRARIES)
-"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib"
-"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"''',
- function=manifest,
- bound_list=['PDB_NAME','DEF_FILE','LIBRARIES_MENTIONED_BY_FILE'])
-
- engine.register_action(
- 'msvc.manifest',
- '''if test -e "$(<[1]).manifest"; then
- $(.MT) -manifest "$(<[1]).manifest" "-outputresource:$(<[1]);1"
- fi''')
-
- engine.register_action(
- 'msvc.link.dll',
- '''$(.LD) /DLL $(LINKFLAGS) /out:"$(<[1]:W)" /IMPLIB:"$(<[2]:W)" /LIBPATH:"$(LINKPATH:W)" /def:"$(DEF_FILE)" $(OPTIONS) @"@($(<[1]:W).rsp:E=
-"$(>)"
-$(LIBRARIES_MENTIONED_BY_FILE)
-$(LIBRARIES)
-"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib"
-"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"''',
- function=link_dll,
- bound_list=['DEF_FILE','LIBRARIES_MENTIONED_BY_FILE'])
-
- engine.register_action(
- 'msvc.manifest.dll',
- '''if test -e "$(<[1]).manifest"; then
- $(.MT) -manifest "$(<[1]).manifest" "-outputresource:$(<[1]);2"
- fi''')
-
-
-################################################################################
-#
-# Classes.
-#
-################################################################################
-
-class MsvcPchGenerator(pch.PchGenerator):
-
- # Inherit the __init__ method
-
- def run_pch(self, project, name, prop_set, sources):
- # Find the header in sources. Ignore any CPP sources.
- pch_header = None
- pch_source = None
- for s in sources:
- if type.is_derived(s.type(), 'H'):
- pch_header = s
- elif type.is_derived(s.type(), 'CPP') or type.is_derived(s.type(), 'C'):
- pch_source = s
-
- if not pch-header:
- raise RuntimeError( "can not build pch without pch-header" )
-
- # If we do not have the PCH source - that is fine. We will just create a
- # temporary .cpp file in the action.
- temp_prop_set = property_set.create([Property('pch-source',pch_source)]+prop_set.all())
- generated = Generator.run(project,name,temp_prop_set,pch_header)
- pch_file = None
- for g in generated:
- if type.is_derived(g.type(), 'PCH'):
- pch_file = g
- return property_set.create([Property('pch-header',pch_header),Property('pch-file',pch_file)]+generated)
-
-
-################################################################################
-#
-# Local rules.
-#
-################################################################################
-
-# Detects versions listed as '_known_versions' by checking registry information,
-# environment variables & default paths. Supports both native Windows and
-# Cygwin.
-def auto_detect_toolset_versions():
- if on_windows() or on_cygwin():
- for version in _known_versions:
- versionVarName = '__version_{}_reg'.format(version.replace('.','_'))
- if versionVarName in globals():
- vc_path = None
- for x in [ '', 'Wow6432Node\\' ]:
- try:
- with _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Microsoft\\{}{}'.format(x, globals()[versionVarName])) as reg_key:
- vc_path = _winreg.QueryValueEx(reg_key, "ProductDir")[0]
- except:
- pass
- if vc_path:
- vc_path = os.path.join(vc_path,'bin')
- register_configuration(version,os.path.normpath(vc_path))
-
- for i in _known_versions:
- if not i in __versions.all():
- register_configuration(i,default_path(i))
-
-
-# Worker rule for toolset version configuration. Takes an explicit version id or
-# nothing in case it should configure the default toolset version (the first
-# registered one or a new 'default' one in case no toolset versions have been
-# registered yet).
-#
-
-def configure_really(version=None, options=[]):
- v = version
- if not v:
- # Take the first registered (i.e. auto-detected) version.
- version = __versions.first()
- v = version
-
- # Note: 'version' can still be empty at this point if no versions have
- # been auto-detected.
- if not version:
- version = "default"
-
- # Version alias -> real version number.
- version = globals().get("__version_alias_{}".format(version), version)
-
- # Check whether the selected configuration is already in use.
- if version in __versions.used():
- # Allow multiple 'toolset.using' calls for the same configuration if the
- # identical sets of options are used.
- if options and options != __versions.get(version,'options'):
- raise RuntimeError("MSVC toolset configuration: Toolset version '$(version)' already configured.".format(version))
- else:
- # Register a new configuration.
- __versions.register(version)
-
- # Add user-supplied to auto-detected options.
- version_opts = __versions.get(version, 'options')
- if (version_opts):
- options = version_opts + options
-
- # Mark the configuration as 'used'.
- __versions.use(version)
- # Generate conditions and save them.
- conditions = common.check_init_parameters('msvc', None, ('version', v))
- __versions.set(version, 'conditions', conditions)
- command = feature.get_values('<command>', options)
-
- # If version is specified, we try to search first in default paths, and
- # only then in PATH.
- command = common.get_invocation_command('msvc', 'cl.exe', command, default_paths(version))
- common.handle_options('msvc', conditions, command, options)
-
- if not version:
- # Even if version is not explicitly specified, try to detect the
- # version from the path.
- # FIXME: We currently detect both Microsoft Visual Studio 9.0 and
- # 9.0express as 9.0 here.
- if re.search("Microsoft Visual Studio 11", command):
- version = '11.0'
- if re.search("Microsoft Visual Studio 10", command):
- version = '10.0'
- elif re.search("Microsoft Visual Studio 9", command):
- version = '9.0'
- elif re.search("Microsoft Visual Studio 8", command):
- version = '8.0'
- elif re.search("NET 2003[\/\\]VC7", command):
- version = '7.1'
- elif re.search("Microsoft Visual C\\+\\+ Toolkit 2003", command):
- version = '7.1toolkit'
- elif re.search(".NET[\/\\]VC7", command):
- version = '7.0'
- else:
- version = '6.0'
-
- # Generate and register setup command.
-
- below_8_0 = re.search("^[67]\\.",version) != None
-
- if below_8_0:
- cpu = ['i386']
- else:
- cpu = ['i386', 'amd64', 'ia64']
-
- setup_scripts = {}
-
- if command:
- # TODO: Note that if we specify a non-existant toolset version then
- # this rule may find and use a corresponding compiler executable
- # belonging to an incorrect toolset version. For example, if you
- # have only MSVC 7.1 installed, have its executable on the path and
- # specify you want Boost Build to use MSVC 9.0, then you want Boost
- # Build to report an error but this may cause it to silently use the
- # MSVC 7.1 compiler even though it thinks it is using the msvc-9.0
- # toolset version.
- command = common.get_absolute_tool_path(command)
-
- if command:
- parent = os.path.dirname(os.path.normpath(command))
- # Setup will be used if the command name has been specified. If
- # setup is not specified explicitly then a default setup script will
- # be used instead. Setup scripts may be global or arhitecture/
- # /platform/cpu specific. Setup options are used only in case of
- # global setup scripts.
-
- # Default setup scripts provided with different VC distributions:
- #
- # VC 7.1 had only the vcvars32.bat script specific to 32 bit i386
- # builds. It was located in the bin folder for the regular version
- # and in the root folder for the free VC 7.1 tools.
- #
- # Later 8.0 & 9.0 versions introduce separate platform specific
- # vcvars*.bat scripts (e.g. 32 bit, 64 bit AMD or 64 bit Itanium)
- # located in or under the bin folder. Most also include a global
- # vcvarsall.bat helper script located in the root folder which runs
- # one of the aforementioned vcvars*.bat scripts based on the options
- # passed to it. So far only the version coming with some PlatformSDK
- # distributions does not include this top level script but to
- # support those we need to fall back to using the worker scripts
- # directly in case the top level script can not be found.
-
- global_setup = feature.get_values('<setup>',options)
- if global_setup:
- global_setup = global_setup[0]
- else:
- global_setup = None
-
- if not below_8_0 and not global_setup:
- global_setup = locate_default_setup(command,parent,'vcvarsall.bat')
-
-
- default_setup = {
- 'amd64' : 'vcvarsx86_amd64.bat',
- 'i386' : 'vcvars32.bat',
- 'ia64' : 'vcvarsx86_ia64.bat' }
-
- # http://msdn2.microsoft.com/en-us/library/x4d2c09s(VS.80).aspx and
- # http://msdn2.microsoft.com/en-us/library/x4d2c09s(vs.90).aspx
- # mention an x86_IPF option, that seems to be a documentation bug
- # and x86_ia64 is the correct option.
- default_global_setup_options = {
- 'amd64' : 'x86_amd64',
- 'i386' : 'x86',
- 'ia64' : 'x86_ia64' }
-
- somehow_detect_the_itanium_platform = None
- # When using 64-bit Windows, and targeting 64-bit, it is possible to
- # use a native 64-bit compiler, selected by the "amd64" & "ia64"
- # parameters to vcvarsall.bat. There are two variables we can use --
- # PROCESSOR_ARCHITECTURE and PROCESSOR_IDENTIFIER. The first is
- # 'x86' when running 32-bit Windows, no matter which processor is
- # used, and 'AMD64' on 64-bit windows on x86 (either AMD64 or EM64T)
- # Windows.
- #
- if re.search( 'AMD64', environ[ "PROCESSOR_ARCHITECTURE" ] ) != None:
- default_global_setup_options[ 'amd64' ] = 'amd64'
- # TODO: The same 'native compiler usage' should be implemented for
- # the Itanium platform by using the "ia64" parameter. For this
- # though we need someone with access to this platform who can find
- # out how to correctly detect this case.
- elif somehow_detect_the_itanium_platform:
- default_global_setup_options[ 'ia64' ] = 'ia64'
-
- setup_prefix = "call "
- setup_suffix = """ >nul\n"""
- if on_cygwin():
- setup_prefix = "cmd.exe /S /C call "
- setup_suffix = " \">nul\" \"&&\" "
-
- for c in cpu:
- setup_options = None
- setup_cpu = feature.get_values('<setup-{}>'.format(c),options)
-
- if not setup_cpu:
- if global_setup:
- setup_cpu = global_setup
- # If needed we can easily add using configuration flags
- # here for overriding which options get passed to the
- # global setup command for which target platform:
- # setup_options = feature.get_values('<setup-options-{}>'.format(c),options)
- if not setup_options:
- setup_options = default_global_setup_options[ c ]
- else:
- setup_cpu = locate_default_setup(command, parent, default_setup[ c ])
-
- # Cygwin to Windows path translation.
- # setup-$(c) = "\""$(setup-$(c):W)"\"" ;
-
- # Append setup options to the setup name and add the final setup
- # prefix & suffix.
- setup_scripts[ c ] = '{}"{}" {}{}'.format(setup_prefix, setup_cpu, setup_options, setup_suffix)
-
- # Get tool names (if any) and finish setup.
- compiler = feature.get_values("<compiler>", options)
- if not compiler:
- compiler = "cl"
-
- linker = feature.get_values("<linker>", options)
- if not linker:
- linker = "link"
-
- resource_compiler = feature.get_values("<resource-compiler>", options)
- if not resource_compiler:
- resource_compiler = "rc"
-
- # Turn on some options for i386 assembler
- # -coff generate COFF format object file (compatible with cl.exe output)
- default_assembler_amd64 = 'ml64'
- default_assembler_i386 = 'ml -coff'
- default_assembler_ia64 = 'ias'
-
- assembler = feature.get_values('<assembler>',options)
-
- idl_compiler = feature.get_values('<idl-compiler>',options)
- if not idl_compiler:
- idl_compiler = 'midl'
-
- mc_compiler = feature.get_values('<mc-compiler>',options)
- if not mc_compiler:
- mc_compiler = 'mc'
-
- manifest_tool = feature.get_values('<manifest-tool>',options)
- if not manifest_tool:
- manifest_tool = 'mt'
-
- cc_filter = feature.get_values('<compiler-filter>',options)
-
- for c in cpu:
- cpu_conditions = [ condition + '/' + arch for arch in globals()['__cpu_arch_{}'.format(c)] for condition in conditions ]
-
- setup_script = setup_scripts.get(c, '')
-
- if debug():
- for cpu_condition in cpu_conditions:
- print "notice: [msvc-cfg] condition: '{}', setup: '{}'".format(cpu_condition,setup_script)
-
- cpu_assembler = assembler
- if not cpu_assembler:
- cpu_assembler = locals()['default_assembler_{}'.format(c)]
-
- toolset.flags('msvc.compile', '.CC' , cpu_conditions, ['{}{} /Zm800 -nologo' .format(setup_script, compiler)])
- toolset.flags('msvc.compile', '.RC' , cpu_conditions, ['{}{} -nologo' .format(setup_script, resource_compiler)])
- toolset.flags('msvc.compile', '.ASM', cpu_conditions, ['{}{} ' .format(setup_script, cpu_assembler)])
- toolset.flags('msvc.link' , '.LD' , cpu_conditions, ['{}{} /NOLOGO /INCREMENTAL:NO'.format(setup_script, linker)])
- toolset.flags('msvc.archive', '.LD' , cpu_conditions, ['{}{} /lib /NOLOGO' .format(setup_script, linker)])
- toolset.flags('msvc.compile', '.IDL', cpu_conditions, ['{}{} ' .format(setup_script, idl_compiler)])
- toolset.flags('msvc.compile', '.MC' , cpu_conditions, ['{}{} ' .format(setup_script, mc_compiler)])
- toolset.flags('msvc.link' , '.MT' , cpu_conditions, ['{}{} -nologo' .format(setup_script, manifest_tool)])
-
- if cc_filter:
- toolset.flags('msvc', '.CC.FILTER', cpu_conditions, ['"|" {}'.format(cc_filter)])
-
- # Set version-specific flags.
- configure_version_specific('msvc', version, conditions)
-
-
-# Returns the default installation path for the given version.
-#
-def default_path(version):
- # Use auto-detected path if possible.
- options = __versions.get(version, 'options')
- tmp_path = None
- if options:
- tmp_path = feature.get_values('<command>', options)
-
- if tmp_path:
- tmp_path="".join(tmp_path)
- tmp_path=os.path.dirname(tmp_path)
- else:
- env_var_var_name = '__version_{}_env'.format(version.replace('.','_'))
- vc_path = None
- if env_var_var_name in globals():
- env_var_name = globals()[env_var_var_name]
- if env_var_name in os.environ:
- vc_path = environ[env_var_name]
- if vc_path:
- vc_path = os.path.join(vc_path,globals()['__version_{}_envpath'.format(version.replace('.','_'))])
- tmp_path = os.path.normpath(vc_path)
-
- var_name = '__version_{}_path'.format(version.replace('.','_'))
- if not tmp_path and var_name in globals():
- tmp_path = os.path.normpath(os.path.join(common.get_program_files_dir(), globals()[var_name]))
- return tmp_path
-
-
-# Returns either the default installation path (if 'version' is not empty) or
-# list of all known default paths (if no version is given)
-#
-def default_paths(version = None):
- possible_paths = []
- if version:
- path = default_path(version)
- if path:
- possible_paths.append(path)
- else:
- for i in _known_versions:
- path = default_path(i)
- if path:
- possible_paths.append(path)
- return possible_paths
-
-
-class MsvcLinkingGenerator(builtin.LinkingGenerator):
- # Calls the base version. If necessary, also create a target for the
- # manifest file.specifying source's name as the name of the created
- # target. As result, the PCH will be named whatever.hpp.gch, and not
- # whatever.gch.
- def generated_targets(self, sources, prop_set, project, name):
- result = builtin.LinkingGenerator.generated_targets(self, sources, prop_set, project, name)
- if result:
- name_main = result[0].name()
- action = result[0].action()
-
- if prop_set.get('<debug-symbols>') == 'on':
- # We force exact name on PDB. The reason is tagging -- the tag rule may
- # reasonably special case some target types, like SHARED_LIB. The tag rule
- # will not catch PDB, and it cannot even easily figure if PDB is paired with
- # SHARED_LIB or EXE or something else. Because PDB always get the
- # same name as the main target, with .pdb as extension, just force it.
- target = FileTarget(name_main.split_ext()[0]+'.pdb','PDB',project,action,True)
- registered_target = virtual_target.register(target)
- if target != registered_target:
- action.replace_targets(target,registered_target)
- result.append(registered_target)
- if prop_set.get('<embed-manifest>') == 'off':
- # Manifest is evil target. It has .manifest appened to the name of
- # main target, including extension. E.g. a.exe.manifest. We use 'exact'
- # name because to achieve this effect.
- target = FileTarget(name_main+'.manifest', 'MANIFEST', project, action, True)
- registered_target = virtual_target.register(target)
- if target != registered_target:
- action.replace_targets(target,registered_target)
- result.append(registered_target)
- return result
-
-
-# Unsafe worker rule for the register-toolset() rule. Must not be called
-# multiple times.
-
-def register_toolset_really():
- feature.extend('toolset', ['msvc'])
-
- # Intel and msvc supposedly have link-compatible objects.
- feature.subfeature( 'toolset', 'msvc', 'vendor', 'intel', ['propagated', 'optional'])
-
- # Inherit MIDL flags.
- toolset.inherit_flags('msvc', 'midl')
-
- # Inherit MC flags.
- toolset.inherit_flags('msvc','mc')
-
- # Dynamic runtime comes only in MT flavour.
- toolset.add_requirements(['<toolset>msvc,<runtime-link>shared:<threading>multi'])
-
- # Declare msvc toolset specific features.
- feature.feature('debug-store', ['object', 'database'], ['propagated'])
- feature.feature('pch-source', [], ['dependency', 'free'])
-
- # Declare generators.
-
- # TODO: Is it possible to combine these? Make the generators
- # non-composing so that they do not convert each source into a separate
- # .rsp file.
- generators.register(MsvcLinkingGenerator('msvc.link', True, ['OBJ', 'SEARCHED_LIB', 'STATIC_LIB', 'IMPORT_LIB'], ['EXE'], ['<toolset>msvc']))
- generators.register(MsvcLinkingGenerator('msvc.link.dll', True, ['OBJ', 'SEARCHED_LIB', 'STATIC_LIB', 'IMPORT_LIB'], ['SHARED_LIB','IMPORT_LIB'], ['<toolset>msvc']))
-
- builtin.register_archiver('msvc.archive', ['OBJ'], ['STATIC_LIB'], ['<toolset>msvc'])
- builtin.register_c_compiler('msvc.compile.c++', ['CPP'], ['OBJ'], ['<toolset>msvc'])
- builtin.register_c_compiler('msvc.compile.c', ['C'], ['OBJ'], ['<toolset>msvc'])
- builtin.register_c_compiler('msvc.compile.c++.preprocess', ['CPP'], ['PREPROCESSED_CPP'], ['<toolset>msvc'])
- builtin.register_c_compiler('msvc.compile.c.preprocess', ['C'], ['PREPROCESSED_C'], ['<toolset>msvc'])
-
- # Using 'register-c-compiler' adds the build directory to INCLUDES.
- builtin.register_c_compiler('msvc.compile.rc', ['RC'], ['OBJ(%_res)'], ['<toolset>msvc'])
- generators.override('msvc.compile.rc', 'rc.compile.resource')
- generators.register_standard('msvc.compile.asm', ['ASM'], ['OBJ'], ['<toolset>msvc'])
-
- builtin.register_c_compiler('msvc.compile.idl', ['IDL'], ['MSTYPELIB', 'H', 'C(%_i)', 'C(%_proxy)', 'C(%_dlldata)'], ['<toolset>msvc'])
- generators.override('msvc.compile.idl', 'midl.compile.idl')
-
- generators.register_standard('msvc.compile.mc', ['MC'], ['H','RC'], ['<toolset>msvc'])
- generators.override('msvc.compile.mc', 'mc.compile')
-
- # Note: the 'H' source type will catch both '.h' and '.hpp' headers as
- # the latter have their HPP type derived from H. The type of compilation
- # is determined entirely by the destination type.
- generators.register(MsvcPchGenerator('msvc.compile.c.pch', False, ['H'], ['C_PCH','OBJ'], ['<pch>on', '<toolset>msvc']))
- generators.register(MsvcPchGenerator('msvc.compile.c++.pch', False, ['H'], ['CPP_PCH','OBJ'], ['<pch>on', '<toolset>msvc']))
-
- generators.override('msvc.compile.c.pch', 'pch.default-c-pch-generator')
- generators.override('msvc.compile.c++.pch', 'pch.default-cpp-pch-generator')
-
- toolset.flags('msvc.compile', 'PCH_FILE' , ['<pch>on'], ['<pch-file>' ])
- toolset.flags('msvc.compile', 'PCH_SOURCE', ['<pch>on'], ['<pch-source>'])
- toolset.flags('msvc.compile', 'PCH_HEADER', ['<pch>on'], ['<pch-header>'])
-
- #
- # Declare flags for compilation.
- #
- toolset.flags('msvc.compile', 'CFLAGS', ['<optimization>speed'], ['/O2'])
- toolset.flags('msvc.compile', 'CFLAGS', ['<optimization>space'], ['/O1'])
-
- toolset.flags('msvc.compile', 'CFLAGS', [ a + '/<instruction-set>' + t for a in __cpu_arch_ia64 for t in __cpu_type_itanium ], ['/G1'])
- toolset.flags('msvc.compile', 'CFLAGS', [ a + '/<instruction-set>' + t for a in __cpu_arch_ia64 for t in __cpu_type_itanium2 ], ['/G2'])
-
- toolset.flags('msvc.compile', 'CFLAGS', ['<debug-symbols>on/<debug-store>object'], ['/Z7'])
- toolset.flags('msvc.compile', 'CFLAGS', ['<debug-symbols>on/<debug-store>database'], ['/Zi'])
- toolset.flags('msvc.compile', 'CFLAGS', ['<optimization>off'], ['/Od'])
- toolset.flags('msvc.compile', 'CFLAGS', ['<inlining>off'], ['/Ob0'])
- toolset.flags('msvc.compile', 'CFLAGS', ['<inlining>on'], ['/Ob1'])
- toolset.flags('msvc.compile', 'CFLAGS', ['<inlining>full'], ['/Ob2'])
-
- toolset.flags('msvc.compile', 'CFLAGS', ['<warnings>on'], ['/W3'])
- toolset.flags('msvc.compile', 'CFLAGS', ['<warnings>off'], ['/W0'])
- toolset.flags('msvc.compile', 'CFLAGS', ['<warnings>all'], ['/W4'])
- toolset.flags('msvc.compile', 'CFLAGS', ['<warnings-as-errors>on'], ['/WX'])
-
- toolset.flags('msvc.compile', 'C++FLAGS', ['<exception-handling>on/<asynch-exceptions>off/<extern-c-nothrow>off'], ['/EHs'])
- toolset.flags('msvc.compile', 'C++FLAGS', ['<exception-handling>on/<asynch-exceptions>off/<extern-c-nothrow>on'], ['/EHsc'])
- toolset.flags('msvc.compile', 'C++FLAGS', ['<exception-handling>on/<asynch-exceptions>on/<extern-c-nothrow>off'], ['/EHa'])
- toolset.flags('msvc.compile', 'C++FLAGS', ['<exception-handling>on/<asynch-exceptions>on/<extern-c-nothrow>on'], ['/EHac'])
-
- # By default 8.0 enables rtti support while prior versions disabled it. We
- # simply enable or disable it explicitly so we do not have to depend on this
- # default behaviour.
- toolset.flags('msvc.compile', 'CFLAGS', ['<rtti>on'], ['/GR'])
- toolset.flags('msvc.compile', 'CFLAGS', ['<rtti>off'], ['/GR-'])
- toolset.flags('msvc.compile', 'CFLAGS', ['<runtime-debugging>off/<runtime-link>shared'], ['/MD'])
- toolset.flags('msvc.compile', 'CFLAGS', ['<runtime-debugging>on/<runtime-link>shared'], ['/MDd'])
-
- toolset.flags('msvc.compile', 'CFLAGS', ['<runtime-debugging>off/<runtime-link>static/<threading>multi'], ['/MT'])
- toolset.flags('msvc.compile', 'CFLAGS', ['<runtime-debugging>on/<runtime-link>static/<threading>multi'], ['/MTd'])
-
- toolset.flags('msvc.compile', 'OPTIONS', [], ['<cflags>'])
- toolset.flags('msvc.compile.c++', 'OPTIONS', [], ['<cxxflags>'])
-
- toolset.flags('msvc.compile', 'PDB_CFLAG', ['<debug-symbols>on/<debug-store>database'],['/Fd'])
-
- toolset.flags('msvc.compile', 'DEFINES', [], ['<define>'])
- toolset.flags('msvc.compile', 'UNDEFS', [], ['<undef>'])
- toolset.flags('msvc.compile', 'INCLUDES', [], ['<include>'])
-
- # Declare flags for the assembler.
- toolset.flags('msvc.compile.asm', 'USER_ASMFLAGS', [], ['<asmflags>'])
-
- toolset.flags('msvc.compile.asm', 'ASMFLAGS', ['<debug-symbols>on'], ['/Zi', '/Zd'])
-
- toolset.flags('msvc.compile.asm', 'ASMFLAGS', ['<warnings>on'], ['/W3'])
- toolset.flags('msvc.compile.asm', 'ASMFLAGS', ['<warnings>off'], ['/W0'])
- toolset.flags('msvc.compile.asm', 'ASMFLAGS', ['<warnings>all'], ['/W4'])
- toolset.flags('msvc.compile.asm', 'ASMFLAGS', ['<warnings-as-errors>on'], ['/WX'])
-
- toolset.flags('msvc.compile.asm', 'DEFINES', [], ['<define>'])
-
- # Declare flags for linking.
- toolset.flags('msvc.link', 'PDB_LINKFLAG', ['<debug-symbols>on/<debug-store>database'], ['/PDB']) # not used yet
- toolset.flags('msvc.link', 'LINKFLAGS', ['<debug-symbols>on'], ['/DEBUG'])
- toolset.flags('msvc.link', 'DEF_FILE', [], ['<def-file>'])
-
- # The linker disables the default optimizations when using /DEBUG so we
- # have to enable them manually for release builds with debug symbols.
- toolset.flags('msvc', 'LINKFLAGS', ['<debug-symbols>on/<runtime-debugging>off'], ['/OPT:REF,ICF'])
-
- toolset.flags('msvc', 'LINKFLAGS', ['<user-interface>console'], ['/subsystem:console'])
- toolset.flags('msvc', 'LINKFLAGS', ['<user-interface>gui'], ['/subsystem:windows'])
- toolset.flags('msvc', 'LINKFLAGS', ['<user-interface>wince'], ['/subsystem:windowsce'])
- toolset.flags('msvc', 'LINKFLAGS', ['<user-interface>native'], ['/subsystem:native'])
- toolset.flags('msvc', 'LINKFLAGS', ['<user-interface>auto'], ['/subsystem:posix'])
-
- toolset.flags('msvc.link', 'OPTIONS', [], ['<linkflags>'])
- toolset.flags('msvc.link', 'LINKPATH', [], ['<library-path>'])
-
- toolset.flags('msvc.link', 'FINDLIBS_ST', ['<find-static-library>'])
- toolset.flags('msvc.link', 'FINDLIBS_SA', ['<find-shared-library>'])
- toolset.flags('msvc.link', 'LIBRARY_OPTION', ['<toolset>msvc'])
- toolset.flags('msvc.link', 'LIBRARIES_MENTIONED_BY_FILE', ['<library-file>'])
-
- toolset.flags('msvc.archive', 'AROPTIONS', [], ['<archiveflags>'])
-
-
-# Locates the requested setup script under the given folder and returns its full
-# path or nothing in case the script can not be found. In case multiple scripts
-# are found only the first one is returned.
-#
-# TODO: There used to exist a code comment for the msvc.init rule stating that
-# we do not correctly detect the location of the vcvars32.bat setup script for
-# the free VC7.1 tools in case user explicitly provides a path. This should be
-# tested or simply remove this whole comment in case this toolset version is no
-# longer important.
-#
-def locate_default_setup(command, parent, setup_name):
- for setup in [os.path.join(dir,setup_name) for dir in [command,parent]]:
- if os.path.exists(setup):
- return setup
- return None
-
-
-# Validates given path, registers found configuration and prints debug
-# information about it.
-#
-def register_configuration(version, path=None):
- if path:
- command = os.path.join(path, 'cl.exe')
- if os.path.exists(command):
- if debug():
- print "notice: [msvc-cfg] msvc-$(version) detected, command: ''".format(version,command)
- __versions.register(version)
- __versions.set(version,'options',['<command>{}'.format(command)])
-
-
-################################################################################
-#
-# Startup code executed when loading this module.
-#
-################################################################################
-
-# Similar to Configurations, but remembers the first registered configuration.
-class MSVCConfigurations(Configurations):
- def __init__(self):
- Configurations.__init__(self)
- self.first_ = None
-
- def register(self, id):
- Configurations.register(self,id)
- if not self.first_:
- self.first_ = id
-
- def first(self):
- return self.first_
-
-
-# List of all registered configurations.
-__versions = MSVCConfigurations()
-
-# Supported CPU architectures.
-__cpu_arch_i386 = [
- '<architecture>/<address-model>',
- '<architecture>/<address-model>32',
- '<architecture>x86/<address-model>',
- '<architecture>x86/<address-model>32']
-
-__cpu_arch_amd64 = [
- '<architecture>/<address-model>64',
- '<architecture>x86/<address-model>64']
-
-__cpu_arch_ia64 = [
- '<architecture>ia64/<address-model>',
- '<architecture>ia64/<address-model>64']
-
-
-# Supported CPU types (only Itanium optimization options are supported from
-# VC++ 2005 on). See
-# http://msdn2.microsoft.com/en-us/library/h66s5s0e(vs.90).aspx for more
-# detailed information.
-__cpu_type_g5 = ['i586', 'pentium', 'pentium-mmx' ]
-__cpu_type_g6 = ['i686', 'pentiumpro', 'pentium2', 'pentium3', 'pentium3m', 'pentium-m', 'k6',
- 'k6-2', 'k6-3', 'winchip-c6', 'winchip2', 'c3', 'c3-2' ]
-__cpu_type_em64t = ['prescott', 'nocona', 'conroe', 'conroe-xe', 'conroe-l', 'allendale', 'mermon',
- 'mermon-xe', 'kentsfield', 'kentsfield-xe', 'penryn', 'wolfdale',
- 'yorksfield', 'nehalem' ]
-__cpu_type_amd64 = ['k8', 'opteron', 'athlon64', 'athlon-fx']
-__cpu_type_g7 = ['pentium4', 'pentium4m', 'athlon', 'athlon-tbird', 'athlon-4', 'athlon-xp'
- 'athlon-mp'] + __cpu_type_em64t + __cpu_type_amd64
-__cpu_type_itanium = ['itanium', 'itanium1', 'merced']
-__cpu_type_itanium2 = ['itanium2', 'mckinley']
-
-
-# Known toolset versions, in order of preference.
-_known_versions = ['11.0', '10.0', '10.0express', '9.0', '9.0express', '8.0', '8.0express', '7.1', '7.1toolkit', '7.0', '6.0']
-
-# Version aliases.
-__version_alias_6 = '6.0'
-__version_alias_6_5 = '6.0'
-__version_alias_7 = '7.0'
-__version_alias_8 = '8.0'
-__version_alias_9 = '9.0'
-__version_alias_10 = '10.0'
-__version_alias_11 = '11.0'
-
-# Names of registry keys containing the Visual C++ installation path (relative
-# to "HKEY_LOCAL_MACHINE\SOFTWARE\\Microsoft").
-__version_6_0_reg = "VisualStudio\\6.0\\Setup\\Microsoft Visual C++"
-__version_7_0_reg = "VisualStudio\\7.0\\Setup\\VC"
-__version_7_1_reg = "VisualStudio\\7.1\\Setup\\VC"
-__version_8_0_reg = "VisualStudio\\8.0\\Setup\\VC"
-__version_8_0express_reg = "VCExpress\\8.0\\Setup\\VC"
-__version_9_0_reg = "VisualStudio\\9.0\\Setup\\VC"
-__version_9_0express_reg = "VCExpress\\9.0\\Setup\\VC"
-__version_10_0_reg = "VisualStudio\\10.0\\Setup\\VC"
-__version_10_0express_reg = "VCExpress\\10.0\\Setup\\VC"
-__version_11_0_reg = "VisualStudio\\11.0\\Setup\\VC"
-
-# Visual C++ Toolkit 2003 does not store its installation path in the registry.
-# The environment variable 'VCToolkitInstallDir' and the default installation
-# path will be checked instead.
-__version_7_1toolkit_path = 'Microsoft Visual C++ Toolkit 2003\\bin'
-__version_7_1toolkit_env = 'VCToolkitInstallDir'
-
-# Path to the folder containing "cl.exe" relative to the value of the
-# corresponding environment variable.
-__version_7_1toolkit_envpath = 'bin' ;
-#
-#
-# Auto-detect all the available msvc installations on the system.
-auto_detect_toolset_versions()
-
-# And finally trigger the actual Boost Build toolset registration.
-register_toolset()
diff --git a/tools/build/v2/tools/notfile.jam b/tools/build/v2/tools/notfile.jam
deleted file mode 100644
index 97a5b0e876..0000000000
--- a/tools/build/v2/tools/notfile.jam
+++ /dev/null
@@ -1,74 +0,0 @@
-# Copyright (c) 2005 Vladimir Prus.
-#
-# Use, modification and distribution is subject to the Boost Software
-# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
-# http://www.boost.org/LICENSE_1_0.txt)
-
-import "class" : new ;
-import generators ;
-import project ;
-import targets ;
-import toolset ;
-import type ;
-
-
-type.register NOTFILE_MAIN ;
-
-
-class notfile-generator : generator
-{
- rule __init__ ( * : * )
- {
- generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
- }
-
- rule run ( project name ? : property-set : sources * : multiple ? )
- {
- local action ;
- local action-name = [ $(property-set).get <action> ] ;
-
- local m = [ MATCH ^@(.*) : $(action-name) ] ;
-
- if $(m)
- {
- action = [ new action $(sources) : $(m[1])
- : $(property-set) ] ;
- }
- else
- {
- action = [ new action $(sources) : notfile.run
- : $(property-set) ] ;
- }
- return [ virtual-target.register
- [ new notfile-target $(name) : $(project) : $(action) ] ] ;
- }
-}
-
-
-generators.register [ new notfile-generator notfile.main : : NOTFILE_MAIN ] ;
-
-
-toolset.flags notfile.run ACTION : <action> ;
-
-
-actions run
-{
- $(ACTION)
-}
-
-
-rule notfile ( target-name : action + : sources * : requirements * : default-build * )
-{
- local project = [ project.current ] ;
-
- requirements += <action>$(action) ;
-
- targets.main-target-alternative
- [ new typed-target $(target-name) : $(project) : NOTFILE_MAIN
- : [ targets.main-target-sources $(sources) : $(target-name) ]
- : [ targets.main-target-requirements $(requirements) : $(project) ]
- : [ targets.main-target-default-build $(default-build) : $(project) ]
- ] ;
-}
-
-IMPORT $(__name__) : notfile : : notfile ;
diff --git a/tools/build/v2/tools/pathscale.jam b/tools/build/v2/tools/pathscale.jam
deleted file mode 100644
index 454e34547e..0000000000
--- a/tools/build/v2/tools/pathscale.jam
+++ /dev/null
@@ -1,168 +0,0 @@
-# Copyright 2006 Noel Belcourt
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-import property ;
-import generators ;
-import toolset : flags ;
-import feature ;
-import type ;
-import common ;
-import fortran ;
-
-feature.extend toolset : pathscale ;
-toolset.inherit pathscale : unix ;
-generators.override pathscale.prebuilt : builtin.prebuilt ;
-generators.override pathscale.searched-lib-generator : searched-lib-generator ;
-
-# Documentation and toolchain description located
-# http://www.pathscale.com/docs.html
-
-rule init ( version ? : command * : options * )
-{
- command = [ common.get-invocation-command pathscale : pathCC : $(command)
- : /opt/ekopath/bin ] ;
-
- # Determine the version
- local command-string = $(command:J=" ") ;
- if $(command)
- {
- version ?= [ MATCH "^([0-9.]+)"
- : [ SHELL "$(command-string) -dumpversion" ] ] ;
- }
-
- local condition = [ common.check-init-parameters pathscale
- : version $(version) ] ;
-
- common.handle-options pathscale : $(condition) : $(command) : $(options) ;
-
- toolset.flags pathscale.compile.fortran90 OPTIONS $(condition) :
- [ feature.get-values <fflags> : $(options) ] : unchecked ;
-
- command_c = $(command_c[1--2]) $(command[-1]:B=pathcc) ;
-
- toolset.flags pathscale CONFIG_C_COMMAND $(condition) : $(command_c) ;
-
- # fortran support
- local f-command = [ common.get-invocation-command pathscale : pathf90 : $(command) ] ;
- local command_f = $(command_f[1--2]) $(f-command[-1]:B=pathf90) ;
- local command_f90 = $(command_f[1--2]) $(f-command[-1]:B=pathf90) ;
-
- toolset.flags pathscale CONFIG_F_COMMAND $(condition) : $(command_f) ;
- toolset.flags pathscale CONFIG_F90_COMMAND $(condition) : $(command_f90) ;
-
- # always link lib rt to resolve clock_gettime()
- flags pathscale.link FINDLIBS-SA : rt : unchecked ;
-}
-
-# Declare generators
-generators.register-c-compiler pathscale.compile.c : C : OBJ : <toolset>pathscale ;
-generators.register-c-compiler pathscale.compile.c++ : CPP : OBJ : <toolset>pathscale ;
-generators.register-fortran-compiler pathscale.compile.fortran : FORTRAN : OBJ : <toolset>pathscale ;
-generators.register-fortran90-compiler pathscale.compile.fortran90 : FORTRAN90 : OBJ : <toolset>pathscale ;
-
-# Declare flags and actions for compilation
-flags pathscale.compile OPTIONS <optimization>off : -O0 ;
-flags pathscale.compile OPTIONS <optimization>speed : -O3 ;
-flags pathscale.compile OPTIONS <optimization>space : -Os ;
-
-flags pathscale.compile OPTIONS <inlining>off : -noinline ;
-flags pathscale.compile OPTIONS <inlining>on : -inline ;
-flags pathscale.compile OPTIONS <inlining>full : -inline ;
-
-flags pathscale.compile OPTIONS <warnings>off : -woffall ;
-flags pathscale.compile OPTIONS <warnings>on : -Wall ;
-flags pathscale.compile OPTIONS <warnings>all : -Wall -pedantic ;
-flags pathscale.compile OPTIONS <warnings-as-errors>on : -Werror ;
-
-flags pathscale.compile OPTIONS <debug-symbols>on : -ggdb ;
-flags pathscale.compile OPTIONS <profiling>on : -pg ;
-flags pathscale.compile OPTIONS <link>shared : -fPIC ;
-flags pathscale.compile OPTIONS <address-model>32 : -m32 ;
-flags pathscale.compile OPTIONS <address-model>64 : -m64 ;
-
-flags pathscale.compile USER_OPTIONS <cflags> ;
-flags pathscale.compile.c++ USER_OPTIONS <cxxflags> ;
-flags pathscale.compile DEFINES <define> ;
-flags pathscale.compile INCLUDES <include> ;
-
-flags pathscale.compile.fortran USER_OPTIONS <fflags> ;
-flags pathscale.compile.fortran90 USER_OPTIONS <fflags> ;
-
-actions compile.c
-{
- "$(CONFIG_C_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
-}
-
-actions compile.c++
-{
- "$(CONFIG_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
-}
-
-actions compile.fortran
-{
- "$(CONFIG_F_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
-}
-
-rule compile.fortran90 ( targets * : sources * : properties * )
-{
- # the space rule inserts spaces between targets and it's necessary
- SPACE on $(targets) = " " ;
- # Serialize execution of the compile.fortran90 action
- # F90 source must be compiled in a particular order so we
- # serialize the build as a parallel F90 compile might fail
- JAM_SEMAPHORE on $(targets) = <s>pathscale-f90-semaphore ;
-}
-
-actions compile.fortran90
-{
- "$(CONFIG_F90_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -module $(<[1]:D) -c -o "$(<)" "$(>)"
-}
-
-# Declare flags and actions for linking
-flags pathscale.link OPTIONS <debug-symbols>on : -ggdb -rdynamic ;
-# Strip the binary when no debugging is needed
-flags pathscale.link OPTIONS <debug-symbols>off : -g0 ;
-flags pathscale.link OPTIONS <profiling>on : -pg ;
-flags pathscale.link USER_OPTIONS <linkflags> ;
-flags pathscale.link LINKPATH <library-path> ;
-flags pathscale.link FINDLIBS-ST <find-static-library> ;
-flags pathscale.link FINDLIBS-SA <find-shared-library> ;
-flags pathscale.link FINDLIBS-SA <threading>multi : pthread ;
-flags pathscale.link LIBRARIES <library-file> ;
-flags pathscale.link LINK-RUNTIME <runtime-link>static : static ;
-flags pathscale.link LINK-RUNTIME <runtime-link>shared : dynamic ;
-flags pathscale.link RPATH <dll-path> ;
-# On gcc, there are separate options for dll path at runtime and
-# link time. On Solaris, there's only one: -R, so we have to use
-# it, even though it's bad idea.
-flags pathscale.link RPATH <xdll-path> ;
-
-rule link ( targets * : sources * : properties * )
-{
- SPACE on $(targets) = " " ;
-}
-
-actions link bind LIBRARIES
-{
- "$(CONFIG_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -L"$(LINKPATH)" -Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST)
-}
-
-# Slight mods for dlls
-rule link.dll ( targets * : sources * : properties * )
-{
- SPACE on $(targets) = " " ;
-}
-
-actions link.dll bind LIBRARIES
-{
- "$(CONFIG_COMMAND)" $(OPTIONS) $(USER_OPTIONS) -L"$(LINKPATH)" -Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,"$(RPATH)" -o "$(<)" -Wl,-soname$(SPACE)-Wl,$(<[1]:D=) -shared "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST)
-}
-
-# Declare action for creating static libraries
-# "$(CONFIG_COMMAND)" -ar -o "$(<)" "$(>)"
-actions piecemeal archive
-{
- ar $(ARFLAGS) ru "$(<)" "$(>)"
-}
diff --git a/tools/build/v2/tools/pgi.jam b/tools/build/v2/tools/pgi.jam
deleted file mode 100644
index 3a35c64470..0000000000
--- a/tools/build/v2/tools/pgi.jam
+++ /dev/null
@@ -1,147 +0,0 @@
-# Copyright Noel Belcourt 2007.
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-import property ;
-import generators ;
-import os ;
-import toolset : flags ;
-import feature ;
-import fortran ;
-import type ;
-import common ;
-import gcc ;
-
-feature.extend toolset : pgi ;
-toolset.inherit pgi : unix ;
-generators.override pgi.prebuilt : builtin.lib-generator ;
-generators.override pgi.searched-lib-generator : searched-lib-generator ;
-
-# Documentation and toolchain description located
-# http://www.pgroup.com/resources/docs.htm
-
-rule init ( version ? : command * : options * )
-{
- local condition = [ common.check-init-parameters pgi : version $(version) ] ;
-
- local l_command = [ common.get-invocation-command pgi : pgCC : $(command) ] ;
-
- common.handle-options pgi : $(condition) : $(l_command) : $(options) ;
-
- command_c = $(command_c[1--2]) $(l_command[-1]:B=cc) ;
-
- toolset.flags pgi CONFIG_C_COMMAND $(condition) : $(command_c) ;
-
- flags pgi.compile DEFINES $(condition) :
- [ feature.get-values <define> : $(options) ] : unchecked ;
-
- # IOV_MAX support
- flags pgi.compile DEFINES $(condition) : __need_IOV_MAX : unchecked ;
-
- # set link flags
- flags pgi.link FINDLIBS-ST : [
- feature.get-values <find-static-library> : $(options) ] : unchecked ;
-
- # always link lib rt to resolve clock_gettime()
- flags pgi.link FINDLIBS-SA : rt [
- feature.get-values <find-shared-library> : $(options) ] : unchecked ;
-
- gcc.init-link-flags pgi gnu $(condition) ;
-}
-
-# Declare generators
-generators.register-c-compiler pgi.compile.c : C : OBJ : <toolset>pgi ;
-generators.register-c-compiler pgi.compile.c++ : CPP : OBJ : <toolset>pgi ;
-generators.register-fortran-compiler pgi.compile.fortran : FORTRAN : OBJ : <toolset>pgi ;
-
-# Declare flags and actions for compilation
-flags pgi.compile OPTIONS : -Kieee ;
-flags pgi.compile OPTIONS <link>shared : -fpic -fPIC ;
-flags pgi.compile OPTIONS <debug-symbols>on : -gopt ;
-flags pgi.compile OPTIONS <profiling>on : -xprofile=tcov ;
-flags pgi.compile OPTIONS <optimization>speed : -fast -Mx,8,0x10000000 ;
-flags pgi.compile OPTIONS <optimization>space : -xO2 -xspace ;
-# flags pgi.compile OPTIONS <threading>multi : -mt ;
-
-flags pgi.compile OPTIONS <warnings>off : -Minform=severe ;
-flags pgi.compile OPTIONS <warnings>on : -Minform=warn ;
-
-flags pgi.compile.c++ OPTIONS <inlining>off : -INLINE:none ;
-
-flags pgi.compile OPTIONS <cflags> ;
-flags pgi.compile.c++ OPTIONS <cxxflags> ;
-flags pgi.compile DEFINES <define> ;
-flags pgi.compile INCLUDES <include> ;
-
-flags pgi.compile.fortran OPTIONS <fflags> ;
-
-actions compile.c
-{
- "$(CONFIG_C_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
-}
-
-actions compile.c++
-{
- "$(CONFIG_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
-}
-
-actions compile.fortran
-{
- "$(CONFIG_F_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
-}
-
-# Declare flags and actions for linking
-flags pgi.link OPTIONS <debug-symbols>on : -gopt ;
-# Strip the binary when no debugging is needed
-flags pgi.link OPTIONS <debug-symbols>off : -s ;
-flags pgi.link OPTIONS <profiling>on : -xprofile=tcov ;
-flags pgi.link OPTIONS <linkflags> ;
-flags pgi.link OPTIONS <link>shared : -fpic -fPIC ;
-flags pgi.link LINKPATH <library-path> ;
-flags pgi.link FINDLIBS-ST <find-static-library> ;
-flags pgi.link FINDLIBS-SA <find-shared-library> ;
-flags pgi.link FINDLIBS-SA <threading>multi : pthread rt ;
-flags pgi.link LIBRARIES <library-file> ;
-flags pgi.link LINK-RUNTIME <runtime-link>static : static ;
-flags pgi.link LINK-RUNTIME <runtime-link>shared : dynamic ;
-flags pgi.link RPATH <dll-path> ;
-
-# On gcc, there are separate options for dll path at runtime and
-# link time. On Solaris, there's only one: -R, so we have to use
-# it, even though it's bad idea.
-flags pgi.link RPATH <xdll-path> ;
-
-rule link ( targets * : sources * : properties * )
-{
- SPACE on $(targets) = " " ;
-}
-
-# reddish can only link statically and, somehow, the presence of -Bdynamic on the link line
-# marks the executable as a dynamically linked exec even though no dynamic libraries are supplied.
-# Yod on redstorm refuses to load an executable that is dynamically linked.
-# removing the dynamic link options should get us where we need to be on redstorm.
-# "$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME)
-actions link bind LIBRARIES
-{
- "$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -Bstatic -l$(FINDLIBS-ST) -Bdynamic -l$(FINDLIBS-SA) -B$(LINK-RUNTIME)
-}
-
-# Slight mods for dlls
-rule link.dll ( targets * : sources * : properties * )
-{
- SPACE on $(targets) = " " ;
-}
-
-# "$(CONFIG_COMMAND)" $(OPTIONS) -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" -h$(<[1]:D=) -G "$(>)" "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME)
-
-actions link.dll bind LIBRARIES
-{
- "$(CONFIG_COMMAND)" $(OPTIONS) -shared -L"$(LINKPATH)" -R"$(RPATH)" -o "$(<)" "$(>)" -Wl,-h -Wl,$(<[1]:D=) "$(LIBRARIES)" -Bdynamic -l$(FINDLIBS-SA) -Bstatic -l$(FINDLIBS-ST) -B$(LINK-RUNTIME)
-}
-
-actions updated together piecemeal pgi.archive
-{
- ar -rc$(ARFLAGS:E=) "$(<)" "$(>)"
-}
-
diff --git a/tools/build/v2/tools/python.jam b/tools/build/v2/tools/python.jam
deleted file mode 100644
index 6c2073b788..0000000000
--- a/tools/build/v2/tools/python.jam
+++ /dev/null
@@ -1,1267 +0,0 @@
-# Copyright 2004 Vladimir Prus.
-# Distributed under the Boost Software License, Version 1.0. (See
-# accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-# Support for Python and the the Boost.Python library.
-#
-# This module defines
-#
-# - a project 'python' with a target 'python' in it, that corresponds to the
-# python library
-#
-# - a main target rule 'python-extension' which can be used to build a python
-# extension.
-#
-# Extensions that use Boost.Python must explicitly link to it.
-
-import type ;
-import testing ;
-import generators ;
-import project ;
-import errors ;
-import targets ;
-import "class" : new ;
-import os ;
-import common ;
-import toolset ;
-import regex ;
-import numbers ;
-import string ;
-import property ;
-import sequence ;
-import path ;
-import feature ;
-import set ;
-import builtin ;
-import version ;
-
-
-# Make this module a project.
-project.initialize $(__name__) ;
-project python ;
-
-# Save the project so that if 'init' is called several times we define new
-# targets in the python project, not in whatever project we were called by.
-.project = [ project.current ] ;
-
-# Dynamic linker lib. Necessary to specify it explicitly on some platforms.
-lib dl ;
-# This contains 'openpty' function need by python. Again, on some system need to
-# pass this to linker explicitly.
-lib util ;
-# Python uses pthread symbols.
-lib pthread ;
-# Extra library needed by phtread on some platforms.
-lib rt ;
-
-# The pythonpath feature specifies additional elements for the PYTHONPATH
-# environment variable, set by run-pyd. For example, pythonpath can be used to
-# access Python modules that are part of the product being built, but are not
-# installed in the development system's default paths.
-feature.feature pythonpath : : free optional path ;
-
-# Initializes the Python toolset. Note that all parameters are optional.
-#
-# - version -- the version of Python to use. Should be in Major.Minor format,
-# for example 2.3. Do not include the subminor version.
-#
-# - cmd-or-prefix: Preferably, a command that invokes a Python interpreter.
-# Alternatively, the installation prefix for Python libraries and includes. If
-# empty, will be guessed from the version, the platform's installation
-# patterns, and the python executables that can be found in PATH.
-#
-# - includes: the include path to Python headers. If empty, will be guessed.
-#
-# - libraries: the path to Python library binaries. If empty, will be guessed.
-# On MacOS/Darwin, you can also pass the path of the Python framework.
-#
-# - condition: if specified, should be a set of properties that are matched
-# against the build configuration when Boost.Build selects a Python
-# configuration to use.
-#
-# - extension-suffix: A string to append to the name of extension modules before
-# the true filename extension. Ordinarily we would just compute this based on
-# the value of the <python-debugging> feature. However ubuntu's python-dbg
-# package uses the windows convention of appending _d to debug-build extension
-# modules. We have no way of detecting ubuntu, or of probing python for the
-# "_d" requirement, and if you configure and build python using
-# --with-pydebug, you'll be using the standard *nix convention. Defaults to ""
-# (or "_d" when targeting windows and <python-debugging> is set).
-#
-# Example usage:
-#
-# using python : 2.3 ;
-# using python : 2.3 : /usr/local/bin/python ;
-#
-rule init ( version ? : cmd-or-prefix ? : includes * : libraries ?
- : condition * : extension-suffix ? )
-{
- project.push-current $(.project) ;
-
- debug-message Configuring python... ;
- for local v in version cmd-or-prefix includes libraries condition
- {
- if $($(v))
- {
- debug-message " user-specified "$(v): \"$($(v))\" ;
- }
- }
-
- configure $(version) : $(cmd-or-prefix) : $(includes) : $(libraries) : $(condition) : $(extension-suffix) ;
-
- project.pop-current ;
-}
-
-# A simpler version of SHELL that grabs stderr as well as stdout, but returns
-# nothing if there was an error.
-#
-local rule shell-cmd ( cmd )
-{
- debug-message running command '$(cmd)" 2>&1"' ;
- x = [ SHELL $(cmd)" 2>&1" : exit-status ] ;
- if $(x[2]) = 0
- {
- return $(x[1]) ;
- }
- else
- {
- return ;
- }
-}
-
-
-# Try to identify Cygwin symlinks. Invoking such a file directly as an NT
-# executable from a native Windows build of bjam would be fatal to the bjam
-# process. One /can/ invoke them through sh.exe or bash.exe, if you can prove
-# that those are not also symlinks. ;-)
-#
-# If a symlink is found returns non-empty; we try to extract the target of the
-# symlink from the file and return that.
-#
-# Note: 1. only works on NT 2. path is a native path.
-local rule is-cygwin-symlink ( path )
-{
- local is-symlink = ;
-
- # Look for a file with the given path having the S attribute set, as cygwin
- # symlinks do. /-C means "do not use thousands separators in file sizes."
- local dir-listing = [ shell-cmd "DIR /-C /A:S \""$(path)"\"" ] ;
-
- if $(dir-listing)
- {
- # Escape any special regex characters in the base part of the path.
- local base-pat = [ regex.escape $(path:D=) : ].[()*+?|\\$^ : \\ ] ;
-
- # Extract the file's size from the directory listing.
- local size-of-system-file = [ MATCH "([0-9]+) "$(base-pat) : $(dir-listing) : 1 ] ;
-
- # If the file has a reasonably small size, look for the special symlink
- # identification text.
- if $(size-of-system-file) && [ numbers.less $(size-of-system-file) 1000 ]
- {
- local link = [ SHELL "FIND /OFF \"!<symlink>\" \""$(path)"\" 2>&1" ] ;
- if $(link[2]) != 0
- {
- local nl = "
-
-" ;
- is-symlink = [ MATCH ".*!<symlink>([^"$(nl)"]*)" : $(link[1]) : 1 ] ;
- if $(is-symlink)
- {
- is-symlink = [ *nix-path-to-native $(is-symlink) ] ;
- is-symlink = $(is-symlink:R=$(path:D)) ;
- }
-
- }
- }
- }
- return $(is-symlink) ;
-}
-
-
-# Append ext to each member of names that does not contain '.'.
-#
-local rule default-extension ( names * : ext * )
-{
- local result ;
- for local n in $(names)
- {
- switch $(n)
- {
- case *.* : result += $(n) ;
- case * : result += $(n)$(ext) ;
- }
- }
- return $(result) ;
-}
-
-
-# Tries to determine whether invoking "cmd" would actually attempt to launch a
-# cygwin symlink.
-#
-# Note: only works on NT.
-#
-local rule invokes-cygwin-symlink ( cmd )
-{
- local dirs = $(cmd:D) ;
- if ! $(dirs)
- {
- dirs = . [ os.executable-path ] ;
- }
- local base = [ default-extension $(cmd:D=) : .exe .cmd .bat ] ;
- local paths = [ GLOB $(dirs) : $(base) ] ;
- if $(paths)
- {
- # Make sure we have not run into a Cygwin symlink. Invoking such a file
- # as an NT executable would be fatal for the bjam process.
- return [ is-cygwin-symlink $(paths[1]) ] ;
- }
-}
-
-
-local rule debug-message ( message * )
-{
- if --debug-configuration in [ modules.peek : ARGV ]
- {
- ECHO notice: [python-cfg] $(message) ;
- }
-}
-
-
-# Like W32_GETREG, except prepend HKEY_CURRENT_USER\SOFTWARE and
-# HKEY_LOCAL_MACHINE\SOFTWARE to the first argument, returning the first result
-# found. Also accounts for the fact that on 64-bit machines, 32-bit software has
-# its own area, under SOFTWARE\Wow6432node.
-#
-local rule software-registry-value ( path : data ? )
-{
- local result ;
- for local root in HKEY_CURRENT_USER HKEY_LOCAL_MACHINE
- {
- for local x64elt in "" Wow6432node\\ # Account for 64-bit windows
- {
- if ! $(result)
- {
- result = [ W32_GETREG $(root)\\SOFTWARE\\$(x64elt)$(path) : $(data) ] ;
- }
- }
-
- }
- return $(result) ;
-}
-
-
-.windows-drive-letter-re = ^([A-Za-z]):[\\/](.*) ;
-.cygwin-drive-letter-re = ^/cygdrive/([a-z])/(.*) ;
-
-.working-directory = [ PWD ] ;
-.working-drive-letter = [ SUBST $(.working-directory) $(.windows-drive-letter-re) $1 ] ;
-.working-drive-letter ?= [ SUBST $(.working-directory) $(.cygwin-drive-letter-re) $1 ] ;
-
-
-local rule windows-to-cygwin-path ( path )
-{
- # If path is rooted with a drive letter, rewrite it using the /cygdrive
- # mountpoint.
- local p = [ SUBST $(path:T) $(.windows-drive-letter-re) /cygdrive/$1/$2 ] ;
-
- # Else if path is rooted without a drive letter, use the working directory.
- p ?= [ SUBST $(path:T) ^/(.*) /cygdrive/$(.working-drive-letter:L)/$2 ] ;
-
- # Else return the path unchanged.
- return $(p:E=$(path:T)) ;
-}
-
-
-# :W only works in Cygwin builds of bjam. This one works on NT builds as well.
-#
-local rule cygwin-to-windows-path ( path )
-{
- path = $(path:R="") ; # strip any trailing slash
-
- local drive-letter = [ SUBST $(path) $(.cygwin-drive-letter-re) $1:/$2 ] ;
- if $(drive-letter)
- {
- path = $(drive-letter) ;
- }
- else if $(path:R=/x) = $(path) # already rooted?
- {
- # Look for a cygwin mount that includes each head sequence in $(path).
- local head = $(path) ;
- local tail = "" ;
-
- while $(head)
- {
- local root = [ software-registry-value
- "Cygnus Solutions\\Cygwin\\mounts v2\\"$(head) : native ] ;
-
- if $(root)
- {
- path = $(tail:R=$(root)) ;
- head = ;
- }
- tail = $(tail:R=$(head:D=)) ;
-
- if $(head) = /
- {
- head = ;
- }
- else
- {
- head = $(head:D) ;
- }
- }
- }
- return [ regex.replace $(path:R="") / \\ ] ;
-}
-
-
-# Convert a *nix path to native.
-#
-local rule *nix-path-to-native ( path )
-{
- if [ os.name ] = NT
- {
- path = [ cygwin-to-windows-path $(path) ] ;
- }
- return $(path) ;
-}
-
-
-# Convert an NT path to native.
-#
-local rule windows-path-to-native ( path )
-{
- if [ os.name ] = NT
- {
- return $(path) ;
- }
- else
- {
- return [ windows-to-cygwin-path $(path) ] ;
- }
-}
-
-
-# Return nonempty if path looks like a windows path, i.e. it starts with a drive
-# letter or contains backslashes.
-#
-local rule guess-windows-path ( path )
-{
- return [ SUBST $(path) ($(.windows-drive-letter-re)|.*([\\]).*) $1 ] ;
-}
-
-
-local rule path-to-native ( paths * )
-{
- local result ;
-
- for local p in $(paths)
- {
- if [ guess-windows-path $(p) ]
- {
- result += [ windows-path-to-native $(p) ] ;
- }
- else
- {
- result += [ *nix-path-to-native $(p:T) ] ;
- }
- }
- return $(result) ;
-}
-
-
-# Validate the version string and extract the major/minor part we care about.
-#
-local rule split-version ( version )
-{
- local major-minor = [ MATCH ^([0-9]+)\.([0-9]+)(.*)$ : $(version) : 1 2 3 ] ;
- if ! $(major-minor[2]) || $(major-minor[3])
- {
- ECHO "Warning: \"using python\" expects a two part (major, minor) version number; got" $(version) instead ;
-
- # Add a zero to account for the missing digit if necessary.
- major-minor += 0 ;
- }
-
- return $(major-minor[1]) $(major-minor[2]) ;
-}
-
-
-# Build a list of versions from 3.0 down to 1.5. Because bjam can not enumerate
-# registry sub-keys, we have no way of finding a version with a 2-digit minor
-# version, e.g. 2.10 -- let us hope that never happens.
-#
-.version-countdown = ;
-for local v in [ numbers.range 15 30 ]
-{
- .version-countdown = [ SUBST $(v) (.)(.*) $1.$2 ] $(.version-countdown) ;
-}
-
-
-local rule windows-installed-pythons ( version ? )
-{
- version ?= $(.version-countdown) ;
- local interpreters ;
-
- for local v in $(version)
- {
- local install-path = [
- software-registry-value "Python\\PythonCore\\"$(v)"\\InstallPath" ] ;
-
- if $(install-path)
- {
- install-path = [ windows-path-to-native $(install-path) ] ;
- debug-message Registry indicates Python $(v) installed at \"$(install-path)\" ;
- }
-
- interpreters += $(:E=python:R=$(install-path)) ;
- }
- return $(interpreters) ;
-}
-
-
-local rule darwin-installed-pythons ( version ? )
-{
- version ?= $(.version-countdown) ;
-
- local prefix
- = [ GLOB /System/Library/Frameworks /Library/Frameworks
- : Python.framework ] ;
-
- return $(prefix)/Versions/$(version)/bin/python ;
-}
-
-
-# Assume "python-cmd" invokes a python interpreter and invoke it to extract all
-# the information we care about from its "sys" module. Returns void if
-# unsuccessful.
-#
-local rule probe ( python-cmd )
-{
- # Avoid invoking a Cygwin symlink on NT.
- local skip-symlink ;
- if [ os.name ] = NT
- {
- skip-symlink = [ invokes-cygwin-symlink $(python-cmd) ] ;
- }
-
- if $(skip-symlink)
- {
- debug-message -------------------------------------------------------------------- ;
- debug-message \"$(python-cmd)\" would attempt to invoke a Cygwin symlink, ;
- debug-message causing a bjam built for Windows to hang. ;
- debug-message ;
- debug-message If you intend to target a Cygwin build of Python, please ;
- debug-message replace the path to the link with the path to a real executable ;
- debug-message (guessing: \"$(skip-symlink)\") "in" your 'using python' line ;
- debug-message "in" user-config.jam or site-config.jam. Do not forget to escape ;
- debug-message backslashes ;
- debug-message -------------------------------------------------------------------- ;
- }
- else
- {
- # Prepare a List of Python format strings and expressions that can be
- # used to print the constants we want from the sys module.
-
- # We do not really want sys.version since that is a complicated string,
- # so get the information from sys.version_info instead.
- local format = "version=%d.%d" ;
- local exprs = "version_info[0]" "version_info[1]" ;
-
- for local s in $(sys-elements[2-])
- {
- format += $(s)=%s ;
- exprs += $(s) ;
- }
-
- # Invoke Python and ask it for all those values.
- if [ version.check-jam-version 3 1 17 ] || ( [ os.name ] != NT )
- {
- # Prior to version 3.1.17 Boost Jam's SHELL command did not support
- # quoted commands correctly on Windows. This means that on that
- # platform we do not support using a Python command interpreter
- # executable whose path contains a space character.
- python-cmd = \"$(python-cmd)\" ;
- }
- local full-cmd =
- $(python-cmd)" -c \"from sys import *; print('"$(format:J=\\n)"' % ("$(exprs:J=,)"))\"" ;
-
- local output = [ shell-cmd $(full-cmd) ] ;
- if $(output)
- {
- # Parse the output to get all the results.
- local nl = "
-
-" ;
- for s in $(sys-elements)
- {
- # These variables are expected to be declared local in the
- # caller, so Jam's dynamic scoping will set their values there.
- sys.$(s) = [ SUBST $(output) \\<$(s)=([^$(nl)]+) $1 ] ;
- }
- }
- return $(output) ;
- }
-}
-
-
-# Make sure the "libraries" and "includes" variables (in an enclosing scope)
-# have a value based on the information given.
-#
-local rule compute-default-paths ( target-os : version ? : prefix ? :
- exec-prefix ? )
-{
- exec-prefix ?= $(prefix) ;
-
- if $(target-os) = windows
- {
- # The exec_prefix is where you're supposed to look for machine-specific
- # libraries.
- local default-library-path = $(exec-prefix)\\libs ;
- local default-include-path = $(:E=Include:R=$(prefix)) ;
-
- # If the interpreter was found in a directory called "PCBuild" or
- # "PCBuild8," assume we're looking at a Python built from the source
- # distro, and go up one additional level to the default root. Otherwise,
- # the default root is the directory where the interpreter was found.
-
- # We ask Python itself what the executable path is in case of
- # intermediate symlinks or shell scripts.
- local executable-dir = $(sys.executable:D) ;
-
- if [ MATCH ^(PCBuild) : $(executable-dir:D=) ]
- {
- debug-message "This Python appears to reside in a source distribution;" ;
- debug-message "prepending \""$(executable-dir)"\" to default library search path" ;
-
- default-library-path = $(executable-dir) $(default-library-path) ;
-
- default-include-path = $(:E=PC:R=$(executable-dir:D)) $(default-include-path) ;
-
- debug-message "and \""$(default-include-path[1])"\" to default #include path" ;
- }
-
- libraries ?= $(default-library-path) ;
- includes ?= $(default-include-path) ;
- }
- else
- {
- includes ?= $(prefix)/include/python$(version) ;
-
- local lib = $(exec-prefix)/lib ;
- libraries ?= $(lib)/python$(version)/config $(lib) ;
- }
-}
-
-# The version of the python interpreter to use.
-feature.feature python : : propagated ;
-feature.feature python.interpreter : : free ;
-
-toolset.flags python.capture-output PYTHON : <python.interpreter> ;
-
-#
-# Support for Python configured --with-pydebug
-#
-feature.feature python-debugging : off on : propagated ;
-builtin.variant debug-python : debug : <python-debugging>on ;
-
-
-# Return a list of candidate commands to try when looking for a Python
-# interpreter. prefix is expected to be a native path.
-#
-local rule candidate-interpreters ( version ? : prefix ? : target-os )
-{
- local bin-path = bin ;
- if $(target-os) = windows
- {
- # On Windows, look in the root directory itself and, to work with the
- # result of a build-from-source, the PCBuild directory.
- bin-path = PCBuild8 PCBuild "" ;
- }
-
- bin-path = $(bin-path:R=$(prefix)) ;
-
- if $(target-os) in windows darwin
- {
- return # Search:
- $(:E=python:R=$(bin-path)) # Relative to the prefix, if any
- python # In the PATH
- [ $(target-os)-installed-pythons $(version) ] # Standard install locations
- ;
- }
- else
- {
- # Search relative to the prefix, or if none supplied, in PATH.
- local unversioned = $(:E=python:R=$(bin-path:E=)) ;
-
- # If a version was specified, look for a python with that specific
- # version appended before looking for one called, simply, "python"
- return $(unversioned)$(version) $(unversioned) ;
- }
-}
-
-
-# Compute system library dependencies for targets linking with static Python
-# libraries.
-#
-# On many systems, Python uses libraries such as pthreads or libdl. Since static
-# libraries carry no library dependency information of their own that the linker
-# can extract, these extra dependencies have to be given explicitly on the link
-# line of the client. The information about these dependencies is packaged into
-# the "python" target below.
-#
-# Even where Python itself uses pthreads, it never allows extension modules to
-# be entered concurrently (unless they explicitly give up the interpreter lock).
-# Therefore, extension modules do not need the efficiency overhead of threadsafe
-# code as produced by <threading>multi, and we handle libpthread along with
-# other libraries here. Note: this optimization is based on an assumption that
-# the compiler generates link-compatible code in both the single- and
-# multi-threaded cases, and that system libraries do not change their ABIs
-# either.
-#
-# Returns a list of usage-requirements that link to the necessary system
-# libraries.
-#
-local rule system-library-dependencies ( target-os )
-{
- switch $(target-os)
- {
- case s[uo][nl]* : # solaris, sun, sunos
- # Add a librt dependency for the gcc toolset on SunOS (the sun
- # toolset adds -lrt unconditionally). While this appears to
- # duplicate the logic already in gcc.jam, it does not as long as
- # we are not forcing <threading>multi.
-
- # On solaris 10, distutils.sysconfig.get_config_var('LIBS') yields
- # '-lresolv -lsocket -lnsl -lrt -ldl'. However, that does not seem
- # to be the right list for extension modules. For example, on my
- # installation, adding -ldl causes at least one test to fail because
- # the library can not be found and removing it causes no failures.
-
- # Apparently, though, we need to add -lrt for gcc.
- return <toolset>gcc:<library>rt ;
-
- case osf : return <library>pthread <toolset>gcc:<library>rt ;
-
- case qnx* : return ;
- case darwin : return ;
- case windows : return ;
-
- case hpux : return <library>rt ;
- case *bsd : return <library>pthread <toolset>gcc:<library>util ;
-
- case aix : return <library>pthread <library>dl ;
-
- case * : return <library>pthread <library>dl
- <toolset>gcc:<library>util <toolset-intel:platform>linux:<library>util ;
- }
-}
-
-
-# Declare a target to represent Python's library.
-#
-local rule declare-libpython-target ( version ? : requirements * )
-{
- # Compute the representation of Python version in the name of Python's
- # library file.
- local lib-version = $(version) ;
- if <target-os>windows in $(requirements)
- {
- local major-minor = [ split-version $(version) ] ;
- lib-version = $(major-minor:J="") ;
- if <python-debugging>on in $(requirements)
- {
- lib-version = $(lib-version)_d ;
- }
- }
-
- if ! $(lib-version)
- {
- ECHO *** warning: could not determine Python version, which will ;
- ECHO *** warning: probably prevent us from linking with the python ;
- ECHO *** warning: library. Consider explicitly passing the version ;
- ECHO *** warning: to 'using python'. ;
- }
-
- # Declare it.
- lib python.lib : : <name>python$(lib-version) $(requirements) ;
-}
-
-
-# Implementation of init.
-local rule configure ( version ? : cmd-or-prefix ? : includes * : libraries ? :
- condition * : extension-suffix ? )
-{
- local prefix ;
- local exec-prefix ;
- local cmds-to-try ;
- local interpreter-cmd ;
-
- local target-os = [ feature.get-values target-os : $(condition) ] ;
- target-os ?= [ feature.defaults target-os ] ;
- target-os = $(target-os:G=) ;
-
- if $(target-os) = windows && <python-debugging>on in $(condition)
- {
- extension-suffix ?= _d ;
- }
- extension-suffix ?= "" ;
-
- # Normalize and dissect any version number.
- local major-minor ;
- if $(version)
- {
- major-minor = [ split-version $(version) ] ;
- version = $(major-minor:J=.) ;
- }
-
- local cmds-to-try ;
-
- if ! $(cmd-or-prefix) || [ GLOB $(cmd-or-prefix) : * ]
- {
- # If the user did not pass a command, whatever we got was a prefix.
- prefix = $(cmd-or-prefix) ;
- cmds-to-try = [ candidate-interpreters $(version) : $(prefix) : $(target-os) ] ;
- }
- else
- {
- # Work with the command the user gave us.
- cmds-to-try = $(cmd-or-prefix) ;
-
- # On Windows, do not nail down the interpreter command just yet in case
- # the user specified something that turns out to be a cygwin symlink,
- # which could bring down bjam if we invoke it.
- if $(target-os) != windows
- {
- interpreter-cmd = $(cmd-or-prefix) ;
- }
- }
-
- # Values to use in case we can not really find anything in the system.
- local fallback-cmd = $(cmds-to-try[1]) ;
- local fallback-version ;
-
- # Anything left to find or check?
- if ! ( $(interpreter-cmd) && $(includes) && $(libraries) )
- {
- # Values to be extracted from python's sys module. These will be set by
- # the probe rule, above, using Jam's dynamic scoping.
- local sys-elements = version platform prefix exec_prefix executable ;
- local sys.$(sys-elements) ;
-
- # Compute the string Python's sys.platform needs to match. If not
- # targeting Windows or cygwin we will assume only native builds can
- # possibly run, so we will not require a match and we leave sys.platform
- # blank.
- local platform ;
- switch $(target-os)
- {
- case windows : platform = win32 ;
- case cygwin : platform = cygwin ;
- }
-
- while $(cmds-to-try)
- {
- # Pop top command.
- local cmd = $(cmds-to-try[1]) ;
- cmds-to-try = $(cmds-to-try[2-]) ;
-
- debug-message Checking interpreter command \"$(cmd)\"... ;
- if [ probe $(cmd) ]
- {
- fallback-version ?= $(sys.version) ;
-
- # Check for version/platform validity.
- for local x in version platform
- {
- if $($(x)) && $($(x)) != $(sys.$(x))
- {
- debug-message ...$(x) "mismatch (looking for"
- $($(x)) but found $(sys.$(x))")" ;
- cmd = ;
- }
- }
-
- if $(cmd)
- {
- debug-message ...requested configuration matched! ;
-
- exec-prefix = $(sys.exec_prefix) ;
-
- compute-default-paths $(target-os) : $(sys.version) :
- $(sys.prefix) : $(sys.exec_prefix) ;
-
- version = $(sys.version) ;
- interpreter-cmd ?= $(cmd) ;
- cmds-to-try = ; # All done.
- }
- }
- else
- {
- debug-message ...does not invoke a working interpreter ;
- }
- }
- }
-
- # Anything left to compute?
- if $(includes) && $(libraries)
- {
- .configured = true ;
- }
- else
- {
- version ?= $(fallback-version) ;
- version ?= 2.5 ;
- exec-prefix ?= $(prefix) ;
- compute-default-paths $(target-os) : $(version) : $(prefix:E=) ;
- }
-
- if ! $(interpreter-cmd)
- {
- fallback-cmd ?= python ;
- debug-message No working Python interpreter found. ;
- if [ os.name ] != NT || ! [ invokes-cygwin-symlink $(fallback-cmd) ]
- {
- interpreter-cmd = $(fallback-cmd) ;
- debug-message falling back to \"$(interpreter-cmd)\" ;
- }
- }
-
- includes = [ path-to-native $(includes) ] ;
- libraries = [ path-to-native $(libraries) ] ;
-
- debug-message "Details of this Python configuration:" ;
- debug-message " interpreter command:" \"$(interpreter-cmd:E=<empty>)\" ;
- debug-message " include path:" \"$(includes:E=<empty>)\" ;
- debug-message " library path:" \"$(libraries:E=<empty>)\" ;
- if $(target-os) = windows
- {
- debug-message " DLL search path:" \"$(exec-prefix:E=<empty>)\" ;
- }
-
- #
- # End autoconfiguration sequence.
- #
- local target-requirements = $(condition) ;
-
- # Add the version, if any, to the target requirements.
- if $(version)
- {
- if ! $(version) in [ feature.values python ]
- {
- feature.extend python : $(version) ;
- }
- target-requirements += <python>$(version:E=default) ;
- }
-
- target-requirements += <target-os>$(target-os) ;
-
- # See if we can find a framework directory on darwin.
- local framework-directory ;
- if $(target-os) = darwin
- {
- # Search upward for the framework directory.
- local framework-directory = $(libraries[-1]) ;
- while $(framework-directory:D=) && $(framework-directory:D=) != Python.framework
- {
- framework-directory = $(framework-directory:D) ;
- }
-
- if $(framework-directory:D=) = Python.framework
- {
- debug-message framework directory is \"$(framework-directory)\" ;
- }
- else
- {
- debug-message "no framework directory found; using library path" ;
- framework-directory = ;
- }
- }
-
- local dll-path = $(libraries) ;
-
- # Make sure that we can find the Python DLL on Windows.
- if ( $(target-os) = windows ) && $(exec-prefix)
- {
- dll-path += $(exec-prefix) ;
- }
-
- #
- # Prepare usage requirements.
- #
- local usage-requirements = [ system-library-dependencies $(target-os) ] ;
- usage-requirements += <include>$(includes) <python.interpreter>$(interpreter-cmd) ;
- if <python-debugging>on in $(condition)
- {
- if $(target-os) = windows
- {
- # In pyconfig.h, Py_DEBUG is set if _DEBUG is set. If we define
- # Py_DEBUG we will get multiple definition warnings.
- usage-requirements += <define>_DEBUG ;
- }
- else
- {
- usage-requirements += <define>Py_DEBUG ;
- }
- }
-
- # Global, but conditional, requirements to give access to the interpreter
- # for general utilities, like other toolsets, that run Python scripts.
- toolset.add-requirements
- $(target-requirements:J=,):<python.interpreter>$(interpreter-cmd) ;
-
- # Register the right suffix for extensions.
- register-extension-suffix $(extension-suffix) : $(target-requirements) ;
-
- #
- # Declare the "python" target. This should really be called
- # python_for_embedding.
- #
-
- if $(framework-directory)
- {
- alias python
- :
- : $(target-requirements)
- :
- : $(usage-requirements) <framework>$(framework-directory)
- ;
- }
- else
- {
- declare-libpython-target $(version) : $(target-requirements) ;
-
- # This is an evil hack. On, Windows, when Python is embedded, nothing
- # seems to set up sys.path to include Python's standard library
- # (http://article.gmane.org/gmane.comp.python.general/544986). The evil
- # here, aside from the workaround necessitated by Python's bug, is that:
- #
- # a. we're guessing the location of the python standard library from the
- # location of pythonXX.lib
- #
- # b. we're hijacking the <testing.launcher> property to get the
- # environment variable set up, and the user may want to use it for
- # something else (e.g. launch the debugger).
- local set-PYTHONPATH ;
- if $(target-os) = windows
- {
- set-PYTHONPATH = [ common.prepend-path-variable-command PYTHONPATH :
- $(libraries:D)/Lib ] ;
- }
-
- alias python
- :
- : $(target-requirements)
- :
- # Why python.lib must be listed here instead of along with the
- # system libs is a mystery, but if we do not do it, on cygwin,
- # -lpythonX.Y never appears in the command line (although it does on
- # linux).
- : $(usage-requirements)
- <testing.launcher>$(set-PYTHONPATH)
- <library-path>$(libraries) <dll-path>$(dll-path) <library>python.lib
- ;
- }
-
- # On *nix, we do not want to link either Boost.Python or Python extensions
- # to libpython, because the Python interpreter itself provides all those
- # symbols. If we linked to libpython, we would get duplicate symbols. So
- # declare two targets -- one for building extensions and another for
- # embedding.
- #
- # Unlike most *nix systems, Mac OS X's linker does not permit undefined
- # symbols when linking a shared library. So, we still need to link against
- # the Python framework, even when building extensions. Note that framework
- # builds of Python always use shared libraries, so we do not need to worry
- # about duplicate Python symbols.
- if $(target-os) in windows cygwin darwin
- {
- alias python_for_extensions : python : $(target-requirements) ;
- }
- # On AIX we need Python extensions and Boost.Python to import symbols from
- # the Python interpreter. Dynamic libraries opened with dlopen() do not
- # inherit the symbols from the Python interpreter.
- else if $(target-os) = aix
- {
- alias python_for_extensions
- :
- : $(target-requirements)
- :
- : $(usage-requirements) <linkflags>-Wl,-bI:$(libraries[1])/python.exp
- ;
- }
- else
- {
- alias python_for_extensions
- :
- : $(target-requirements)
- :
- : $(usage-requirements)
- ;
- }
-}
-
-
-rule configured ( )
-{
- return $(.configured) ;
-}
-
-
-type.register PYTHON_EXTENSION : : SHARED_LIB ;
-
-
-local rule register-extension-suffix ( root : condition * )
-{
- local suffix ;
-
- switch [ feature.get-values target-os : $(condition) ]
- {
- case windows : suffix = pyd ;
- case cygwin : suffix = dll ;
- case hpux :
- {
- if [ feature.get-values python : $(condition) ] in 1.5 1.6 2.0 2.1 2.2 2.3 2.4
- {
- suffix = sl ;
- }
- else
- {
- suffix = so ;
- }
- }
- case * : suffix = so ;
- }
-
- type.set-generated-target-suffix PYTHON_EXTENSION : $(condition) : <$(root).$(suffix)> ;
-}
-
-
-# Unset 'lib' prefix for PYTHON_EXTENSION
-type.set-generated-target-prefix PYTHON_EXTENSION : : "" ;
-
-
-rule python-extension ( name : sources * : requirements * : default-build * :
- usage-requirements * )
-{
- if [ configured ]
- {
- requirements += <use>/python//python_for_extensions ;
- }
- requirements += <suppress-import-lib>true ;
-
- local project = [ project.current ] ;
-
- targets.main-target-alternative
- [ new typed-target $(name) : $(project) : PYTHON_EXTENSION
- : [ targets.main-target-sources $(sources) : $(name) ]
- : [ targets.main-target-requirements $(requirements) : $(project) ]
- : [ targets.main-target-default-build $(default-build) : $(project) ]
- ] ;
-}
-
-IMPORT python : python-extension : : python-extension ;
-
-rule py2to3
-{
- common.copy $(>) $(<) ;
- 2to3 $(<) ;
-}
-
-actions 2to3
-{
- 2to3 -wn "$(<)"
- 2to3 -dwn "$(<)"
-}
-
-
-# Support for testing.
-type.register PY : py ;
-type.register RUN_PYD_OUTPUT ;
-type.register RUN_PYD : : TEST ;
-
-
-class python-test-generator : generator
-{
- import set ;
-
- rule __init__ ( * : * )
- {
- generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
- self.composing = true ;
- }
-
- rule run ( project name ? : property-set : sources * : multiple ? )
- {
- local pyversion = [ $(property-set).get <python> ] ;
- local python ;
- local other-pythons ;
-
- # Make new target that converting Python source by 2to3 when running with Python 3.
- local rule make-2to3-source ( source )
- {
- if $(pyversion) >= 3.0
- {
- local a = [ new action $(source) : python.py2to3 : $(property-set) ] ;
- local t = [ utility.basename [ $(s).name ] ] ;
- local p = [ new file-target $(t) : PY : $(project) : $(a) ] ;
- return $(p) ;
- }
- else
- {
- return $(source) ;
- }
- }
-
- for local s in $(sources)
- {
- if [ $(s).type ] = PY
- {
- if ! $(python)
- {
- # First Python source ends up on command line.
- python = [ make-2to3-source $(s) ] ;
-
- }
- else
- {
- # Other Python sources become dependencies.
- other-pythons += [ make-2to3-source $(s) ] ;
- }
- }
- }
-
- local extensions ;
- for local s in $(sources)
- {
- if [ $(s).type ] = PYTHON_EXTENSION
- {
- extensions += $(s) ;
- }
- }
-
- local libs ;
- for local s in $(sources)
- {
- if [ type.is-derived [ $(s).type ] LIB ]
- && ! $(s) in $(extensions)
- {
- libs += $(s) ;
- }
- }
-
- local new-sources ;
- for local s in $(sources)
- {
- if [ type.is-derived [ $(s).type ] CPP ]
- {
- local name = [ utility.basename [ $(s).name ] ] ;
- if $(name) = [ utility.basename [ $(python).name ] ]
- {
- name = $(name)_ext ;
- }
- local extension = [ generators.construct $(project) $(name) :
- PYTHON_EXTENSION : $(property-set) : $(s) $(libs) ] ;
-
- # The important part of usage requirements returned from
- # PYTHON_EXTENSION generator are xdll-path properties that will
- # allow us to find the python extension at runtime.
- property-set = [ $(property-set).add $(extension[1]) ] ;
-
- # Ignore usage requirements. We're a top-level generator and
- # nobody is going to use what we generate.
- new-sources += $(extension[2-]) ;
- }
- }
-
- property-set = [ $(property-set).add-raw <dependency>$(other-pythons) ] ;
-
- return [ construct-result $(python) $(extensions) $(new-sources) :
- $(project) $(name) : $(property-set) ] ;
- }
-}
-
-
-generators.register
- [ new python-test-generator python.capture-output : : RUN_PYD_OUTPUT ] ;
-
-generators.register-standard testing.expect-success
- : RUN_PYD_OUTPUT : RUN_PYD ;
-
-
-# There are two different ways of spelling OS names. One is used for [ os.name ]
-# and the other is used for the <host-os> and <target-os> properties. Until that
-# is remedied, this sets up a crude mapping from the latter to the former, that
-# will work *for the purposes of cygwin/NT cross-builds only*. Could not think
-# of a better name than "translate".
-#
-.translate-os-windows = NT ;
-.translate-os-cygwin = CYGWIN ;
-local rule translate-os ( src-os )
-{
- local x = $(.translate-os-$(src-os)) [ os.name ] ;
- return $(x[1]) ;
-}
-
-
-# Extract the path to a single ".pyd" source. This is used to build the
-# PYTHONPATH for running bpl tests.
-#
-local rule pyd-pythonpath ( source )
-{
- return [ on $(source) return $(LOCATE) $(SEARCH) ] ;
-}
-
-
-# The flag settings on testing.capture-output do not apply to python.capture
-# output at the moment. Redo this explicitly.
-toolset.flags python.capture-output ARGS <testing.arg> ;
-
-
-rule capture-output ( target : sources * : properties * )
-{
- # Setup up a proper DLL search path. Here, $(sources[1]) is a python module
- # and $(sources[2]) is a DLL. Only $(sources[1]) is passed to
- # testing.capture-output, so RUN_PATH variable on $(sources[2]) is not
- # consulted. Move it over explicitly.
- RUN_PATH on $(sources[1]) = [ on $(sources[2-]) return $(RUN_PATH) ] ;
-
- PYTHONPATH = [ sequence.transform pyd-pythonpath : $(sources[2-]) ] ;
- PYTHONPATH += [ feature.get-values pythonpath : $(properties) ] ;
-
- # After test is run, we remove the Python module, but not the Python script.
- testing.capture-output $(target) : $(sources[1]) : $(properties) :
- $(sources[2-]) ;
-
- # PYTHONPATH is different; it will be interpreted by whichever Python is
- # invoked and so must follow path rules for the target os. The only OSes
- # where we can run python for other OSes currently are NT and CYGWIN so we
- # only need to handle those cases.
- local target-os = [ feature.get-values target-os : $(properties) ] ;
- # Oddly, host-os is not in properties, so grab the default value.
- local host-os = [ feature.defaults host-os ] ;
- host-os = $(host-os:G=) ;
- if $(target-os) != $(host-os)
- {
- PYTHONPATH = [ sequence.transform $(host-os)-to-$(target-os)-path :
- $(PYTHONPATH) ] ;
- }
- local path-separator = [ os.path-separator [ translate-os $(target-os) ] ] ;
- local set-PYTHONPATH = [ common.variable-setting-command PYTHONPATH :
- $(PYTHONPATH:J=$(path-separator)) ] ;
- LAUNCHER on $(target) = $(set-PYTHONPATH) [ on $(target) return \"$(PYTHON)\" ] ;
-}
-
-
-rule bpl-test ( name : sources * : requirements * )
-{
- local s ;
- sources ?= $(name).py $(name).cpp ;
- return [ testing.make-test run-pyd : $(sources) /boost/python//boost_python
- : $(requirements) : $(name) ] ;
-}
-
-
-IMPORT $(__name__) : bpl-test : : bpl-test ;
diff --git a/tools/build/v2/tools/qcc.jam b/tools/build/v2/tools/qcc.jam
deleted file mode 100644
index 4f2a4fc14f..0000000000
--- a/tools/build/v2/tools/qcc.jam
+++ /dev/null
@@ -1,236 +0,0 @@
-# Copyright (c) 2001 David Abrahams.
-# Copyright (c) 2002-2003 Rene Rivera.
-# Copyright (c) 2002-2003 Vladimir Prus.
-#
-# Use, modification and distribution is subject to the Boost Software
-# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
-# http://www.boost.org/LICENSE_1_0.txt)
-
-import "class" : new ;
-import common ;
-import errors ;
-import feature ;
-import generators ;
-import os ;
-import property ;
-import set ;
-import toolset ;
-import type ;
-import unix ;
-
-feature.extend toolset : qcc ;
-
-toolset.inherit-generators qcc : unix : unix.link unix.link.dll ;
-generators.override builtin.lib-generator : qcc.prebuilt ;
-toolset.inherit-flags qcc : unix ;
-toolset.inherit-rules qcc : unix ;
-
-# Initializes the qcc toolset for the given version. If necessary, command may
-# be used to specify where the compiler is located. The parameter 'options' is a
-# space-delimited list of options, each one being specified as
-# <option-name>option-value. Valid option names are: cxxflags, linkflags and
-# linker-type. Accepted values for linker-type are gnu and sun, gnu being the
-# default.
-#
-# Example:
-# using qcc : 3.4 : : <cxxflags>foo <linkflags>bar <linker-type>sun ;
-#
-rule init ( version ? : command * : options * )
-{
- local condition = [ common.check-init-parameters qcc : version $(version) ] ;
- local command = [ common.get-invocation-command qcc : QCC : $(command) ] ;
- common.handle-options qcc : $(condition) : $(command) : $(options) ;
-}
-
-
-generators.register-c-compiler qcc.compile.c++ : CPP : OBJ : <toolset>qcc ;
-generators.register-c-compiler qcc.compile.c : C : OBJ : <toolset>qcc ;
-generators.register-c-compiler qcc.compile.asm : ASM : OBJ : <toolset>qcc ;
-
-
-# Declare flags for compilation.
-toolset.flags qcc.compile OPTIONS <debug-symbols>on : -gstabs+ ;
-
-# Declare flags and action for compilation.
-toolset.flags qcc.compile OPTIONS <optimization>off : -O0 ;
-toolset.flags qcc.compile OPTIONS <optimization>speed : -O3 ;
-toolset.flags qcc.compile OPTIONS <optimization>space : -Os ;
-
-toolset.flags qcc.compile OPTIONS <inlining>off : -Wc,-fno-inline ;
-toolset.flags qcc.compile OPTIONS <inlining>on : -Wc,-Wno-inline ;
-toolset.flags qcc.compile OPTIONS <inlining>full : -Wc,-finline-functions -Wc,-Wno-inline ;
-
-toolset.flags qcc.compile OPTIONS <warnings>off : -w ;
-toolset.flags qcc.compile OPTIONS <warnings>all : -Wc,-Wall ;
-toolset.flags qcc.compile OPTIONS <warnings-as-errors>on : -Wc,-Werror ;
-
-toolset.flags qcc.compile OPTIONS <profiling>on : -p ;
-
-toolset.flags qcc.compile OPTIONS <cflags> ;
-toolset.flags qcc.compile.c++ OPTIONS <cxxflags> ;
-toolset.flags qcc.compile DEFINES <define> ;
-toolset.flags qcc.compile INCLUDES <include> ;
-
-toolset.flags qcc.compile OPTIONS <link>shared : -shared ;
-
-toolset.flags qcc.compile.c++ TEMPLATE_DEPTH <c++-template-depth> ;
-
-
-rule compile.c++
-{
- # Here we want to raise the template-depth parameter value to something
- # higher than the default value of 17. Note that we could do this using the
- # feature.set-default rule but we do not want to set the default value for
- # all toolsets as well.
- #
- # TODO: This 'modified default' has been inherited from some 'older Boost
- # Build implementation' and has most likely been added to make some Boost
- # library parts compile correctly. We should see what exactly prompted this
- # and whether we can get around the problem more locally.
- local template-depth = [ on $(1) return $(TEMPLATE_DEPTH) ] ;
- if ! $(template-depth)
- {
- TEMPLATE_DEPTH on $(1) = 128 ;
- }
-}
-
-actions compile.c++
-{
- "$(CONFIG_COMMAND)" -Wc,-ftemplate-depth-$(TEMPLATE_DEPTH) $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
-}
-
-actions compile.c
-{
- "$(CONFIG_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
-}
-
-actions compile.asm
-{
- "$(CONFIG_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
-}
-
-
-# The class checking that we do not try to use the <runtime-link>static property
-# while creating or using a shared library, since it is not supported by qcc/
-# /libc.
-#
-class qcc-linking-generator : unix-linking-generator
-{
- rule generated-targets ( sources + : property-set : project name ? )
- {
- if <runtime-link>static in [ $(property-set).raw ]
- {
- local m ;
- if [ id ] = "qcc.link.dll"
- {
- m = "on qcc, DLL can't be build with <runtime-link>static" ;
- }
- if ! $(m)
- {
- for local s in $(sources)
- {
- local type = [ $(s).type ] ;
- if $(type) && [ type.is-derived $(type) SHARED_LIB ]
- {
- m = "on qcc, using DLLS together with the <runtime-link>static options is not possible " ;
- }
- }
- }
- if $(m)
- {
- errors.user-error $(m) : "It is suggested to use"
- "<runtime-link>static together with <link>static." ;
- }
- }
-
- return [ unix-linking-generator.generated-targets
- $(sources) : $(property-set) : $(project) $(name) ] ;
- }
-}
-
-generators.register [ new qcc-linking-generator qcc.link : LIB OBJ : EXE
- : <toolset>qcc ] ;
-
-generators.register [ new qcc-linking-generator qcc.link.dll : LIB OBJ
- : SHARED_LIB : <toolset>qcc ] ;
-
-generators.override qcc.prebuilt : builtin.prebuilt ;
-generators.override qcc.searched-lib-generator : searched-lib-generator ;
-
-
-# Declare flags for linking.
-# First, the common flags.
-toolset.flags qcc.link OPTIONS <debug-symbols>on : -gstabs+ ;
-toolset.flags qcc.link OPTIONS <profiling>on : -p ;
-toolset.flags qcc.link OPTIONS <linkflags> ;
-toolset.flags qcc.link LINKPATH <library-path> ;
-toolset.flags qcc.link FINDLIBS-ST <find-static-library> ;
-toolset.flags qcc.link FINDLIBS-SA <find-shared-library> ;
-toolset.flags qcc.link LIBRARIES <library-file> ;
-
-toolset.flags qcc.link FINDLIBS-SA : m ;
-
-# For <runtime-link>static we made sure there are no dynamic libraries in the
-# link.
-toolset.flags qcc.link OPTIONS <runtime-link>static : -static ;
-
-# Assuming this is just like with gcc.
-toolset.flags qcc.link RPATH : <dll-path> : unchecked ;
-toolset.flags qcc.link RPATH_LINK : <xdll-path> : unchecked ;
-
-
-# Declare actions for linking.
-#
-rule link ( targets * : sources * : properties * )
-{
- SPACE on $(targets) = " " ;
- # Serialize execution of the 'link' action, since running N links in
- # parallel is just slower. For now, serialize only qcc links while it might
- # be a good idea to serialize all links.
- JAM_SEMAPHORE on $(targets) = <s>qcc-link-semaphore ;
-}
-
-actions link bind LIBRARIES
-{
- "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA) $(OPTIONS)
-}
-
-
-# Always remove archive and start again. Here is the rationale from Andre Hentz:
-# I had a file, say a1.c, that was included into liba.a. I moved a1.c to a2.c,
-# updated my Jamfiles and rebuilt. My program was crashing with absurd errors.
-# After some debugging I traced it back to the fact that a1.o was *still* in
-# liba.a
-RM = [ common.rm-command ] ;
-if [ os.name ] = NT
-{
- RM = "if exist \"$(<[1])\" DEL \"$(<[1])\"" ;
-}
-
-
-# Declare action for creating static libraries. The 'r' letter means to add
-# files to the archive with replacement. Since we remove the archive, we do not
-# care about replacement, but there is no option to "add without replacement".
-# The 'c' letter suppresses warnings in case the archive does not exists yet.
-# That warning is produced only on some platforms, for whatever reasons.
-#
-actions piecemeal archive
-{
- $(RM) "$(<)"
- ar rc "$(<)" "$(>)"
-}
-
-
-rule link.dll ( targets * : sources * : properties * )
-{
- SPACE on $(targets) = " " ;
- JAM_SEMAPHORE on $(targets) = <s>qcc-link-semaphore ;
-}
-
-
-# Differ from 'link' above only by -shared.
-#
-actions link.dll bind LIBRARIES
-{
- "$(CONFIG_COMMAND)" -L"$(LINKPATH)" -Wl,-R$(SPACE)-Wl,"$(RPATH)" -o "$(<)" $(HAVE_SONAME)-Wl,-h$(SPACE)-Wl,$(<[1]:D=) -shared "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-ST) -l$(FINDLIBS-SA) $(OPTIONS)
-}
diff --git a/tools/build/v2/tools/qt4.jam b/tools/build/v2/tools/qt4.jam
deleted file mode 100644
index 71d1b76207..0000000000
--- a/tools/build/v2/tools/qt4.jam
+++ /dev/null
@@ -1,724 +0,0 @@
-# Copyright 2002-2006 Vladimir Prus
-# Copyright 2005 Alo Sarv
-# Copyright 2005-2009 Juergen Hunold
-#
-# Distributed under the Boost Software License, Version 1.0. (See
-# accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-# Qt4 library support module
-#
-# The module attempts to auto-detect QT installation location from QTDIR
-# environment variable; failing that, installation location can be passed as
-# argument:
-#
-# toolset.using qt4 : /usr/local/Trolltech/Qt-4.0.0 ;
-#
-# The module supports code generation from .ui and .qrc files, as well as
-# running the moc preprocessor on headers. Note that you must list all your
-# moc-able headers in sources.
-#
-# Example:
-#
-# exe myapp : myapp.cpp myapp.h myapp.ui myapp.qrc
-# /qt4//QtGui /qt4//QtNetwork ;
-#
-# It's also possible to run moc on cpp sources:
-#
-# import cast ;
-#
-# exe myapp : myapp.cpp [ cast _ moccable-cpp : myapp.cpp ] /qt4//QtGui ;
-#
-# When moccing source file myapp.cpp you need to include "myapp.moc" from
-# myapp.cpp. When moccing .h files, the output of moc will be automatically
-# compiled and linked in, you don't need any includes.
-#
-# This is consistent with Qt guidelines:
-# http://doc.trolltech.com/4.0/moc.html
-
-import modules ;
-import feature ;
-import errors ;
-import type ;
-import "class" : new ;
-import generators ;
-import project ;
-import toolset : flags ;
-import os ;
-import virtual-target ;
-import scanner ;
-
-# Qt3Support control feature
-#
-# Qt4 configure defaults to build Qt4 libraries with Qt3Support.
-# The autodetection is missing, so we default to disable Qt3Support.
-# This prevents the user from inadvertedly using a deprecated API.
-#
-# The Qt3Support library can be activated by adding
-# "<qt3support>on" to requirements
-#
-# Use "<qt3support>on:<define>QT3_SUPPORT_WARNINGS"
-# to get warnings about deprecated Qt3 support funtions and classes.
-# Files ported by the "qt3to4" conversion tool contain _tons_ of
-# warnings, so this define is not set as default.
-#
-# Todo: Detect Qt3Support from Qt's configure data.
-# Or add more auto-configuration (like python).
-feature.feature qt3support : off on : propagated link-incompatible ;
-
-# The Qt version used for requirements
-# Valid are <qt>4.4 or <qt>4.5.0
-# Auto-detection via qmake sets '<qt>major.minor.patch'
-feature.feature qt : : propagated ;
-
-project.initialize $(__name__) ;
-project qt ;
-
-# Save the project so that we tolerate 'import + using' combo.
-.project = [ project.current ] ;
-
-# Helper utils for easy debug output
-if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
-{
- .debug-configuration = TRUE ;
-}
-
-local rule debug-message ( message * )
-{
- if $(.debug-configuration) = TRUE
- {
- ECHO notice: [qt4-cfg] $(message) ;
- }
-}
-
-# Capture qmake output line by line
-local rule read-output ( content )
-{
- local lines ;
- local nl = "
-" ;
- local << = "([^$(nl)]*)[$(nl)](.*)" ;
- local line+ = [ MATCH "$(<<)" : "$(content)" ] ;
- while $(line+)
- {
- lines += $(line+[1]) ;
- line+ = [ MATCH "$(<<)" : "$(line+[2])" ] ;
- }
- return $(lines) ;
-}
-
-# Capture Qt version from qmake
-local rule check-version ( bin_prefix )
-{
- full-cmd = $(bin_prefix)"/qmake -v" ;
- debug-message Running '$(full-cmd)' ;
- local output = [ SHELL $(full-cmd) ] ;
- for line in [ read-output $(output) ]
- {
- # Parse the output to get all the results.
- if [ MATCH "QMake" : $(line) ]
- {
- # Skip first line of output
- }
- else
- {
- temp = [ MATCH "([0-9]*)\\.([0-9]*)\\.([0-9]*)" : $(line) ] ;
- }
- }
- return $(temp) ;
-}
-
-# Validate the version string and extract the major/minor part we care about.
-#
-local rule split-version ( version )
-{
- local major-minor = [ MATCH ^([0-9]+)\.([0-9]+)(.*)$ : $(version) : 1 2 3 ] ;
- if ! $(major-minor[2]) || $(major-minor[3])
- {
- ECHO "Warning: 'using qt' expects a two part (major, minor) version number; got" $(version) instead ;
-
- # Add a zero to account for the missing digit if necessary.
- major-minor += 0 ;
- }
-
- return $(major-minor[1]) $(major-minor[2]) ;
-}
-
-# Initialize the QT support module.
-# Parameters:
-# - 'prefix' parameter tells where Qt is installed.
-# - 'full_bin' optional full path to Qt binaries (qmake,moc,uic,rcc)
-# - 'full_inc' optional full path to Qt top-level include directory
-# - 'full_lib' optional full path to Qt library directory
-# - 'version' optional version of Qt, else autodetected via 'qmake -v'
-# - 'condition' optional requirements
-rule init ( prefix : full_bin ? : full_inc ? : full_lib ? : version ? : condition * )
-{
- project.push-current $(.project) ;
-
- debug-message "==== Configuring Qt ... ====" ;
- for local v in version cmd-or-prefix includes libraries condition
- {
- if $($(v))
- {
- debug-message " user-specified "$(v): '$($(v))' ;
- }
- }
-
- # Needed as default value
- .prefix = $(prefix) ;
-
- # pre-build paths to detect reinitializations changes
- local inc_prefix lib_prefix bin_prefix ;
- if $(full_inc)
- {
- inc_prefix = $(full_inc) ;
- }
- else
- {
- inc_prefix = $(prefix)/include ;
- }
- if $(full_lib)
- {
- lib_prefix = $(full_lib) ;
- }
- else
- {
- lib_prefix = $(prefix)/lib ;
- }
- if $(full_bin)
- {
- bin_prefix = $(full_bin) ;
- }
- else
- {
- bin_prefix = $(prefix)/bin ;
- }
-
- # Globally needed variables
- .incprefix = $(inc_prefix) ;
- .libprefix = $(lib_prefix) ;
- .binprefix = $(bin_prefix) ;
-
- if ! $(.initialized)
- {
- # Make sure this is initialised only once
- .initialized = true ;
-
- # Generates cpp files from header files using "moc" tool
- generators.register-standard qt4.moc : H : CPP(moc_%) : <allow>qt4 ;
-
- # The OBJ result type is a fake, 'H' will be really produced. See
- # comments on the generator class, defined below the 'init' function.
- generators.register [ new uic-generator qt4.uic : UI : OBJ :
- <allow>qt4 ] ;
-
- # The OBJ result type is a fake here too.
- generators.register [ new moc-h-generator
- qt4.moc.inc : MOCCABLE_CPP : OBJ : <allow>qt4 ] ;
-
- generators.register [ new moc-inc-generator
- qt4.moc.inc : MOCCABLE_H : OBJ : <allow>qt4 ] ;
-
- # Generates .cpp files from .qrc files.
- generators.register-standard qt4.rcc : QRC : CPP(qrc_%) ;
-
- # dependency scanner for wrapped files.
- type.set-scanner QRC : qrc-scanner ;
-
- # Save value of first occuring prefix
- .PREFIX = $(prefix) ;
- }
-
- if $(version)
- {
- major-minor = [ split-version $(version) ] ;
- version = $(major-minor:J=.) ;
- }
- else
- {
- version = [ check-version $(bin_prefix) ] ;
- if $(version)
- {
- version = $(version:J=.) ;
- }
- debug-message Detected version '$(version)' ;
- }
-
- local target-requirements = $(condition) ;
-
- # Add the version, if any, to the target requirements.
- if $(version)
- {
- if ! $(version) in [ feature.values qt ]
- {
- feature.extend qt : $(version) ;
- }
- target-requirements += <qt>$(version:E=default) ;
- }
-
- local target-os = [ feature.get-values target-os : $(condition) ] ;
- if ! $(target-os)
- {
- target-os ?= [ feature.defaults target-os ] ;
- target-os = $(target-os:G=) ;
- target-requirements += <target-os>$(target-os) ;
- }
-
- # Build exact requirements for the tools
- local tools-requirements = $(target-requirements:J=/) ;
-
- debug-message "Details of this Qt configuration:" ;
- debug-message " prefix: " '$(prefix:E=<empty>)' ;
- debug-message " binary path: " '$(bin_prefix:E=<empty>)' ;
- debug-message " include path:" '$(inc_prefix:E=<empty>)' ;
- debug-message " library path:" '$(lib_prefix:E=<empty>)' ;
- debug-message " target requirements:" '$(target-requirements)' ;
- debug-message " tool requirements: " '$(tools-requirements)' ;
-
- # setup the paths for the tools
- toolset.flags qt4.moc .BINPREFIX $(tools-requirements) : $(bin_prefix) ;
- toolset.flags qt4.rcc .BINPREFIX $(tools-requirements) : $(bin_prefix) ;
- toolset.flags qt4.uic .BINPREFIX $(tools-requirements) : $(bin_prefix) ;
-
- # TODO: 2009-02-12: Better support for directories
- # Most likely needed are separate getters for: include,libraries,binaries and sources.
- toolset.flags qt4.directory .PREFIX $(tools-requirements) : $(prefix) ;
-
- # Test for a buildable Qt.
- if [ glob $(.prefix)/Jamroot ]
- {
- .bjam-qt = true
-
- # this will declare QtCore (and qtmain on <target-os>windows)
- add-shared-library QtCore ;
- }
- else
- # Setup common pre-built Qt.
- # Special setup for QtCore on which everything depends
- {
- local usage-requirements =
- <include>$(.incprefix)
- <library-path>$(.libprefix)
- <dll-path>$(.libprefix)
- <threading>multi
- <allow>qt4 ;
-
- local suffix ;
-
- # Since Qt-4.2, debug versions on unix have to be built
- # separately and therefore have no suffix.
- .suffix_version = "" ;
- .suffix_debug = "" ;
-
- # Control flag for auto-configuration of the debug libraries.
- # This setup requires Qt 'configure -debug-and-release'.
- # Only available on some platforms.
- # ToDo: 2009-02-12: Maybe throw this away and
- # require separate setup with <variant>debug as condition.
- .have_separate_debug = FALSE ;
-
- # Setup other platforms
- if $(target-os) in windows cygwin
- {
- .have_separate_debug = TRUE ;
-
- # On NT, the libs have "4" suffix, and "d" suffix in debug builds.
- .suffix_version = "4" ;
- .suffix_debug = "d" ;
-
- # On Windows we must link against the qtmain library
- lib qtmain
- : # sources
- : # requirements
- <name>qtmain$(.suffix_debug)
- <variant>debug
- $(target-requirements)
- ;
-
- lib qtmain
- : # sources
- : # requirements
- <name>qtmain
- $(target-requirements)
- ;
- }
- else if $(target-os) = darwin
- {
- # On MacOS X, both debug and release libraries are available.
- .suffix_debug = "_debug" ;
-
- .have_separate_debug = TRUE ;
-
- alias qtmain ;
- }
- else
- {
- alias qtmain : : $(target-requirements) ;
- }
-
- lib QtCore : qtmain
- : # requirements
- <name>QtCore$(.suffix_version)
- $(target-requirements)
- : # default-build
- : # usage-requirements
- <define>QT_CORE_LIB
- <define>QT_NO_DEBUG
- <include>$(.incprefix)/QtCore
- $(usage-requirements)
- ;
-
- if $(.have_separate_debug) = TRUE
- {
- debug-message Configure debug libraries with suffix '$(.suffix_debug)' ;
-
- lib QtCore : $(main)
- : # requirements
- <name>QtCore$(.suffix_debug)$(.suffix_version)
- <variant>debug
- $(target-requirements)
- : # default-build
- : # usage-requirements
- <define>QT_CORE_LIB
- <include>$(.incprefix)/QtCore
- $(usage-requirements)
- ;
- }
- }
-
- # Initialising the remaining libraries is canonical
- # parameters 'module' : 'depends-on' : 'usage-define' : 'requirements' : 'include'
- # 'include' only for non-canonical include paths.
- add-shared-library QtGui : QtCore : QT_GUI_LIB : $(target-requirements) ;
- add-shared-library QtNetwork : QtCore : QT_NETWORK_LIB : $(target-requirements) ;
- add-shared-library QtSql : QtCore : QT_SQL_LIB : $(target-requirements) ;
- add-shared-library QtXml : QtCore : QT_XML_LIB : $(target-requirements) ;
-
- add-shared-library Qt3Support : QtGui QtNetwork QtXml QtSql
- : QT_QT3SUPPORT_LIB QT3_SUPPORT
- : <qt3support>on $(target-requirements) ;
-
- # Dummy target to enable "<qt3support>off" and
- # "<library>/qt//Qt3Support" at the same time. This enables quick
- # switching from one to the other for test/porting purposes.
- alias Qt3Support : : <qt3support>off $(target-requirements) ;
-
- # OpenGl Support
- add-shared-library QtOpenGL : QtGui : QT_OPENGL_LIB : $(target-requirements) ;
-
- # SVG-Support (Qt 4.1)
- add-shared-library QtSvg : QtXml QtOpenGL : QT_SVG_LIB : $(target-requirements) ;
-
- # Test-Support (Qt 4.1)
- add-shared-library QtTest : QtCore : : $(target-requirements) ;
-
- # Qt designer library
- add-shared-library QtDesigner : QtGui QtXml : : $(target-requirements) ;
- add-shared-library QtDesignerComponents : QtGui QtXml : : $(target-requirements) ;
-
- # Support for dynamic Widgets (Qt 4.1)
- add-static-library QtUiTools : QtGui QtXml : $(target-requirements) ;
-
- # DBus-Support (Qt 4.2)
- add-shared-library QtDBus : QtXml : : $(target-requirements) ;
-
- # Script-Engine (Qt 4.3)
- add-shared-library QtScript : QtGui QtXml : QT_SCRIPT_LIB : $(target-requirements) ;
-
- # Tools for the Script-Engine (Qt 4.5)
- add-shared-library QtScriptTools : QtScript : QT_SCRIPTTOOLS_LIB : $(target-requirements) ;
-
- # WebKit (Qt 4.4)
- add-shared-library QtWebKit : QtGui : QT_WEBKIT_LIB : $(target-requirements) ;
-
- # Phonon Multimedia (Qt 4.4)
- add-shared-library phonon : QtGui QtXml : QT_PHONON_LIB : $(target-requirements) ;
-
- # Multimedia engine (Qt 4.6)
- add-shared-library QtMultimedia : QtGui : QT_MULTIMEDIA_LIB : $(target-requirements) ;
-
- # XmlPatterns-Engine (Qt 4.4)
- add-shared-library QtXmlPatterns : QtNetwork : QT_XMLPATTERNS_LIB : $(target-requirements) ;
-
- # Help-Engine (Qt 4.4)
- add-shared-library QtHelp : QtGui QtSql QtXml : : $(target-requirements) ;
- add-shared-library QtCLucene : QCore QtSql QtXml : : $(target-requirements) ;
-
- # QML-Engine (Qt 4.7)
- add-shared-library QtDeclarative : QtGui QtXml : : $(target-requirements) ;
-
- # AssistantClient Support
- # Compat library removed in 4.7.0
- # Pre-4.4 help system, use QtHelp for new programs
- if $(version) < "4.7"
- {
- add-shared-library QtAssistantClient : QtGui : : $(target-requirements) : QtAssistant ;
- }
- debug-message "==== Configured Qt-$(version) ====" ;
-
- project.pop-current ;
-}
-
-rule initialized ( )
-{
- return $(.initialized) ;
-}
-
-
-
-# This custom generator is needed because in QT4, UI files are translated only
-# into H files, and no C++ files are created. Further, the H files need not be
-# passed via MOC. The header is used only via inclusion. If we define a standard
-# UI -> H generator, Boost.Build will run MOC on H, and then compile the
-# resulting cpp. It will give a warning, since output from moc will be empty.
-#
-# This generator is declared with a UI -> OBJ signature, so it gets invoked when
-# linking generator tries to convert sources to OBJ, but it produces target of
-# type H. This is non-standard, but allowed. That header won't be mocced.
-#
-class uic-generator : generator
-{
- rule __init__ ( * : * )
- {
- generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
- }
-
- rule run ( project name ? : property-set : sources * )
- {
- if ! $(name)
- {
- name = [ $(sources[0]).name ] ;
- name = $(name:B) ;
- }
-
- local a = [ new action $(sources[1]) : qt4.uic : $(property-set) ] ;
-
- # The 'ui_' prefix is to match qmake's default behavior.
- local target = [ new file-target ui_$(name) : H : $(project) : $(a) ] ;
-
- local r = [ virtual-target.register $(target) ] ;
-
- # Since this generator will return a H target, the linking generator
- # won't use it at all, and won't set any dependency on it. However, we
- # need the target to be seen by bjam, so that dependency from sources to
- # this generated header is detected -- if jam does not know about this
- # target, it won't do anything.
- DEPENDS all : [ $(r).actualize ] ;
-
- return $(r) ;
- }
-}
-
-
-class moc-h-generator : generator
-{
- rule __init__ ( * : * )
- {
- generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
- }
-
- rule run ( project name ? : property-set : sources * )
- {
- if ! $(sources[2]) && [ $(sources[1]).type ] = MOCCABLE_CPP
- {
- name = [ $(sources[0]).name ] ;
- name = $(name:B) ;
-
- local a = [ new action $(sources[1]) : qt4.moc.inc :
- $(property-set) ] ;
-
- local target = [ new file-target $(name) : MOC : $(project) : $(a)
- ] ;
-
- local r = [ virtual-target.register $(target) ] ;
-
- # Since this generator will return a H target, the linking generator
- # won't use it at all, and won't set any dependency on it. However,
- # we need the target to be seen by bjam, so that dependency from
- # sources to this generated header is detected -- if jam does not
- # know about this target, it won't do anything.
- DEPENDS all : [ $(r).actualize ] ;
-
- return $(r) ;
- }
- }
-}
-
-
-class moc-inc-generator : generator
-{
- rule __init__ ( * : * )
- {
- generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
- }
-
- rule run ( project name ? : property-set : sources * )
- {
- if ! $(sources[2]) && [ $(sources[1]).type ] = MOCCABLE_H
- {
- name = [ $(sources[0]).name ] ;
- name = $(name:B) ;
-
- local a = [ new action $(sources[1]) : qt4.moc.inc :
- $(property-set) ] ;
-
- local target = [ new file-target moc_$(name) : CPP : $(project) :
- $(a) ] ;
-
- # Since this generator will return a H target, the linking generator
- # won't use it at all, and won't set any dependency on it. However,
- # we need the target to be seen by bjam, so that dependency from
- # sources to this generated header is detected -- if jam does not
- # know about this target, it won't do anything.
- DEPENDS all : [ $(target).actualize ] ;
-
- return [ virtual-target.register $(target) ] ;
- }
- }
-}
-
-
-# Query the installation directory. This is needed in at least two scenarios.
-# First, when re-using sources from the Qt-Tree. Second, to "install" custom Qt
-# plugins to the Qt-Tree.
-#
-rule directory
-{
- return $(.PREFIX) ;
-}
-
-# Add a shared Qt library.
-rule add-shared-library ( lib-name : depends-on * : usage-defines * : requirements * : include ? )
-{
- add-library $(lib-name) : $(.suffix_version) : $(depends-on) : $(usage-defines) : $(requirements) : $(include) ;
-}
-
-# Add a static Qt library.
-rule add-static-library ( lib-name : depends-on * : usage-defines * : requirements * : include ? )
-{
- add-library $(lib-name) : : $(depends-on) : $(usage-defines) : $(requirements) : $(include) ;
-}
-
-# Add a Qt library.
-# Static libs are unversioned, whereas shared libs have the major number as suffix.
-# Creates both release and debug versions on platforms where both are enabled by Qt configure.
-# Flags:
-# - lib-name Qt library Name
-# - version Qt major number used as shared library suffix (QtCore4.so)
-# - depends-on other Qt libraries
-# - usage-defines those are set by qmake, so set them when using this library
-# - requirements addional requirements
-# - include non-canonical include path. The canonical path is $(.incprefix)/$(lib-name).
-rule add-library ( lib-name : version ? : depends-on * : usage-defines * : requirements * : include ? )
-{
- if $(.bjam-qt)
- {
- # Import Qt module
- # Eveything will be setup there
- alias $(lib-name)
- : $(.prefix)//$(lib-name)
- :
- :
- : <allow>qt4 ;
- }
- else
- {
- local real_include ;
- real_include ?= $(include) ;
- real_include ?= $(lib-name) ;
-
- lib $(lib-name)
- : # sources
- $(depends-on)
- : # requirements
- <name>$(lib-name)$(version)
- $(requirements)
- : # default-build
- : # usage-requirements
- <define>$(usage-defines)
- <include>$(.incprefix)/$(real_include)
- ;
-
- if $(.have_separate_debug) = TRUE
- {
- lib $(lib-name)
- : # sources
- $(depends-on)
- : # requirements
- <name>$(lib-name)$(.suffix_debug)$(version)
- $(requirements)
- <variant>debug
- : # default-build
- : # usage-requirements
- <define>$(usage-defines)
- <include>$(.incprefix)/$(real_include)
- ;
- }
- }
-
- # Make library explicit so that a simple <use>qt4 will not bring in everything.
- # And some components like QtDBus/Phonon may not be available on all platforms.
- explicit $(lib-name) ;
-}
-
-# Use $(.BINPREFIX[-1]) for the paths as several tools-requirements can match.
-# The exact match is the last one.
-
-# Get <include> and <defines> from current toolset.
-flags qt4.moc INCLUDES <include> ;
-flags qt4.moc DEFINES <define> ;
-
-# need a newline for expansion of DEFINES and INCLUDES in the response file.
-.nl = "
-" ;
-
-# Processes headers to create Qt MetaObject information. Qt4-moc has its
-# c++-parser, so pass INCLUDES and DEFINES.
-# We use response file with one INCLUDE/DEFINE per line
-#
-actions moc
-{
- $(.BINPREFIX[-1])/moc -f $(>) -o $(<) @"@($(<).rsp:E=-D$(DEFINES)$(.nl) -I$(INCLUDES:T)$(.nl))"
-}
-
-# When moccing files for include only, we don't need -f, otherwise the generated
-# code will include the .cpp and we'll get duplicated symbols.
-#
-actions moc.inc
-{
- $(.BINPREFIX[-1])/moc $(>) -o $(<) @"@($(<).rsp:E=-D$(DEFINES)$(.nl) -I$(INCLUDES:T)$(.nl))"
-}
-
-
-# Generates source files from resource files.
-#
-actions rcc
-{
- $(.BINPREFIX[-1])/rcc $(>) -name $(>:B) -o $(<)
-}
-
-
-# Generates user-interface source from .ui files.
-#
-actions uic
-{
- $(.BINPREFIX[-1])/uic $(>) -o $(<)
-}
-
-
-# Scanner for .qrc files. Look for the CDATA section of the <file> tag. Ignore
-# the "alias" attribute. See http://doc.trolltech.com/qt/resources.html for
-# detailed documentation of the Qt Resource System.
-#
-class qrc-scanner : common-scanner
-{
- rule pattern ( )
- {
- return "<file.*>(.*)</file>" ;
- }
-}
-
-
-# Wrapped files are "included".
-scanner.register qrc-scanner : include ;
diff --git a/tools/build/v2/tools/rc.jam b/tools/build/v2/tools/rc.jam
deleted file mode 100644
index 9964d339ba..0000000000
--- a/tools/build/v2/tools/rc.jam
+++ /dev/null
@@ -1,156 +0,0 @@
-# Copyright (C) Andre Hentz 2003. Permission to copy, use, modify, sell and
-# distribute this software is granted provided this copyright notice appears in
-# all copies. This software is provided "as is" without express or implied
-# warranty, and with no claim as to its suitability for any purpose.
-#
-# Copyright (c) 2006 Rene Rivera.
-#
-# Use, modification and distribution is subject to the Boost Software
-# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
-# http://www.boost.org/LICENSE_1_0.txt)
-
-import type ;
-import generators ;
-import feature ;
-import errors ;
-import scanner ;
-import toolset : flags ;
-
-if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
-{
- .debug-configuration = true ;
-}
-
-type.register RC : rc ;
-
-rule init ( )
-{
-}
-
-# Configures a new resource compilation command specific to a condition,
-# usually a toolset selection condition. The possible options are:
-#
-# * <rc-type>(rc|windres) - Indicates the type of options the command
-# accepts.
-#
-# Even though the arguments are all optional, only when a command, condition,
-# and at minimum the rc-type option are given will the command be configured.
-# This is so that callers don't have to check auto-configuration values
-# before calling this. And still get the functionality of build failures when
-# the resource compiler can't be found.
-#
-rule configure ( command ? : condition ? : options * )
-{
- local rc-type = [ feature.get-values <rc-type> : $(options) ] ;
-
- if $(command) && $(condition) && $(rc-type)
- {
- flags rc.compile.resource .RC $(condition) : $(command) ;
- flags rc.compile.resource .RC_TYPE $(condition) : $(rc-type:L) ;
- flags rc.compile.resource DEFINES <define> ;
- flags rc.compile.resource INCLUDES <include> ;
- if $(.debug-configuration)
- {
- ECHO notice: using rc compiler :: $(condition) :: $(command) ;
- }
- }
-}
-
-rule compile.resource ( target : sources * : properties * )
-{
- local rc-type = [ on $(target) return $(.RC_TYPE) ] ;
- rc-type ?= null ;
- compile.resource.$(rc-type) $(target) : $(sources[1]) ;
-}
-
-actions compile.resource.rc
-{
- "$(.RC)" -l 0x409 "-U$(UNDEFS)" "-D$(DEFINES)" -I"$(>:D)" -I"$(<:D)" -I"$(INCLUDES)" -fo "$(<)" "$(>)"
-}
-
-actions compile.resource.windres
-{
- "$(.RC)" "-U$(UNDEFS)" "-D$(DEFINES)" -I"$(>:D)" -I"$(<:D)" -I"$(INCLUDES)" -o "$(<)" -i "$(>)"
-}
-
-actions quietly compile.resource.null
-{
- as /dev/null -o "$(<)"
-}
-
-# Since it's a common practice to write
-# exe hello : hello.cpp hello.rc
-# we change the name of object created from RC file, to
-# avoid conflict with hello.cpp.
-# The reason we generate OBJ and not RES, is that gcc does not
-# seem to like RES files, but works OK with OBJ.
-# See http://article.gmane.org/gmane.comp.lib.boost.build/5643/
-#
-# Using 'register-c-compiler' adds the build directory to INCLUDES
-generators.register-c-compiler rc.compile.resource : RC : OBJ(%_res) ;
-
-# Register scanner for resources
-class res-scanner : scanner
-{
- import regex virtual-target path scanner ;
-
- rule __init__ ( includes * )
- {
- scanner.__init__ ;
-
- self.includes = $(includes) ;
- }
-
- rule pattern ( )
- {
- return "(([^ ]+[ ]+(BITMAP|CURSOR|FONT|ICON|MESSAGETABLE|RT_MANIFEST)[ ]+([^ \"]+|\"[^\"]+\"))|(#include[ ]*(<[^<]+>|\"[^\"]+\")))" ;
- }
-
- rule process ( target : matches * : binding )
- {
- local angle = [ regex.transform $(matches) : "#include[ ]*<([^<]+)>" ] ;
- local quoted = [ regex.transform $(matches) : "#include[ ]*\"([^\"]+)\"" ] ;
- local res = [ regex.transform $(matches) : "[^ ]+[ ]+(BITMAP|CURSOR|FONT|ICON|MESSAGETABLE|RT_MANIFEST)[ ]+(([^ \"]+)|\"([^\"]+)\")" : 3 4 ] ;
-
- # Icons and other includes may referenced as
- #
- # IDR_MAINFRAME ICON "res\\icon.ico"
- #
- # so we have to replace double backslashes to single ones.
- res = [ regex.replace-list $(res) : "\\\\\\\\" : "/" ] ;
-
- # CONSIDER: the new scoping rule seem to defeat "on target" variables.
- local g = [ on $(target) return $(HDRGRIST) ] ;
- local b = [ NORMALIZE_PATH $(binding:D) ] ;
-
- # Attach binding of including file to included targets.
- # When target is directly created from virtual target
- # this extra information is unnecessary. But in other
- # cases, it allows to distinguish between two headers of the
- # same name included from different places.
- # We don't need this extra information for angle includes,
- # since they should not depend on including file (we can't
- # get literal "." in include path).
- local g2 = $(g)"#"$(b) ;
-
- angle = $(angle:G=$(g)) ;
- quoted = $(quoted:G=$(g2)) ;
- res = $(res:G=$(g2)) ;
-
- local all = $(angle) $(quoted) ;
-
- INCLUDES $(target) : $(all) ;
- DEPENDS $(target) : $(res) ;
- NOCARE $(all) $(res) ;
- SEARCH on $(angle) = $(self.includes:G=) ;
- SEARCH on $(quoted) = $(b) $(self.includes:G=) ;
- SEARCH on $(res) = $(b) $(self.includes:G=) ;
-
- # Just propagate current scanner to includes, in a hope
- # that includes do not change scanners.
- scanner.propagate $(__name__) : $(angle) $(quoted) : $(target) ;
- }
-}
-
-scanner.register res-scanner : include ;
-type.set-scanner RC : res-scanner ;
diff --git a/tools/build/v2/tools/rc.py b/tools/build/v2/tools/rc.py
deleted file mode 100644
index 0b82d231d7..0000000000
--- a/tools/build/v2/tools/rc.py
+++ /dev/null
@@ -1,189 +0,0 @@
-# Status: being ported by Steven Watanabe
-# Base revision: 47077
-#
-# Copyright (C) Andre Hentz 2003. Permission to copy, use, modify, sell and
-# distribute this software is granted provided this copyright notice appears in
-# all copies. This software is provided "as is" without express or implied
-# warranty, and with no claim as to its suitability for any purpose.
-#
-# Copyright (c) 2006 Rene Rivera.
-#
-# Copyright (c) 2008 Steven Watanabe
-#
-# Use, modification and distribution is subject to the Boost Software
-# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
-# http://www.boost.org/LICENSE_1_0.txt)
-
-##import type ;
-##import generators ;
-##import feature ;
-##import errors ;
-##import scanner ;
-##import toolset : flags ;
-
-from b2.build import type, toolset, generators, scanner, feature
-from b2.tools import builtin
-from b2.util import regex
-from b2.build.toolset import flags
-from b2.manager import get_manager
-
-__debug = None
-
-def debug():
- global __debug
- if __debug is None:
- __debug = "--debug-configuration" in bjam.variable("ARGV")
- return __debug
-
-type.register('RC', ['rc'])
-
-def init():
- pass
-
-def configure (command = None, condition = None, options = None):
- """
- Configures a new resource compilation command specific to a condition,
- usually a toolset selection condition. The possible options are:
-
- * <rc-type>(rc|windres) - Indicates the type of options the command
- accepts.
-
- Even though the arguments are all optional, only when a command, condition,
- and at minimum the rc-type option are given will the command be configured.
- This is so that callers don't have to check auto-configuration values
- before calling this. And still get the functionality of build failures when
- the resource compiler can't be found.
- """
- rc_type = feature.get_values('<rc-type>', options)
- if rc_type:
- assert(len(rc_type) == 1)
- rc_type = rc_type[0]
-
- if command and condition and rc_type:
- flags('rc.compile.resource', '.RC', condition, command)
- flags('rc.compile.resource', '.RC_TYPE', condition, rc_type.lower())
- flags('rc.compile.resource', 'DEFINES', [], ['<define>'])
- flags('rc.compile.resource', 'INCLUDES', [], ['<include>'])
- if debug():
- print 'notice: using rc compiler ::', condition, '::', command
-
-engine = get_manager().engine()
-
-class RCAction:
- """Class representing bjam action defined from Python.
- The function must register the action to execute."""
-
- def __init__(self, action_name, function):
- self.action_name = action_name
- self.function = function
-
- def __call__(self, targets, sources, property_set):
- if self.function:
- self.function(targets, sources, property_set)
-
-# FIXME: What is the proper way to dispatch actions?
-def rc_register_action(action_name, function = None):
- global engine
- if engine.actions.has_key(action_name):
- raise "Bjam action %s is already defined" % action_name
- engine.actions[action_name] = RCAction(action_name, function)
-
-def rc_compile_resource(targets, sources, properties):
- rc_type = bjam.call('get-target-variable', targets, '.RC_TYPE')
- global engine
- engine.set_update_action('rc.compile.resource.' + rc_type, targets, sources, properties)
-
-rc_register_action('rc.compile.resource', rc_compile_resource)
-
-
-engine.register_action(
- 'rc.compile.resource.rc',
- '"$(.RC)" -l 0x409 "-U$(UNDEFS)" "-D$(DEFINES)" -I"$(>:D)" -I"$(<:D)" -I"$(INCLUDES)" -fo "$(<)" "$(>)"')
-
-engine.register_action(
- 'rc.compile.resource.windres',
- '"$(.RC)" "-U$(UNDEFS)" "-D$(DEFINES)" -I"$(>:D)" -I"$(<:D)" -I"$(INCLUDES)" -o "$(<)" -i "$(>)"')
-
-# FIXME: this was originally declared quietly
-engine.register_action(
- 'compile.resource.null',
- 'as /dev/null -o "$(<)"')
-
-# Since it's a common practice to write
-# exe hello : hello.cpp hello.rc
-# we change the name of object created from RC file, to
-# avoid conflict with hello.cpp.
-# The reason we generate OBJ and not RES, is that gcc does not
-# seem to like RES files, but works OK with OBJ.
-# See http://article.gmane.org/gmane.comp.lib.boost.build/5643/
-#
-# Using 'register-c-compiler' adds the build directory to INCLUDES
-# FIXME: switch to generators
-builtin.register_c_compiler('rc.compile.resource', ['RC'], ['OBJ(%_res)'], [])
-
-__angle_include_re = "#include[ ]*<([^<]+)>"
-
-# Register scanner for resources
-class ResScanner(scanner.Scanner):
-
- def __init__(self, includes):
- scanner.__init__ ;
- self.includes = includes
-
- def pattern(self):
- return "(([^ ]+[ ]+(BITMAP|CURSOR|FONT|ICON|MESSAGETABLE|RT_MANIFEST)" +\
- "[ ]+([^ \"]+|\"[^\"]+\"))|(#include[ ]*(<[^<]+>|\"[^\"]+\")))" ;
-
- def process(self, target, matches, binding):
-
- angle = regex.transform(matches, "#include[ ]*<([^<]+)>")
- quoted = regex.transform(matches, "#include[ ]*\"([^\"]+)\"")
- res = regex.transform(matches,
- "[^ ]+[ ]+(BITMAP|CURSOR|FONT|ICON|MESSAGETABLE|RT_MANIFEST)" +\
- "[ ]+(([^ \"]+)|\"([^\"]+)\")", [3, 4])
-
- # Icons and other includes may referenced as
- #
- # IDR_MAINFRAME ICON "res\\icon.ico"
- #
- # so we have to replace double backslashes to single ones.
- res = [ re.sub(r'\\\\', '/', match) for match in res ]
-
- # CONSIDER: the new scoping rule seem to defeat "on target" variables.
- g = bjam.call('get-target-variable', target, 'HDRGRIST')
- b = os.path.normalize_path(os.path.dirname(binding))
-
- # Attach binding of including file to included targets.
- # When target is directly created from virtual target
- # this extra information is unnecessary. But in other
- # cases, it allows to distinguish between two headers of the
- # same name included from different places.
- # We don't need this extra information for angle includes,
- # since they should not depend on including file (we can't
- # get literal "." in include path).
- g2 = g + "#" + b
-
- g = "<" + g + ">"
- g2 = "<" + g2 + ">"
- angle = [g + x for x in angle]
- quoted = [g2 + x for x in quoted]
- res = [g2 + x for x in res]
-
- all = angle + quoted
-
- bjam.call('mark-included', target, all)
-
- engine = get_manager().engine()
-
- engine.add_dependency(target, res)
- bjam.call('NOCARE', all + res)
- engine.set_target_variable(angle, 'SEARCH', ungrist(self.includes))
- engine.set_target_variable(quoted, 'SEARCH', b + ungrist(self.includes))
- engine.set_target_variable(res, 'SEARCH', b + ungrist(self.includes)) ;
-
- # Just propagate current scanner to includes, in a hope
- # that includes do not change scanners.
- get_manager().scanners().propagate(self, angle + quoted)
-
-scanner.register(ResScanner, 'include')
-type.set_scanner('RC', ResScanner)
diff --git a/tools/build/v2/tools/stage.jam b/tools/build/v2/tools/stage.jam
deleted file mode 100644
index 36427447bf..0000000000
--- a/tools/build/v2/tools/stage.jam
+++ /dev/null
@@ -1,524 +0,0 @@
-# Copyright 2003 Dave Abrahams
-# Copyright 2005, 2006 Rene Rivera
-# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# This module defines the 'install' rule, used to copy a set of targets to a
-# single location.
-
-import targets ;
-import "class" : new ;
-import errors ;
-import type ;
-import generators ;
-import feature ;
-import project ;
-import virtual-target ;
-import path ;
-import types/register ;
-
-
-feature.feature <install-dependencies> : off on : incidental ;
-feature.feature <install-type> : : free incidental ;
-feature.feature <install-source-root> : : free path ;
-feature.feature <so-version> : : free incidental ;
-
-# If 'on', version symlinks for shared libraries will not be created. Affects
-# Unix builds only.
-feature.feature <install-no-version-symlinks> : on : optional incidental ;
-
-
-class install-target-class : basic-target
-{
- import feature ;
- import project ;
- import type ;
- import errors ;
- import generators ;
- import path ;
- import stage ;
- import "class" : new ;
- import property ;
- import property-set ;
-
- rule __init__ ( name-and-dir : project : sources * : requirements * : default-build * )
- {
- basic-target.__init__ $(name-and-dir) : $(project) : $(sources) :
- $(requirements) : $(default-build) ;
- }
-
- # If <location> is not set, sets it based on the project data.
- #
- rule update-location ( property-set )
- {
- local loc = [ $(property-set).get <location> ] ;
- if ! $(loc)
- {
- loc = [ path.root $(self.name) [ $(self.project).get location ] ] ;
- property-set = [ $(property-set).add-raw $(loc:G=<location>) ] ;
- }
-
- return $(property-set) ;
- }
-
- # Takes a target that is installed and a property set which is used when
- # installing.
- #
- rule adjust-properties ( target : build-property-set )
- {
- local ps-raw ;
- local a = [ $(target).action ] ;
- if $(a)
- {
- local ps = [ $(a).properties ] ;
- ps-raw = [ $(ps).raw ] ;
-
- # Unless <hardcode-dll-paths>true is in properties, which can happen
- # only if the user has explicitly requested it, nuke all <dll-path>
- # properties.
- if [ $(build-property-set).get <hardcode-dll-paths> ] != true
- {
- ps-raw = [ property.change $(ps-raw) : <dll-path> ] ;
- }
-
- # If any <dll-path> properties were specified for installing, add
- # them.
- local l = [ $(build-property-set).get <dll-path> ] ;
- ps-raw += $(l:G=<dll-path>) ;
-
- # Also copy <linkflags> feature from current build set, to be used
- # for relinking.
- local l = [ $(build-property-set).get <linkflags> ] ;
- ps-raw += $(l:G=<linkflags>) ;
-
- # Remove the <tag> feature on original targets.
- ps-raw = [ property.change $(ps-raw) : <tag> ] ;
-
- # And <location>. If stage target has another stage target in
- # sources, then we shall get virtual targets with the <location>
- # property set.
- ps-raw = [ property.change $(ps-raw) : <location> ] ;
- }
-
- local d = [ $(build-property-set).get <dependency> ] ;
- ps-raw += $(d:G=<dependency>) ;
-
- local d = [ $(build-property-set).get <location> ] ;
- ps-raw += $(d:G=<location>) ;
-
- local ns = [ $(build-property-set).get <install-no-version-symlinks> ] ;
- ps-raw += $(ns:G=<install-no-version-symlinks>) ;
-
- local d = [ $(build-property-set).get <install-source-root> ] ;
- # Make the path absolute: we shall use it to compute relative paths and
- # making the path absolute will help.
- if $(d)
- {
- d = [ path.root $(d) [ path.pwd ] ] ;
- ps-raw += $(d:G=<install-source-root>) ;
- }
-
- if $(ps-raw)
- {
- return [ property-set.create $(ps-raw) ] ;
- }
- else
- {
- return [ property-set.empty ] ;
- }
- }
-
- rule construct ( name : source-targets * : property-set )
- {
- source-targets = [ targets-to-stage $(source-targets) :
- $(property-set) ] ;
-
- property-set = [ update-location $(property-set) ] ;
-
- local ename = [ $(property-set).get <name> ] ;
-
- if $(ename) && $(source-targets[2])
- {
- errors.error "When <name> property is used in 'install', only one"
- "source is allowed" ;
- }
-
- local result ;
- for local i in $(source-targets)
- {
- local staged-targets ;
-
- local new-properties = [ adjust-properties $(i) :
- $(property-set) ] ;
-
- # See if something special should be done when staging this type. It
- # is indicated by the presence of a special "INSTALLED_" type.
- local t = [ $(i).type ] ;
- if $(t) && [ type.registered INSTALLED_$(t) ]
- {
- if $(ename)
- {
- errors.error "In 'install': <name> property specified with target that requires relinking." ;
- }
- else
- {
- local targets = [ generators.construct $(self.project)
- $(name) : INSTALLED_$(t) : $(new-properties) : $(i) ] ;
- staged-targets += $(targets[2-]) ;
- }
- }
- else
- {
- staged-targets = [ stage.copy-file $(self.project) $(ename) :
- $(i) : $(new-properties) ] ;
- }
-
- if ! $(staged-targets)
- {
- errors.error "Unable to generate staged version of " [ $(source).str ] ;
- }
-
- for t in $(staged-targets)
- {
- result += [ virtual-target.register $(t) ] ;
- }
- }
-
- return [ property-set.empty ] $(result) ;
- }
-
- # Given the list of source targets explicitly passed to 'stage', returns the
- # list of targets which must be staged.
- #
- rule targets-to-stage ( source-targets * : property-set )
- {
- local result ;
-
- # Traverse the dependencies, if needed.
- if [ $(property-set).get <install-dependencies> ] = "on"
- {
- source-targets = [ collect-targets $(source-targets) ] ;
- }
-
- # Filter the target types, if needed.
- local included-types = [ $(property-set).get <install-type> ] ;
- for local r in $(source-targets)
- {
- local ty = [ $(r).type ] ;
- if $(ty)
- {
- # Do not stage searched libs.
- if $(ty) != SEARCHED_LIB
- {
- if $(included-types)
- {
- if [ include-type $(ty) : $(included-types) ]
- {
- result += $(r) ;
- }
- }
- else
- {
- result += $(r) ;
- }
- }
- }
- else if ! $(included-types)
- {
- # Don't install typeless target if there is an explicit list of
- # allowed types.
- result += $(r) ;
- }
- }
-
- return $(result) ;
- }
-
- # CONSIDER: figure out why we can not use virtual-target.traverse here.
- #
- rule collect-targets ( targets * )
- {
- # Find subvariants
- local s ;
- for local t in $(targets)
- {
- s += [ $(t).creating-subvariant ] ;
- }
- s = [ sequence.unique $(s) ] ;
-
- local result = [ new set ] ;
- $(result).add $(targets) ;
-
- for local i in $(s)
- {
- $(i).all-referenced-targets $(result) ;
- }
- local result2 ;
- for local r in [ $(result).list ]
- {
- if $(r:G) != <use>
- {
- result2 += $(r:G=) ;
- }
- }
- DELETE_MODULE $(result) ;
- return [ sequence.unique $(result2) ] ;
- }
-
- # Returns true iff 'type' is subtype of some element of 'types-to-include'.
- #
- local rule include-type ( type : types-to-include * )
- {
- local found ;
- while $(types-to-include) && ! $(found)
- {
- if [ type.is-subtype $(type) $(types-to-include[1]) ]
- {
- found = true ;
- }
- types-to-include = $(types-to-include[2-]) ;
- }
-
- return $(found) ;
- }
-}
-
-
-# Creates a copy of target 'source'. The 'properties' object should have a
-# <location> property which specifies where the target must be placed.
-#
-rule copy-file ( project name ? : source : properties )
-{
- name ?= [ $(source).name ] ;
- local relative ;
-
- local new-a = [ new non-scanning-action $(source) : common.copy :
- $(properties) ] ;
- local source-root = [ $(properties).get <install-source-root> ] ;
- if $(source-root)
- {
- # Get the real path of the target. We probably need to strip relative
- # path from the target name at construction.
- local path = [ $(source).path ] ;
- path = [ path.root $(name:D) $(path) ] ;
- # Make the path absolute. Otherwise, it would be hard to compute the
- # relative path. The 'source-root' is already absolute, see the
- # 'adjust-properties' method above.
- path = [ path.root $(path) [ path.pwd ] ] ;
-
- relative = [ path.relative-to $(source-root) $(path) ] ;
- }
-
- # Note: Using $(name:D=$(relative)) might be faster here, but then we would
- # need to explicitly check that relative is not ".", otherwise we might get
- # paths like '<prefix>/boost/.', try to create it and mkdir would obviously
- # fail.
- name = [ path.join $(relative) $(name:D=) ] ;
-
- return [ new file-target $(name) exact : [ $(source).type ] : $(project) :
- $(new-a) ] ;
-}
-
-
-rule symlink ( name : project : source : properties )
-{
- local a = [ new action $(source) : symlink.ln : $(properties) ] ;
- return [ new file-target $(name) exact : [ $(source).type ] : $(project) :
- $(a) ] ;
-}
-
-
-rule relink-file ( project : source : property-set )
-{
- local action = [ $(source).action ] ;
- local cloned-action = [ virtual-target.clone-action $(action) : $(project) :
- "" : $(property-set) ] ;
- return [ $(cloned-action).targets ] ;
-}
-
-
-# Declare installed version of the EXE type. Generator for this type will cause
-# relinking to the new location.
-type.register INSTALLED_EXE : : EXE ;
-
-
-class installed-exe-generator : generator
-{
- import type ;
- import property-set ;
- import modules ;
- import stage ;
-
- rule __init__ ( )
- {
- generator.__init__ install-exe : EXE : INSTALLED_EXE ;
- }
-
- rule run ( project name ? : property-set : source : multiple ? )
- {
- local need-relink ;
-
- if [ $(property-set).get <os> ] in NT CYGWIN ||
- [ $(property-set).get <target-os> ] in windows cygwin
- {
- }
- else
- {
- # See if the dll-path properties are not changed during
- # install. If so, copy, don't relink.
- local a = [ $(source).action ] ;
- local p = [ $(a).properties ] ;
- local original = [ $(p).get <dll-path> ] ;
- local current = [ $(property-set).get <dll-path> ] ;
-
- if $(current) != $(original)
- {
- need-relink = true ;
- }
- }
-
-
- if $(need-relink)
- {
- return [ stage.relink-file $(project)
- : $(source) : $(property-set) ] ;
- }
- else
- {
- return [ stage.copy-file $(project)
- : $(source) : $(property-set) ] ;
- }
- }
-}
-
-
-generators.register [ new installed-exe-generator ] ;
-
-
-# Installing a shared link on Unix might cause a creation of versioned symbolic
-# links.
-type.register INSTALLED_SHARED_LIB : : SHARED_LIB ;
-
-
-class installed-shared-lib-generator : generator
-{
- import type ;
- import property-set ;
- import modules ;
- import stage ;
-
- rule __init__ ( )
- {
- generator.__init__ install-shared-lib : SHARED_LIB
- : INSTALLED_SHARED_LIB ;
- }
-
- rule run ( project name ? : property-set : source : multiple ? )
- {
- if [ $(property-set).get <os> ] in NT CYGWIN ||
- [ $(property-set).get <target-os> ] in windows cygwin
- {
- local copied = [ stage.copy-file $(project) : $(source) :
- $(property-set) ] ;
- return [ virtual-target.register $(copied) ] ;
- }
- else
- {
- local a = [ $(source).action ] ;
- local copied ;
- if ! $(a)
- {
- # Non-derived file, just copy.
- copied = [ stage.copy-file $(project) : $(source) :
- $(property-set) ] ;
- }
- else
- {
- local cp = [ $(a).properties ] ;
- local current-dll-path = [ $(cp).get <dll-path> ] ;
- local new-dll-path = [ $(property-set).get <dll-path> ] ;
-
- if $(current-dll-path) != $(new-dll-path)
- {
- # Rpath changed, need to relink.
- copied = [ stage.relink-file $(project) : $(source) :
- $(property-set) ] ;
- }
- else
- {
- copied = [ stage.copy-file $(project) : $(source) :
- $(property-set) ] ;
- }
- }
-
- copied = [ virtual-target.register $(copied) ] ;
-
- local result = $(copied) ;
- # If the name is in the form NNN.XXX.YYY.ZZZ, where all 'X', 'Y' and
- # 'Z' are numbers, we need to create NNN.XXX and NNN.XXX.YYY
- # symbolic links.
- local m = [ MATCH (.*)\\.([0123456789]+)\\.([0123456789]+)\\.([0123456789]+)$
- : [ $(copied).name ] ] ;
- if $(m)
- {
- # Symlink without version at all is used to make
- # -lsome_library work.
- result += [ stage.symlink $(m[1]) : $(project) : $(copied) :
- $(property-set) ] ;
-
- # Symlinks of some libfoo.N and libfoo.N.M are used so that
- # library can found at runtime, if libfoo.N.M.X has soname of
- # libfoo.N. That happens when the library makes some binary
- # compatibility guarantees. If not, it is possible to skip those
- # symlinks.
- local suppress =
- [ $(property-set).get <install-no-version-symlinks> ] ;
-
- if $(suppress) != "on"
- {
- result += [ stage.symlink $(m[1]).$(m[2]) : $(project)
- : $(copied) : $(property-set) ] ;
- result += [ stage.symlink $(m[1]).$(m[2]).$(m[3]) : $(project)
- : $(copied) : $(property-set) ] ;
- }
- }
-
- return $(result) ;
- }
- }
-}
-
-generators.register [ new installed-shared-lib-generator ] ;
-
-
-# Main target rule for 'install'.
-#
-rule install ( name : sources * : requirements * : default-build * )
-{
- local project = [ project.current ] ;
-
- # Unless the user has explicitly asked us to hardcode dll paths, add
- # <hardcode-dll-paths>false in requirements, to override default value.
- if ! <hardcode-dll-paths>true in $(requirements)
- {
- requirements += <hardcode-dll-paths>false ;
- }
-
- if <tag> in $(requirements:G)
- {
- errors.user-error
- "The <tag> property is not allowed for the 'install' rule" ;
- }
-
- targets.main-target-alternative
- [ new install-target-class $(name) : $(project)
- : [ targets.main-target-sources $(sources) : $(name) ]
- : [ targets.main-target-requirements $(requirements) : $(project) ]
- : [ targets.main-target-default-build $(default-build) : $(project) ]
- ] ;
-}
-
-
-IMPORT $(__name__) : install : : install ;
-IMPORT $(__name__) : install : : stage ;
diff --git a/tools/build/v2/tools/stage.py b/tools/build/v2/tools/stage.py
deleted file mode 100644
index 90d3c0f976..0000000000
--- a/tools/build/v2/tools/stage.py
+++ /dev/null
@@ -1,350 +0,0 @@
-# Status: ported.
-# Base revision 64444.
-#
-# Copyright 2003 Dave Abrahams
-# Copyright 2005, 2006 Rene Rivera
-# Copyright 2002, 2003, 2004, 2005, 2006, 2010 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# This module defines the 'install' rule, used to copy a set of targets to a
-# single location.
-
-import b2.build.feature as feature
-import b2.build.targets as targets
-import b2.build.property as property
-import b2.build.property_set as property_set
-import b2.build.generators as generators
-import b2.build.virtual_target as virtual_target
-
-from b2.manager import get_manager
-from b2.util.sequence import unique
-from b2.util import bjam_signature
-
-import b2.build.type
-
-import os.path
-import re
-import types
-
-feature.feature('install-dependencies', ['off', 'on'], ['incidental'])
-feature.feature('install-type', [], ['free', 'incidental'])
-feature.feature('install-source-root', [], ['free', 'path'])
-feature.feature('so-version', [], ['free', 'incidental'])
-
-# If 'on', version symlinks for shared libraries will not be created. Affects
-# Unix builds only.
-feature.feature('install-no-version-symlinks', ['on'], ['optional', 'incidental'])
-
-class InstallTargetClass(targets.BasicTarget):
-
- def update_location(self, ps):
- """If <location> is not set, sets it based on the project data."""
-
- loc = ps.get('location')
- if not loc:
- loc = os.path.join(self.project().get('location'), self.name())
- ps = ps.add_raw(["<location>" + loc])
-
- return ps
-
- def adjust_properties(self, target, build_ps):
- a = target.action()
- properties = []
- if a:
- ps = a.properties()
- properties = ps.all()
-
- # Unless <hardcode-dll-paths>true is in properties, which can happen
- # only if the user has explicitly requested it, nuke all <dll-path>
- # properties.
-
- if build_ps.get('hardcode-dll-paths') != ['true']:
- properties = [p for p in properties if p.feature().name() != 'dll-path']
-
- # If any <dll-path> properties were specified for installing, add
- # them.
- properties.extend(build_ps.get_properties('dll-path'))
-
- # Also copy <linkflags> feature from current build set, to be used
- # for relinking.
- properties.extend(build_ps.get_properties('linkflags'))
-
- # Remove the <tag> feature on original targets.
- # And <location>. If stage target has another stage target in
- # sources, then we shall get virtual targets with the <location>
- # property set.
- properties = [p for p in properties
- if not p.feature().name() in ['tag', 'location']]
-
- properties.extend(build_ps.get_properties('dependency'))
-
- properties.extend(build_ps.get_properties('location'))
-
-
- properties.extend(build_ps.get_properties('install-no-version-symlinks'))
-
- d = build_ps.get_properties('install-source-root')
-
- # Make the path absolute: we shall use it to compute relative paths and
- # making the path absolute will help.
- if d:
- p = d[0]
- properties.append(property.Property(p.feature(), os.path.abspath(p.value())))
-
- return property_set.create(properties)
-
-
- def construct(self, name, source_targets, ps):
-
- source_targets = self.targets_to_stage(source_targets, ps)
- ps = self.update_location(ps)
-
- ename = ps.get('name')
- if ename:
- ename = ename[0]
- if ename and len(source_targets) > 1:
- get_manager().errors()("When <name> property is used in 'install', only one source is allowed")
-
- result = []
-
- for i in source_targets:
-
- staged_targets = []
- new_ps = self.adjust_properties(i, ps)
-
- # See if something special should be done when staging this type. It
- # is indicated by the presence of a special "INSTALLED_" type.
- t = i.type()
- if t and b2.build.type.registered("INSTALLED_" + t):
-
- if ename:
- get_manager().errors()("In 'install': <name> property specified with target that requires relinking.")
- else:
- (r, targets) = generators.construct(self.project(), name, "INSTALLED_" + t,
- new_ps, [i])
- assert isinstance(r, property_set.PropertySet)
- staged_targets.extend(targets)
-
- else:
- staged_targets.append(copy_file(self.project(), ename, i, new_ps))
-
- if not staged_targets:
- get_manager().errors()("Unable to generate staged version of " + i)
-
- result.extend(get_manager().virtual_targets().register(t) for t in staged_targets)
-
- return (property_set.empty(), result)
-
- def targets_to_stage(self, source_targets, ps):
- """Given the list of source targets explicitly passed to 'stage', returns the
- list of targets which must be staged."""
-
- result = []
-
- # Traverse the dependencies, if needed.
- if ps.get('install-dependencies') == ['on']:
- source_targets = self.collect_targets(source_targets)
-
- # Filter the target types, if needed.
- included_types = ps.get('install-type')
- for r in source_targets:
- ty = r.type()
- if ty:
- # Do not stage searched libs.
- if ty != "SEARCHED_LIB":
- if included_types:
- if self.include_type(ty, included_types):
- result.append(r)
- else:
- result.append(r)
- elif not included_types:
- # Don't install typeless target if there is an explicit list of
- # allowed types.
- result.append(r)
-
- return result
-
- # CONSIDER: figure out why we can not use virtual-target.traverse here.
- #
- def collect_targets(self, targets):
-
- s = [t.creating_subvariant() for t in targets]
- s = unique(s)
-
- result = set(targets)
- for i in s:
- i.all_referenced_targets(result)
-
- result2 = []
- for r in result:
- if isinstance(r, property.Property):
-
- if r.feature().name() != 'use':
- result2.append(r.value())
- else:
- result2.append(r)
- result2 = unique(result2)
- return result2
-
- # Returns true iff 'type' is subtype of some element of 'types-to-include'.
- #
- def include_type(self, type, types_to_include):
- return any(b2.build.type.is_subtype(type, ti) for ti in types_to_include)
-
-# Creates a copy of target 'source'. The 'properties' object should have a
-# <location> property which specifies where the target must be placed.
-#
-def copy_file(project, name, source, ps):
-
- if not name:
- name = source.name()
-
- relative = ""
-
- new_a = virtual_target.NonScanningAction([source], "common.copy", ps)
- source_root = ps.get('install-source-root')
- if source_root:
- source_root = source_root[0]
- # Get the real path of the target. We probably need to strip relative
- # path from the target name at construction.
- path = os.path.join(source.path(), os.path.dirname(name))
- # Make the path absolute. Otherwise, it would be hard to compute the
- # relative path. The 'source-root' is already absolute, see the
- # 'adjust-properties' method above.
- path = os.path.abspath(path)
-
- relative = os.path.relpath(path, source_root)
-
- name = os.path.join(relative, os.path.basename(name))
- return virtual_target.FileTarget(name, source.type(), project, new_a, exact=True)
-
-def symlink(name, project, source, ps):
- a = virtual_target.Action([source], "symlink.ln", ps)
- return virtual_target.FileTarget(name, source.type(), project, a, exact=True)
-
-def relink_file(project, source, ps):
- action = source[0].action()
- cloned_action = virtual_target.clone_action(action, project, "", ps)
- targets = cloned_action.targets()
- # We relink only on Unix, where exe or shared lib is always a single file.
- assert len(targets) == 1
- return targets[0]
-
-
-# Declare installed version of the EXE type. Generator for this type will cause
-# relinking to the new location.
-b2.build.type.register('INSTALLED_EXE', [], 'EXE')
-
-class InstalledExeGenerator(generators.Generator):
-
- def __init__(self):
- generators.Generator.__init__(self, "install-exe", False, ['EXE'], ['INSTALLED_EXE'])
-
- def run(self, project, name, ps, source):
-
- need_relink = False;
-
- if ps.get('os') in ['NT', 'CYGWIN'] or ps.get('target-os') in ['windows', 'cygwin']:
- # Never relink
- pass
- else:
- # See if the dll-path properties are not changed during
- # install. If so, copy, don't relink.
- need_relink = ps.get('dll-path') != source[0].action().properties().get('dll-path')
-
- if need_relink:
- return [relink_file(project, source, ps)]
- else:
- return [copy_file(project, None, source[0], ps)]
-
-generators.register(InstalledExeGenerator())
-
-
-# Installing a shared link on Unix might cause a creation of versioned symbolic
-# links.
-b2.build.type.register('INSTALLED_SHARED_LIB', [], 'SHARED_LIB')
-
-class InstalledSharedLibGenerator(generators.Generator):
-
- def __init__(self):
- generators.Generator.__init__(self, 'install-shared-lib', False, ['SHARED_LIB'], ['INSTALLED_SHARED_LIB'])
-
- def run(self, project, name, ps, source):
-
- source = source[0]
- if ps.get('os') in ['NT', 'CYGWIN'] or ps.get('target-os') in ['windows', 'cygwin']:
- copied = copy_file(project, None, source, ps)
- return [get_manager().virtual_targets().register(copied)]
- else:
- a = source.action()
- if not a:
- # Non-derived file, just copy.
- copied = copy_file(project, source, ps)
- else:
-
- need_relink = ps.get('dll-path') != source.action().properties().get('dll-path')
-
- if need_relink:
- # Rpath changed, need to relink.
- copied = relink_file(project, source, ps)
- else:
- copied = copy_file(project, None, source, ps)
-
- result = [get_manager().virtual_targets().register(copied)]
- # If the name is in the form NNN.XXX.YYY.ZZZ, where all 'X', 'Y' and
- # 'Z' are numbers, we need to create NNN.XXX and NNN.XXX.YYY
- # symbolic links.
- m = re.match("(.*)\\.([0123456789]+)\\.([0123456789]+)\\.([0123456789]+)$",
- copied.name());
- if m:
- # Symlink without version at all is used to make
- # -lsome_library work.
- result.append(symlink(m.group(1), project, copied, ps))
-
- # Symlinks of some libfoo.N and libfoo.N.M are used so that
- # library can found at runtime, if libfoo.N.M.X has soname of
- # libfoo.N. That happens when the library makes some binary
- # compatibility guarantees. If not, it is possible to skip those
- # symlinks.
- if ps.get('install-no-version-symlinks') != ['on']:
-
- result.append(symlink(m.group(1) + '.' + m.group(2), project, copied, ps))
- result.append(symlink(m.group(1) + '.' + m.group(2) + '.' + m.group(3),
- project, copied, ps))
-
- return result
-
-generators.register(InstalledSharedLibGenerator())
-
-
-# Main target rule for 'install'.
-#
-@bjam_signature((["name"], ["sources", "*"], ["requirements", "*"],
- ["default_build", "*"], ["usage_requirements", "*"]))
-def install(name, sources, requirements=[], default_build=[], usage_requirements=[]):
-
- requirements = requirements[:]
- # Unless the user has explicitly asked us to hardcode dll paths, add
- # <hardcode-dll-paths>false in requirements, to override default value.
- if not '<hardcode-dll-paths>true' in requirements:
- requirements.append('<hardcode-dll-paths>false')
-
- if any(r.startswith('<tag>') for r in requirements):
- get_manager().errors()("The <tag> property is not allowed for the 'install' rule")
-
- from b2.manager import get_manager
- t = get_manager().targets()
-
- project = get_manager().projects().current()
-
- return t.main_target_alternative(
- InstallTargetClass(name, project,
- t.main_target_sources(sources, name),
- t.main_target_requirements(requirements, project),
- t.main_target_default_build(default_build, project),
- t.main_target_usage_requirements(usage_requirements, project)))
-
-get_manager().projects().add_rule("install", install)
-get_manager().projects().add_rule("stage", install)
-
diff --git a/tools/build/v2/tools/stlport.jam b/tools/build/v2/tools/stlport.jam
deleted file mode 100644
index 62eebda5f8..0000000000
--- a/tools/build/v2/tools/stlport.jam
+++ /dev/null
@@ -1,303 +0,0 @@
-# Copyright Gennadiy Rozental
-# Copyright 2006 Rene Rivera
-# Copyright 2003, 2004, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# The STLPort is usable by means of 'stdlib' feature. When
-# stdlib=stlport is specified, default version of STLPort will be used,
-# while stdlib=stlport-4.5 will use specific version.
-# The subfeature value 'hostios' means to use host compiler's iostreams.
-#
-# The specific version of stlport is selected by features:
-# The <runtime-link> feature selects between static and shared library
-# The <runtime-debugging>on selects STLPort with debug symbols
-# and stl debugging.
-# There's no way to use STLPort with debug symbols but without
-# stl debugging.
-
-# TODO: must implement selection of different STLPort installations based
-# on used toolset.
-# Also, finish various flags:
-#
-# This is copied from V1 toolset, "+" means "implemented"
-#+flags $(CURR_TOOLSET) DEFINES <stlport-iostream>off : _STLP_NO_OWN_IOSTREAMS=1 _STLP_HAS_NO_NEW_IOSTREAMS=1 ;
-#+flags $(CURR_TOOLSET) DEFINES <stlport-extensions>off : _STLP_NO_EXTENSIONS=1 ;
-# flags $(CURR_TOOLSET) DEFINES <stlport-anachronisms>off : _STLP_NO_ANACHRONISMS=1 ;
-# flags $(CURR_TOOLSET) DEFINES <stlport-cstd-namespace>global : _STLP_VENDOR_GLOBAL_CSTD=1 ;
-# flags $(CURR_TOOLSET) DEFINES <exception-handling>off : _STLP_NO_EXCEPTIONS=1 ;
-# flags $(CURR_TOOLSET) DEFINES <stlport-debug-alloc>on : _STLP_DEBUG_ALLOC=1 ;
-#+flags $(CURR_TOOLSET) DEFINES <runtime-build>debug : _STLP_DEBUG=1 _STLP_DEBUG_UNINITIALIZED=1 ;
-#+flags $(CURR_TOOLSET) DEFINES <runtime-link>dynamic : _STLP_USE_DYNAMIC_LIB=1 ;
-
-
-import feature : feature subfeature ;
-import project ;
-import "class" : new ;
-import targets ;
-import property-set ;
-import common ;
-import type ;
-
-# Make this module into a project.
-project.initialize $(__name__) ;
-project stlport ;
-
-# The problem: how to request to use host compiler's iostreams?
-#
-# Solution 1: Global 'stlport-iostream' feature.
-# That's ugly. Subfeature make more sense for stlport-specific thing.
-# Solution 2: Use subfeature with two values, one of which ("use STLPort iostream")
-# is default.
-# The problem is that such subfeature will appear in target paths, and that's ugly
-# Solution 3: Use optional subfeature with only one value.
-
-feature.extend stdlib : stlport ;
-feature.compose <stdlib>stlport : <library>/stlport//stlport ;
-
-# STLport iostreams or native iostreams
-subfeature stdlib stlport : iostream : hostios : optional propagated ;
-
-# STLport extensions
-subfeature stdlib stlport : extensions : noext : optional propagated ;
-
-# STLport anachronisms -- NOT YET SUPPORTED
-# subfeature stdlib stlport : anachronisms : on off ;
-
-# STLport debug allocation -- NOT YET SUPPORTED
-#subfeature stdlib stlport : debug-alloc : off on ;
-
-# Declare a special target class to handle the creation of search-lib-target
-# instances for STLport. We need a special class, because otherwise we'll have
-# - declare prebuilt targets for all possible toolsets. And by the time 'init'
-# is called we don't even know the list of toolsets that are registered
-# - when host iostreams are used, we really should produce nothing. It would
-# be hard/impossible to achieve this using prebuilt targets.
-
-class stlport-target-class : basic-target
-{
- import feature project type errors generators ;
- import set : difference ;
-
- rule __init__ ( project : headers ? : libraries * : version ? )
- {
- basic-target.__init__ stlport : $(project) ;
- self.headers = $(headers) ;
- self.libraries = $(libraries) ;
- self.version = $(version) ;
- self.version.5 = [ MATCH "^(5[.][0123456789]+).*" : $(version) ] ;
-
- local requirements ;
- requirements += <stdlib-stlport:version>$(self.version) ;
- self.requirements = [ property-set.create $(requirements) ] ;
- }
-
- rule generate ( property-set )
- {
- # Since this target is built with <stdlib>stlport, it will also
- # have <library>/stlport//stlport in requirements, which will
- # cause a loop in main target references. Remove that property
- # manually.
-
- property-set = [ property-set.create
- [ difference
- [ $(property-set).raw ] :
- <library>/stlport//stlport
- <stdlib>stlport
- ]
- ] ;
- return [ basic-target.generate $(property-set) ] ;
- }
-
- rule construct ( name : source-targets * : property-set )
- {
- # Deduce the name of stlport library, based on toolset and
- # debug setting.
- local raw = [ $(property-set).raw ] ;
- local hostios = [ feature.get-values <stdlib-stlport:iostream> : $(raw) ] ;
- local toolset = [ feature.get-values <toolset> : $(raw) ] ;
-
- if $(self.version.5)
- {
- # Version 5.x
-
- # STLport host IO streams no longer supported. So we always
- # need libraries.
-
- # name: stlport(stl)?[dg]?(_static)?.M.R
- local name = stlport ;
- if [ feature.get-values <runtime-debugging> : $(raw) ] = "on"
- {
- name += stl ;
- switch $(toolset)
- {
- case gcc* : name += g ;
- case darwin* : name += g ;
- case * : name += d ;
- }
- }
-
- if [ feature.get-values <runtime-link> : $(raw) ] = "static"
- {
- name += _static ;
- }
-
- # Starting with version 5.2.0, the STLport static libraries no longer
- # include a version number in their name
- local version.pre.5.2 = [ MATCH "^(5[.][01]+).*" : $(version) ] ;
- if $(version.pre.5.2) || [ feature.get-values <runtime-link> : $(raw) ] != "static"
- {
- name += .$(self.version.5) ;
- }
-
- name = $(name:J=) ;
-
- if [ feature.get-values <install-dependencies> : $(raw) ] = "on"
- {
- #~ Allow explicitly asking to install the STLport lib by
- #~ refering to it directly: /stlport//stlport/<install-dependencies>on
- #~ This allows for install packaging of all libs one might need for
- #~ a standalone distribution.
- import path : make : path-make ;
- local runtime-link
- = [ feature.get-values <runtime-link> : $(raw) ] ;
- local lib-file.props
- = [ property-set.create $(raw) <link>$(runtime-link) ] ;
- local lib-file.prefix
- = [ type.generated-target-prefix $(runtime-link:U)_LIB : $(lib-file.props) ] ;
- local lib-file.suffix
- = [ type.generated-target-suffix $(runtime-link:U)_LIB : $(lib-file.props) ] ;
- lib-file.prefix
- ?= "" "lib" ;
- lib-file.suffix
- ?= "" ;
- local lib-file
- = [ GLOB $(self.libraries) [ modules.peek : PATH ] :
- $(lib-file.prefix)$(name).$(lib-file.suffix) ] ;
- lib-file
- = [ new file-reference [ path-make $(lib-file[1]) ] : $(self.project) ] ;
- lib-file
- = [ $(lib-file).generate "" ] ;
- local lib-file.requirements
- = [ targets.main-target-requirements
- [ $(lib-file.props).raw ] <file>$(lib-file[-1])
- : $(self.project) ] ;
- return [ generators.construct $(self.project) $(name) : LIB : $(lib-file.requirements) ] ;
- }
- else
- {
- #~ Otherwise, it's just a regular usage of the library.
- return [ generators.construct
- $(self.project) $(name) : SEARCHED_LIB : $(property-set) ] ;
- }
- }
- else if ! $(hostios) && $(toolset) != msvc
- {
- # We don't need libraries if host istreams are used. For
- # msvc, automatic library selection will be used.
-
- # name: stlport_<toolset>(_stldebug)?
- local name = stlport ;
- name = $(name)_$(toolset) ;
- if [ feature.get-values <runtime-debugging> : $(raw) ] = "on"
- {
- name = $(name)_stldebug ;
- }
-
- return [ generators.construct
- $(self.project) $(name) : SEARCHED_LIB : $(property-set) ] ;
- }
- else
- {
- return [ property-set.empty ] ;
- }
- }
-
- rule compute-usage-requirements ( subvariant )
- {
- local usage-requirements =
- <include>$(self.headers)
- <dll-path>$(self.libraries)
- <library-path>$(self.libraries)
- ;
-
- local rproperties = [ $(subvariant).build-properties ] ;
- # CONSIDER: should this "if" sequence be replaced with
- # some use of 'property-map' class?
- if [ $(rproperties).get <runtime-debugging> ] = "on"
- {
- usage-requirements +=
- <define>_STLP_DEBUG=1
- <define>_STLP_DEBUG_UNINITIALIZED=1 ;
- }
- if [ $(rproperties).get <runtime-link> ] = "shared"
- {
- usage-requirements +=
- <define>_STLP_USE_DYNAMIC_LIB=1 ;
- }
- if [ $(rproperties).get <stdlib-stlport:extensions> ] = noext
- {
- usage-requirements +=
- <define>_STLP_NO_EXTENSIONS=1 ;
- }
- if [ $(rproperties).get <stdlib-stlport:iostream> ] = hostios
- {
- usage-requirements +=
- <define>_STLP_NO_OWN_IOSTREAMS=1
- <define>_STLP_HAS_NO_NEW_IOSTREAMS=1 ;
- }
- if $(self.version.5)
- {
- # Version 5.x
- if [ $(rproperties).get <threading> ] = "single"
- {
- # Since STLport5 doesn't normally support single-thread
- # we force STLport5 into the multi-thread mode. Hence
- # getting what other libs provide of single-thread code
- # linking against a multi-thread lib.
- usage-requirements +=
- <define>_STLP_THREADS=1 ;
- }
- }
-
- return [ property-set.create $(usage-requirements) ] ;
- }
-}
-
-rule stlport-target ( headers ? : libraries * : version ? )
-{
- local project = [ project.current ] ;
-
- targets.main-target-alternative
- [ new stlport-target-class $(project) : $(headers) : $(libraries)
- : $(version)
- ] ;
-}
-
-local .version-subfeature-defined ;
-
-# Initialize stlport support.
-rule init (
- version ? :
- headers : # Location of header files
- libraries * # Location of libraries, lib and bin subdirs of STLport.
- )
-{
- # FIXME: need to use common.check-init-parameters here.
- # At the moment, that rule always tries to define subfeature
- # of the 'toolset' feature, while we need to define subfeature
- # of <stdlib>stlport, so tweaks to check-init-parameters are needed.
- if $(version)
- {
- if ! $(.version-subfeature-defined)
- {
- feature.subfeature stdlib stlport : version : : propagated ;
- .version-subfeature-defined = true ;
- }
- feature.extend-subfeature stdlib stlport : version : $(version) ;
- }
-
- # Declare the main target for this STLPort version.
- stlport-target $(headers) : $(libraries) : $(version) ;
-}
-
diff --git a/tools/build/v2/tools/testing-aux.jam b/tools/build/v2/tools/testing-aux.jam
deleted file mode 100644
index 525dafd0cb..0000000000
--- a/tools/build/v2/tools/testing-aux.jam
+++ /dev/null
@@ -1,210 +0,0 @@
-# This module is imported by testing.py. The definitions here are
-# too tricky to do in Python
-
-# Causes the 'target' to exist after bjam invocation if and only if all the
-# dependencies were successfully built.
-#
-rule expect-success ( target : dependency + : requirements * )
-{
- **passed** $(target) : $(sources) ;
-}
-IMPORT testing : expect-success : : testing.expect-success ;
-
-# Causes the 'target' to exist after bjam invocation if and only if all some of
-# the dependencies were not successfully built.
-#
-rule expect-failure ( target : dependency + : properties * )
-{
- local grist = [ MATCH ^<(.*)> : $(dependency:G) ] ;
- local marker = $(dependency:G=$(grist)*fail) ;
- (failed-as-expected) $(marker) ;
- FAIL_EXPECTED $(dependency) ;
- LOCATE on $(marker) = [ on $(dependency) return $(LOCATE) ] ;
- RMOLD $(marker) ;
- DEPENDS $(marker) : $(dependency) ;
- DEPENDS $(target) : $(marker) ;
- **passed** $(target) : $(marker) ;
-}
-IMPORT testing : expect-failure : : testing.expect-failure ;
-
-# The rule/action combination used to report successful passing of a test.
-#
-rule **passed**
-{
- # Force deletion of the target, in case any dependencies failed to build.
- RMOLD $(<) ;
-}
-
-
-# Used to create test files signifying passed tests.
-#
-actions **passed**
-{
- echo passed > "$(<)"
-}
-
-
-# Used to create replacement object files that do not get created during tests
-# that are expected to fail.
-#
-actions (failed-as-expected)
-{
- echo failed as expected > "$(<)"
-}
-
-# Runs executable 'sources' and stores stdout in file 'target'. Unless
-# --preserve-test-targets command line option has been specified, removes the
-# executable. The 'target-to-remove' parameter controls what should be removed:
-# - if 'none', does not remove anything, ever
-# - if empty, removes 'source'
-# - if non-empty and not 'none', contains a list of sources to remove.
-#
-rule capture-output ( target : source : properties * : targets-to-remove * )
-{
- output-file on $(target) = $(target:S=.output) ;
- LOCATE on $(target:S=.output) = [ on $(target) return $(LOCATE) ] ;
-
- # The INCLUDES kill a warning about independent target...
- INCLUDES $(target) : $(target:S=.output) ;
- # but it also puts .output into dependency graph, so we must tell jam it is
- # OK if it cannot find the target or updating rule.
- NOCARE $(target:S=.output) ;
-
- # This has two-fold effect. First it adds input files to the dependendency
- # graph, preventing a warning. Second, it causes input files to be bound
- # before target is created. Therefore, they are bound using SEARCH setting
- # on them and not LOCATE setting of $(target), as in other case (due to jam
- # bug).
- DEPENDS $(target) : [ on $(target) return $(INPUT_FILES) ] ;
-
- if $(targets-to-remove) = none
- {
- targets-to-remove = ;
- }
- else if ! $(targets-to-remove)
- {
- targets-to-remove = $(source) ;
- }
-
- if [ on $(target) return $(REMOVE_TEST_TARGETS) ]
- {
- TEMPORARY $(targets-to-remove) ;
- # Set a second action on target that will be executed after capture
- # output action. The 'RmTemps' rule has the 'ignore' modifier so it is
- # always considered succeeded. This is needed for 'run-fail' test. For
- # that test the target will be marked with FAIL_EXPECTED, and without
- # 'ignore' successful execution will be negated and be reported as
- # failure. With 'ignore' we do not detect a case where removing files
- # fails, but it is not likely to happen.
- RmTemps $(target) : $(targets-to-remove) ;
- }
-}
-
-
-if [ os.name ] = NT
-{
- .STATUS = %status% ;
- .SET_STATUS = "set status=%ERRORLEVEL%" ;
- .RUN_OUTPUT_NL = "echo." ;
- .STATUS_0 = "%status% EQU 0 (" ;
- .STATUS_NOT_0 = "%status% NEQ 0 (" ;
- .VERBOSE = "%verbose% EQU 1 (" ;
- .ENDIF = ")" ;
- .SHELL_SET = "set " ;
- .CATENATE = type ;
- .CP = copy ;
-}
-else
-{
- .STATUS = "$status" ;
- .SET_STATUS = "status=$?" ;
- .RUN_OUTPUT_NL = "echo" ;
- .STATUS_0 = "test $status -eq 0 ; then" ;
- .STATUS_NOT_0 = "test $status -ne 0 ; then" ;
- .VERBOSE = "test $verbose -eq 1 ; then" ;
- .ENDIF = "fi" ;
- .SHELL_SET = "" ;
- .CATENATE = cat ;
- .CP = cp ;
-}
-
-
-.VERBOSE_TEST = 0 ;
-if --verbose-test in [ modules.peek : ARGV ]
-{
- .VERBOSE_TEST = 1 ;
-}
-
-
-.RM = [ common.rm-command ] ;
-
-
-actions capture-output bind INPUT_FILES output-file
-{
- $(PATH_SETUP)
- $(LAUNCHER) "$(>)" $(ARGS) "$(INPUT_FILES)" > "$(output-file)" 2>&1
- $(.SET_STATUS)
- $(.RUN_OUTPUT_NL) >> "$(output-file)"
- echo EXIT STATUS: $(.STATUS) >> "$(output-file)"
- if $(.STATUS_0)
- $(.CP) "$(output-file)" "$(<)"
- $(.ENDIF)
- $(.SHELL_SET)verbose=$(.VERBOSE_TEST)
- if $(.STATUS_NOT_0)
- $(.SHELL_SET)verbose=1
- $(.ENDIF)
- if $(.VERBOSE)
- echo ====== BEGIN OUTPUT ======
- $(.CATENATE) "$(output-file)"
- echo ====== END OUTPUT ======
- $(.ENDIF)
- exit $(.STATUS)
-}
-
-IMPORT testing : capture-output : : testing.capture-output ;
-
-
-actions quietly updated ignore piecemeal together RmTemps
-{
- $(.RM) "$(>)"
-}
-
-
-.MAKE_FILE = [ common.file-creation-command ] ;
-
-actions unit-test
-{
- $(PATH_SETUP)
- $(LAUNCHER) $(>) $(ARGS) && $(.MAKE_FILE) $(<)
-}
-
-rule record-time ( target : source : start end user system )
-{
- local src-string = [$(source:G=:J=",")"] " ;
- USER_TIME on $(target) += $(src-string)$(user) ;
- SYSTEM_TIME on $(target) += $(src-string)$(system) ;
-}
-
-# Calling this rule requests that Boost Build time how long it taks to build the
-# 'source' target and display the results both on the standard output and in the
-# 'target' file.
-#
-rule time ( target : source : properties * )
-{
- # Set up rule for recording timing information.
- __TIMING_RULE__ on $(source) = testing.record-time $(target) ;
-
- # Make sure that the source is rebuilt any time we need to retrieve that
- # information.
- REBUILDS $(target) : $(source) ;
-}
-
-
-actions time
-{
- echo user: $(USER_TIME)
- echo system: $(SYSTEM_TIME)
-
- echo user: $(USER_TIME)" seconds" > "$(<)"
- echo system: $(SYSTEM_TIME)" seconds" >> "$(<)"
-}
diff --git a/tools/build/v2/tools/testing.jam b/tools/build/v2/tools/testing.jam
deleted file mode 100644
index c42075b78a..0000000000
--- a/tools/build/v2/tools/testing.jam
+++ /dev/null
@@ -1,581 +0,0 @@
-# Copyright 2005 Dave Abrahams
-# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# This module implements regression testing framework. It declares a number of
-# main target rules which perform some action and, if the results are OK,
-# creates an output file.
-#
-# The exact list of rules is:
-# 'compile' -- creates .test file if compilation of sources was
-# successful.
-# 'compile-fail' -- creates .test file if compilation of sources failed.
-# 'run' -- creates .test file is running of executable produced from
-# sources was successful. Also leaves behind .output file
-# with the output from program run.
-# 'run-fail' -- same as above, but .test file is created if running fails.
-#
-# In all cases, presence of .test file is an indication that the test passed.
-# For more convenient reporting, you might want to use C++ Boost regression
-# testing utilities (see http://www.boost.org/more/regression.html).
-#
-# For historical reason, a 'unit-test' rule is available which has the same
-# syntax as 'exe' and behaves just like 'run'.
-
-# Things to do:
-# - Teach compiler_status handle Jamfile.v2.
-# Notes:
-# - <no-warn> is not implemented, since it is Como-specific, and it is not
-# clear how to implement it
-# - std::locale-support is not implemented (it is used in one test).
-
-
-import alias ;
-import "class" ;
-import common ;
-import errors ;
-import feature ;
-import generators ;
-import os ;
-import path ;
-import project ;
-import property ;
-import property-set ;
-import regex ;
-import sequence ;
-import targets ;
-import toolset ;
-import type ;
-import virtual-target ;
-
-
-rule init ( )
-{
-}
-
-
-# Feature controling the command used to lanch test programs.
-feature.feature testing.launcher : : free optional ;
-
-feature.feature test-info : : free incidental ;
-feature.feature testing.arg : : free incidental ;
-feature.feature testing.input-file : : free dependency ;
-
-feature.feature preserve-test-targets : on off : incidental propagated ;
-
-# Register target types.
-type.register TEST : test ;
-type.register COMPILE : : TEST ;
-type.register COMPILE_FAIL : : TEST ;
-type.register RUN_OUTPUT : run ;
-type.register RUN : : TEST ;
-type.register RUN_FAIL : : TEST ;
-type.register LINK_FAIL : : TEST ;
-type.register LINK : : TEST ;
-type.register UNIT_TEST : passed : TEST ;
-
-
-# Declare the rules which create main targets. While the 'type' module already
-# creates rules with the same names for us, we need extra convenience: default
-# name of main target, so write our own versions.
-
-# Helper rule. Create a test target, using basename of first source if no target
-# name is explicitly passed. Remembers the created target in a global variable.
-#
-rule make-test ( target-type : sources + : requirements * : target-name ? )
-{
- target-name ?= $(sources[1]:D=:S=) ;
-
- # Having periods (".") in the target name is problematic because the typed
- # generator will strip the suffix and use the bare name for the file
- # targets. Even though the location-prefix averts problems most times it
- # does not prevent ambiguity issues when referring to the test targets. For
- # example when using the XML log output. So we rename the target to remove
- # the periods, and provide an alias for users.
- local real-name = [ regex.replace $(target-name) "[.]" "~" ] ;
-
- local project = [ project.current ] ;
- # The <location-prefix> forces the build system for generate paths in the
- # form '$build_dir/array1.test/gcc/debug'. This is necessary to allow
- # post-processing tools to work.
- local t = [ targets.create-typed-target [ type.type-from-rule-name
- $(target-type) ] : $(project) : $(real-name) : $(sources) :
- $(requirements) <location-prefix>$(real-name).test ] ;
-
- # The alias to the real target, per period replacement above.
- if $(real-name) != $(target-name)
- {
- alias $(target-name) : $(t) ;
- }
-
- # Remember the test (for --dump-tests). A good way would be to collect all
- # given a project. This has some technical problems: e.g. we can not call
- # this dump from a Jamfile since projects referred by 'build-project' are
- # not available until the whole Jamfile has been loaded.
- .all-tests += $(t) ;
- return $(t) ;
-}
-
-
-# Note: passing more that one cpp file here is known to fail. Passing a cpp file
-# and a library target works.
-#
-rule compile ( sources + : requirements * : target-name ? )
-{
- return [ make-test compile : $(sources) : $(requirements) : $(target-name) ]
- ;
-}
-
-
-rule compile-fail ( sources + : requirements * : target-name ? )
-{
- return [ make-test compile-fail : $(sources) : $(requirements) :
- $(target-name) ] ;
-}
-
-
-rule link ( sources + : requirements * : target-name ? )
-{
- return [ make-test link : $(sources) : $(requirements) : $(target-name) ] ;
-}
-
-
-rule link-fail ( sources + : requirements * : target-name ? )
-{
- return [ make-test link-fail : $(sources) : $(requirements) : $(target-name)
- ] ;
-}
-
-
-rule handle-input-files ( input-files * )
-{
- if $(input-files[2])
- {
- # Check that sorting made when creating property-set instance will not
- # change the ordering.
- if [ sequence.insertion-sort $(input-files) ] != $(input-files)
- {
- errors.user-error "Names of input files must be sorted alphabetically"
- : "due to internal limitations" ;
- }
- }
- return <testing.input-file>$(input-files) ;
-}
-
-
-rule run ( sources + : args * : input-files * : requirements * : target-name ? :
- default-build * )
-{
- requirements += <testing.arg>$(args:J=" ") ;
- requirements += [ handle-input-files $(input-files) ] ;
- return [ make-test run : $(sources) : $(requirements) : $(target-name) ] ;
-}
-
-
-rule run-fail ( sources + : args * : input-files * : requirements * :
- target-name ? : default-build * )
-{
- requirements += <testing.arg>$(args:J=" ") ;
- requirements += [ handle-input-files $(input-files) ] ;
- return [ make-test run-fail : $(sources) : $(requirements) : $(target-name)
- ] ;
-}
-
-
-# Use 'test-suite' as a synonym for 'alias', for backward compatibility.
-IMPORT : alias : : test-suite ;
-
-
-# For all main targets in 'project-module', which are typed targets with type
-# derived from 'TEST', produce some interesting information.
-#
-rule dump-tests
-{
- for local t in $(.all-tests)
- {
- dump-test $(t) ;
- }
-}
-
-
-# Given a project location in normalized form (slashes are forward), compute the
-# name of the Boost library.
-#
-local rule get-library-name ( path )
-{
- # Path is in normalized form, so all slashes are forward.
- local match1 = [ MATCH /(tools|libs)/(.*)/(test|example) : $(path) ] ;
- local match2 = [ MATCH /(tools|libs)/(.*)$ : $(path) ] ;
- local match3 = [ MATCH (/status$) : $(path) ] ;
-
- if $(match1) { return $(match1[2]) ; }
- else if $(match2) { return $(match2[2]) ; }
- else if $(match3) { return "" ; }
- else if --dump-tests in [ modules.peek : ARGV ]
- {
- # The 'run' rule and others might be used outside boost. In that case,
- # just return the path, since the 'library name' makes no sense.
- return $(path) ;
- }
-}
-
-
-# Was an XML dump requested?
-.out-xml = [ MATCH --out-xml=(.*) : [ modules.peek : ARGV ] ] ;
-
-
-# Takes a target (instance of 'basic-target') and prints
-# - its type
-# - its name
-# - comments specified via the <test-info> property
-# - relative location of all source from the project root.
-#
-rule dump-test ( target )
-{
- local type = [ $(target).type ] ;
- local name = [ $(target).name ] ;
- local project = [ $(target).project ] ;
-
- local project-root = [ $(project).get project-root ] ;
- local library = [ get-library-name [ path.root [ $(project).get location ]
- [ path.pwd ] ] ] ;
- if $(library)
- {
- name = $(library)/$(name) ;
- }
-
- local sources = [ $(target).sources ] ;
- local source-files ;
- for local s in $(sources)
- {
- if [ class.is-a $(s) : file-reference ]
- {
- local location = [ path.root [ path.root [ $(s).name ]
- [ $(s).location ] ] [ path.pwd ] ] ;
-
- source-files += [ path.relative-to [ path.root $(project-root)
- [ path.pwd ] ] $(location) ] ;
- }
- }
-
- local target-name = [ $(project).get location ] // [ $(target).name ] .test
- ;
- target-name = $(target-name:J=) ;
-
- local r = [ $(target).requirements ] ;
- # Extract values of the <test-info> feature.
- local test-info = [ $(r).get <test-info> ] ;
-
- # If the user requested XML output on the command-line, add the test info to
- # that XML file rather than dumping them to stdout.
- if $(.out-xml)
- {
- local nl = "
-" ;
- .contents on $(.out-xml) +=
- "$(nl) <test type=\"$(type)\" name=\"$(name)\">"
- "$(nl) <target><![CDATA[$(target-name)]]></target>"
- "$(nl) <info><![CDATA[$(test-info)]]></info>"
- "$(nl) <source><![CDATA[$(source-files)]]></source>"
- "$(nl) </test>"
- ;
- }
- else
- {
- # Format them into a single string of quoted strings.
- test-info = \"$(test-info:J=\"\ \")\" ;
-
- ECHO boost-test($(type)) \"$(name)\" [$(test-info)] ":"
- \"$(source-files)\" ;
- }
-}
-
-
-# Register generators. Depending on target type, either 'expect-success' or
-# 'expect-failure' rule will be used.
-generators.register-standard testing.expect-success : OBJ : COMPILE ;
-generators.register-standard testing.expect-failure : OBJ : COMPILE_FAIL ;
-generators.register-standard testing.expect-success : RUN_OUTPUT : RUN ;
-generators.register-standard testing.expect-failure : RUN_OUTPUT : RUN_FAIL ;
-generators.register-standard testing.expect-failure : EXE : LINK_FAIL ;
-generators.register-standard testing.expect-success : EXE : LINK ;
-
-# Generator which runs an EXE and captures output.
-generators.register-standard testing.capture-output : EXE : RUN_OUTPUT ;
-
-# Generator which creates a target if sources run successfully. Differs from RUN
-# in that run output is not captured. The reason why it exists is that the 'run'
-# rule is much better for automated testing, but is not user-friendly (see
-# http://article.gmane.org/gmane.comp.lib.boost.build/6353).
-generators.register-standard testing.unit-test : EXE : UNIT_TEST ;
-
-
-# The action rules called by generators.
-
-# Causes the 'target' to exist after bjam invocation if and only if all the
-# dependencies were successfully built.
-#
-rule expect-success ( target : dependency + : requirements * )
-{
- **passed** $(target) : $(sources) ;
-}
-
-
-# Causes the 'target' to exist after bjam invocation if and only if all some of
-# the dependencies were not successfully built.
-#
-rule expect-failure ( target : dependency + : properties * )
-{
- local grist = [ MATCH ^<(.*)> : $(dependency:G) ] ;
- local marker = $(dependency:G=$(grist)*fail) ;
- (failed-as-expected) $(marker) ;
- FAIL_EXPECTED $(dependency) ;
- LOCATE on $(marker) = [ on $(dependency) return $(LOCATE) ] ;
- RMOLD $(marker) ;
- DEPENDS $(marker) : $(dependency) ;
- DEPENDS $(target) : $(marker) ;
- **passed** $(target) : $(marker) ;
-}
-
-
-# The rule/action combination used to report successful passing of a test.
-#
-rule **passed**
-{
- # Dump all the tests, if needed. We do it here, since dump should happen
- # only after all Jamfiles have been read, and there is no such place
- # currently defined (but there should be).
- if ! $(.dumped-tests) && ( --dump-tests in [ modules.peek : ARGV ] )
- {
- .dumped-tests = true ;
- dump-tests ;
- }
-
- # Force deletion of the target, in case any dependencies failed to build.
- RMOLD $(<) ;
-}
-
-
-# Used to create test files signifying passed tests.
-#
-actions **passed**
-{
- echo passed > "$(<)"
-}
-
-
-# Used to create replacement object files that do not get created during tests
-# that are expected to fail.
-#
-actions (failed-as-expected)
-{
- echo failed as expected > "$(<)"
-}
-
-
-rule run-path-setup ( target : source : properties * )
-{
- # For testing, we need to make sure that all dynamic libraries needed by the
- # test are found. So, we collect all paths from dependency libraries (via
- # xdll-path property) and add whatever explicit dll-path user has specified.
- # The resulting paths are added to the environment on each test invocation.
- local dll-paths = [ feature.get-values <dll-path> : $(properties) ] ;
- dll-paths += [ feature.get-values <xdll-path> : $(properties) ] ;
- dll-paths += [ on $(source) return $(RUN_PATH) ] ;
- dll-paths = [ sequence.unique $(dll-paths) ] ;
- if $(dll-paths)
- {
- dll-paths = [ sequence.transform path.native : $(dll-paths) ] ;
- PATH_SETUP on $(target) = [ common.prepend-path-variable-command
- [ os.shared-library-path-variable ] : $(dll-paths) ] ;
- }
-}
-
-
-local argv = [ modules.peek : ARGV ] ;
-
-toolset.flags testing.capture-output ARGS <testing.arg> ;
-toolset.flags testing.capture-output INPUT_FILES <testing.input-file> ;
-toolset.flags testing.capture-output LAUNCHER <testing.launcher> ;
-
-
-# Runs executable 'sources' and stores stdout in file 'target'. Unless
-# --preserve-test-targets command line option has been specified, removes the
-# executable. The 'target-to-remove' parameter controls what should be removed:
-# - if 'none', does not remove anything, ever
-# - if empty, removes 'source'
-# - if non-empty and not 'none', contains a list of sources to remove.
-#
-rule capture-output ( target : source : properties * : targets-to-remove * )
-{
- output-file on $(target) = $(target:S=.output) ;
- LOCATE on $(target:S=.output) = [ on $(target) return $(LOCATE) ] ;
-
- # The INCLUDES kill a warning about independent target...
- INCLUDES $(target) : $(target:S=.output) ;
- # but it also puts .output into dependency graph, so we must tell jam it is
- # OK if it cannot find the target or updating rule.
- NOCARE $(target:S=.output) ;
-
- # This has two-fold effect. First it adds input files to the dependendency
- # graph, preventing a warning. Second, it causes input files to be bound
- # before target is created. Therefore, they are bound using SEARCH setting
- # on them and not LOCATE setting of $(target), as in other case (due to jam
- # bug).
- DEPENDS $(target) : [ on $(target) return $(INPUT_FILES) ] ;
-
- if $(targets-to-remove) = none
- {
- targets-to-remove = ;
- }
- else if ! $(targets-to-remove)
- {
- targets-to-remove = $(source) ;
- }
-
- run-path-setup $(target) : $(source) : $(properties) ;
-
- if [ feature.get-values preserve-test-targets : $(properties) ] = off
- {
- TEMPORARY $(targets-to-remove) ;
- # Set a second action on target that will be executed after capture
- # output action. The 'RmTemps' rule has the 'ignore' modifier so it is
- # always considered succeeded. This is needed for 'run-fail' test. For
- # that test the target will be marked with FAIL_EXPECTED, and without
- # 'ignore' successful execution will be negated and be reported as
- # failure. With 'ignore' we do not detect a case where removing files
- # fails, but it is not likely to happen.
- RmTemps $(target) : $(targets-to-remove) ;
- }
-}
-
-
-if [ os.name ] = NT
-{
- .STATUS = %status% ;
- .SET_STATUS = "set status=%ERRORLEVEL%" ;
- .RUN_OUTPUT_NL = "echo." ;
- .STATUS_0 = "%status% EQU 0 (" ;
- .STATUS_NOT_0 = "%status% NEQ 0 (" ;
- .VERBOSE = "%verbose% EQU 1 (" ;
- .ENDIF = ")" ;
- .SHELL_SET = "set " ;
- .CATENATE = type ;
- .CP = copy ;
-}
-else
-{
- .STATUS = "$status" ;
- .SET_STATUS = "status=$?" ;
- .RUN_OUTPUT_NL = "echo" ;
- .STATUS_0 = "test $status -eq 0 ; then" ;
- .STATUS_NOT_0 = "test $status -ne 0 ; then" ;
- .VERBOSE = "test $verbose -eq 1 ; then" ;
- .ENDIF = "fi" ;
- .SHELL_SET = "" ;
- .CATENATE = cat ;
- .CP = cp ;
-}
-
-
-.VERBOSE_TEST = 0 ;
-if --verbose-test in [ modules.peek : ARGV ]
-{
- .VERBOSE_TEST = 1 ;
-}
-
-
-.RM = [ common.rm-command ] ;
-
-
-actions capture-output bind INPUT_FILES output-file
-{
- $(PATH_SETUP)
- $(LAUNCHER) "$(>)" $(ARGS) "$(INPUT_FILES)" > "$(output-file)" 2>&1
- $(.SET_STATUS)
- $(.RUN_OUTPUT_NL) >> "$(output-file)"
- echo EXIT STATUS: $(.STATUS) >> "$(output-file)"
- if $(.STATUS_0)
- $(.CP) "$(output-file)" "$(<)"
- $(.ENDIF)
- $(.SHELL_SET)verbose=$(.VERBOSE_TEST)
- if $(.STATUS_NOT_0)
- $(.SHELL_SET)verbose=1
- $(.ENDIF)
- if $(.VERBOSE)
- echo ====== BEGIN OUTPUT ======
- $(.CATENATE) "$(output-file)"
- echo ====== END OUTPUT ======
- $(.ENDIF)
- exit $(.STATUS)
-}
-
-
-actions quietly updated ignore piecemeal together RmTemps
-{
- $(.RM) "$(>)"
-}
-
-
-.MAKE_FILE = [ common.file-creation-command ] ;
-
-toolset.flags testing.unit-test LAUNCHER <testing.launcher> ;
-toolset.flags testing.unit-test ARGS <testing.arg> ;
-
-
-rule unit-test ( target : source : properties * )
-{
- run-path-setup $(target) : $(source) : $(properties) ;
-}
-
-
-actions unit-test
-{
- $(PATH_SETUP)
- $(LAUNCHER) $(>) $(ARGS) && $(.MAKE_FILE) $(<)
-}
-
-
-IMPORT $(__name__) : compile compile-fail run run-fail link link-fail
- : : compile compile-fail run run-fail link link-fail ;
-
-
-type.register TIME : time ;
-generators.register-standard testing.time : : TIME ;
-
-
-rule record-time ( target : source : start end user system )
-{
- local src-string = [$(source:G=:J=",")"] " ;
- USER_TIME on $(target) += $(src-string)$(user) ;
- SYSTEM_TIME on $(target) += $(src-string)$(system) ;
-}
-
-
-IMPORT testing : record-time : : testing.record-time ;
-
-
-# Calling this rule requests that Boost Build time how long it taks to build the
-# 'source' target and display the results both on the standard output and in the
-# 'target' file.
-#
-rule time ( target : source : properties * )
-{
- # Set up rule for recording timing information.
- __TIMING_RULE__ on $(source) = testing.record-time $(target) ;
-
- # Make sure that the source is rebuilt any time we need to retrieve that
- # information.
- REBUILDS $(target) : $(source) ;
-}
-
-
-actions time
-{
- echo user: $(USER_TIME)
- echo system: $(SYSTEM_TIME)
-
- echo user: $(USER_TIME)" seconds" > "$(<)"
- echo system: $(SYSTEM_TIME)" seconds" >> "$(<)"
-}
diff --git a/tools/build/v2/tools/testing.py b/tools/build/v2/tools/testing.py
deleted file mode 100644
index 3b53500cc2..0000000000
--- a/tools/build/v2/tools/testing.py
+++ /dev/null
@@ -1,342 +0,0 @@
-# Status: ported, except for --out-xml
-# Base revision: 64488
-#
-# Copyright 2005 Dave Abrahams
-# Copyright 2002, 2003, 2004, 2005, 2010 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# This module implements regression testing framework. It declares a number of
-# main target rules which perform some action and, if the results are OK,
-# creates an output file.
-#
-# The exact list of rules is:
-# 'compile' -- creates .test file if compilation of sources was
-# successful.
-# 'compile-fail' -- creates .test file if compilation of sources failed.
-# 'run' -- creates .test file is running of executable produced from
-# sources was successful. Also leaves behind .output file
-# with the output from program run.
-# 'run-fail' -- same as above, but .test file is created if running fails.
-#
-# In all cases, presence of .test file is an indication that the test passed.
-# For more convenient reporting, you might want to use C++ Boost regression
-# testing utilities (see http://www.boost.org/more/regression.html).
-#
-# For historical reason, a 'unit-test' rule is available which has the same
-# syntax as 'exe' and behaves just like 'run'.
-
-# Things to do:
-# - Teach compiler_status handle Jamfile.v2.
-# Notes:
-# - <no-warn> is not implemented, since it is Como-specific, and it is not
-# clear how to implement it
-# - std::locale-support is not implemented (it is used in one test).
-
-import b2.build.feature as feature
-import b2.build.type as type
-import b2.build.targets as targets
-import b2.build.generators as generators
-import b2.build.toolset as toolset
-import b2.tools.common as common
-import b2.util.option as option
-import b2.build_system as build_system
-
-
-
-from b2.manager import get_manager
-from b2.util import stem, bjam_signature
-from b2.util.sequence import unique
-
-import bjam
-
-import re
-import os.path
-import sys
-
-def init():
- pass
-
-# Feature controling the command used to lanch test programs.
-feature.feature("testing.launcher", [], ["free", "optional"])
-
-feature.feature("test-info", [], ["free", "incidental"])
-feature.feature("testing.arg", [], ["free", "incidental"])
-feature.feature("testing.input-file", [], ["free", "dependency"])
-
-feature.feature("preserve-test-targets", ["on", "off"], ["incidental", "propagated"])
-
-# Register target types.
-type.register("TEST", ["test"])
-type.register("COMPILE", [], "TEST")
-type.register("COMPILE_FAIL", [], "TEST")
-
-type.register("RUN_OUTPUT", ["run"])
-type.register("RUN", [], "TEST")
-type.register("RUN_FAIL", [], "TEST")
-
-type.register("LINK", [], "TEST")
-type.register("LINK_FAIL", [], "TEST")
-type.register("UNIT_TEST", ["passed"], "TEST")
-
-__all_tests = []
-
-# Declare the rules which create main targets. While the 'type' module already
-# creates rules with the same names for us, we need extra convenience: default
-# name of main target, so write our own versions.
-
-# Helper rule. Create a test target, using basename of first source if no target
-# name is explicitly passed. Remembers the created target in a global variable.
-def make_test(target_type, sources, requirements, target_name=None):
-
- if not target_name:
- target_name = stem(os.path.basename(sources[0]))
-
- # Having periods (".") in the target name is problematic because the typed
- # generator will strip the suffix and use the bare name for the file
- # targets. Even though the location-prefix averts problems most times it
- # does not prevent ambiguity issues when referring to the test targets. For
- # example when using the XML log output. So we rename the target to remove
- # the periods, and provide an alias for users.
- real_name = target_name.replace(".", "~")
-
- project = get_manager().projects().current()
- # The <location-prefix> forces the build system for generate paths in the
- # form '$build_dir/array1.test/gcc/debug'. This is necessary to allow
- # post-processing tools to work.
- t = get_manager().targets().create_typed_target(
- type.type_from_rule_name(target_type), project, real_name, sources,
- requirements + ["<location-prefix>" + real_name + ".test"], [], [])
-
- # The alias to the real target, per period replacement above.
- if real_name != target_name:
- get_manager().projects().project_rules().all_names_["alias"](
- target_name, [t])
-
- # Remember the test (for --dump-tests). A good way would be to collect all
- # given a project. This has some technical problems: e.g. we can not call
- # this dump from a Jamfile since projects referred by 'build-project' are
- # not available until the whole Jamfile has been loaded.
- __all_tests.append(t)
- return t
-
-
-# Note: passing more that one cpp file here is known to fail. Passing a cpp file
-# and a library target works.
-#
-@bjam_signature((["sources", "*"], ["requirements", "*"], ["target_name", "?"]))
-def compile(sources, requirements, target_name=None):
- return make_test("compile", sources, requirements, target_name)
-
-@bjam_signature((["sources", "*"], ["requirements", "*"], ["target_name", "?"]))
-def compile_fail(sources, requirements, target_name=None):
- return make_test("compile-fail", sources, requirements, target_name)
-
-@bjam_signature((["sources", "*"], ["requirements", "*"], ["target_name", "?"]))
-def link(sources, requirements, target_name=None):
- return make_test("link", sources, requirements, target_name)
-
-@bjam_signature((["sources", "*"], ["requirements", "*"], ["target_name", "?"]))
-def link_fail(sources, requirements, target_name=None):
- return make_test("link-fail", sources, requirements, target_name)
-
-def handle_input_files(input_files):
- if len(input_files) > 1:
- # Check that sorting made when creating property-set instance will not
- # change the ordering.
- if sorted(input_files) != input_files:
- get_manager().errors()("Names of input files must be sorted alphabetically\n" +
- "due to internal limitations")
- return ["<testing.input-file>" + f for f in input_files]
-
-@bjam_signature((["sources", "*"], ["args", "*"], ["input_files", "*"],
- ["requirements", "*"], ["target_name", "?"],
- ["default_build", "*"]))
-def run(sources, args, input_files, requirements, target_name=None, default_build=[]):
- if args:
- requirements.append("<testing.arg>" + " ".join(args))
- requirements.extend(handle_input_files(input_files))
- return make_test("run", sources, requirements, target_name)
-
-@bjam_signature((["sources", "*"], ["args", "*"], ["input_files", "*"],
- ["requirements", "*"], ["target_name", "?"],
- ["default_build", "*"]))
-def run_fail(sources, args, input_files, requirements, target_name=None, default_build=[]):
- if args:
- requirements.append("<testing.arg>" + " ".join(args))
- requirements.extend(handle_input_files(input_files))
- return make_test("run-fail", sources, requirements, target_name)
-
-# Register all the rules
-for name in ["compile", "compile-fail", "link", "link-fail", "run", "run-fail"]:
- get_manager().projects().add_rule(name, getattr(sys.modules[__name__], name.replace("-", "_")))
-
-# Use 'test-suite' as a synonym for 'alias', for backward compatibility.
-from b2.build.alias import alias
-get_manager().projects().add_rule("test-suite", alias)
-
-# For all main targets in 'project-module', which are typed targets with type
-# derived from 'TEST', produce some interesting information.
-#
-def dump_tests():
- for t in __all_tests:
- dump_test(t)
-
-# Given a project location in normalized form (slashes are forward), compute the
-# name of the Boost library.
-#
-__ln1 = re.compile("/(tools|libs)/(.*)/(test|example)")
-__ln2 = re.compile("/(tools|libs)/(.*)$")
-__ln3 = re.compile("(/status$)")
-def get_library_name(path):
-
- path = path.replace("\\", "/")
- match1 = __ln1.match(path)
- match2 = __ln2.match(path)
- match3 = __ln3.match(path)
-
- if match1:
- return match1.group(2)
- elif match2:
- return match2.group(2)
- elif match3:
- return ""
- elif option.get("dump-tests", False, True):
- # The 'run' rule and others might be used outside boost. In that case,
- # just return the path, since the 'library name' makes no sense.
- return path
-
-# Was an XML dump requested?
-__out_xml = option.get("out-xml", False, True)
-
-# Takes a target (instance of 'basic-target') and prints
-# - its type
-# - its name
-# - comments specified via the <test-info> property
-# - relative location of all source from the project root.
-#
-def dump_test(target):
- type = target.type()
- name = target.name()
- project = target.project()
-
- project_root = project.get('project-root')
- library = get_library_name(os.path.abspath(project.get('location')))
- if library:
- name = library + "/" + name
-
- sources = target.sources()
- source_files = []
- for s in sources:
- if isinstance(s, targets.FileReference):
- location = os.path.abspath(os.path.join(s.location(), s.name()))
- source_files.append(os.path.relpath(location, os.path.abspath(project_root)))
-
- target_name = project.get('location') + "//" + target.name() + ".test"
-
- test_info = target.requirements().get('test-info')
- test_info = " ".join('"' + ti + '"' for ti in test_info)
-
- # If the user requested XML output on the command-line, add the test info to
- # that XML file rather than dumping them to stdout.
- #if $(.out-xml)
- #{
-# local nl = "
-#" ;
-# .contents on $(.out-xml) +=
-# "$(nl) <test type=\"$(type)\" name=\"$(name)\">"
-# "$(nl) <target><![CDATA[$(target-name)]]></target>"
-# "$(nl) <info><![CDATA[$(test-info)]]></info>"
-# "$(nl) <source><![CDATA[$(source-files)]]></source>"
-# "$(nl) </test>"
-# ;
-# }
-# else
-
- source_files = " ".join('"' + s + '"' for s in source_files)
- if test_info:
- print 'boost-test(%s) "%s" [%s] : %s' % (type, name, test_info, source_files)
- else:
- print 'boost-test(%s) "%s" : %s' % (type, name, source_files)
-
-# Register generators. Depending on target type, either 'expect-success' or
-# 'expect-failure' rule will be used.
-generators.register_standard("testing.expect-success", ["OBJ"], ["COMPILE"])
-generators.register_standard("testing.expect-failure", ["OBJ"], ["COMPILE_FAIL"])
-generators.register_standard("testing.expect-success", ["RUN_OUTPUT"], ["RUN"])
-generators.register_standard("testing.expect-failure", ["RUN_OUTPUT"], ["RUN_FAIL"])
-generators.register_standard("testing.expect-success", ["EXE"], ["LINK"])
-generators.register_standard("testing.expect-failure", ["EXE"], ["LINK_FAIL"])
-
-# Generator which runs an EXE and captures output.
-generators.register_standard("testing.capture-output", ["EXE"], ["RUN_OUTPUT"])
-
-# Generator which creates a target if sources run successfully. Differs from RUN
-# in that run output is not captured. The reason why it exists is that the 'run'
-# rule is much better for automated testing, but is not user-friendly (see
-# http://article.gmane.org/gmane.comp.lib.boost.build/6353).
-generators.register_standard("testing.unit-test", ["EXE"], ["UNIT_TEST"])
-
-# FIXME: if those calls are after bjam.call, then bjam will crash
-# when toolset.flags calls bjam.caller.
-toolset.flags("testing.capture-output", "ARGS", [], ["<testing.arg>"])
-toolset.flags("testing.capture-output", "INPUT_FILES", [], ["<testing.input-file>"])
-toolset.flags("testing.capture-output", "LAUNCHER", [], ["<testing.launcher>"])
-
-toolset.flags("testing.unit-test", "LAUNCHER", [], ["<testing.launcher>"])
-toolset.flags("testing.unit-test", "ARGS", [], ["<testing.arg>"])
-
-type.register("TIME", ["time"])
-generators.register_standard("testing.time", [], ["TIME"])
-
-
-# The following code sets up actions for this module. It's pretty convoluted,
-# but the basic points is that we most of actions are defined by Jam code
-# contained in testing-aux.jam, which we load into Jam module named 'testing'
-
-def run_path_setup(target, sources, ps):
-
- # For testing, we need to make sure that all dynamic libraries needed by the
- # test are found. So, we collect all paths from dependency libraries (via
- # xdll-path property) and add whatever explicit dll-path user has specified.
- # The resulting paths are added to the environment on each test invocation.
- dll_paths = ps.get('dll-path')
- dll_paths.extend(ps.get('xdll-path'))
- dll_paths.extend(bjam.call("get-target-variable", sources, "RUN_PATH"))
- dll_paths = unique(dll_paths)
- if dll_paths:
- bjam.call("set-target-variable", target, "PATH_SETUP",
- common.prepend_path_variable_command(
- common.shared_library_path_variable(), dll_paths))
-
-def capture_output_setup(target, sources, ps):
- run_path_setup(target, sources, ps)
-
- if ps.get('preserve-test-targets') == ['off']:
- bjam.call("set-target-variable", target, "REMOVE_TEST_TARGETS", "1")
-
-get_manager().engine().register_bjam_action("testing.capture-output",
- capture_output_setup)
-
-
-path = os.path.dirname(get_manager().projects().loaded_tool_module_path_[__name__])
-import b2.util.os_j
-get_manager().projects().project_rules()._import_rule("testing", "os.name",
- b2.util.os_j.name)
-import b2.tools.common
-get_manager().projects().project_rules()._import_rule("testing", "common.rm-command",
- b2.tools.common.rm_command)
-get_manager().projects().project_rules()._import_rule("testing", "common.file-creation-command",
- b2.tools.common.file_creation_command)
-
-bjam.call("load", "testing", os.path.join(path, "testing-aux.jam"))
-
-
-for name in ["expect-success", "expect-failure", "time"]:
- get_manager().engine().register_bjam_action("testing." + name)
-
-get_manager().engine().register_bjam_action("testing.unit-test",
- run_path_setup)
-
-if option.get("dump-tests", False, True):
- build_system.add_pre_build_hook(dump_tests)
diff --git a/tools/build/v2/tools/types/asm.py b/tools/build/v2/tools/types/asm.py
deleted file mode 100644
index b4e1c30e78..0000000000
--- a/tools/build/v2/tools/types/asm.py
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright Craig Rodrigues 2005.
-# Copyright (c) 2008 Steven Watanabe
-#
-# Distributed under the Boost
-# Software License, Version 1.0. (See accompanying
-# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
-
-from b2.build import type
-
-def register():
- type.register_type('ASM', ['s', 'S', 'asm'])
-
-register()
diff --git a/tools/build/v2/tools/types/cpp.jam b/tools/build/v2/tools/types/cpp.jam
deleted file mode 100644
index 3159cdd772..0000000000
--- a/tools/build/v2/tools/types/cpp.jam
+++ /dev/null
@@ -1,86 +0,0 @@
-# Copyright David Abrahams 2004.
-# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
-# Copyright 2010 Rene Rivera
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-import type ;
-import scanner ;
-
-class c-scanner : scanner
-{
- import path ;
- import regex ;
- import scanner ;
- import sequence ;
- import virtual-target ;
-
- rule __init__ ( includes * )
- {
- scanner.__init__ ;
-
- for local i in $(includes)
- {
- self.includes += [ sequence.transform path.native
- : [ regex.split $(i:G=) "&&" ] ] ;
- }
- }
-
- rule pattern ( )
- {
- return "#[ \t]*include[ ]*(<(.*)>|\"(.*)\")" ;
- }
-
- rule process ( target : matches * : binding )
- {
- local angle = [ regex.transform $(matches) : "<(.*)>" ] ;
- angle = [ sequence.transform path.native : $(angle) ] ;
- local quoted = [ regex.transform $(matches) : "\"(.*)\"" ] ;
- quoted = [ sequence.transform path.native : $(quoted) ] ;
-
- # CONSIDER: the new scoping rule seem to defeat "on target" variables.
- local g = [ on $(target) return $(HDRGRIST) ] ;
- local b = [ NORMALIZE_PATH $(binding:D) ] ;
-
- # Attach binding of including file to included targets. When a target is
- # directly created from virtual target this extra information is
- # unnecessary. But in other cases, it allows us to distinguish between
- # two headers of the same name included from different places. We do not
- # need this extra information for angle includes, since they should not
- # depend on including file (we can not get literal "." in include path).
- local g2 = $(g)"#"$(b) ;
-
- angle = $(angle:G=$(g)) ;
- quoted = $(quoted:G=$(g2)) ;
-
- local all = $(angle) $(quoted) ;
-
- INCLUDES $(target) : $(all) ;
- NOCARE $(all) ;
- SEARCH on $(angle) = $(self.includes:G=) ;
- SEARCH on $(quoted) = $(b) $(self.includes:G=) ;
-
- # Just propagate the current scanner to includes in hope that includes
- # do not change scanners.
- scanner.propagate $(__name__) : $(angle) $(quoted) : $(target) ;
-
- ISFILE $(angle) $(quoted) ;
- }
-}
-
-scanner.register c-scanner : include ;
-
-type.register CPP : cpp cxx cc ;
-type.register H : h ;
-type.register HPP : hpp : H ;
-type.register C : c ;
-
-# It most cases where a CPP file or a H file is a source of some action, we
-# should rebuild the result if any of files included by CPP/H are changed. One
-# case when this is not needed is installation, which is handled specifically.
-type.set-scanner CPP : c-scanner ;
-type.set-scanner C : c-scanner ;
-# One case where scanning of H/HPP files is necessary is PCH generation -- if
-# any header included by HPP being precompiled changes, we need to recompile the
-# header.
-type.set-scanner H : c-scanner ;
-type.set-scanner HPP : c-scanner ;
diff --git a/tools/build/v2/tools/types/cpp.py b/tools/build/v2/tools/types/cpp.py
deleted file mode 100644
index a6703255c6..0000000000
--- a/tools/build/v2/tools/types/cpp.py
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright David Abrahams 2004. Distributed under the Boost
-# Software License, Version 1.0. (See accompanying
-# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
-
-from b2.build import type
-
-def register ():
- type.register_type('CPP', ['cpp', 'cxx', 'cc'])
- type.register_type('H', ['h'])
- type.register_type('HPP', ['hpp'], 'H')
- type.register_type('C', ['c'])
-
-register ()
diff --git a/tools/build/v2/tools/types/qt.jam b/tools/build/v2/tools/types/qt.jam
deleted file mode 100644
index 6d1dfbd42f..0000000000
--- a/tools/build/v2/tools/types/qt.jam
+++ /dev/null
@@ -1,10 +0,0 @@
-# Copyright Vladimir Prus 2005. Distributed under the Boost
-# Software License, Version 1.0. (See accompanying
-# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
-
-type UI : ui ;
-type QRC : qrc ;
-type MOCCABLE_CPP ;
-type MOCCABLE_H ;
-# Result of running moc.
-type MOC : moc : H ;
diff --git a/tools/build/v2/tools/xsltproc-config.jam b/tools/build/v2/tools/xsltproc-config.jam
deleted file mode 100644
index de54a2eb37..0000000000
--- a/tools/build/v2/tools/xsltproc-config.jam
+++ /dev/null
@@ -1,37 +0,0 @@
-#~ Copyright 2005 Rene Rivera.
-#~ Distributed under the Boost Software License, Version 1.0.
-#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Automatic configuration for Python tools and librries. To use, just import this module.
-
-import os ;
-import toolset : using ;
-
-if [ os.name ] = NT
-{
- local xsltproc-path = [ GLOB [ modules.peek : PATH ] "C:\\Boost\\bin" : xsltproc\.exe ] ;
- xsltproc-path = $(xsltproc-path[1]) ;
-
- if $(xsltproc-path)
- {
- if --debug-configuration in [ modules.peek : ARGV ]
- {
- ECHO "notice:" using xsltproc ":" $(xsltproc-path) ;
- }
- using xsltproc : $(xsltproc-path) ;
- }
-}
-else
-{
- local xsltproc-path = [ GLOB [ modules.peek : PATH ] : xsltproc ] ;
- xsltproc-path = $(xsltproc-path[1]) ;
-
- if $(xsltproc-path)
- {
- if --debug-configuration in [ modules.peek : ARGV ]
- {
- ECHO "notice:" using xsltproc ":" $(xsltproc-path) ;
- }
- using xsltproc : $(xsltproc-path) ;
- }
-}
diff --git a/tools/build/v2/tools/xsltproc.jam b/tools/build/v2/tools/xsltproc.jam
deleted file mode 100644
index 96f5170bed..0000000000
--- a/tools/build/v2/tools/xsltproc.jam
+++ /dev/null
@@ -1,194 +0,0 @@
-# Copyright (C) 2003 Doug Gregor. Permission to copy, use, modify, sell and
-# distribute this software is granted provided this copyright notice appears in
-# all copies. This software is provided "as is" without express or implied
-# warranty, and with no claim as to its suitability for any purpose.
-
-# This module defines rules to apply an XSLT stylesheet to an XML file using the
-# xsltproc driver, part of libxslt.
-#
-# Note: except for 'init', this modules does not provide any rules for end
-# users.
-
-import feature ;
-import regex ;
-import sequence ;
-import common ;
-import os ;
-import modules ;
-import path ;
-import errors ;
-
-feature.feature xsl:param : : free ;
-feature.feature xsl:path : : free ;
-feature.feature catalog : : free ;
-
-
-# Initialize xsltproc support. The parameters are:
-# xsltproc: The xsltproc executable
-#
-rule init ( xsltproc ? )
-{
- if $(xsltproc)
- {
- modify-config ;
- .xsltproc = $(xsltproc) ;
- check-xsltproc ;
- }
-}
-
-rule freeze-config ( )
-{
- if ! $(.config-frozen)
- {
- .config-frozen = true ;
- .xsltproc ?= [ modules.peek : XSLTPROC ] ;
- .xsltproc ?= xsltproc ;
- check-xsltproc ;
- .is-cygwin = [ .is-cygwin $(.xsltproc) ] ;
- }
-}
-
-rule modify-config
-{
- if $(.config-frozen)
- {
- errors.user-error "xsltproc: Cannot change xsltproc command after it has been used." ;
- }
-}
-
-rule check-xsltproc ( )
-{
- if $(.xsltproc)
- {
- local status = [ SHELL "\"$(.xsltproc)\" -V" : no-output : exit-status ] ;
- if $(status[2]) != "0"
- {
- errors.user-error "xsltproc: Could not run \"$(.xsltproc)\" -V." ;
- }
- }
-}
-
-# Returns a non-empty string if a cygwin xsltproc binary was specified.
-rule is-cygwin ( )
-{
- freeze-config ;
- return $(.is-cygwin) ;
-}
-
-rule .is-cygwin ( xsltproc )
-{
- if [ os.on-windows ]
- {
- local file = [ path.make [ modules.binding $(__name__) ] ] ;
- local dir = [ path.native
- [ path.join [ path.parent $(file) ] xsltproc ] ] ;
- if [ os.name ] = CYGWIN
- {
- dir = $(dir:W) ;
- }
- local command =
- "\"$(xsltproc)\" \"$(dir)\\test.xsl\" \"$(dir)\\test.xml\" 2>&1" ;
- local status = [ SHELL $(command) : no-output : exit-status ] ;
- if $(status[2]) != "0"
- {
- return true ;
- }
- }
-}
-
-rule compute-xslt-flags ( target : properties * )
-{
- local flags ;
-
- # Raw flags.
- flags += [ feature.get-values <flags> : $(properties) ] ;
-
- # Translate <xsl:param> into command line flags.
- for local param in [ feature.get-values <xsl:param> : $(properties) ]
- {
- local namevalue = [ regex.split $(param) "=" ] ;
- flags += --stringparam $(namevalue[1]) \"$(namevalue[2])\" ;
- }
-
- # Translate <xsl:path>.
- for local path in [ feature.get-values <xsl:path> : $(properties) ]
- {
- flags += --path \"$(path:G=)\" ;
- }
-
- # Take care of implicit dependencies.
- local other-deps ;
- for local dep in [ feature.get-values <implicit-dependency> : $(properties) ]
- {
- other-deps += [ $(dep:G=).creating-subvariant ] ;
- }
-
- local implicit-target-directories ;
- for local dep in [ sequence.unique $(other-deps) ]
- {
- implicit-target-directories += [ $(dep).all-target-directories ] ;
- }
-
- for local dir in $(implicit-target-directories)
- {
- flags += --path \"$(dir:T)\" ;
- }
-
- return $(flags) ;
-}
-
-
-local rule .xsltproc ( target : source stylesheet : properties * : dirname ? : action )
-{
- freeze-config ;
- STYLESHEET on $(target) = $(stylesheet) ;
- FLAGS on $(target) += [ compute-xslt-flags $(target) : $(properties) ] ;
- NAME on $(target) = $(.xsltproc) ;
-
- for local catalog in [ feature.get-values <catalog> : $(properties) ]
- {
- CATALOG = [ common.variable-setting-command XML_CATALOG_FILES : $(catalog:T) ] ;
- }
-
- if [ os.on-windows ] && ! [ is-cygwin ]
- {
- action = $(action).windows ;
- }
-
- $(action) $(target) : $(source) ;
-}
-
-
-rule xslt ( target : source stylesheet : properties * )
-{
- return [ .xsltproc $(target) : $(source) $(stylesheet) : $(properties) : : xslt-xsltproc ] ;
-}
-
-
-rule xslt-dir ( target : source stylesheet : properties * : dirname )
-{
- return [ .xsltproc $(target) : $(source) $(stylesheet) : $(properties) : $(dirname) : xslt-xsltproc-dir ] ;
-}
-
-actions xslt-xsltproc.windows
-{
- $(CATALOG) "$(NAME:E=xsltproc)" $(FLAGS) --xinclude -o "$(<)" "$(STYLESHEET:W)" "$(>:W)"
-}
-
-
-actions xslt-xsltproc bind STYLESHEET
-{
- $(CATALOG) "$(NAME:E=xsltproc)" $(FLAGS) --xinclude -o "$(<)" "$(STYLESHEET:T)" "$(>:T)"
-}
-
-
-actions xslt-xsltproc-dir.windows bind STYLESHEET
-{
- $(CATALOG) "$(NAME:E=xsltproc)" $(FLAGS) --xinclude -o "$(<:D)/" "$(STYLESHEET:W)" "$(>:W)"
-}
-
-
-actions xslt-xsltproc-dir bind STYLESHEET
-{
- $(CATALOG) "$(NAME:E=xsltproc)" $(FLAGS) --xinclude -o "$(<:D)/" "$(STYLESHEET:T)" "$(>:T)"
-}
diff --git a/tools/build/v2/tools/zlib.jam b/tools/build/v2/tools/zlib.jam
deleted file mode 100644
index f9138fd573..0000000000
--- a/tools/build/v2/tools/zlib.jam
+++ /dev/null
@@ -1,92 +0,0 @@
-# Copyright (c) 2010 Vladimir Prus.
-#
-# Use, modification and distribution is subject to the Boost Software
-# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
-# http://www.boost.org/LICENSE_1_0.txt)
-
-# Supports the zlib library
-#
-# After 'using zlib', the following targets are available:
-#
-# /zlib//zlib -- The zlib library
-
-
-# In addition to direct purpose of supporting zlib, this module also
-# serves as canonical example of how third-party condiguration works
-# in Boost.Build. The operation is as follows
-#
-# - For each 'using zlib : condition ... : ...' we create a target alternative
-# for zlib, with the specified condition.
-# - There's one target alternative for 'zlib' with no specific condition
-# properties.
-#
-# Two invocations of 'using zlib' with the same condition but different
-# properties are not permitted, e.g.:
-#
-# using zlib : condition <target-os>windows : include foo ;
-# using zlib : condition <target-os>windows : include bar ;
-#
-# is in error. One exception is for empty condition, 'using' without any
-# parameters is overridable. That is:
-#
-# using zlib ;
-# using zlib : include foo ;
-#
-# Is OK then the first 'using' is ignored. Likewise if the order of the statements
-# is reversed.
-#
-# When 'zlib' target is built, a target alternative is selected as usual for
-# Boost.Build. The selected alternative is a custom target class, which:
-#
-# - calls ac.find-include-path to find header path. If explicit path is provided
-# in 'using', only that path is checked, and if no header is found there, error
-# is emitted. Otherwise, we check a directory specified using ZLIB_INCLUDE
-# environment variable, and failing that, in standard directories.
-# [TODO: document sysroot handling]
-# - calls ac.find-library to find the library, in an identical fashion.
-#
-
-import project ;
-import ac ;
-import errors ;
-import "class" : new ;
-import targets ;
-
-project.initialize $(__name__) ;
-project = [ project.current ] ;
-project zlib ;
-
-header = zlib.h ;
-names = z zlib zll zdll ;
-
-.default-alternative = [ new ac-library zlib : $(project) ] ;
-$(.default-alternative).set-header $(header) ;
-$(.default-alternative).set-default-names $(names) ;
-targets.main-target-alternative $(.default-alternative) ;
-
-rule init ( * : * )
-{
- if ! $(condition)
- {
- # Special case the no-condition case so that 'using' without parameters
- # can mix with more specific 'using'.
- $(.default-alternative).reconfigure $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
- }
- else
- {
- # FIXME: consider if we should allow overriding definitions for a given
- # condition -- e.g. project-config.jam might want to override whatever is
- # in user-config.jam.
- local mt = [ new ac-library zlib : $(project)
- : $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ] ;
- $(mt).set-header $(header) ;
- $(mt).set-default-names $(names) ;
- targets.main-target-alternative $(mt) ;
- }
-}
-
-
-
-
-
-
diff --git a/tools/build/v2/util/assert.jam b/tools/build/v2/util/assert.jam
deleted file mode 100644
index abedad5259..0000000000
--- a/tools/build/v2/util/assert.jam
+++ /dev/null
@@ -1,336 +0,0 @@
-# Copyright 2001, 2002, 2003 Dave Abrahams
-# Copyright 2006 Rene Rivera
-# Copyright 2002, 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import errors ;
-import modules ;
-
-
-################################################################################
-#
-# Private implementation details.
-#
-################################################################################
-
-# Rule added as a replacement for the regular Jam = operator but which does not
-# ignore trailing empty string elements.
-#
-local rule exact-equal-test ( lhs * : rhs * )
-{
- local lhs_extended = $(lhs) xxx ;
- local rhs_extended = $(rhs) xxx ;
- if $(lhs_extended) = $(rhs_extended)
- {
- return true ;
- }
-}
-
-
-# Two lists are considered set-equal if they contain the same elements, ignoring
-# duplicates and ordering.
-#
-local rule set-equal-test ( set1 * : set2 * )
-{
- if ( $(set1) in $(set2) ) && ( $(set2) in $(set1) )
- {
- return true ;
- }
-}
-
-
-################################################################################
-#
-# Public interface.
-#
-################################################################################
-
-# Assert the equality of A and B, ignoring trailing empty string elements.
-#
-rule equal ( a * : b * )
-{
- if $(a) != $(b)
- {
- errors.error-skip-frames 3 assertion failure: \"$(a)\" "==" \"$(b)\"
- (ignoring trailing empty strings) ;
- }
-}
-
-
-# Assert that the result of calling RULE-NAME on the given arguments has a false
-# logical value (is either an empty list or all empty strings).
-#
-rule false ( rule-name args * : * )
-{
- local result ;
- module [ CALLER_MODULE ]
- {
- modules.poke assert : result : [ $(1) : $(2) : $(3) : $(4) : $(5) : $(6)
- : $(7) : $(8) : $(9) ] ;
- }
-
- if $(result)
- {
- errors.error-skip-frames 3 assertion failure: Expected false result from
- "[" $(rule-name) [ errors.lol->list $(args) : $(2) : $(3) : $(4) :
- $(5) : $(6) : $(7) : $(8) : $(9) ] "]" : Got: "[" \"$(result)\" "]" ;
- }
-}
-
-
-# Assert that ELEMENT is present in LIST.
-#
-rule "in" ( element : list * )
-{
- if ! $(element) in $(list)
- {
- errors.error-skip-frames 3 assertion failure: Expected \"$(element)\" in
- "[" \"$(list)\" "]" ;
- }
-}
-
-
-# Assert the inequality of A and B, ignoring trailing empty string elements.
-#
-rule not-equal ( a * : b * )
-{
- if $(a) = $(b)
- {
- errors.error-skip-frames 3 assertion failure: \"$(a)\" "!=" \"$(b)\"
- (ignoring trailing empty strings) ;
- }
-}
-
-
-# Assert that ELEMENT is not present in LIST.
-#
-rule not-in ( element : list * )
-{
- if $(element) in $(list)
- {
- errors.error-skip-frames 3 assertion failure: Did not expect
- \"$(element)\" in "[" \"$(list)\" "]" ;
- }
-}
-
-
-# Assert the inequality of A and B as sets.
-#
-rule not-set-equal ( a * : b * )
-{
- if [ set-equal-test $(a) : $(b) ]
- {
- errors.error-skip-frames 3 assertion failure: Expected "[" \"$(a)\" "]"
- and "[" \"$(b)\" "]" to not be equal as sets ;
- }
-}
-
-
-# Assert that A and B are not exactly equal, not ignoring trailing empty string
-# elements.
-#
-rule not-exact-equal ( a * : b * )
-{
- if [ exact-equal-test $(a) : $(b) ]
- {
- errors.error-skip-frames 3 assertion failure: \"$(a)\" "!=" \"$(b)\" ;
- }
-}
-
-
-# Assert that EXPECTED is the result of calling RULE-NAME with the given
-# arguments.
-#
-rule result ( expected * : rule-name args * : * )
-{
- local result ;
- module [ CALLER_MODULE ]
- {
- modules.poke assert : result : [ $(2) : $(3) : $(4) : $(5) : $(6) : $(7)
- : $(8) : $(9) ] ;
- }
-
- if ! [ exact-equal-test $(result) : $(expected) ]
- {
- errors.error-skip-frames 3 assertion failure: "[" $(rule-name) [
- errors.lol->list $(args) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) :
- $(9) ] "]" : Expected: "[" \"$(expected)\" "]" : Got: "["
- \"$(result)\" "]" ;
- }
-}
-
-
-# Assert that EXPECTED is set-equal (i.e. duplicates and ordering are ignored)
-# to the result of calling RULE-NAME with the given arguments. Note that rules
-# called this way may accept at most 8 parameters.
-#
-rule result-set-equal ( expected * : rule-name args * : * )
-{
- local result ;
- module [ CALLER_MODULE ]
- {
- modules.poke assert : result : [ $(2) : $(3) : $(4) : $(5) : $(6) : $(7)
- : $(8) : $(9) ] ;
- }
-
- if ! [ set-equal-test $(result) : $(expected) ]
- {
- errors.error-skip-frames 3 assertion failure: "[" $(rule-name) [
- errors.lol->list $(args) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) :
- $(9) ] "]" : Expected: "[" \"$(expected)\" "]" : Got: "["
- \"$(result)\" "]" ;
- }
-}
-
-
-# Assert the equality of A and B as sets.
-#
-rule set-equal ( a * : b * )
-{
- if ! [ set-equal-test $(a) : $(b) ]
- {
- errors.error-skip-frames 3 assertion failure: Expected "[" \"$(a)\" "]"
- and "[" \"$(b)\" "]" to be equal as sets ;
- }
-}
-
-
-# Assert that the result of calling RULE-NAME on the given arguments has a true
-# logical value (is neither an empty list nor all empty strings).
-#
-rule true ( rule-name args * : * )
-{
- local result ;
- module [ CALLER_MODULE ]
- {
- modules.poke assert : result : [ $(1) : $(2) : $(3) : $(4) : $(5) : $(6)
- : $(7) : $(8) : $(9) ] ;
- }
-
- if ! $(result)
- {
- errors.error-skip-frames 3 assertion failure: Expected true result from
- "[" $(rule-name) [ errors.lol->list $(args) : $(2) : $(3) : $(4) :
- $(5) : $(6) : $(7) : $(8) : $(9) ] "]" ;
- }
-}
-
-
-# Assert the exact equality of A and B, not ignoring trailing empty string
-# elements.
-#
-rule exact-equal ( a * : b * )
-{
- if ! [ exact-equal-test $(a) : $(b) ]
- {
- errors.error-skip-frames 3 assertion failure: \"$(a)\" "==" \"$(b)\" ;
- }
-}
-
-
-# Assert that the given variable is not an empty list.
-#
-rule variable-not-empty ( name )
-{
- local value = [ modules.peek [ CALLER_MODULE ] : $(name) ] ;
- if ! $(value)-is-not-empty
- {
- errors.error-skip-frames 3 assertion failure: Expected variable
- \"$(name)\" not to be an empty list ;
- }
-}
-
-
-rule __test__ ( )
-{
- # Helper rule used to avoid test duplication related to different list
- # equality test rules.
- #
- local rule run-equality-test ( equality-assert : ignore-trailing-empty-strings ? )
- {
- local not-equality-assert = not-$(equality-assert) ;
-
- # When the given equality test is expected to ignore trailing empty
- # strings some of the test results should be inverted.
- local not-equality-assert-i = not-$(equality-assert) ;
- if $(ignore-trailing-empty-strings)
- {
- not-equality-assert-i = $(equality-assert) ;
- }
-
- $(equality-assert) : ;
- $(equality-assert) "" "" : "" "" ;
- $(not-equality-assert-i) : "" "" ;
- $(equality-assert) x : x ;
- $(not-equality-assert) : x ;
- $(not-equality-assert) "" : x ;
- $(not-equality-assert) "" "" : x ;
- $(not-equality-assert-i) x : x "" ;
- $(equality-assert) x "" : x "" ;
- $(not-equality-assert) x : "" x ;
- $(equality-assert) "" x : "" x ;
-
- $(equality-assert) 1 2 3 : 1 2 3 ;
- $(not-equality-assert) 1 2 3 : 3 2 1 ;
- $(not-equality-assert) 1 2 3 : 1 5 3 ;
- $(not-equality-assert) 1 2 3 : 1 "" 3 ;
- $(not-equality-assert) 1 2 3 : 1 1 2 3 ;
- $(not-equality-assert) 1 2 3 : 1 2 2 3 ;
- $(not-equality-assert) 1 2 3 : 5 6 7 ;
-
- # Extra variables used here just to make sure Boost Jam or Boost Build
- # do not handle lists with empty strings differently depending on
- # whether they are literals or stored in variables.
-
- local empty = ;
- local empty-strings = "" "" ;
- local x-empty-strings = x "" "" ;
- local empty-strings-x = "" "" x ;
-
- $(equality-assert) : $(empty) ;
- $(not-equality-assert-i) "" : $(empty) ;
- $(not-equality-assert-i) "" "" : $(empty) ;
- $(not-equality-assert-i) : $(empty-strings) ;
- $(not-equality-assert-i) "" : $(empty-strings) ;
- $(equality-assert) "" "" : $(empty-strings) ;
- $(equality-assert) $(empty) : $(empty) ;
- $(equality-assert) $(empty-strings) : $(empty-strings) ;
- $(not-equality-assert-i) $(empty) : $(empty-strings) ;
- $(equality-assert) $(x-empty-strings) : $(x-empty-strings) ;
- $(equality-assert) $(empty-strings-x) : $(empty-strings-x) ;
- $(not-equality-assert) $(empty-strings-x) : $(x-empty-strings) ;
- $(not-equality-assert-i) x : $(x-empty-strings) ;
- $(not-equality-assert) x : $(empty-strings-x) ;
- $(not-equality-assert-i) x : $(x-empty-strings) ;
- $(not-equality-assert-i) x "" : $(x-empty-strings) ;
- $(equality-assert) x "" "" : $(x-empty-strings) ;
- $(not-equality-assert) x : $(empty-strings-x) ;
- $(not-equality-assert) "" x : $(empty-strings-x) ;
- $(equality-assert) "" "" x : $(empty-strings-x) ;
- }
-
-
- # ---------------
- # Equality tests.
- # ---------------
-
- run-equality-test equal : ignore-trailing-empty-strings ;
- run-equality-test exact-equal ;
-
-
- # -------------------------
- # assert.set-equal() tests.
- # -------------------------
-
- set-equal : ;
- not-set-equal "" "" : ;
- set-equal "" "" : "" ;
- set-equal "" "" : "" "" ;
- set-equal a b c : a b c ;
- set-equal a b c : b c a ;
- set-equal a b c a : a b c ;
- set-equal a b c : a b c a ;
- not-set-equal a b c : a b c d ;
- not-set-equal a b c d : a b c ;
-}
diff --git a/tools/build/v2/util/doc.jam b/tools/build/v2/util/doc.jam
deleted file mode 100644
index 6e89d075ee..0000000000
--- a/tools/build/v2/util/doc.jam
+++ /dev/null
@@ -1,997 +0,0 @@
-# Copyright 2002, 2005 Dave Abrahams
-# Copyright 2002, 2003, 2006 Rene Rivera
-# Copyright 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Documentation system, handles --help requests.
-# It defines rules that attach documentation to modules, rules, and variables.
-# Collects and generates documentation for the various parts of the build
-# system. The documentation is collected from comments integrated into the code.
-
-import modules ;
-import print ;
-import set ;
-import container ;
-import "class" ;
-import sequence ;
-import path ;
-
-
-# The type of output to generate.
-# "console" is formated text echoed to the console (the default);
-# "text" is formated text appended to the output file;
-# "html" is HTML output to the file.
-#
-help-output = console ;
-
-
-# The file to output documentation to when generating "text" or "html" help.
-# This is without extension as the extension is determined by the type of
-# output.
-#
-help-output-file = help ;
-
-# Whether to include local rules in help output.
-#
-.option.show-locals ?= ;
-
-# When showing documentation for a module, whether to also generate
-# automatically the detailed docs for each item in the module.
-#
-.option.detailed ?= ;
-
-# Generate debug output as the help is generated and modules are parsed.
-#
-.option.debug ?= ;
-
-# Enable or disable a documentation option.
-#
-local rule set-option (
- option # The option name.
- : value ? # Enabled (non-empty), or disabled (empty)
-)
-{
- .option.$(option) = $(value) ;
-}
-
-
-# Set the type of output.
-#
-local rule set-output ( type )
-{
- help-output = $(type) ;
-}
-
-
-# Set the output to a file.
-#
-local rule set-output-file ( file )
-{
- help-output-file = $(file) ;
-}
-
-
-# Extracts the brief comment from a complete comment. The brief comment is the
-# first sentence.
-#
-local rule brief-comment (
- docs * # The comment documentation.
-)
-{
- local d = $(docs:J=" ") ;
- local p = [ MATCH ".*([.])$" : $(d) ] ;
- if ! $(p) { d = $(d)"." ; }
- d = $(d)" " ;
- local m = [ MATCH "^([^.]+[.])(.*)" : $(d) ] ;
- local brief = $(m[1]) ;
- while $(m[2]) && [ MATCH "^([^ ])" : $(m[2]) ]
- {
- m = [ MATCH "^([^.]+[.])(.*)" : $(m[2]) ] ;
- brief += $(m[1]) ;
- }
- return $(brief:J="") ;
-}
-
-
-# Specifies the documentation for the current module.
-#
-local rule set-module-doc (
- module-name ? # The name of the module to document.
- : docs * # The documentation for the module.
-)
-{
- module-name ?= * ;
-
- $(module-name).brief = [ brief-comment $(docs) ] ;
- $(module-name).docs = $(docs) ;
-
- if ! $(module-name) in $(documented-modules)
- {
- documented-modules += $(module-name) ;
- }
-}
-
-
-# Specifies the documentation for the current module.
-#
-local rule set-module-copyright (
- module-name ? # The name of the module to document.
- : copyright * # The copyright for the module.
-)
-{
- module-name ?= * ;
-
- $(module-name).copy-brief = [ brief-comment $(copyright) ] ;
- $(module-name).copy-docs = $(docs) ;
-
- if ! $(module-name) in $(documented-modules)
- {
- documented-modules += $(module-name) ;
- }
-}
-
-
-# Specifies the documentation for a rule in the current module. If called in the
-# global module, this documents a global rule.
-#
-local rule set-rule-doc (
- name # The name of the rule.
- module-name ? # The name of the module to document.
- is-local ? # Whether the rule is local to the module.
- : docs * # The documentation for the rule.
-)
-{
- module-name ?= * ;
-
- $(module-name).$(name).brief = [ brief-comment $(docs) ] ;
- $(module-name).$(name).docs = $(docs) ;
- $(module-name).$(name).is-local = $(is-local) ;
-
- if ! $(name) in $($(module-name).rules)
- {
- $(module-name).rules += $(name) ;
- }
-}
-
-
-# Specify a class, will turn a rule into a class.
-#
-local rule set-class-doc (
- name # The name of the class.
- module-name ? # The name of the module to document.
- : super-name ? # The super class name.
-)
-{
- module-name ?= * ;
-
- $(module-name).$(name).is-class = true ;
- $(module-name).$(name).super-name = $(super-name) ;
- $(module-name).$(name).class-rules =
- [ MATCH "^($(name)[.].*)" : $($(module-name).rules) ] ;
- $(module-name).$($(module-name).$(name).class-rules).is-class-rule = true ;
-
- $(module-name).classes += $(name) ;
- $(module-name).class-rules += $($(module-name).$(name).class-rules) ;
- $(module-name).rules =
- [ set.difference $($(module-name).rules) :
- $(name) $($(module-name).$(name).class-rules) ] ;
-}
-
-
-# Set the argument call signature of a rule.
-#
-local rule set-rule-arguments-signature (
- name # The name of the rule.
- module-name ? # The name of the module to document.
- : signature * # The arguments signature.
-)
-{
- module-name ?= * ;
-
- $(module-name).$(name).signature = $(signature) ;
-}
-
-
-# Specifies the documentation for an argument of a rule.
-#
-local rule set-argument-doc (
- name # The name of the argument.
- qualifier # Argument syntax qualifier, "*", "+", etc.
- rule-name # The name of the rule.
- module-name ? # THe optional name of the module.
- : docs * # The documentation.
-)
-{
- module-name ?= * ;
-
- $(module-name).$(rule-name).args.$(name).qualifier = $(qualifier) ;
- $(module-name).$(rule-name).args.$(name).docs = $(docs) ;
-
- if ! $(name) in $($(module-name).$(rule-name).args)
- {
- $(module-name).$(rule-name).args += $(name) ;
- }
-}
-
-
-# Specifies the documentation for a variable in the current module. If called in
-# the global module, the global variable is documented.
-#
-local rule set-variable-doc (
- name # The name of the variable.
- default # The default value.
- initial # The initial value.
- module-name ? # The name of the module to document.
- : docs * # The documentation for the variable.
-)
-{
- module-name ?= * ;
-
- $(module-name).$(name).brief = [ brief-comment $(docs) ] ;
- $(module-name).$(name).default = $(default) ;
- $(module-name).$(name).initial = $(initial) ;
- $(module-name).$(name).docs = $(docs) ;
-
- if ! $(name) in $($(module-name).variables)
- {
- $(module-name).variables += $(name) ;
- }
-}
-
-
-# Generates a general description of the documentation and help system.
-#
-local rule print-help-top ( )
-{
- print.section "General command line usage" ;
-
- print.text " bjam [options] [properties] [targets]
-
- Options, properties and targets can be specified in any order.
- " ;
-
- print.section "Important Options" ;
-
- print.list-start ;
- print.list-item "--clean Remove targets instead of building" ;
- print.list-item "-a Rebuild everything" ;
- print.list-item "-n Don't execute the commands, only print them" ;
- print.list-item "-d+2 Show commands as they are executed" ;
- print.list-item "-d0 Supress all informational messages" ;
- print.list-item "-q Stop at first error" ;
- print.list-item "--debug-configuration Diagnose configuration" ;
- print.list-item "--debug-building Report which targets are built with what properties" ;
- print.list-item "--debug-generator Diagnose generator search/execution" ;
- print.list-end ;
-
- print.section "Further Help"
- The following options can be used to obtain additional documentation.
- ;
-
- print.list-start ;
- print.list-item "--help-options Print more obscure command line options." ;
- print.list-item "--help-internal Boost.Build implementation details." ;
- print.list-item "--help-doc-options Implementation details doc formatting." ;
- print.list-end ;
-}
-
-
-# Generate Jam/Boost.Jam command usage information.
-#
-local rule print-help-usage ( )
-{
- print.section "Boost.Jam Usage"
- "bjam [ options... ] targets..."
- ;
- print.list-start ;
- print.list-item -a;
- Build all targets, even if they are current. ;
- print.list-item -fx;
- Read '"x"' as the Jamfile for building instead of searching for the
- Boost.Build system. ;
- print.list-item -jx;
- Run up to '"x"' commands concurrently. ;
- print.list-item -n;
- Do not execute build commands. Instead print out the commands as they
- would be executed if building. ;
- print.list-item -ox;
- Output the used build commands to file '"x"'. ;
- print.list-item -q;
- Quit as soon as a build failure is encountered. Without this option
- Boost.Jam will continue building as many targets as it can.
- print.list-item -sx=y;
- Sets a Jam variable '"x"' to the value '"y"', overriding any value that
- variable would have from the environment. ;
- print.list-item -tx;
- Rebuild the target '"x"', even if it is up-to-date. ;
- print.list-item -v;
- Display the version of bjam. ;
- print.list-item --x;
- Any option not explicitly handled by Boost.Jam remains available to
- build scripts using the '"ARGV"' variable. ;
- print.list-item -dn;
- Enables output of diagnostic messages. The debug level '"n"' and all
- below it are enabled by this option. ;
- print.list-item -d+n;
- Enables output of diagnostic messages. Only the output for debug level
- '"n"' is enabled. ;
- print.list-end ;
- print.section "Debug Levels"
- Each debug level shows a different set of information. Usually with
- higher levels producing more verbose information. The following levels
- are supported: ;
- print.list-start ;
- print.list-item 0;
- Turn off all diagnostic output. Only errors are reported. ;
- print.list-item 1;
- Show the actions taken for building targets, as they are executed. ;
- print.list-item 2;
- Show "quiet" actions and display all action text, as they are executed. ;
- print.list-item 3;
- Show dependency analysis, and target/source timestamps/paths. ;
- print.list-item 4;
- Show arguments of shell invocations. ;
- print.list-item 5;
- Show rule invocations and variable expansions. ;
- print.list-item 6;
- Show directory/header file/archive scans, and attempts at binding to targets. ;
- print.list-item 7;
- Show variable settings. ;
- print.list-item 8;
- Show variable fetches, variable expansions, and evaluation of '"if"' expressions. ;
- print.list-item 9;
- Show variable manipulation, scanner tokens, and memory usage. ;
- print.list-item 10;
- Show execution times for rules. ;
- print.list-item 11;
- Show parsing progress of Jamfiles. ;
- print.list-item 12;
- Show graph for target dependencies. ;
- print.list-item 13;
- Show changes in target status (fate). ;
- print.list-end ;
-}
-
-
-# Generates description of options controlling the help system. This
-# automatically reads the options as all variables in the doc module of the form
-# ".option.*".
-#
-local rule print-help-options (
- module-name # The doc module.
-)
-{
- print.section "Help Options"
- These are all the options available for enabling or disabling to control
- the help system in various ways. Options can be enabled or disabled with
- '"--help-enable-<option>"', and "'--help-disable-<option>'"
- respectively.
- ;
- local options-to-list = [ MATCH ^[.]option[.](.*) : $($(module-name).variables) ] ;
- if $(options-to-list)
- {
- print.list-start ;
- for local option in [ sequence.insertion-sort $(options-to-list) ]
- {
- local def = disabled ;
- if $($(module-name)..option.$(option).default) != "(empty)"
- {
- def = enabled ;
- }
- print.list-item $(option): $($(module-name)..option.$(option).docs)
- Default is $(def). ;
- }
- print.list-end ;
- }
-}
-
-
-# Generate brief documentation for all the known items in the section for a
-# module. Possible sections are: "rules", and "variables".
-#
-local rule print-help-module-section (
- module # The module name.
- section # rules or variables.
- : section-head # The title of the section.
- section-description * # The detailed description of the section.
-)
-{
- if $($(module).$(section))
- {
- print.section $(section-head) $(section-description) ;
- print.list-start ;
- for local item in [ sequence.insertion-sort $($(module).$(section)) ]
- {
- local show = ;
- if ! $($(module).$(item).is-local)
- {
- show = yes ;
- }
- if $(.option.show-locals)
- {
- show = yes ;
- }
- if $(show)
- {
- print.list-item $(item): $($(module).$(item).brief) ;
- }
- }
- print.list-end ;
- }
-}
-
-
-# Generate documentation for all possible modules. We attempt to list all known
-# modules together with a brief description of each.
-#
-local rule print-help-all (
- ignored # Usually the module name, but is ignored here.
-)
-{
- print.section "Modules"
- "These are all the known modules. Use --help <module> to get more"
- "detailed information."
- ;
- if $(documented-modules)
- {
- print.list-start ;
- for local module-name in [ sequence.insertion-sort $(documented-modules) ]
- {
- # The brief docs for each module.
- print.list-item $(module-name): $($(module-name).brief) ;
- }
- print.list-end ;
- }
- # The documentation for each module when details are requested.
- if $(documented-modules) && $(.option.detailed)
- {
- for local module-name in [ sequence.insertion-sort $(documented-modules) ]
- {
- # The brief docs for each module.
- print-help-module $(module-name) ;
- }
- }
-}
-
-
-# Generate documentation for a module. Basic information about the module is
-# generated.
-#
-local rule print-help-module (
- module-name # The module to generate docs for.
-)
-{
- # Print the docs.
- print.section "Module '$(module-name)'" $($(module-name).docs) ;
-
- # Print out the documented classes.
- print-help-module-section $(module-name) classes : "Module '$(module-name)' classes"
- Use --help $(module-name).<class-name> to get more information. ;
-
- # Print out the documented rules.
- print-help-module-section $(module-name) rules : "Module '$(module-name)' rules"
- Use --help $(module-name).<rule-name> to get more information. ;
-
- # Print out the documented variables.
- print-help-module-section $(module-name) variables : "Module '$(module-name)' variables"
- Use --help $(module-name).<variable-name> to get more information. ;
-
- # Print out all the same information but indetailed form.
- if $(.option.detailed)
- {
- print-help-classes $(module-name) ;
- print-help-rules $(module-name) ;
- print-help-variables $(module-name) ;
- }
-}
-
-
-# Generate documentation for a set of rules in a module.
-#
-local rule print-help-rules (
- module-name # Module of the rules.
- : name * # Optional list of rules to describe.
-)
-{
- name ?= $($(module-name).rules) ;
- if [ set.intersection $(name) : $($(module-name).rules) $($(module-name).class-rules) ]
- {
- # Print out the given rules.
- for local rule-name in [ sequence.insertion-sort $(name) ]
- {
- if $(.option.show-locals) || ! $($(module-name).$(rule-name).is-local)
- {
- local signature = $($(module-name).$(rule-name).signature:J=" ") ;
- signature ?= "" ;
- print.section "Rule '$(module-name).$(rule-name) ( $(signature) )'"
- $($(module-name).$(rule-name).docs) ;
- if $($(module-name).$(rule-name).args)
- {
- print.list-start ;
- for local arg-name in $($(module-name).$(rule-name).args)
- {
- print.list-item $(arg-name): $($(module-name).$(rule-name).args.$(arg-name).docs) ;
- }
- print.list-end ;
- }
- }
- }
- }
-}
-
-
-# Generate documentation for a set of classes in a module.
-#
-local rule print-help-classes (
- module-name # Module of the classes.
- : name * # Optional list of classes to describe.
-)
-{
- name ?= $($(module-name).classes) ;
- if [ set.intersection $(name) : $($(module-name).classes) ]
- {
- # Print out the given classes.
- for local class-name in [ sequence.insertion-sort $(name) ]
- {
- if $(.option.show-locals) || ! $($(module-name).$(class-name).is-local)
- {
- local signature = $($(module-name).$(class-name).signature:J=" ") ;
- signature ?= "" ;
- print.section "Class '$(module-name).$(class-name) ( $(signature) )'"
- $($(module-name).$(class-name).docs)
- "Inherits from '"$($(module-name).$(class-name).super-name)"'." ;
- if $($(module-name).$(class-name).args)
- {
- print.list-start ;
- for local arg-name in $($(module-name).$(class-name).args)
- {
- print.list-item $(arg-name): $($(module-name).$(class-name).args.$(arg-name).docs) ;
- }
- print.list-end ;
- }
- }
-
- # Print out the documented rules of the class.
- print-help-module-section $(module-name) $(class-name).class-rules : "Class '$(module-name).$(class-name)' rules"
- Use --help $(module-name).<rule-name> to get more information. ;
-
- # Print out all the rules if details are requested.
- if $(.option.detailed)
- {
- print-help-rules $(module-name) : $($(module-name).$(class-name).class-rules) ;
- }
- }
- }
-}
-
-
-# Generate documentation for a set of variables in a module.
-#
-local rule print-help-variables (
- module-name ? # Module of the variables.
- : name * # Optional list of variables to describe.
-)
-{
- name ?= $($(module-name).variables) ;
- if [ set.intersection $(name) : $($(module-name).variables) ]
- {
- # Print out the given variables.
- for local variable-name in [ sequence.insertion-sort $(name) ]
- {
- print.section "Variable '$(module-name).$(variable-name)'" $($(module-name).$(variable-name).docs) ;
- if $($(module-name).$(variable-name).default) ||
- $($(module-name).$(variable-name).initial)
- {
- print.list-start ;
- if $($(module-name).$(variable-name).default)
- {
- print.list-item "default value:" '$($(module-name).$(variable-name).default:J=" ")' ;
- }
- if $($(module-name).$(variable-name).initial)
- {
- print.list-item "initial value:" '$($(module-name).$(variable-name).initial:J=" ")' ;
- }
- print.list-end ;
- }
- }
- }
-}
-
-
-# Generate documentation for a project.
-#
-local rule print-help-project (
- unused ?
- : jamfile * # The project Jamfile.
-)
-{
- if $(jamfile<$(jamfile)>.docs)
- {
- # Print the docs.
- print.section "Project-specific help"
- Project has jamfile at $(jamfile) ;
-
- print.lines $(jamfile<$(jamfile)>.docs) "" ;
- }
-}
-
-
-# Generate documentation for a config file.
-#
-local rule print-help-config (
- unused ?
- : type # The type of configuration file user or site.
- config-file # The configuration Jamfile.
-)
-{
- if $(jamfile<$(config-file)>.docs)
- {
- # Print the docs.
- print.section "Configuration help"
- Configuration file at $(config-file) ;
-
- print.lines $(jamfile<$(config-file)>.docs) "" ;
- }
-}
-
-
-ws = " " ;
-
-# Extract the text from a block of comments.
-#
-local rule extract-comment (
- var # The name of the variable to extract from.
-)
-{
- local comment = ;
- local line = $($(var)[1]) ;
- local l = [ MATCH "^[$(ws)]*(#)(.*)$" : $(line) ] ;
- while $(l[1]) && $($(var))
- {
- if $(l[2]) { comment += [ MATCH "^[$(ws)]?(.*)$" : $(l[2]) ] ; }
- else { comment += "" ; }
- $(var) = $($(var)[2-]) ;
- line = $($(var)[1]) ;
- l = [ MATCH "^[$(ws)]*(#)(.*)$" : $(line) ] ;
- }
- return $(comment) ;
-}
-
-
-# Extract s single line of Jam syntax, ignoring any comments.
-#
-local rule extract-syntax (
- var # The name of the variable to extract from.
-)
-{
- local syntax = ;
- local line = $($(var)[1]) ;
- while ! $(syntax) && ! [ MATCH "^[$(ws)]*(#)" : $(line) ] && $($(var))
- {
- local m = [ MATCH "^[$(ws)]*(.*)$" : $(line) ] ;
- if $(m)
- {
- syntax = $(m) ;
- }
- $(var) = $($(var)[2-]) ;
- line = $($(var)[1]) ;
- }
- return $(syntax) ;
-}
-
-
-# Extract the next token, this is either a single Jam construct or a comment as
-# a single token.
-#
-local rule extract-token (
- var # The name of the variable to extract from.
-)
-{
- local parts = ;
- while ! $(parts)
- {
- parts = [ MATCH "^[$(ws)]*([^$(ws)]+)[$(ws)]*(.*)" : $($(var)[1]) ] ;
- if ! $(parts)
- {
- $(var) = $($(var)[2-]) ;
- }
- }
- local token = ;
- if [ MATCH "^(#)" : $(parts[1]) ]
- {
- token = $(parts:J=" ") ;
- $(var) = $($(var)[2-]) ;
- }
- else
- {
- token = $(parts[1]) ;
- $(var) = $(parts[2-]:J=" ") $($(var)[2-]) ;
- }
- return $(token) ;
-}
-
-
-# Scan for a rule declaration as the next item in the variable.
-#
-local rule scan-rule (
- syntax ? # The first part of the text which contains the rule declaration.
- : var # The name of the variable to extract from.
-)
-{
- local rule-parts =
- [ MATCH "^[$(ws)]*(rule|local[$(ws)]*rule)[$(ws)]+([^$(ws)]+)[$(ws)]*(.*)" : $(syntax:J=" ") ] ;
- if $(rule-parts[1])
- {
- # Mark as doc for rule.
- local rule-name = $(rule-parts[2]) ;
- if $(scope-name)
- {
- rule-name = $(scope-name).$(rule-name) ;
- }
- local is-local = [ MATCH "^(local).*" : $(rule-parts[1]) ] ;
- if $(comment-block)
- {
- set-rule-doc $(rule-name) $(module-name) $(is-local) : $(comment-block) ;
- }
- # Parse args of rule.
- $(var) = $(rule-parts[3-]) $($(var)) ;
- set-rule-arguments-signature $(rule-name) $(module-name) : [ scan-rule-arguments $(var) ] ;
- # Scan within this rules scope.
- local scope-level = [ extract-token $(var) ] ;
- local scope-name = $(rule-name) ;
- while $(scope-level)
- {
- local comment-block = [ extract-comment $(var) ] ;
- local syntax-block = [ extract-syntax $(var) ] ;
- if [ scan-rule $(syntax-block) : $(var) ]
- {
- }
- else if [ MATCH "^(\\{)" : $(syntax-block) ]
- {
- scope-level += "{" ;
- }
- else if [ MATCH "^[^\\}]*([\\}])[$(ws)]*$" : $(syntax-block) ]
- {
- scope-level = $(scope-level[2-]) ;
- }
- }
-
- return true ;
- }
-}
-
-
-# Scan the arguments of a rule.
-#
-local rule scan-rule-arguments (
- var # The name of the variable to extract from.
-)
-{
- local arg-syntax = ;
- local token = [ extract-token $(var) ] ;
- while $(token) != "(" && $(token) != "{"
- {
- token = [ extract-token $(var) ] ;
- }
- if $(token) != "{"
- {
- token = [ extract-token $(var) ] ;
- }
- local arg-signature = ;
- while $(token) != ")" && $(token) != "{"
- {
- local arg-name = ;
- local arg-qualifier = " " ;
- local arg-doc = ;
- if $(token) = ":"
- {
- arg-signature += $(token) ;
- token = [ extract-token $(var) ] ;
- }
- arg-name = $(token) ;
- arg-signature += $(token) ;
- token = [ extract-token $(var) ] ;
- if [ MATCH "^([\\*\\+\\?])" : $(token) ]
- {
- arg-qualifier = $(token) ;
- arg-signature += $(token) ;
- token = [ extract-token $(var) ] ;
- }
- if $(token) = ":"
- {
- arg-signature += $(token) ;
- token = [ extract-token $(var) ] ;
- }
- if [ MATCH "^(#)" : $(token) ]
- {
- $(var) = $(token) $($(var)) ;
- arg-doc = [ extract-comment $(var) ] ;
- token = [ extract-token $(var) ] ;
- }
- set-argument-doc $(arg-name) $(arg-qualifier) $(rule-name) $(module-name) : $(arg-doc) ;
- }
- while $(token) != "{"
- {
- token = [ extract-token $(var) ] ;
- }
- $(var) = "{" $($(var)) ;
- arg-signature ?= "" ;
- return $(arg-signature) ;
-}
-
-
-# Scan for a variable declaration.
-#
-local rule scan-variable (
- syntax ? # The first part of the text which contains the variable declaration.
- : var # The name of the variable to extract from.
-)
-{
- # [1] = name, [2] = value(s)
- local var-parts =
- [ MATCH "^[$(ws)]*([^$(ws)]+)[$(ws)]+([\\?\\=]*)[$(ws)]+([^\\;]*)\\;" : $(syntax) ] ;
- if $(var-parts)
- {
- local value = [ MATCH "^(.*)[ ]$" : $(var-parts[3-]:J=" ") ] ;
- local default-value = "" ;
- local initial-valie = "" ;
- if $(var-parts[2]) = "?="
- {
- default-value = $(value) ;
- default-value ?= "(empty)" ;
- }
- else
- {
- initial-value = $(value) ;
- initial-value ?= "(empty)" ;
- }
- if $(comment-block)
- {
- set-variable-doc $(var-parts[1]) $(default-value) $(initial-value) $(module-name) : $(comment-block) ;
- }
- return true ;
- }
-}
-
-
-# Scan a class declaration.
-#
-local rule scan-class (
- syntax ? # The syntax text for the class declaration.
-)
-{
- # [1] = class?, [2] = name, [3] = superclass
- local class-parts =
- [ MATCH "^[$(ws)]*([^$(ws)]+)[$(ws)]+([^$(ws)]+)[$(ws)]+:*[$(ws)]*([^$(ws);]*)" : $(syntax) ] ;
- if $(class-parts[1]) = "class" || $(class-parts[1]) = "class.class"
- {
- set-class-doc $(class-parts[2]) $(module-name) : $(class-parts[3]) ;
- }
-}
-
-
-# Scan a module file for documentation comments. This also invokes any actions
-# assigned to the module. The actions are the rules that do the actual output of
-# the documentation. This rule is invoked as the header scan rule for the module
-# file.
-#
-rule scan-module (
- target # The module file.
- : text * # The text in the file, one item per line.
- : action * # Rule to call to output docs for the module.
-)
-{
- if $(.option.debug) { ECHO "HELP:" scanning module target '$(target)' ; }
- local module-name = $(target:B) ;
- local module-documented = ;
- local comment-block = ;
- local syntax-block = ;
- # This is a hack because we can not get the line of a file if it happens to
- # not have a new-line termination.
- text += "}" ;
- while $(text)
- {
- comment-block = [ extract-comment text ] ;
- syntax-block = [ extract-syntax text ] ;
- if $(.option.debug)
- {
- ECHO "HELP:" comment block; '$(comment-block)' ;
- ECHO "HELP:" syntax block; '$(syntax-block)' ;
- }
- if [ scan-rule $(syntax-block) : text ] { }
- else if [ scan-variable $(syntax-block) : text ] { }
- else if [ scan-class $(syntax-block) ] { }
- else if [ MATCH .*([cC]opyright).* : $(comment-block:J=" ") ]
- {
- # mark as the copy for the module.
- set-module-copyright $(module-name) : $(comment-block) ;
- }
- else if $(action[1]) in "print-help-project" "print-help-config"
- && ! $(jamfile<$(target)>.docs)
- {
- # special module docs for the project jamfile.
- jamfile<$(target)>.docs = $(comment-block) ;
- }
- else if ! $(module-documented)
- {
- # document the module.
- set-module-doc $(module-name) : $(comment-block) ;
- module-documented = true ;
- }
- }
- if $(action)
- {
- $(action[1]) $(module-name) : $(action[2-]) ;
- }
-}
-
-
-# Import scan-module to global scope, so that it is available during header
-# scanning phase.
-#
-IMPORT $(__name__) : scan-module : : doc.scan-module ;
-
-
-# Read in a file using the SHELL builtin and return the individual lines as
-# would be done for header scanning.
-#
-local rule read-file (
- file # The file to read in.
-)
-{
- file = [ path.native [ path.root [ path.make $(file) ] [ path.pwd ] ] ] ;
- if ! $(.file<$(file)>.lines)
- {
- local content ;
- switch [ modules.peek : OS ]
- {
- case NT :
- content = [ SHELL "TYPE \"$(file)\"" ] ;
-
- case * :
- content = [ SHELL "cat \"$(file)\"" ] ;
- }
- local lines ;
- local nl = "
-" ;
- local << = "([^$(nl)]*)[$(nl)](.*)" ;
- local line+ = [ MATCH "$(<<)" : "$(content)" ] ;
- while $(line+)
- {
- lines += $(line+[1]) ;
- line+ = [ MATCH "$(<<)" : "$(line+[2])" ] ;
- }
- .file<$(file)>.lines = $(lines) ;
- }
- return $(.file<$(file)>.lines) ;
-}
-
-
-# Add a scan action to perform to generate the help documentation. The action
-# rule is passed the name of the module as the first argument. The second
-# argument(s) are optional and passed directly as specified here.
-#
-local rule do-scan (
- modules + # The modules to scan and perform the action on.
- : action * # The action rule, plus the secondary arguments to pass to the action rule.
-)
-{
- if $(help-output) = text
- {
- print.output $(help-output-file).txt plain ;
- ALWAYS $(help-output-file).txt ;
- DEPENDS all : $(help-output-file).txt ;
- }
- if $(help-output) = html
- {
- print.output $(help-output-file).html html ;
- ALWAYS $(help-output-file).html ;
- DEPENDS all : $(help-output-file).html ;
- }
- for local module-file in $(modules[1--2])
- {
- scan-module $(module-file) : [ read-file $(module-file) ] ;
- }
- scan-module $(modules[-1]) : [ read-file $(modules[-1]) ] : $(action) ;
-}
diff --git a/tools/build/v2/util/indirect.jam b/tools/build/v2/util/indirect.jam
deleted file mode 100644
index ec63f1920b..0000000000
--- a/tools/build/v2/util/indirect.jam
+++ /dev/null
@@ -1,115 +0,0 @@
-# Copyright 2003 Dave Abrahams
-# Copyright 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import modules ;
-import numbers ;
-
-
-# The pattern that indirect rules must match: module%rule
-.pattern = ^([^%]*)%([^%]+)$ ;
-
-
-#
-# Type checking rules.
-#
-local rule indirect-rule ( x )
-{
- if ! [ MATCH $(.pattern) : $(x) ]
- {
- return "expected a string of the form module%rule, but got \""$(x)"\" for argument" ;
- }
-}
-
-
-# Make an indirect rule which calls the given rule. If context is supplied it is
-# expected to be the module in which to invoke the rule by the 'call' rule
-# below. Otherwise, the rule will be invoked in the module of this rule's
-# caller.
-#
-rule make ( rulename bound-args * : context ? )
-{
- context ?= [ CALLER_MODULE ] ;
- context ?= "" ;
- return $(context)%$(rulename) $(bound-args) ;
-}
-
-
-# Make an indirect rule which calls the given rule. 'rulename' may be a
-# qualified rule; if so it is returned unchanged. Otherwise, if frames is not
-# supplied, the result will be invoked (by 'call', below) in the module of the
-# caller. Otherwise, frames > 1 specifies additional call frames to back up in
-# order to find the module context.
-#
-rule make-qualified ( rulename bound-args * : frames ? )
-{
- if [ MATCH $(.pattern) : $(rulename) ]
- {
- return $(rulename) $(bound-args) ;
- }
- else
- {
- frames ?= 1 ;
- # If the rule name includes a Jamfile module, grab it.
- local module-context = [ MATCH ^(Jamfile<[^>]*>)\\..* : $(rulename) ] ;
-
- if ! $(module-context)
- {
- # Take the first dot-separated element as module name. This disallows
- # module names with dots, but allows rule names with dots.
- module-context = [ MATCH ^([^.]*)\\..* : $(rulename) ] ;
- }
- module-context ?= [ CALLER_MODULE $(frames) ] ;
- return [ make $(rulename) $(bound-args) : $(module-context) ] ;
- }
-}
-
-
-# Returns the module name in which the given indirect rule will be invoked.
-#
-rule get-module ( [indirect-rule] x )
-{
- local m = [ MATCH $(.pattern) : $(x) ] ;
- if ! $(m[1])
- {
- m = ;
- }
- return $(m[1]) ;
-}
-
-
-# Returns the rulename that will be called when x is invoked.
-#
-rule get-rule ( [indirect-rule] x )
-{
- local m = [ MATCH $(.pattern) : $(x) ] ;
- return $(m[2]) ;
-}
-
-
-# Invoke the given indirect-rule.
-#
-rule call ( [indirect-rule] r args * : * )
-{
- return [ modules.call-in [ get-module $(r) ] : [ get-rule $(r) ] $(args)
- : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ] ;
-}
-
-
-rule __test__
-{
- import assert ;
-
- rule foo-barr! ( x )
- {
- assert.equal $(x) : x ;
- }
-
- assert.equal [ get-rule [ make foo-barr! ] ] : foo-barr! ;
- assert.equal [ get-module [ make foo-barr! ] ] : [ CALLER_MODULE ] ;
-
- call [ make foo-barr! ] x ;
- call [ make foo-barr! x ] ;
- call [ make foo-barr! : [ CALLER_MODULE ] ] x ;
-}
diff --git a/tools/build/v2/util/path.jam b/tools/build/v2/util/path.jam
deleted file mode 100644
index ea26b816bc..0000000000
--- a/tools/build/v2/util/path.jam
+++ /dev/null
@@ -1,934 +0,0 @@
-# Copyright Vladimir Prus 2002-2006.
-# Copyright Dave Abrahams 2003-2004.
-# Copyright Rene Rivera 2003-2006.
-#
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or copy at
-# http://www.boost.org/LICENSE_1_0.txt)
-
-# Performs various path manipulations. Paths are always in a 'normalized'
-# representation. In it, a path may be either:
-#
-# - '.', or
-#
-# - ['/'] [ ( '..' '/' )* (token '/')* token ]
-#
-# In plain english, path can be rooted, '..' elements are allowed only at the
-# beginning, and it never ends in slash, except for path consisting of slash
-# only.
-
-import errors ;
-import modules ;
-import regex ;
-import sequence ;
-import set ;
-import version ;
-
-
-os = [ modules.peek : OS ] ;
-if [ modules.peek : UNIX ]
-{
- local uname = [ modules.peek : JAMUNAME ] ;
- switch $(uname)
- {
- case CYGWIN* : os = CYGWIN ;
- case * : os = UNIX ;
- }
-}
-
-
-# Converts the native path into normalized form.
-#
-rule make ( native )
-{
- return [ make-$(os) $(native) ] ;
-}
-
-
-# Builds native representation of the path.
-#
-rule native ( path )
-{
- return [ native-$(os) $(path) ] ;
-}
-
-
-# Tests if a path is rooted.
-#
-rule is-rooted ( path )
-{
- return [ MATCH "^(/)" : $(path) ] ;
-}
-
-
-# Tests if a path has a parent.
-#
-rule has-parent ( path )
-{
- if $(path) != /
- {
- return 1 ;
- }
- else
- {
- return ;
- }
-}
-
-
-# Returns the path without any directory components.
-#
-rule basename ( path )
-{
- return [ MATCH "([^/]+)$" : $(path) ] ;
-}
-
-
-# Returns parent directory of the path. If no parent exists, error is issued.
-#
-rule parent ( path )
-{
- if [ has-parent $(path) ]
- {
- if $(path) = .
- {
- return .. ;
- }
- else
- {
- # Strip everything at the end of path up to and including the last
- # slash.
- local result = [ regex.match "((.*)/)?([^/]+)" : $(path) : 2 3 ] ;
-
- # Did we strip what we shouldn't?
- if $(result[2]) = ".."
- {
- return $(path)/.. ;
- }
- else
- {
- if ! $(result[1])
- {
- if [ is-rooted $(path) ]
- {
- result = / ;
- }
- else
- {
- result = . ;
- }
- }
- return $(result[1]) ;
- }
- }
- }
- else
- {
- errors.error "Path '$(path)' has no parent" ;
- }
-}
-
-
-# Returns path2 such that "[ join path path2 ] = .". The path may not contain
-# ".." element or be rooted.
-#
-rule reverse ( path )
-{
- if $(path) = .
- {
- return $(path) ;
- }
- else
- {
- local tokens = [ regex.split $(path) "/" ] ;
- local tokens2 ;
- for local i in $(tokens)
- {
- tokens2 += .. ;
- }
- return [ sequence.join $(tokens2) : "/" ] ;
- }
-}
-
-
-# Concatenates the passed path elements. Generates an error if any element other
-# than the first one is rooted. Skips any empty or undefined path elements.
-#
-rule join ( elements + )
-{
- if ! $(elements[2-])
- {
- return $(elements[1]) ;
- }
- else
- {
- for local e in $(elements[2-])
- {
- if [ is-rooted $(e) ]
- {
- errors.error only the first element may be rooted ;
- }
- }
- if [ version.check-jam-version 3 1 17 ]
- {
- return [ NORMALIZE_PATH "$(elements)" ] ;
- }
- else
- {
- # Boost Jam prior to version 3.1.17 had problems with its
- # NORMALIZE_PATH rule in case you passed it a leading backslash
- # instead of a slash, in some cases when you sent it an empty
- # initial path element and possibly some others. At least some of
- # those cases were being hit and relied upon when calling this rule
- # from the path.make-NT rule.
- if ! $(elements[1]) && $(elements[2])
- {
- return [ NORMALIZE_PATH "/" "$(elements[2-])" ] ;
- }
- else
- {
- return [ NORMALIZE_PATH "$(elements)" ] ;
- }
- }
- }
-}
-
-
-# If 'path' is relative, it is rooted at 'root'. Otherwise, it is unchanged.
-#
-rule root ( path root )
-{
- if [ is-rooted $(path) ]
- {
- return $(path) ;
- }
- else
- {
- return [ join $(root) $(path) ] ;
- }
-}
-
-
-# Returns the current working directory.
-#
-rule pwd ( )
-{
- if ! $(.pwd)
- {
- .pwd = [ make [ PWD ] ] ;
- }
- return $(.pwd) ;
-}
-
-
-# Returns the list of files matching the given pattern in the specified
-# directory. Both directories and patterns are supplied as portable paths. Each
-# pattern should be non-absolute path, and can't contain "." or ".." elements.
-# Each slash separated element of pattern can contain the following special
-# characters:
-# - '?', which match any character
-# - '*', which matches arbitrary number of characters.
-# A file $(d)/e1/e2/e3 (where 'd' is in $(dirs)) matches pattern p1/p2/p3 if and
-# only if e1 matches p1, e2 matches p2 and so on.
-#
-# For example:
-# [ glob . : *.cpp ]
-# [ glob . : */build/Jamfile ]
-#
-rule glob ( dirs * : patterns + : exclude-patterns * )
-{
- local result ;
- local real-patterns ;
- local real-exclude-patterns ;
- for local d in $(dirs)
- {
- for local p in $(patterns)
- {
- local pattern = [ path.root $(p) $(d) ] ;
- real-patterns += [ path.native $(pattern) ] ;
- }
-
- for local p in $(exclude-patterns)
- {
- local pattern = [ path.root $(p) $(d) ] ;
- real-exclude-patterns += [ path.native $(pattern) ] ;
- }
- }
-
- local inc = [ GLOB-RECURSIVELY $(real-patterns) ] ;
- inc = [ sequence.transform NORMALIZE_PATH : $(inc) ] ;
- local exc = [ GLOB-RECURSIVELY $(real-exclude-patterns) ] ;
- exc = [ sequence.transform NORMALIZE_PATH : $(exc) ] ;
-
- return [ sequence.transform path.make : [ set.difference $(inc) : $(exc) ] ]
- ;
-}
-
-
-# Recursive version of GLOB. Builds the glob of files while also searching in
-# the subdirectories of the given roots. An optional set of exclusion patterns
-# will filter out the matching entries from the result. The exclusions also
-# apply to the subdirectory scanning, such that directories that match the
-# exclusion patterns will not be searched.
-#
-rule glob-tree ( roots * : patterns + : exclude-patterns * )
-{
- return [ sequence.transform path.make : [ .glob-tree [ sequence.transform
- path.native : $(roots) ] : $(patterns) : $(exclude-patterns) ] ] ;
-}
-
-
-local rule .glob-tree ( roots * : patterns * : exclude-patterns * )
-{
- local excluded ;
- if $(exclude-patterns)
- {
- excluded = [ GLOB $(roots) : $(exclude-patterns) ] ;
- }
- local result = [ set.difference [ GLOB $(roots) : $(patterns) ] :
- $(excluded) ] ;
- local subdirs ;
- for local d in [ set.difference [ GLOB $(roots) : * ] : $(excluded) ]
- {
- if ! ( $(d:D=) in . .. ) && ! [ CHECK_IF_FILE $(d) ]
- {
- subdirs += $(d) ;
- }
- }
- if $(subdirs)
- {
- result += [ .glob-tree $(subdirs) : $(patterns) : $(exclude-patterns) ]
- ;
- }
- return $(result) ;
-}
-
-
-# Returns true is the specified file exists.
-#
-rule exists ( file )
-{
- return [ path.glob $(file:D) : $(file:D=) ] ;
-}
-NATIVE_RULE path : exists ;
-
-
-# Find out the absolute name of path and returns the list of all the parents,
-# starting with the immediate one. Parents are returned as relative names. If
-# 'upper_limit' is specified, directories above it will be pruned.
-#
-rule all-parents ( path : upper_limit ? : cwd ? )
-{
- cwd ?= [ pwd ] ;
- local path_ele = [ regex.split [ root $(path) $(cwd) ] "/" ] ;
-
- if ! $(upper_limit)
- {
- upper_limit = / ;
- }
- local upper_ele = [ regex.split [ root $(upper_limit) $(cwd) ] "/" ] ;
-
- # Leave only elements in 'path_ele' below 'upper_ele'.
- while $(path_ele) && ( $(upper_ele[1]) = $(path_ele[1]) )
- {
- upper_ele = $(upper_ele[2-]) ;
- path_ele = $(path_ele[2-]) ;
- }
-
- # Have all upper elements been removed ?
- if $(upper_ele)
- {
- errors.error "$(upper_limit) is not prefix of $(path)" ;
- }
-
- # Create the relative paths to parents, number of elements in 'path_ele'.
- local result ;
- for local i in $(path_ele)
- {
- path = [ parent $(path) ] ;
- result += $(path) ;
- }
- return $(result) ;
-}
-
-
-# Search for 'pattern' in parent directories of 'dir', up till and including
-# 'upper_limit', if it is specified, or till the filesystem root otherwise.
-#
-rule glob-in-parents ( dir : patterns + : upper-limit ? )
-{
- local result ;
- local parent-dirs = [ all-parents $(dir) : $(upper-limit) ] ;
-
- while $(parent-dirs) && ! $(result)
- {
- result = [ glob $(parent-dirs[1]) : $(patterns) ] ;
- parent-dirs = $(parent-dirs[2-]) ;
- }
- return $(result) ;
-}
-
-
-# Assuming 'child' is a subdirectory of 'parent', return the relative path from
-# 'parent' to 'child'.
-#
-rule relative ( child parent : no-error ? )
-{
- local not-a-child ;
- if $(parent) = "."
- {
- return $(child) ;
- }
- else
- {
- local split1 = [ regex.split $(parent) / ] ;
- local split2 = [ regex.split $(child) / ] ;
-
- while $(split1)
- {
- if $(split1[1]) = $(split2[1])
- {
- split1 = $(split1[2-]) ;
- split2 = $(split2[2-]) ;
- }
- else
- {
- not-a-child = true ;
- split1 = ;
- }
- }
- if $(split2)
- {
- if $(not-a-child)
- {
- if $(no-error)
- {
- return not-a-child ;
- }
- else
- {
- errors.error $(child) is not a subdir of $(parent) ;
- }
- }
- else
- {
- return [ join $(split2) ] ;
- }
- }
- else
- {
- return "." ;
- }
- }
-}
-
-
-# Returns the minimal path to path2 that is relative path1.
-#
-rule relative-to ( path1 path2 )
-{
- local root_1 = [ regex.split [ reverse $(path1) ] / ] ;
- local split1 = [ regex.split $(path1) / ] ;
- local split2 = [ regex.split $(path2) / ] ;
-
- while $(split1) && $(root_1)
- {
- if $(split1[1]) = $(split2[1])
- {
- root_1 = $(root_1[2-]) ;
- split1 = $(split1[2-]) ;
- split2 = $(split2[2-]) ;
- }
- else
- {
- split1 = ;
- }
- }
- return [ join . $(root_1) $(split2) ] ;
-}
-
-
-# Returns the list of paths which are used by the operating system for looking
-# up programs.
-#
-rule programs-path ( )
-{
- local result ;
- local raw = [ modules.peek : PATH Path path ] ;
- for local p in $(raw)
- {
- if $(p)
- {
- result += [ path.make $(p) ] ;
- }
- }
- return $(result) ;
-}
-
-rule makedirs ( path )
-{
- local result = true ;
- local native = [ native $(path) ] ;
- if ! [ exists $(native) ]
- {
- if [ makedirs [ parent $(path) ] ]
- {
- if ! [ MAKEDIR $(native) ]
- {
- errors.error "Could not create directory '$(path)'" ;
- result = ;
- }
- }
- }
- return $(result) ;
-}
-
-# Converts native Windows paths into our internal canonic path representation.
-# Supports 'invalid' paths containing multiple successive path separator
-# characters.
-#
-# TODO: Check and if needed add support for Windows 'X:file' path format where
-# the file is located in the current folder on drive X.
-#
-rule make-NT ( native )
-{
- local result ;
-
- if [ version.check-jam-version 3 1 17 ]
- {
- result = [ NORMALIZE_PATH $(native) ] ;
- }
- else
- {
- # This old implementation is really fragile due to a not so clear way
- # NORMALIZE_PATH rule worked in Boost.Jam versions prior to 3.1.17. E.g.
- # path.join would mostly ignore empty path elements but would root the
- # joined path in case the initial two path elements were empty or some
- # similar accidental wierdness.
- result = [ path.join [ regex.split $(native) "[/\\]" ] ] ;
- }
-
- # We need to add an extra '/' in front in case this is a rooted Windows path
- # starting with a drive letter and not a path separator character since the
- # builtin NORMALIZE_PATH rule has no knowledge of this leading drive letter
- # and treats it as a regular folder name.
- if [ regex.match "(^.:)" : $(native) ]
- {
- result = /$(result) ;
- }
-
- return $(result) ;
-}
-
-
-rule native-NT ( path )
-{
- local result ;
- if [ is-rooted $(path) ] && ! [ regex.match "^/(.:)" : $(path) ]
- {
- result = $(path) ;
- }
- else
- {
- result = [ MATCH "^/?(.*)" : $(path) ] ;
- }
- result = [ sequence.join [ regex.split $(result) "/" ] : "\\" ] ;
- return $(result) ;
-}
-
-
-rule make-UNIX ( native )
-{
- # VP: I have no idea now 'native' can be empty here! But it can!
- if ! $(native)
- {
- errors.error "Empty path passed to 'make-UNIX'" ;
- }
- else
- {
- return [ NORMALIZE_PATH $(native:T) ] ;
- }
-}
-
-
-rule native-UNIX ( path )
-{
- return $(path) ;
-}
-
-
-rule make-CYGWIN ( path )
-{
- return [ make-NT $(path) ] ;
-}
-
-
-rule native-CYGWIN ( path )
-{
- local result = $(path) ;
- if [ regex.match "(^/.:)" : $(path) ] # Windows absolute path.
- {
- result = [ MATCH "^/?(.*)" : $(path) ] ; # Remove leading '/'.
- }
- return [ native-UNIX $(result) ] ;
-}
-
-
-# split-path-VMS: splits input native path into device dir file (each part is
-# optional).
-#
-# example:
-#
-# dev:[dir]file.c => dev: [dir] file.c
-#
-rule split-path-VMS ( native )
-{
- local matches = [ MATCH ([a-zA-Z0-9_-]+:)?(\\[[^\]]*\\])?(.*)?$ : $(native) ] ;
- local device = $(matches[1]) ;
- local dir = $(matches[2]) ;
- local file = $(matches[3]) ;
-
- return $(device) $(dir) $(file) ;
-}
-
-
-# Converts a native VMS path into a portable path spec.
-#
-# Does not handle current-device absolute paths such as "[dir]File.c" as it is
-# not clear how to represent them in the portable path notation.
-#
-# Adds a trailing dot (".") to the file part if no extension is present (helps
-# when converting it back into native path).
-#
-rule make-VMS ( native )
-{
- if [ MATCH ^(\\[[a-zA-Z0-9]) : $(native) ]
- {
- errors.error "Can't handle default-device absolute paths: " $(native) ;
- }
-
- local parts = [ split-path-VMS $(native) ] ;
- local device = $(parts[1]) ;
- local dir = $(parts[2]) ;
- local file = $(parts[3]) ;
- local elems ;
-
- if $(device)
- {
- #
- # rooted
- #
- elems = /$(device) ;
- }
-
- if $(dir) = "[]"
- {
- #
- # Special case: current directory
- #
- elems = $(elems) "." ;
- }
- else if $(dir)
- {
- dir = [ regex.replace $(dir) "\\[|\\]" "" ] ;
- local dir_parts = [ regex.split $(dir) \\. ] ;
-
- if $(dir_parts[1]) = ""
- {
- #
- # Relative path
- #
- dir_parts = $(dir_parts[2--1]) ;
- }
-
- #
- # replace "parent-directory" parts (- => ..)
- #
- dir_parts = [ regex.replace-list $(dir_parts) : - : .. ] ;
-
- elems = $(elems) $(dir_parts) ;
- }
-
- if $(file)
- {
- if ! [ MATCH (\\.) : $(file) ]
- {
- #
- # Always add "." to end of non-extension file.
- #
- file = $(file). ;
- }
- elems = $(elems) $(file) ;
- }
-
- local portable = [ path.join $(elems) ] ;
-
- return $(portable) ;
-}
-
-
-# Converts a portable path spec into a native VMS path.
-#
-# Relies on having at least one dot (".") included in the file name to be able
-# to differentiate it from the directory part.
-#
-rule native-VMS ( path )
-{
- local device = "" ;
- local dir = $(path) ;
- local file = "" ;
- local native ;
- local split ;
-
- #
- # Has device ?
- #
- if [ is-rooted $(dir) ]
- {
- split = [ MATCH ^/([^:]+:)/?(.*) : $(dir) ] ;
- device = $(split[1]) ;
- dir = $(split[2]) ;
- }
-
- #
- # Has file ?
- #
- # This is no exact science, just guess work:
- #
- # If the last part of the current path spec
- # includes some chars, followed by a dot,
- # optionally followed by more chars -
- # then it is a file (keep your fingers crossed).
- #
- split = [ regex.split $(dir) / ] ;
- local maybe_file = $(split[-1]) ;
-
- if [ MATCH ^([^.]+\\..*) : $(maybe_file) ]
- {
- file = $(maybe_file) ;
- dir = [ sequence.join $(split[1--2]) : / ] ;
- }
-
- #
- # Has dir spec ?
- #
- if $(dir) = "."
- {
- dir = "[]" ;
- }
- else if $(dir)
- {
- dir = [ regex.replace $(dir) \\.\\. - ] ;
- dir = [ regex.replace $(dir) / . ] ;
-
- if $(device) = ""
- {
- #
- # Relative directory
- #
- dir = "."$(dir) ;
- }
- dir = "["$(dir)"]" ;
- }
-
- native = [ sequence.join $(device) $(dir) $(file) ] ;
-
- return $(native) ;
-}
-
-
-rule __test__ ( )
-{
- import assert ;
- import errors : try catch ;
-
- assert.true is-rooted "/" ;
- assert.true is-rooted "/foo" ;
- assert.true is-rooted "/foo/bar" ;
- assert.result : is-rooted "." ;
- assert.result : is-rooted "foo" ;
- assert.result : is-rooted "foo/bar" ;
-
- assert.true has-parent "foo" ;
- assert.true has-parent "foo/bar" ;
- assert.true has-parent "." ;
- assert.result : has-parent "/" ;
-
- assert.result "." : basename "." ;
- assert.result ".." : basename ".." ;
- assert.result "foo" : basename "foo" ;
- assert.result "foo" : basename "bar/foo" ;
- assert.result "foo" : basename "gaz/bar/foo" ;
- assert.result "foo" : basename "/gaz/bar/foo" ;
-
- assert.result "." : parent "foo" ;
- assert.result "/" : parent "/foo" ;
- assert.result "foo/bar" : parent "foo/bar/giz" ;
- assert.result ".." : parent "." ;
- assert.result ".." : parent "../foo" ;
- assert.result "../../foo" : parent "../../foo/bar" ;
-
- assert.result "." : reverse "." ;
- assert.result ".." : reverse "foo" ;
- assert.result "../../.." : reverse "foo/bar/giz" ;
-
- assert.result "foo" : join "foo" ;
- assert.result "/foo" : join "/" "foo" ;
- assert.result "foo/bar" : join "foo" "bar" ;
- assert.result "foo/bar" : join "foo/giz" "../bar" ;
- assert.result "foo/giz" : join "foo/bar/baz" "../../giz" ;
- assert.result ".." : join "." ".." ;
- assert.result ".." : join "foo" "../.." ;
- assert.result "../.." : join "../foo" "../.." ;
- assert.result "/foo" : join "/bar" "../foo" ;
- assert.result "foo/giz" : join "foo/giz" "." ;
- assert.result "." : join lib2 ".." ;
- assert.result "/" : join "/a" ".." ;
-
- assert.result /a/b : join /a/b/c .. ;
-
- assert.result "foo/bar/giz" : join "foo" "bar" "giz" ;
- assert.result "giz" : join "foo" ".." "giz" ;
- assert.result "foo/giz" : join "foo" "." "giz" ;
-
- try ;
- {
- join "a" "/b" ;
- }
- catch only first element may be rooted ;
-
- local CWD = "/home/ghost/build" ;
- assert.result : all-parents . : . : $(CWD) ;
- assert.result . .. ../.. ../../.. : all-parents "Jamfile" : "" : $(CWD) ;
- assert.result foo . .. ../.. ../../.. : all-parents "foo/Jamfile" : "" : $(CWD) ;
- assert.result ../Work .. ../.. ../../.. : all-parents "../Work/Jamfile" : "" : $(CWD) ;
-
- local CWD = "/home/ghost" ;
- assert.result . .. : all-parents "Jamfile" : "/home" : $(CWD) ;
- assert.result . : all-parents "Jamfile" : "/home/ghost" : $(CWD) ;
-
- assert.result "c/d" : relative "a/b/c/d" "a/b" ;
- assert.result "foo" : relative "foo" "." ;
-
- local save-os = [ modules.peek path : os ] ;
- modules.poke path : os : NT ;
-
- assert.result "foo/bar/giz" : make "foo/bar/giz" ;
- assert.result "foo/bar/giz" : make "foo\\bar\\giz" ;
- assert.result "foo" : make "foo/" ;
- assert.result "foo" : make "foo\\" ;
- assert.result "foo" : make "foo/." ;
- assert.result "foo" : make "foo/bar/.." ;
- assert.result "foo" : make "foo/bar/../" ;
- assert.result "foo" : make "foo/bar/..\\" ;
- assert.result "foo/bar" : make "foo/././././bar" ;
- assert.result "/foo" : make "\\foo" ;
- assert.result "/D:/My Documents" : make "D:\\My Documents" ;
- assert.result "/c:/boost/tools/build/new/project.jam" : make "c:\\boost\\tools\\build\\test\\..\\new\\project.jam" ;
-
- # Test processing 'invalid' paths containing multiple successive path
- # separators.
- assert.result "foo" : make "foo//" ;
- assert.result "foo" : make "foo///" ;
- assert.result "foo" : make "foo\\\\" ;
- assert.result "foo" : make "foo\\\\\\" ;
- assert.result "/foo" : make "//foo" ;
- assert.result "/foo" : make "///foo" ;
- assert.result "/foo" : make "\\\\foo" ;
- assert.result "/foo" : make "\\\\\\foo" ;
- assert.result "/foo" : make "\\/\\/foo" ;
- assert.result "foo/bar" : make "foo//\\//\\\\bar//\\//\\\\\\//\\//\\\\" ;
- assert.result "foo" : make "foo/bar//.." ;
- assert.result "foo/bar" : make "foo/bar/giz//.." ;
- assert.result "foo/giz" : make "foo//\\//\\\\bar///\\\\//\\\\////\\/..///giz\\//\\\\\\//\\//\\\\" ;
- assert.result "../../../foo" : make "..///.//..///.//..////foo///" ;
-
- # Test processing 'invalid' rooted paths with too many '..' path elements
- # that would place them before the root.
- assert.result : make "/.." ;
- assert.result : make "/../" ;
- assert.result : make "/../." ;
- assert.result : make "/.././" ;
- assert.result : make "/foo/../bar/giz/.././././../../." ;
- assert.result : make "/foo/../bar/giz/.././././../.././" ;
- assert.result : make "//foo/../bar/giz/.././././../../." ;
- assert.result : make "//foo/../bar/giz/.././././../.././" ;
- assert.result : make "\\\\foo/../bar/giz/.././././../../." ;
- assert.result : make "\\\\foo/../bar/giz/.././././../.././" ;
- assert.result : make "/..///.//..///.//..////foo///" ;
-
- assert.result "foo\\bar\\giz" : native "foo/bar/giz" ;
- assert.result "foo" : native "foo" ;
- assert.result "\\foo" : native "/foo" ;
- assert.result "D:\\My Documents\\Work" : native "/D:/My Documents/Work" ;
-
- modules.poke path : os : UNIX ;
-
- assert.result "foo/bar/giz" : make "foo/bar/giz" ;
- assert.result "/sub1" : make "/sub1/." ;
- assert.result "/sub1" : make "/sub1/sub2/.." ;
- assert.result "sub1" : make "sub1/." ;
- assert.result "sub1" : make "sub1/sub2/.." ;
- assert.result "/foo/bar" : native "/foo/bar" ;
-
- modules.poke path : os : VMS ;
-
- #
- # Don't really need to poke os before these
- #
- assert.result "disk:" "[dir]" "file" : split-path-VMS "disk:[dir]file" ;
- assert.result "disk:" "[dir]" "" : split-path-VMS "disk:[dir]" ;
- assert.result "disk:" "" "" : split-path-VMS "disk:" ;
- assert.result "disk:" "" "file" : split-path-VMS "disk:file" ;
- assert.result "" "[dir]" "file" : split-path-VMS "[dir]file" ;
- assert.result "" "[dir]" "" : split-path-VMS "[dir]" ;
- assert.result "" "" "file" : split-path-VMS "file" ;
- assert.result "" "" "" : split-path-VMS "" ;
-
- #
- # Special case: current directory
- #
- assert.result "" "[]" "" : split-path-VMS "[]" ;
- assert.result "disk:" "[]" "" : split-path-VMS "disk:[]" ;
- assert.result "" "[]" "file" : split-path-VMS "[]file" ;
- assert.result "disk:" "[]" "file" : split-path-VMS "disk:[]file" ;
-
- #
- # Make portable paths
- #
- assert.result "/disk:" : make "disk:" ;
- assert.result "foo/bar/giz" : make "[.foo.bar.giz]" ;
- assert.result "foo" : make "[.foo]" ;
- assert.result "foo" : make "[.foo.bar.-]" ;
- assert.result ".." : make "[.-]" ;
- assert.result ".." : make "[-]" ;
- assert.result "." : make "[]" ;
- assert.result "giz.h" : make "giz.h" ;
- assert.result "foo/bar/giz.h" : make "[.foo.bar]giz.h" ;
- assert.result "/disk:/my_docs" : make "disk:[my_docs]" ;
- assert.result "/disk:/boost/tools/build/new/project.jam" : make "disk:[boost.tools.build.test.-.new]project.jam" ;
-
- #
- # Special case (adds '.' to end of file w/o extension to
- # disambiguate from directory in portable path spec).
- #
- assert.result "Jamfile." : make "Jamfile" ;
- assert.result "dir/Jamfile." : make "[.dir]Jamfile" ;
- assert.result "/disk:/dir/Jamfile." : make "disk:[dir]Jamfile" ;
-
- #
- # Make native paths
- #
- assert.result "disk:" : native "/disk:" ;
- assert.result "[.foo.bar.giz]" : native "foo/bar/giz" ;
- assert.result "[.foo]" : native "foo" ;
- assert.result "[.-]" : native ".." ;
- assert.result "[.foo.-]" : native "foo/.." ;
- assert.result "[]" : native "." ;
- assert.result "disk:[my_docs.work]" : native "/disk:/my_docs/work" ;
- assert.result "giz.h" : native "giz.h" ;
- assert.result "disk:Jamfile." : native "/disk:Jamfile." ;
- assert.result "disk:[my_docs.work]Jamfile." : native "/disk:/my_docs/work/Jamfile." ;
-
- modules.poke path : os : $(save-os) ;
-}
diff --git a/tools/build/v2/util/path.py b/tools/build/v2/util/path.py
deleted file mode 100644
index 222b96bfe2..0000000000
--- a/tools/build/v2/util/path.py
+++ /dev/null
@@ -1,904 +0,0 @@
-# Status: this module is ported on demand by however needs something
-# from it. Functionality that is not needed by Python port will
-# be dropped.
-
-# Copyright (C) Vladimir Prus 2002. Permission to copy, use, modify, sell and
-# distribute this software is granted provided this copyright notice appears in
-# all copies. This software is provided "as is" without express or implied
-# warranty, and with no claim as to its suitability for any purpose.
-
-# Performs various path manipulations. Path are always in a 'normilized'
-# representation. In it, a path may be either:
-#
-# - '.', or
-#
-# - ['/'] [ ( '..' '/' )* (token '/')* token ]
-#
-# In plain english, path can be rooted, '..' elements are allowed only
-# at the beginning, and it never ends in slash, except for path consisting
-# of slash only.
-
-import os.path
-from utility import to_seq
-from glob import glob as builtin_glob
-
-from b2.util import bjam_signature
-
-@bjam_signature((["path", "root"],))
-def root (path, root):
- """ If 'path' is relative, it is rooted at 'root'. Otherwise, it's unchanged.
- """
- if os.path.isabs (path):
- return path
- else:
- return os.path.join (root, path)
-
-@bjam_signature((["native"],))
-def make (native):
- """ Converts the native path into normalized form.
- """
- # TODO: make os selection here.
- return make_UNIX (native)
-
-def make_UNIX (native):
-
- # VP: I have no idea now 'native' can be empty here! But it can!
- assert (native)
-
- return os.path.normpath (native)
-
-@bjam_signature((["path"],))
-def native (path):
- """ Builds a native representation of the path.
- """
- # TODO: make os selection here.
- return native_UNIX (path)
-
-def native_UNIX (path):
- return path
-
-
-def pwd ():
- """ Returns the current working directory.
- # TODO: is it a good idea to use the current dir? Some use-cases
- may not allow us to depend on the current dir.
- """
- return make (os.getcwd ())
-
-def is_rooted (path):
- """ Tests if a path is rooted.
- """
- return path and path [0] == '/'
-
-
-###################################################################
-# Still to port.
-# Original lines are prefixed with "# "
-#
-# # Copyright (C) Vladimir Prus 2002. Permission to copy, use, modify, sell and
-# # distribute this software is granted provided this copyright notice appears in
-# # all copies. This software is provided "as is" without express or implied
-# # warranty, and with no claim as to its suitability for any purpose.
-#
-# # Performs various path manipulations. Path are always in a 'normilized'
-# # representation. In it, a path may be either:
-# #
-# # - '.', or
-# #
-# # - ['/'] [ ( '..' '/' )* (token '/')* token ]
-# #
-# # In plain english, path can be rooted, '..' elements are allowed only
-# # at the beginning, and it never ends in slash, except for path consisting
-# # of slash only.
-#
-# import modules ;
-# import sequence ;
-# import regex ;
-# import errors : error ;
-#
-#
-# os = [ modules.peek : OS ] ;
-# if [ modules.peek : UNIX ]
-# {
-# local uname = [ modules.peek : JAMUNAME ] ;
-# switch $(uname)
-# {
-# case CYGWIN* :
-# os = CYGWIN ;
-#
-# case * :
-# os = UNIX ;
-# }
-# }
-#
-# #
-# # Tests if a path is rooted.
-# #
-# rule is-rooted ( path )
-# {
-# return [ MATCH "^(/)" : $(path) ] ;
-# }
-#
-# #
-# # Tests if a path has a parent.
-# #
-# rule has-parent ( path )
-# {
-# if $(path) != / {
-# return 1 ;
-# } else {
-# return ;
-# }
-# }
-#
-# #
-# # Returns the path without any directory components.
-# #
-# rule basename ( path )
-# {
-# return [ MATCH "([^/]+)$" : $(path) ] ;
-# }
-#
-# #
-# # Returns parent directory of the path. If no parent exists, error is issued.
-# #
-# rule parent ( path )
-# {
-# if [ has-parent $(path) ] {
-#
-# if $(path) = . {
-# return .. ;
-# } else {
-#
-# # Strip everything at the end of path up to and including
-# # the last slash
-# local result = [ regex.match "((.*)/)?([^/]+)" : $(path) : 2 3 ] ;
-#
-# # Did we strip what we shouldn't?
-# if $(result[2]) = ".." {
-# return $(path)/.. ;
-# } else {
-# if ! $(result[1]) {
-# if [ is-rooted $(path) ] {
-# result = / ;
-# } else {
-# result = . ;
-# }
-# }
-# return $(result[1]) ;
-# }
-# }
-# } else {
-# error "Path '$(path)' has no parent" ;
-# }
-# }
-#
-# #
-# # Returns path2 such that "[ join path path2 ] = .".
-# # The path may not contain ".." element or be rooted.
-# #
-# rule reverse ( path )
-# {
-# if $(path) = .
-# {
-# return $(path) ;
-# }
-# else
-# {
-# local tokens = [ regex.split $(path) "/" ] ;
-# local tokens2 ;
-# for local i in $(tokens) {
-# tokens2 += .. ;
-# }
-# return [ sequence.join $(tokens2) : "/" ] ;
-# }
-# }
-#
-# #
-# # Auxillary rule: does all the semantic of 'join', except for error cheching.
-# # The error checking is separated because this rule is recursive, and I don't
-# # like the idea of checking the same input over and over.
-# #
-# local rule join-imp ( elements + )
-# {
-# return [ NORMALIZE_PATH $(elements:J="/") ] ;
-# }
-#
-# #
-# # Contanenates the passed path elements. Generates an error if
-# # any element other than the first one is rooted.
-# #
-# rule join ( elements + )
-# {
-# if ! $(elements[2])
-# {
-# return $(elements[1]) ;
-# }
-# else
-# {
-# for local e in $(elements[2-])
-# {
-# if [ is-rooted $(e) ]
-# {
-# error only first element may be rooted ;
-# }
-# }
-# return [ join-imp $(elements) ] ;
-# }
-# }
-
-
-def glob (dirs, patterns):
- """ Returns the list of files matching the given pattern in the
- specified directory. Both directories and patterns are
- supplied as portable paths. Each pattern should be non-absolute
- path, and can't contain "." or ".." elements. Each slash separated
- element of pattern can contain the following special characters:
- - '?', which match any character
- - '*', which matches arbitrary number of characters.
- A file $(d)/e1/e2/e3 (where 'd' is in $(dirs)) matches pattern p1/p2/p3
- if and only if e1 matches p1, e2 matches p2 and so on.
-
- For example:
- [ glob . : *.cpp ]
- [ glob . : */build/Jamfile ]
- """
-# {
-# local result ;
-# if $(patterns:D)
-# {
-# # When a pattern has a directory element, we first glob for
-# # directory, and then glob for file name is the found directories.
-# for local p in $(patterns)
-# {
-# # First glob for directory part.
-# local globbed-dirs = [ glob $(dirs) : $(p:D) ] ;
-# result += [ glob $(globbed-dirs) : $(p:D="") ] ;
-# }
-# }
-# else
-# {
-# # When a pattern has not directory, we glob directly.
-# # Take care of special ".." value. The "GLOB" rule simply ignores
-# # the ".." element (and ".") element in directory listings. This is
-# # needed so that
-# #
-# # [ glob libs/*/Jamfile ]
-# #
-# # don't return
-# #
-# # libs/../Jamfile (which is the same as ./Jamfile)
-# #
-# # On the other hand, when ".." is explicitly present in the pattern
-# # we need to return it.
-# #
-# for local dir in $(dirs)
-# {
-# for local p in $(patterns)
-# {
-# if $(p) != ".."
-# {
-# result += [ sequence.transform make
-# : [ GLOB [ native $(dir) ] : $(p) ] ] ;
-# }
-# else
-# {
-# result += [ path.join $(dir) .. ] ;
-# }
-# }
-# }
-# }
-# return $(result) ;
-# }
-#
-
-# TODO: (PF) I replaced the code above by this. I think it should work but needs to be tested.
- result = []
- dirs = to_seq (dirs)
- patterns = to_seq (patterns)
-
- splitdirs = []
- for dir in dirs:
- splitdirs += dir.split (os.pathsep)
-
- for dir in splitdirs:
- for pattern in patterns:
- p = os.path.join (dir, pattern)
- import glob
- result.extend (glob.glob (p))
- return result
-
-#
-# Find out the absolute name of path and returns the list of all the parents,
-# starting with the immediate one. Parents are returned as relative names.
-# If 'upper_limit' is specified, directories above it will be pruned.
-#
-def all_parents(path, upper_limit=None, cwd=None):
-
- if not cwd:
- cwd = os.getcwd()
-
- path_abs = os.path.join(cwd, path)
-
- if upper_limit:
- upper_limit = os.path.join(cwd, upper_limit)
-
- result = []
- while path_abs and path_abs != upper_limit:
- (head, tail) = os.path.split(path)
- path = os.path.join(path, "..")
- result.append(path)
- path_abs = head
-
- if upper_limit and path_abs != upper_limit:
- raise BaseException("'%s' is not a prefix of '%s'" % (upper_limit, path))
-
- return result
-
-# Search for 'pattern' in parent directories of 'dir', up till and including
-# 'upper_limit', if it is specified, or till the filesystem root otherwise.
-#
-def glob_in_parents(dir, patterns, upper_limit=None):
-
- result = []
- parent_dirs = all_parents(dir, upper_limit)
-
- for p in parent_dirs:
- result = glob(p, patterns)
- if result: break
-
- return result
-
-#
-# #
-# # Assuming 'child' is a subdirectory of 'parent', return the relative
-# # path from 'parent' to 'child'
-# #
-# rule relative ( child parent )
-# {
-# if $(parent) = "."
-# {
-# return $(child) ;
-# }
-# else
-# {
-# local split1 = [ regex.split $(parent) / ] ;
-# local split2 = [ regex.split $(child) / ] ;
-#
-# while $(split1)
-# {
-# if $(split1[1]) = $(split2[1])
-# {
-# split1 = $(split1[2-]) ;
-# split2 = $(split2[2-]) ;
-# }
-# else
-# {
-# errors.error $(child) is not a subdir of $(parent) ;
-# }
-# }
-# return [ join $(split2) ] ;
-# }
-# }
-#
-# # Returns the minimal path to path2 that is relative path1.
-# #
-# rule relative-to ( path1 path2 )
-# {
-# local root_1 = [ regex.split [ reverse $(path1) ] / ] ;
-# local split1 = [ regex.split $(path1) / ] ;
-# local split2 = [ regex.split $(path2) / ] ;
-#
-# while $(split1) && $(root_1)
-# {
-# if $(split1[1]) = $(split2[1])
-# {
-# root_1 = $(root_1[2-]) ;
-# split1 = $(split1[2-]) ;
-# split2 = $(split2[2-]) ;
-# }
-# else
-# {
-# split1 = ;
-# }
-# }
-# return [ join . $(root_1) $(split2) ] ;
-# }
-
-# Returns the list of paths which are used by the operating system
-# for looking up programs
-def programs_path ():
- raw = []
- names = ['PATH', 'Path', 'path']
-
- for name in names:
- raw.append(os.environ.get (name, ''))
-
- result = []
- for elem in raw:
- if elem:
- for p in elem.split(os.path.pathsep):
- result.append(make(p))
-
- return result
-
-# rule make-NT ( native )
-# {
-# local tokens = [ regex.split $(native) "[/\\]" ] ;
-# local result ;
-#
-# # Handle paths ending with slashes
-# if $(tokens[-1]) = ""
-# {
-# tokens = $(tokens[1--2]) ; # discard the empty element
-# }
-#
-# result = [ path.join $(tokens) ] ;
-#
-# if [ regex.match "(^.:)" : $(native) ]
-# {
-# result = /$(result) ;
-# }
-#
-# if $(native) = ""
-# {
-# result = "." ;
-# }
-#
-# return $(result) ;
-# }
-#
-# rule native-NT ( path )
-# {
-# local result = [ MATCH "^/?(.*)" : $(path) ] ;
-# result = [ sequence.join [ regex.split $(result) "/" ] : "\\" ] ;
-# return $(result) ;
-# }
-#
-# rule make-CYGWIN ( path )
-# {
-# return [ make-NT $(path) ] ;
-# }
-#
-# rule native-CYGWIN ( path )
-# {
-# local result = $(path) ;
-# if [ regex.match "(^/.:)" : $(path) ] # win absolute
-# {
-# result = [ MATCH "^/?(.*)" : $(path) ] ; # remove leading '/'
-# }
-# return [ native-UNIX $(result) ] ;
-# }
-#
-# #
-# # split-VMS: splits input native path into
-# # device dir file (each part is optional),
-# # example:
-# #
-# # dev:[dir]file.c => dev: [dir] file.c
-# #
-# rule split-path-VMS ( native )
-# {
-# local matches = [ MATCH ([a-zA-Z0-9_-]+:)?(\\[[^\]]*\\])?(.*)?$ : $(native) ] ;
-# local device = $(matches[1]) ;
-# local dir = $(matches[2]) ;
-# local file = $(matches[3]) ;
-#
-# return $(device) $(dir) $(file) ;
-# }
-#
-# #
-# # Converts a native VMS path into a portable path spec.
-# #
-# # Does not handle current-device absolute paths such
-# # as "[dir]File.c" as it is not clear how to represent
-# # them in the portable path notation.
-# #
-# # Adds a trailing dot (".") to the file part if no extension
-# # is present (helps when converting it back into native path).
-# #
-# rule make-VMS ( native )
-# {
-# if [ MATCH ^(\\[[a-zA-Z0-9]) : $(native) ]
-# {
-# errors.error "Can't handle default-device absolute paths: " $(native) ;
-# }
-#
-# local parts = [ split-path-VMS $(native) ] ;
-# local device = $(parts[1]) ;
-# local dir = $(parts[2]) ;
-# local file = $(parts[3]) ;
-# local elems ;
-#
-# if $(device)
-# {
-# #
-# # rooted
-# #
-# elems = /$(device) ;
-# }
-#
-# if $(dir) = "[]"
-# {
-# #
-# # Special case: current directory
-# #
-# elems = $(elems) "." ;
-# }
-# else if $(dir)
-# {
-# dir = [ regex.replace $(dir) "\\[|\\]" "" ] ;
-# local dir_parts = [ regex.split $(dir) \\. ] ;
-#
-# if $(dir_parts[1]) = ""
-# {
-# #
-# # Relative path
-# #
-# dir_parts = $(dir_parts[2--1]) ;
-# }
-#
-# #
-# # replace "parent-directory" parts (- => ..)
-# #
-# dir_parts = [ regex.replace-list $(dir_parts) : - : .. ] ;
-#
-# elems = $(elems) $(dir_parts) ;
-# }
-#
-# if $(file)
-# {
-# if ! [ MATCH (\\.) : $(file) ]
-# {
-# #
-# # Always add "." to end of non-extension file
-# #
-# file = $(file). ;
-# }
-# elems = $(elems) $(file) ;
-# }
-#
-# local portable = [ path.join $(elems) ] ;
-#
-# return $(portable) ;
-# }
-#
-# #
-# # Converts a portable path spec into a native VMS path.
-# #
-# # Relies on having at least one dot (".") included in the file
-# # name to be able to differentiate it ftom the directory part.
-# #
-# rule native-VMS ( path )
-# {
-# local device = "" ;
-# local dir = $(path) ;
-# local file = "" ;
-# local native ;
-# local split ;
-#
-# #
-# # Has device ?
-# #
-# if [ is-rooted $(dir) ]
-# {
-# split = [ MATCH ^/([^:]+:)/?(.*) : $(dir) ] ;
-# device = $(split[1]) ;
-# dir = $(split[2]) ;
-# }
-#
-# #
-# # Has file ?
-# #
-# # This is no exact science, just guess work:
-# #
-# # If the last part of the current path spec
-# # includes some chars, followed by a dot,
-# # optionally followed by more chars -
-# # then it is a file (keep your fingers crossed).
-# #
-# split = [ regex.split $(dir) / ] ;
-# local maybe_file = $(split[-1]) ;
-#
-# if [ MATCH ^([^.]+\\..*) : $(maybe_file) ]
-# {
-# file = $(maybe_file) ;
-# dir = [ sequence.join $(split[1--2]) : / ] ;
-# }
-#
-# #
-# # Has dir spec ?
-# #
-# if $(dir) = "."
-# {
-# dir = "[]" ;
-# }
-# else if $(dir)
-# {
-# dir = [ regex.replace $(dir) \\.\\. - ] ;
-# dir = [ regex.replace $(dir) / . ] ;
-#
-# if $(device) = ""
-# {
-# #
-# # Relative directory
-# #
-# dir = "."$(dir) ;
-# }
-# dir = "["$(dir)"]" ;
-# }
-#
-# native = [ sequence.join $(device) $(dir) $(file) ] ;
-#
-# return $(native) ;
-# }
-#
-#
-# rule __test__ ( ) {
-#
-# import assert ;
-# import errors : try catch ;
-#
-# assert.true is-rooted "/" ;
-# assert.true is-rooted "/foo" ;
-# assert.true is-rooted "/foo/bar" ;
-# assert.result : is-rooted "." ;
-# assert.result : is-rooted "foo" ;
-# assert.result : is-rooted "foo/bar" ;
-#
-# assert.true has-parent "foo" ;
-# assert.true has-parent "foo/bar" ;
-# assert.true has-parent "." ;
-# assert.result : has-parent "/" ;
-#
-# assert.result "." : basename "." ;
-# assert.result ".." : basename ".." ;
-# assert.result "foo" : basename "foo" ;
-# assert.result "foo" : basename "bar/foo" ;
-# assert.result "foo" : basename "gaz/bar/foo" ;
-# assert.result "foo" : basename "/gaz/bar/foo" ;
-#
-# assert.result "." : parent "foo" ;
-# assert.result "/" : parent "/foo" ;
-# assert.result "foo/bar" : parent "foo/bar/giz" ;
-# assert.result ".." : parent "." ;
-# assert.result ".." : parent "../foo" ;
-# assert.result "../../foo" : parent "../../foo/bar" ;
-#
-#
-# assert.result "." : reverse "." ;
-# assert.result ".." : reverse "foo" ;
-# assert.result "../../.." : reverse "foo/bar/giz" ;
-#
-# assert.result "foo" : join "foo" ;
-# assert.result "/foo" : join "/" "foo" ;
-# assert.result "foo/bar" : join "foo" "bar" ;
-# assert.result "foo/bar" : join "foo/giz" "../bar" ;
-# assert.result "foo/giz" : join "foo/bar/baz" "../../giz" ;
-# assert.result ".." : join "." ".." ;
-# assert.result ".." : join "foo" "../.." ;
-# assert.result "../.." : join "../foo" "../.." ;
-# assert.result "/foo" : join "/bar" "../foo" ;
-# assert.result "foo/giz" : join "foo/giz" "." ;
-# assert.result "." : join lib2 ".." ;
-# assert.result "/" : join "/a" ".." ;
-#
-# assert.result /a/b : join /a/b/c .. ;
-#
-# assert.result "foo/bar/giz" : join "foo" "bar" "giz" ;
-# assert.result "giz" : join "foo" ".." "giz" ;
-# assert.result "foo/giz" : join "foo" "." "giz" ;
-#
-# try ;
-# {
-# join "a" "/b" ;
-# }
-# catch only first element may be rooted ;
-#
-# local CWD = "/home/ghost/build" ;
-# assert.result : all-parents . : . : $(CWD) ;
-# assert.result . .. ../.. ../../.. : all-parents "Jamfile" : "" : $(CWD) ;
-# assert.result foo . .. ../.. ../../.. : all-parents "foo/Jamfile" : "" : $(CWD) ;
-# assert.result ../Work .. ../.. ../../.. : all-parents "../Work/Jamfile" : "" : $(CWD) ;
-#
-# local CWD = "/home/ghost" ;
-# assert.result . .. : all-parents "Jamfile" : "/home" : $(CWD) ;
-# assert.result . : all-parents "Jamfile" : "/home/ghost" : $(CWD) ;
-#
-# assert.result "c/d" : relative "a/b/c/d" "a/b" ;
-# assert.result "foo" : relative "foo" "." ;
-#
-# local save-os = [ modules.peek path : os ] ;
-# modules.poke path : os : NT ;
-#
-# assert.result "foo/bar/giz" : make "foo/bar/giz" ;
-# assert.result "foo/bar/giz" : make "foo\\bar\\giz" ;
-# assert.result "foo" : make "foo/." ;
-# assert.result "foo" : make "foo/bar/.." ;
-# assert.result "/D:/My Documents" : make "D:\\My Documents" ;
-# assert.result "/c:/boost/tools/build/new/project.jam" : make "c:\\boost\\tools\\build\\test\\..\\new\\project.jam" ;
-#
-# assert.result "foo\\bar\\giz" : native "foo/bar/giz" ;
-# assert.result "foo" : native "foo" ;
-# assert.result "D:\\My Documents\\Work" : native "/D:/My Documents/Work" ;
-#
-# modules.poke path : os : UNIX ;
-#
-# assert.result "foo/bar/giz" : make "foo/bar/giz" ;
-# assert.result "/sub1" : make "/sub1/." ;
-# assert.result "/sub1" : make "/sub1/sub2/.." ;
-# assert.result "sub1" : make "sub1/." ;
-# assert.result "sub1" : make "sub1/sub2/.." ;
-# assert.result "/foo/bar" : native "/foo/bar" ;
-#
-# modules.poke path : os : VMS ;
-#
-# #
-# # Don't really need to poke os before these
-# #
-# assert.result "disk:" "[dir]" "file" : split-path-VMS "disk:[dir]file" ;
-# assert.result "disk:" "[dir]" "" : split-path-VMS "disk:[dir]" ;
-# assert.result "disk:" "" "" : split-path-VMS "disk:" ;
-# assert.result "disk:" "" "file" : split-path-VMS "disk:file" ;
-# assert.result "" "[dir]" "file" : split-path-VMS "[dir]file" ;
-# assert.result "" "[dir]" "" : split-path-VMS "[dir]" ;
-# assert.result "" "" "file" : split-path-VMS "file" ;
-# assert.result "" "" "" : split-path-VMS "" ;
-#
-# #
-# # Special case: current directory
-# #
-# assert.result "" "[]" "" : split-path-VMS "[]" ;
-# assert.result "disk:" "[]" "" : split-path-VMS "disk:[]" ;
-# assert.result "" "[]" "file" : split-path-VMS "[]file" ;
-# assert.result "disk:" "[]" "file" : split-path-VMS "disk:[]file" ;
-#
-# #
-# # Make portable paths
-# #
-# assert.result "/disk:" : make "disk:" ;
-# assert.result "foo/bar/giz" : make "[.foo.bar.giz]" ;
-# assert.result "foo" : make "[.foo]" ;
-# assert.result "foo" : make "[.foo.bar.-]" ;
-# assert.result ".." : make "[.-]" ;
-# assert.result ".." : make "[-]" ;
-# assert.result "." : make "[]" ;
-# assert.result "giz.h" : make "giz.h" ;
-# assert.result "foo/bar/giz.h" : make "[.foo.bar]giz.h" ;
-# assert.result "/disk:/my_docs" : make "disk:[my_docs]" ;
-# assert.result "/disk:/boost/tools/build/new/project.jam" : make "disk:[boost.tools.build.test.-.new]project.jam" ;
-#
-# #
-# # Special case (adds '.' to end of file w/o extension to
-# # disambiguate from directory in portable path spec).
-# #
-# assert.result "Jamfile." : make "Jamfile" ;
-# assert.result "dir/Jamfile." : make "[.dir]Jamfile" ;
-# assert.result "/disk:/dir/Jamfile." : make "disk:[dir]Jamfile" ;
-#
-# #
-# # Make native paths
-# #
-# assert.result "disk:" : native "/disk:" ;
-# assert.result "[.foo.bar.giz]" : native "foo/bar/giz" ;
-# assert.result "[.foo]" : native "foo" ;
-# assert.result "[.-]" : native ".." ;
-# assert.result "[.foo.-]" : native "foo/.." ;
-# assert.result "[]" : native "." ;
-# assert.result "disk:[my_docs.work]" : native "/disk:/my_docs/work" ;
-# assert.result "giz.h" : native "giz.h" ;
-# assert.result "disk:Jamfile." : native "/disk:Jamfile." ;
-# assert.result "disk:[my_docs.work]Jamfile." : native "/disk:/my_docs/work/Jamfile." ;
-#
-# modules.poke path : os : $(save-os) ;
-#
-# }
-
-#
-
-
-#def glob(dir, patterns):
-# result = []
-# for pattern in patterns:
-# result.extend(builtin_glob(os.path.join(dir, pattern)))
-# return result
-
-def glob(dirs, patterns, exclude_patterns=None):
- """Returns the list of files matching the given pattern in the
- specified directory. Both directories and patterns are
- supplied as portable paths. Each pattern should be non-absolute
- path, and can't contain '.' or '..' elements. Each slash separated
- element of pattern can contain the following special characters:
- - '?', which match any character
- - '*', which matches arbitrary number of characters.
- A file $(d)/e1/e2/e3 (where 'd' is in $(dirs)) matches pattern p1/p2/p3
- if and only if e1 matches p1, e2 matches p2 and so on.
- For example:
- [ glob . : *.cpp ]
- [ glob . : */build/Jamfile ]
- """
-
- assert(isinstance(patterns, list))
- assert(isinstance(dirs, list))
-
- if not exclude_patterns:
- exclude_patterns = []
- else:
- assert(isinstance(exclude_patterns, list))
-
- real_patterns = [os.path.join(d, p) for p in patterns for d in dirs]
- real_exclude_patterns = [os.path.join(d, p) for p in exclude_patterns
- for d in dirs]
-
- inc = [os.path.normpath(name) for p in real_patterns
- for name in builtin_glob(p)]
- exc = [os.path.normpath(name) for p in real_exclude_patterns
- for name in builtin_glob(p)]
- return [x for x in inc if x not in exc]
-
-def glob_tree(roots, patterns, exclude_patterns=None):
- """Recursive version of GLOB. Builds the glob of files while
- also searching in the subdirectories of the given roots. An
- optional set of exclusion patterns will filter out the
- matching entries from the result. The exclusions also apply
- to the subdirectory scanning, such that directories that
- match the exclusion patterns will not be searched."""
-
- if not exclude_patterns:
- exclude_patterns = []
-
- result = glob(roots, patterns, exclude_patterns)
- subdirs = [s for s in glob(roots, ["*"]) if s != "." and s != ".." and os.path.isdir(s)]
- if subdirs:
- result.extend(glob_tree(subdirs, patterns, exclude_patterns))
-
- return result
-
-def glob_in_parents(dir, patterns, upper_limit=None):
- """Recursive version of GLOB which glob sall parent directories
- of dir until the first match is found. Returns an empty result if no match
- is found"""
-
- assert(isinstance(dir, str))
- assert(isinstance(patterns, list))
-
- result = []
-
- absolute_dir = os.path.join(os.getcwd(), dir)
- absolute_dir = os.path.normpath(absolute_dir)
- while absolute_dir:
- new_dir = os.path.split(absolute_dir)[0]
- if new_dir == absolute_dir:
- break
- result = glob([new_dir], patterns)
- if result:
- break
- absolute_dir = new_dir
-
- return result
-
-
-# The relpath functionality is written by
-# Cimarron Taylor
-def split(p, rest=[]):
- (h,t) = os.path.split(p)
- if len(h) < 1: return [t]+rest
- if len(t) < 1: return [h]+rest
- return split(h,[t]+rest)
-
-def commonpath(l1, l2, common=[]):
- if len(l1) < 1: return (common, l1, l2)
- if len(l2) < 1: return (common, l1, l2)
- if l1[0] != l2[0]: return (common, l1, l2)
- return commonpath(l1[1:], l2[1:], common+[l1[0]])
-
-def relpath(p1, p2):
- (common,l1,l2) = commonpath(split(p1), split(p2))
- p = []
- if len(l1) > 0:
- p = [ '../' * len(l1) ]
- p = p + l2
- if p:
- return os.path.join( *p )
- else:
- return "."
diff --git a/tools/build/v2/util/print.jam b/tools/build/v2/util/print.jam
deleted file mode 100644
index 708d21abaa..0000000000
--- a/tools/build/v2/util/print.jam
+++ /dev/null
@@ -1,488 +0,0 @@
-# Copyright 2003 Douglas Gregor
-# Copyright 2002, 2003, 2005 Rene Rivera
-# Copyright 2002, 2003, 2004, 2005 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-# Utilities for generating format independent output. Using these
-# will help in generation of documentation in at minimum plain/console
-# and html.
-
-import modules ;
-import numbers ;
-import string ;
-import regex ;
-import "class" ;
-import scanner ;
-import path ;
-
-# The current output target. Defaults to console.
-output-target = console ;
-
-# The current output type. Defaults to plain. Other possible values are "html".
-output-type = plain ;
-
-# Whitespace.
-.whitespace = [ string.whitespace ] ;
-
-
-# Set the target and type of output to generate. This sets both the destination
-# output and the type of docs to generate to that output. The target can be
-# either a file or "console" for echoing to the console. If the type of output
-# is not specified it defaults to plain text.
-#
-rule output (
- target # The target file or device; file or "console".
- type ? # The type of output; "plain" or "html".
-)
-{
- type ?= plain ;
- if $(output-target) != $(target)
- {
- output-target = $(target) ;
- output-type = $(type) ;
- if $(output-type) = html
- {
- text
- "<!DOCTYPE html PUBLIC \"-//W3C//DTD HTML 4.01 Transitional//EN\">"
- "<html>"
- "<head>"
- "</head>"
- "<body link=\"#0000ff\" vlink=\"#800080\">"
- : true
- : prefix ;
- text
- "</body>"
- "</html>"
- :
- : suffix ;
- }
- }
-}
-
-
-# Generate a section with a description. The type of output can be controlled by
-# the value of the 'output-type' variable.
-#
-rule section (
- name # The name of the section.
- description * # A number of description lines.
-)
-{
- if $(output-type) = plain
- {
- lines [ split-at-words $(name): ] ;
- lines ;
- }
- else if $(output-type) = html
- {
- name = [ escape-html $(name) ] ;
- text <h3>$(name)</h3> <p> ;
- }
- local pre = ;
- while $(description)
- {
- local paragraph = ;
- while $(description) && [ string.is-whitespace $(description[1]) ] { description = $(description[2-]) ; }
- if $(pre)
- {
- while $(description) && (
- $(pre) = " $(description[1])" ||
- ( $(pre) < [ string.chars [ MATCH "^([$(.whitespace)]*)" : " $(description[1])" ] ] )
- )
- { paragraph += $(description[1]) ; description = $(description[2-]) ; }
- while [ string.is-whitespace $(paragraph[-1]) ] { paragraph = $(paragraph[1--2]) ; }
- pre = ;
- if $(output-type) = plain
- {
- lines $(paragraph) "" : " " " " ;
- }
- else if $(output-type) = html
- {
- text <blockquote> ;
- lines $(paragraph) ;
- text </blockquote> ;
- }
- }
- else
- {
- while $(description) && ! [ string.is-whitespace $(description[1]) ]
- { paragraph += $(description[1]) ; description = $(description[2-]) ; }
- if $(paragraph[1]) = :: && ! $(paragraph[2])
- {
- pre = " " ;
- }
- if $(paragraph[1]) = ::
- {
- if $(output-type) = plain
- {
- lines $(paragraph[2-]) "" : " " " " ;
- lines ;
- }
- else if $(output-type) = html
- {
- text <blockquote> ;
- lines $(paragraph[2-]) ;
- text </blockquote> ;
- }
- }
- else
- {
- local p = [ MATCH "(.*)(::)$" : $(paragraph[-1]) ] ;
- local pws = [ MATCH "([ ]*)$" : $(p[1]) ] ;
- p = [ MATCH "(.*)($(pws))($(p[2]))$" : $(paragraph[-1]) ] ;
- if $(p[3]) = ::
- {
- pre = [ string.chars [ MATCH "^([$(.whitespace)]*)" : " $(p[1])" ] ] ;
- if ! $(p[2]) || $(p[2]) = "" { paragraph = $(paragraph[1--2]) $(p[1]): ; }
- else { paragraph = $(paragraph[1--2]) $(p[1]) ; }
- if $(output-type) = plain
- {
- lines [ split-at-words " " $(paragraph) ] : " " " " ;
- lines ;
- }
- else if $(output-type) = html
- {
- text </p> <p> [ escape-html $(paragraph) ] ;
- }
- }
- else
- {
- if $(output-type) = plain
- {
- lines [ split-at-words " " $(paragraph) ] : " " " " ;
- lines ;
- }
- else if $(output-type) = html
- {
- text </p> <p> [ escape-html $(paragraph) ] ;
- }
- }
- }
- }
- }
- if $(output-type) = html
- {
- text </p> ;
- }
-}
-
-
-# Generate the start of a list of items. The type of output can be controlled by
-# the value of the 'output-type' variable.
-#
-rule list-start ( )
-{
- if $(output-type) = plain
- {
- }
- else if $(output-type) = html
- {
- text <ul> ;
- }
-}
-
-
-# Generate an item in a list. The type of output can be controlled by the value
-# of the 'output-type' variable.
-#
-rule list-item (
- item + # The item to list.
-)
-{
- if $(output-type) = plain
- {
- lines [ split-at-words "*" $(item) ] : " " " " ;
- }
- else if $(output-type) = html
- {
- text <li> [ escape-html $(item) ] </li> ;
- }
-}
-
-
-# Generate the end of a list of items. The type of output can be controlled by
-# the value of the 'output-type' variable.
-#
-rule list-end ( )
-{
- if $(output-type) = plain
- {
- lines ;
- }
- else if $(output-type) = html
- {
- text </ul> ;
- }
-}
-
-
-# Split the given text into separate lines, word-wrapping to a margin. The
-# default margin is 78 characters.
-#
-rule split-at-words (
- text + # The text to split.
- : margin ? # An optional margin, default is 78.
-)
-{
- local lines = ;
- text = [ string.words $(text:J=" ") ] ;
- text = $(text:J=" ") ;
- margin ?= 78 ;
- local char-match-1 = ".?" ;
- local char-match = "" ;
- while $(margin) != 0
- {
- char-match = $(char-match)$(char-match-1) ;
- margin = [ numbers.decrement $(margin) ] ;
- }
- while $(text)
- {
- local s = "" ;
- local t = "" ;
- # divide s into the first X characters and the rest
- s = [ MATCH "^($(char-match))(.*)" : $(text) ] ;
-
- if $(s[2])
- {
- # split the first half at a space
- t = [ MATCH "^(.*)[\\ ]([^\\ ]*)$" : $(s[1]) ] ;
- }
- else
- {
- t = $(s) ;
- }
-
- if ! $(t[2])
- {
- t += "" ;
- }
-
- text = $(t[2])$(s[2]) ;
- lines += $(t[1]) ;
- }
- return $(lines) ;
-}
-
-
-# Generate a set of fixed lines. Each single item passed in is output on a
-# separate line. For console this just echos each line, but for html this will
-# split them with <br>.
-#
-rule lines (
- text * # The lines of text.
- : indent ? # Optional indentation prepended to each line after the first one.
- outdent ? # Optional indentation to prepend to the first line.
-)
-{
- text ?= "" ;
- indent ?= "" ;
- outdent ?= "" ;
- if $(output-type) = plain
- {
- text $(outdent)$(text[1]) $(indent)$(text[2-]) ;
- }
- else if $(output-type) = html
- {
- local indent-chars = [ string.chars $(indent) ] ;
- indent = "" ;
- for local c in $(indent-chars)
- {
- if $(c) = " " { c = "&nbsp;" ; }
- else if $(c) = " " { c = "&nbsp;&nbsp;&nbsp;&nbsp;" ; }
- indent = $(indent)$(c) ;
- }
- local html-text = [ escape-html $(text) : "&nbsp;" ] ;
- text $(html-text[1])<br> $(indent)$(html-text[2-])<br> ;
- }
-}
-
-
-# Output text directly to the current target. When doing output to a file, one
-# can indicate if the text should be output to "prefix" it, as the "body"
-# (default), or "suffix" of the file. This is independant of the actual
-# execution order of the text rule. This rule invokes a singular action, one
-# action only once, which does the build of the file. Therefore actions on the
-# target outside of this rule will happen entirely before and/or after all
-# output using this rule.
-#
-rule text (
- strings * # The strings of text to output.
- : overwrite ? # true to overwrite the output (if it is a file)
- : prefix-body-suffix ? # Indication to output prefix, body, or suffix (for a file).
-)
-{
- prefix-body-suffix ?= body ;
- if $(output-target) = console
- {
- if ! $(strings)
- {
- ECHO ;
- }
- else
- {
- for local s in $(strings)
- {
- ECHO $(s) ;
- }
- }
- }
- if ! $($(output-target).did-action)
- {
- $(output-target).did-action = yes ;
- $(output-target).text-prefix = ;
- $(output-target).text-body = ;
- $(output-target).text-suffix = ;
-
- nl on $(output-target) = "
-" ;
- text-redirect on $(output-target) = ">>" ;
- if $(overwrite)
- {
- text-redirect on $(output-target) = ">" ;
- }
- text-content on $(output-target) = ;
-
- text-action $(output-target) ;
-
- if $(overwrite) && $(output-target) != console
- {
- check-for-update $(output-target) ;
- }
- }
- $(output-target).text-$(prefix-body-suffix) += $(strings) ;
- text-content on $(output-target) =
- $($(output-target).text-prefix)
- $($(output-target).text-body)
- $($(output-target).text-suffix) ;
-}
-
-
-# Outputs the text to the current targets, after word-wrapping it.
-#
-rule wrapped-text ( text + )
-{
- local lines = [ split-at-words $(text) ] ;
- text $(lines) ;
-}
-
-
-# Escapes text into html/xml printable equivalents. Does not know about tags and
-# therefore tags fed into this will also be escaped. Currently escapes space,
-# "<", ">", and "&".
-#
-rule escape-html (
- text + # The text to escape.
- : space ? # What to replace spaces with, defaults to " ".
-)
-{
- local html-text = ;
- while $(text)
- {
- local html = $(text[1]) ;
- text = $(text[2-]) ;
- html = [ regex.replace $(html) "&" "&amp;" ] ;
- html = [ regex.replace $(html) "<" "&lt;" ] ;
- html = [ regex.replace $(html) ">" "&gt;" ] ;
- if $(space)
- {
- html = [ regex.replace $(html) " " "$(space)" ] ;
- }
- html-text += $(html) ;
- }
- return $(html-text) ;
-}
-
-
-# Outputs the text strings collected by the text rule to the output file.
-#
-actions quietly text-action
-{
- @($(STDOUT):E=$(text-content:J=$(nl))) $(text-redirect) "$(<)"
-}
-
-
-rule get-scanner ( )
-{
- if ! $(.scanner)
- {
- .scanner = [ class.new print-scanner ] ;
- }
- return $(.scanner) ;
-}
-
-
-# The following code to update print targets when their contents
-# change is a horrible hack. It basically creates a target which
-# binds to this file (print.jam) and installs a scanner on it
-# which reads the target and compares its contents to the new
-# contents that we're writing.
-#
-rule check-for-update ( target )
-{
- local scanner = [ get-scanner ] ;
- local file = [ path.native [ modules.binding $(__name__) ] ] ;
- local g = [ MATCH <(.*)> : $(target:G) ] ;
- local dependency-target = $(__file__:G=$(g:E=)-$(target:G=)-$(scanner)) ;
- DEPENDS $(target) : $(dependency-target) ;
- SEARCH on $(dependency-target) = $(file:D) ;
- ISFILE $(dependency-target) ;
- NOUPDATE $(dependency-target) ;
- base on $(dependency-target) = $(target) ;
- scanner.install $(scanner) : $(dependency-target) none ;
- return $(dependency-target) ;
-}
-
-
-class print-scanner : scanner
-{
- import path ;
- import os ;
-
- rule pattern ( )
- {
- return "(One match...)" ;
- }
-
- rule process ( target : matches * : binding )
- {
- local base = [ on $(target) return $(base) ] ;
- local nl = [ on $(base) return $(nl) ] ;
- local text-content = [ on $(base) return $(text-content) ] ;
- local dir = [ on $(base) return $(LOCATE) ] ;
- if $(dir)
- {
- dir = [ path.make $(dir) ] ;
- }
- local file = [ path.native [ path.join $(dir) $(base:G=) ] ] ;
- local actual-content ;
- if [ os.name ] = NT
- {
- actual-content = [ SHELL "type \"$(file)\" 2>nul" ] ;
- }
- else
- {
- actual-content = [ SHELL "cat \"$(file)\" 2>/dev/null" ] ;
- }
- if $(text-content:J=$(nl)) != $(actual-content)
- {
- ALWAYS $(base) ;
- }
- }
-}
-
-
-rule __test__ ( )
-{
- import assert ;
-
- assert.result one two three : split-at-words one two three : 5 ;
- assert.result "one two" three : split-at-words one two three : 8 ;
- assert.result "one two" three : split-at-words one two three : 9 ;
- assert.result "one two three" : split-at-words one two three ;
-
- # VP, 2004-12-03 The following test fails for some reason, so commenting it
- # out.
- #assert.result "one&nbsp;two&nbsp;three" "&amp;&lt;&gt;" :
- # escape-html "one two three" "&<>" ;
-}
diff --git a/tools/build/v2/util/regex.jam b/tools/build/v2/util/regex.jam
deleted file mode 100644
index 234c36f62d..0000000000
--- a/tools/build/v2/util/regex.jam
+++ /dev/null
@@ -1,193 +0,0 @@
-# Copyright 2001, 2002 Dave Abrahams
-# Copyright 2003 Douglas Gregor
-# Copyright 2003 Rene Rivera
-# Copyright 2002, 2003, 2004, 2005 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-#
-# Returns a list of the following substrings:
-# 1) from beginning till the first occurrence of 'separator' or till the end,
-# 2) between each occurrence of 'separator' and the next occurrence,
-# 3) from the last occurrence of 'separator' till the end.
-# If no separator is present, the result will contain only one element.
-#
-
-rule split ( string separator )
-{
- local result ;
- local s = $(string) ;
-
- # Break pieaces off 's' until it has no separators left.
- local match = 1 ;
- while $(match)
- {
- match = [ MATCH ^(.*)($(separator))(.*) : $(s) ] ;
- if $(match)
- {
- match += "" ; # in case 3rd item was empty - works around MATCH bug
- result = $(match[3]) $(result) ;
- s = $(match[1]) ;
- }
- }
- # Combine the remaining part at the beginning, which does not have
- # separators, with the pieces broken off. Note that the rule's signature
- # does not allow the initial s to be empty.
- return $(s) $(result) ;
-}
-
-
-# Returns the concatenated results of Applying regex.split to every element of
-# the list using the separator pattern.
-#
-rule split-list ( list * : separator )
-{
- local result ;
- for s in $(list)
- {
- result += [ split $(s) $(separator) ] ;
- }
- return $(result) ;
-}
-
-
-# Match string against pattern, and return the elements indicated by indices.
-#
-rule match ( pattern : string : indices * )
-{
- indices ?= 1 2 3 4 5 6 7 8 9 ;
- local x = [ MATCH $(pattern) : $(string) ] ;
- return $(x[$(indices)]) ;
-}
-
-
-# Matches all elements of 'list' agains the 'pattern' and returns a list of
-# elements indicated by indices of all successful matches. If 'indices' is
-# omitted returns a list of first paranthethised groups of all successful
-# matches.
-#
-rule transform ( list * : pattern : indices * )
-{
- indices ?= 1 ;
- local result ;
- for local e in $(list)
- {
- local m = [ MATCH $(pattern) : $(e) ] ;
- if $(m)
- {
- result += $(m[$(indices)]) ;
- }
- }
- return $(result) ;
-}
-
-NATIVE_RULE regex : transform ;
-
-
-# Escapes all of the characters in symbols using the escape symbol escape-symbol
-# for the given string, and returns the escaped string.
-#
-rule escape ( string : symbols : escape-symbol )
-{
- local result = "" ;
- local m = 1 ;
- while $(m)
- {
- m = [ MATCH ^([^$(symbols)]*)([$(symbols)])(.*) : $(string) ] ;
- if $(m)
- {
- m += "" ; # Supposedly a bug fix; borrowed from regex.split
- result = "$(result)$(m[1])$(escape-symbol)$(m[2])" ;
- string = $(m[3]) ;
- }
- }
- string ?= "" ;
- result = "$(result)$(string)" ;
- return $(result) ;
-}
-
-
-# Replaces occurrences of a match string in a given string and returns the new
-# string. The match string can be a regex expression.
-#
-rule replace (
- string # The string to modify.
- match # The characters to replace.
- replacement # The string to replace with.
- )
-{
- local result = "" ;
- local parts = 1 ;
- while $(parts)
- {
- parts = [ MATCH ^(.*)($(match))(.*) : $(string) ] ;
- if $(parts)
- {
- parts += "" ;
- result = "$(replacement)$(parts[3])$(result)" ;
- string = $(parts[1]) ;
- }
- }
- string ?= "" ;
- result = "$(string)$(result)" ;
- return $(result) ;
-}
-
-
-# Replaces occurrences of a match string in a given list of strings and returns
-# a list of new strings. The match string can be a regex expression.
-#
-# list - the list of strings to modify.
-# match - the search expression.
-# replacement - the string to replace with.
-#
-rule replace-list ( list * : match : replacement )
-{
- local result ;
- for local e in $(list)
- {
- result += [ replace $(e) $(match) $(replacement) ] ;
- }
- return $(result) ;
-}
-
-
-rule __test__ ( )
-{
- import assert ;
-
- assert.result a b c : split "a/b/c" / ;
- assert.result "" a b c : split "/a/b/c" / ;
- assert.result "" "" a b c : split "//a/b/c" / ;
- assert.result "" a "" b c : split "/a//b/c" / ;
- assert.result "" a "" b c "" : split "/a//b/c/" / ;
- assert.result "" a "" b c "" "" : split "/a//b/c//" / ;
-
- assert.result a c b d
- : match (.)(.)(.)(.) : abcd : 1 3 2 4 ;
-
- assert.result a b c d
- : match (.)(.)(.)(.) : abcd ;
-
- assert.result ababab cddc
- : match ((ab)*)([cd]+) : abababcddc : 1 3 ;
-
- assert.result a.h c.h
- : transform <a.h> \"b.h\" <c.h> : <(.*)> ;
-
- assert.result a.h b.h c.h
- : transform <a.h> \"b.h\" <c.h> : <([^>]*)>|\"([^\"]*)\" : 1 2 ;
-
- assert.result "^<?xml version=\"1.0\"^>"
- : escape "<?xml version=\"1.0\">" : "&|()<>^" : "^" ;
-
- assert.result "<?xml version=\\\"1.0\\\">"
- : escape "<?xml version=\"1.0\">" : "\\\"" : "\\" ;
-
- assert.result "string&nbsp;string&nbsp;" : replace "string string " " " "&nbsp;" ;
- assert.result "&nbsp;string&nbsp;string" : replace " string string" " " "&nbsp;" ;
- assert.result "string&nbsp;&nbsp;string" : replace "string string" " " "&nbsp;" ;
- assert.result "-" : replace "&" "&" "-" ;
-
- assert.result "-" "a-b" : replace-list "&" "a&b" : "&" : "-" ;
-}
diff --git a/tools/build/v2/util/regex.py b/tools/build/v2/util/regex.py
deleted file mode 100644
index 29e26ecf43..0000000000
--- a/tools/build/v2/util/regex.py
+++ /dev/null
@@ -1,25 +0,0 @@
-# (C) Copyright David Abrahams 2001. Permission to copy, use, modify, sell and
-# distribute this software is granted provided this copyright notice appears in
-# all copies. This software is provided "as is" without express or implied
-# warranty, and with no claim as to its suitability for any purpose.
-
-import re
-
-def transform (list, pattern, indices = [1]):
- """ Matches all elements of 'list' agains the 'pattern'
- and returns a list of the elements indicated by indices of
- all successfull matches. If 'indices' is omitted returns
- a list of first paranthethised groups of all successfull
- matches.
- """
- result = []
-
- for e in list:
- m = re.match (pattern, e)
-
- if m:
- for i in indices:
- result.append (m.group (i))
-
- return result
-
diff --git a/tools/build/v2/util/sequence.jam b/tools/build/v2/util/sequence.jam
deleted file mode 100644
index 73919a65dd..0000000000
--- a/tools/build/v2/util/sequence.jam
+++ /dev/null
@@ -1,335 +0,0 @@
-# Copyright 2001, 2002, 2003 Dave Abrahams
-# Copyright 2006 Rene Rivera
-# Copyright 2002, 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import assert ;
-import numbers ;
-import modules ;
-
-
-# Note that algorithms in this module execute largely in the caller's module
-# namespace, so that local rules can be used as function objects. Also note that
-# most predicates can be multi-element lists. In that case, all but the first
-# element are prepended to the first argument which is passed to the rule named
-# by the first element.
-
-
-# Return the elements e of $(sequence) for which [ $(predicate) e ] has a
-# non-null value.
-#
-rule filter ( predicate + : sequence * )
-{
- local caller = [ CALLER_MODULE ] ;
- local result ;
-
- for local e in $(sequence)
- {
- if [ modules.call-in $(caller) : $(predicate) $(e) ]
- {
- result += $(e) ;
- }
- }
- return $(result) ;
-}
-
-
-# Return a new sequence consisting of [ $(function) $(e) ] for each element e of
-# $(sequence).
-#
-rule transform ( function + : sequence * )
-{
- local caller = [ CALLER_MODULE ] ;
- local result ;
-
- for local e in $(sequence)
- {
- result += [ modules.call-in $(caller) : $(function) $(e) ] ;
- }
- return $(result) ;
-}
-
-
-rule reverse ( s * )
-{
- local r ;
- for local x in $(s)
- {
- r = $(x) $(r) ;
- }
- return $(r) ;
-}
-
-
-rule less ( a b )
-{
- if $(a) < $(b)
- {
- return true ;
- }
-}
-
-
-# Insertion-sort s using the BinaryPredicate ordered.
-#
-rule insertion-sort ( s * : ordered * )
-{
- if ! $(ordered)
- {
- return [ SORT $(s) ] ;
- }
- else
- {
- local caller = [ CALLER_MODULE ] ;
- ordered ?= sequence.less ;
- local result = $(s[1]) ;
- if $(ordered) = sequence.less
- {
- local head tail ;
- for local x in $(s[2-])
- {
- head = ;
- tail = $(result) ;
- while $(tail) && ( $(tail[1]) < $(x) )
- {
- head += $(tail[1]) ;
- tail = $(tail[2-]) ;
- }
- result = $(head) $(x) $(tail) ;
- }
- }
- else
- {
- for local x in $(s[2-])
- {
- local head tail ;
- tail = $(result) ;
- while $(tail) && [ modules.call-in $(caller) : $(ordered) $(tail[1]) $(x) ]
- {
- head += $(tail[1]) ;
- tail = $(tail[2-]) ;
- }
- result = $(head) $(x) $(tail) ;
- }
- }
-
- return $(result) ;
- }
-}
-
-
-# Merge two ordered sequences using the BinaryPredicate ordered.
-#
-rule merge ( s1 * : s2 * : ordered * )
-{
- ordered ?= sequence.less ;
- local result__ ;
- local caller = [ CALLER_MODULE ] ;
-
- while $(s1) && $(s2)
- {
- if [ modules.call-in $(caller) : $(ordered) $(s1[1]) $(s2[1]) ]
- {
- result__ += $(s1[1]) ;
- s1 = $(s1[2-]) ;
- }
- else if [ modules.call-in $(caller) : $(ordered) $(s2[1]) $(s1[1]) ]
- {
- result__ += $(s2[1]) ;
- s2 = $(s2[2-]) ;
- }
- else
- {
- s2 = $(s2[2-]) ;
- }
-
- }
- result__ += $(s1) ;
- result__ += $(s2) ;
-
- return $(result__) ;
-}
-
-
-# Join the elements of s into one long string. If joint is supplied, it is used
-# as a separator.
-#
-rule join ( s * : joint ? )
-{
- joint ?= "" ;
- return $(s:J=$(joint)) ;
-}
-
-
-# Find the length of any sequence.
-#
-rule length ( s * )
-{
- local result = 0 ;
- for local i in $(s)
- {
- result = [ CALC $(result) + 1 ] ;
- }
- return $(result) ;
-}
-
-
-rule unique ( list * : stable ? )
-{
- local result ;
- local prev ;
- if $(stable)
- {
- for local f in $(list)
- {
- if ! $(f) in $(result)
- {
- result += $(f) ;
- }
- }
- }
- else
- {
- for local i in [ SORT $(list) ]
- {
- if $(i) != $(prev)
- {
- result += $(i) ;
- }
- prev = $(i) ;
- }
- }
- return $(result) ;
-}
-
-
-# Returns the maximum number in 'elements'. Uses 'ordered' for comparisons or
-# 'numbers.less' if none is provided.
-#
-rule max-element ( elements + : ordered ? )
-{
- ordered ?= numbers.less ;
-
- local max = $(elements[1]) ;
- for local e in $(elements[2-])
- {
- if [ $(ordered) $(max) $(e) ]
- {
- max = $(e) ;
- }
- }
- return $(max) ;
-}
-
-
-# Returns all of 'elements' for which corresponding element in parallel list
-# 'rank' is equal to the maximum value in 'rank'.
-#
-rule select-highest-ranked ( elements * : ranks * )
-{
- if $(elements)
- {
- local max-rank = [ max-element $(ranks) ] ;
- local result ;
- while $(elements)
- {
- if $(ranks[1]) = $(max-rank)
- {
- result += $(elements[1]) ;
- }
- elements = $(elements[2-]) ;
- ranks = $(ranks[2-]) ;
- }
- return $(result) ;
- }
-}
-NATIVE_RULE sequence : select-highest-ranked ;
-
-
-rule __test__ ( )
-{
- # Use a unique module so we can test the use of local rules.
- module sequence.__test__
- {
- import assert ;
- import sequence ;
-
- local rule is-even ( n )
- {
- if $(n) in 0 2 4 6 8
- {
- return true ;
- }
- }
-
- assert.result 4 6 4 2 8 : sequence.filter is-even : 1 4 6 3 4 7 2 3 8 ;
-
- # Test that argument binding works.
- local rule is-equal-test ( x y )
- {
- if $(x) = $(y)
- {
- return true ;
- }
- }
-
- assert.result 3 3 3 : sequence.filter is-equal-test 3 : 1 2 3 4 3 5 3 5 7 ;
-
- local rule append-x ( n )
- {
- return $(n)x ;
- }
-
- assert.result 1x 2x 3x : sequence.transform append-x : 1 2 3 ;
-
- local rule repeat2 ( x )
- {
- return $(x) $(x) ;
- }
-
- assert.result 1 1 2 2 3 3 : sequence.transform repeat2 : 1 2 3 ;
-
- local rule test-greater ( a b )
- {
- if $(a) > $(b)
- {
- return true ;
- }
- }
- assert.result 1 2 3 4 5 6 7 8 9 : sequence.insertion-sort 9 6 5 3 8 7 1 2 4 ;
- assert.result 9 8 7 6 5 4 3 2 1 : sequence.insertion-sort 9 6 5 3 8 7 1 2 4 : test-greater ;
- assert.result 1 2 3 4 5 6 : sequence.merge 1 3 5 : 2 4 6 ;
- assert.result 6 5 4 3 2 1 : sequence.merge 5 3 1 : 6 4 2 : test-greater ;
- assert.result 1 2 3 : sequence.merge 1 2 3 : ;
- assert.result 1 : sequence.merge 1 : 1 ;
-
- assert.result foo-bar-baz : sequence.join foo bar baz : - ;
- assert.result substandard : sequence.join sub stan dard ;
- assert.result 3.0.1 : sequence.join 3.0.1 : - ;
-
- assert.result 0 : sequence.length ;
- assert.result 3 : sequence.length a b c ;
- assert.result 17 : sequence.length 17 16 15 14 13 12 11 10 9 8 7 6 5 4 3 2 1 ;
-
- assert.result 1 : sequence.length a ;
- assert.result 10 : sequence.length a b c d e f g h i j ;
- assert.result 11 : sequence.length a b c d e f g h i j k ;
- assert.result 12 : sequence.length a b c d e f g h i j k l ;
-
- local p2 = x ;
- for local i in 1 2 3 4 5 6 7 8
- {
- p2 = $(p2) $(p2) ;
- }
- assert.result 256 : sequence.length $(p2) ;
-
- assert.result 1 2 3 4 5 : sequence.unique 1 2 3 2 4 3 3 5 5 5 ;
-
- assert.result 5 : sequence.max-element 1 3 5 0 4 ;
-
- assert.result e-3 h-3 : sequence.select-highest-ranked e-1 e-3 h-3 m-2 : 1 3 3 2 ;
-
- assert.result 7 6 5 4 3 2 1 : sequence.reverse 1 2 3 4 5 6 7 ;
- }
-}
diff --git a/tools/build/v2/util/utility.jam b/tools/build/v2/util/utility.jam
deleted file mode 100644
index c46747f586..0000000000
--- a/tools/build/v2/util/utility.jam
+++ /dev/null
@@ -1,235 +0,0 @@
-# Copyright 2001, 2002 Dave Abrahams
-# Copyright 2002, 2003, 2004, 2005 Vladimir Prus
-# Copyright 2008 Jurko Gospodnetic
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
-
-import "class" : is-instance ;
-import errors ;
-
-
-# For all elements of 'list' which do not already have 'suffix', add 'suffix'.
-#
-rule apply-default-suffix ( suffix : list * )
-{
- local result ;
- for local i in $(list)
- {
- if $(i:S) = $(suffix)
- {
- result += $(i) ;
- }
- else
- {
- result += $(i)$(suffix) ;
- }
- }
- return $(result) ;
-}
-
-
-# If 'name' contains a dot, returns the part before the last dot. If 'name'
-# contains no dot, returns it unmodified.
-#
-rule basename ( name )
-{
- if $(name:S)
- {
- name = $(name:B) ;
- }
- return $(name) ;
-}
-
-
-# Return the file of the caller of the rule that called caller-file.
-#
-rule caller-file ( )
-{
- local bt = [ BACKTRACE ] ;
- return $(bt[9]) ;
-}
-
-
-# Tests if 'a' is equal to 'b'. If 'a' is a class instance, calls its 'equal'
-# method. Uses ordinary jam's comparison otherwise.
-#
-rule equal ( a b )
-{
- if [ is-instance $(a) ]
- {
- return [ $(a).equal $(b) ] ;
- }
- else
- {
- if $(a) = $(b)
- {
- return true ;
- }
- }
-}
-
-
-# Tests if 'a' is less than 'b'. If 'a' is a class instance, calls its 'less'
-# method. Uses ordinary jam's comparison otherwise.
-#
-rule less ( a b )
-{
- if [ is-instance $(a) ]
- {
- return [ $(a).less $(b) ] ;
- }
- else
- {
- if $(a) < $(b)
- {
- return true ;
- }
- }
-}
-
-
-# Returns the textual representation of argument. If it is a class instance,
-# class its 'str' method. Otherwise, returns the argument.
-#
-rule str ( value )
-{
- if [ is-instance $(value) ]
- {
- return [ $(value).str ] ;
- }
- else
- {
- return $(value) ;
- }
-}
-
-
-# Accepts a list of gristed values and returns them ungristed. Reports an error
-# in case any of the passed parameters is not gristed, i.e. surrounded in angle
-# brackets < and >.
-#
-rule ungrist ( names * )
-{
- local result ;
- for local name in $(names)
- {
- local stripped = [ MATCH ^<(.*)>$ : $(name) ] ;
- if ! $(stripped)
- {
- errors.error "in ungrist $(names) : $(name) is not of the form <.*>" ;
- }
- result += $(stripped) ;
- }
- return $(result) ;
-}
-
-
-# If the passed value is quoted, unquotes it. Otherwise returns the value
-# unchanged.
-#
-rule unquote ( value ? )
-{
- local match-result = [ MATCH ^(\")(.*)(\")$ : $(value) ] ;
- if $(match-result)
- {
- return $(match-result[2]) ;
- }
- else
- {
- return $(value) ;
- }
-}
-
-
-rule __test__ ( )
-{
- import assert ;
- import "class" : new ;
- import errors : try catch ;
-
- assert.result 123 : str 123 ;
-
- class test-class__
- {
- rule __init__ ( ) { }
- rule str ( ) { return "str-test-class" ; }
- rule less ( a ) { return "yes, of course!" ; }
- rule equal ( a ) { return "not sure" ; }
- }
-
- assert.result "str-test-class" : str [ new test-class__ ] ;
- assert.true less 1 2 ;
- assert.false less 2 1 ;
- assert.result "yes, of course!" : less [ new test-class__ ] 1 ;
- assert.true equal 1 1 ;
- assert.false equal 1 2 ;
- assert.result "not sure" : equal [ new test-class__ ] 1 ;
-
- assert.result foo.lib foo.lib : apply-default-suffix .lib : foo.lib foo.lib
- ;
-
- assert.result foo : basename foo ;
- assert.result foo : basename foo.so ;
- assert.result foo.so : basename foo.so.1 ;
-
- assert.result : unquote ;
- assert.result "" : unquote "" ;
- assert.result foo : unquote foo ;
- assert.result \"foo : unquote \"foo ;
- assert.result foo\" : unquote foo\" ;
- assert.result foo : unquote \"foo\" ;
- assert.result \"foo\" : unquote \"\"foo\"\" ;
-
- assert.result : ungrist ;
- assert.result foo : ungrist <foo> ;
- assert.result <foo> : ungrist <<foo>> ;
- assert.result foo bar : ungrist <foo> <bar> ;
-
- try ;
- {
- ungrist "" ;
- }
- catch "in ungrist : is not of the form <.*>" ;
-
- try ;
- {
- ungrist <> ;
- }
- catch "in ungrist <> : <> is not of the form <.*>" ;
-
- try ;
- {
- ungrist foo ;
- }
- catch "in ungrist foo : foo is not of the form <.*>" ;
-
- try ;
- {
- ungrist <foo ;
- }
- catch "in ungrist <foo : <foo is not of the form <.*>" ;
-
- try ;
- {
- ungrist foo> ;
- }
- catch "in ungrist foo> : foo> is not of the form <.*>" ;
-
- try ;
- {
- ungrist foo bar ;
- }
- catch "in ungrist foo : foo is not of the form <.*>" ;
-
- try ;
- {
- ungrist foo <bar> ;
- }
- catch "in ungrist foo : foo is not of the form <.*>" ;
-
- try ;
- {
- ungrist <foo> bar ;
- }
- catch "in ungrist bar : bar is not of the form <.*>" ;
-}
diff --git a/tools/build/v2/boost.css b/tools/build/website/boost.css
index 8401b29c33..8401b29c33 100644
--- a/tools/build/v2/boost.css
+++ b/tools/build/website/boost.css
diff --git a/tools/build/v2/boost_build.png b/tools/build/website/boost_build.png
index 3a9e64df2a..3a9e64df2a 100644
--- a/tools/build/v2/boost_build.png
+++ b/tools/build/website/boost_build.png
Binary files differ
diff --git a/tools/build/v2/boost_build.svg b/tools/build/website/boost_build.svg
index ba4ccc77b5..ba4ccc77b5 100644
--- a/tools/build/v2/boost_build.svg
+++ b/tools/build/website/boost_build.svg
diff --git a/tools/inspect/apple_macro_check.cpp b/tools/inspect/apple_macro_check.cpp
index ce917e0d8a..7afb8f9fde 100644
--- a/tools/inspect/apple_macro_check.cpp
+++ b/tools/inspect/apple_macro_check.cpp
@@ -64,7 +64,7 @@ namespace boost
// Only check files in the boost directory, as we can avoid including the
// apple test headers elsewhere.
- path relative( relative_to( full_path, fs::initial_path() ) );
+ path relative( relative_to( full_path, search_root_path() ) );
if ( relative.empty() || *relative.begin() != "boost") return;
boost::sregex_iterator cur(contents.begin(), contents.end(), apple_macro_regex), end;
diff --git a/tools/inspect/ascii_check.hpp b/tools/inspect/ascii_check.hpp
index 878909e31c..6094ce8566 100644
--- a/tools/inspect/ascii_check.hpp
+++ b/tools/inspect/ascii_check.hpp
@@ -35,4 +35,4 @@ namespace boost
}
}
-#endif // BOOST_TAB_CHECK_HPP
+#endif // BOOST_ASCII_CHECK_HPP
diff --git a/tools/inspect/assert_macro_check.cpp b/tools/inspect/assert_macro_check.cpp
index a91cb8a9f8..340a9f5baa 100644
--- a/tools/inspect/assert_macro_check.cpp
+++ b/tools/inspect/assert_macro_check.cpp
@@ -47,8 +47,8 @@ namespace boost
assert_macro_check::assert_macro_check()
: m_files_with_errors(0)
, m_from_boost_root(
- fs::exists(fs::initial_path() / "boost") &&
- fs::exists(fs::initial_path() / "libs"))
+ fs::exists(search_root_path() / "boost") &&
+ fs::exists(search_root_path() / "libs"))
{
register_signature( ".c" );
register_signature( ".cpp" );
@@ -70,7 +70,7 @@ namespace boost
// Check files iff (a) they are in the boost directory, or (b) they
// are in the src directory under libs.
if (m_from_boost_root) {
- path relative( relative_to( full_path, fs::initial_path() ) );
+ path relative( relative_to( full_path, search_root_path() ) );
path::const_iterator pbeg = relative.begin(), pend = relative.end();
if (pbeg != std::find(pbeg, pend, "boost") &&
!(pbeg == std::find(pbeg, pend, "libs") && pend != std::find(pbeg, pend, "src")))
diff --git a/tools/inspect/build/Jamfile.v2 b/tools/inspect/build/Jamfile.v2
index f9355baa32..e3a53a03dd 100644
--- a/tools/inspect/build/Jamfile.v2
+++ b/tools/inspect/build/Jamfile.v2
@@ -19,6 +19,7 @@ exe inspect
assert_macro_check.cpp
copyright_check.cpp
crlf_check.cpp
+ deprecated_macro_check.cpp
end_check.cpp
inspect.cpp
license_check.cpp
diff --git a/tools/inspect/build/msvc/boost_inspect.sln b/tools/inspect/build/msvc/boost_inspect.sln
index c01f5e330c..a4a1a40aba 100644
--- a/tools/inspect/build/msvc/boost_inspect.sln
+++ b/tools/inspect/build/msvc/boost_inspect.sln
@@ -1,7 +1,7 @@

-Microsoft Visual Studio Solution File, Format Version 10.00
-# Visual C++ Express 2008
-Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "boost_inspect", "boost_inspect.vcproj", "{0EC8AC1C-6D1F-47FC-A06A-9CC3F924BD82}"
+Microsoft Visual Studio Solution File, Format Version 11.00
+# Visual C++ Express 2010
+Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "inspect", "boost_inspect.vcxproj", "{0EC8AC1C-6D1F-47FC-A06A-9CC3F924BD82}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
diff --git a/tools/inspect/build/msvc/boost_inspect.vcproj b/tools/inspect/build/msvc/boost_inspect.vcproj
deleted file mode 100644
index 372c14b06f..0000000000
--- a/tools/inspect/build/msvc/boost_inspect.vcproj
+++ /dev/null
@@ -1,247 +0,0 @@
-<?xml version="1.0" encoding="Windows-1252"?>
-<VisualStudioProject
- ProjectType="Visual C++"
- Version="9.00"
- Name="inspect"
- ProjectGUID="{0EC8AC1C-6D1F-47FC-A06A-9CC3F924BD82}"
- RootNamespace="boost_inspect"
- Keyword="Win32Proj"
- TargetFrameworkVersion="196613"
- >
- <Platforms>
- <Platform
- Name="Win32"
- />
- </Platforms>
- <ToolFiles>
- </ToolFiles>
- <Configurations>
- <Configuration
- Name="Debug|Win32"
- OutputDirectory="$(SolutionDir)$(ConfigurationName)"
- IntermediateDirectory="$(ConfigurationName)"
- ConfigurationType="1"
- CharacterSet="1"
- >
- <Tool
- Name="VCPreBuildEventTool"
- />
- <Tool
- Name="VCCustomBuildTool"
- />
- <Tool
- Name="VCXMLDataGeneratorTool"
- />
- <Tool
- Name="VCWebServiceProxyGeneratorTool"
- />
- <Tool
- Name="VCMIDLTool"
- />
- <Tool
- Name="VCCLCompilerTool"
- Optimization="0"
- AdditionalIncludeDirectories="..\..\..\.."
- PreprocessorDefinitions="WIN32;_DEBUG;_CONSOLE"
- MinimalRebuild="true"
- ExceptionHandling="2"
- BasicRuntimeChecks="3"
- RuntimeLibrary="3"
- UsePrecompiledHeader="0"
- WarningLevel="3"
- DebugInformationFormat="4"
- />
- <Tool
- Name="VCManagedResourceCompilerTool"
- />
- <Tool
- Name="VCResourceCompilerTool"
- />
- <Tool
- Name="VCPreLinkEventTool"
- />
- <Tool
- Name="VCLinkerTool"
- LinkIncremental="2"
- AdditionalLibraryDirectories="..\..\..\..\stage\lib"
- GenerateDebugInformation="true"
- SubSystem="1"
- TargetMachine="1"
- />
- <Tool
- Name="VCALinkTool"
- />
- <Tool
- Name="VCManifestTool"
- />
- <Tool
- Name="VCXDCMakeTool"
- />
- <Tool
- Name="VCBscMakeTool"
- />
- <Tool
- Name="VCFxCopTool"
- />
- <Tool
- Name="VCAppVerifierTool"
- />
- <Tool
- Name="VCPostBuildEventTool"
- />
- </Configuration>
- <Configuration
- Name="Release|Win32"
- OutputDirectory="$(SolutionDir)$(ConfigurationName)"
- IntermediateDirectory="$(ConfigurationName)"
- ConfigurationType="1"
- CharacterSet="1"
- WholeProgramOptimization="1"
- >
- <Tool
- Name="VCPreBuildEventTool"
- />
- <Tool
- Name="VCCustomBuildTool"
- />
- <Tool
- Name="VCXMLDataGeneratorTool"
- />
- <Tool
- Name="VCWebServiceProxyGeneratorTool"
- />
- <Tool
- Name="VCMIDLTool"
- />
- <Tool
- Name="VCCLCompilerTool"
- Optimization="2"
- EnableIntrinsicFunctions="true"
- AdditionalIncludeDirectories="..\..\..\.."
- PreprocessorDefinitions="WIN32;NDEBUG;_CONSOLE"
- ExceptionHandling="2"
- RuntimeLibrary="2"
- EnableFunctionLevelLinking="true"
- UsePrecompiledHeader="0"
- WarningLevel="3"
- DebugInformationFormat="3"
- />
- <Tool
- Name="VCManagedResourceCompilerTool"
- />
- <Tool
- Name="VCResourceCompilerTool"
- />
- <Tool
- Name="VCPreLinkEventTool"
- />
- <Tool
- Name="VCLinkerTool"
- LinkIncremental="1"
- AdditionalLibraryDirectories="..\..\..\..\stage\lib"
- GenerateDebugInformation="true"
- SubSystem="1"
- OptimizeReferences="2"
- EnableCOMDATFolding="2"
- TargetMachine="1"
- />
- <Tool
- Name="VCALinkTool"
- />
- <Tool
- Name="VCManifestTool"
- />
- <Tool
- Name="VCXDCMakeTool"
- />
- <Tool
- Name="VCBscMakeTool"
- />
- <Tool
- Name="VCFxCopTool"
- />
- <Tool
- Name="VCAppVerifierTool"
- />
- <Tool
- Name="VCPostBuildEventTool"
- />
- </Configuration>
- </Configurations>
- <References>
- </References>
- <Files>
- <Filter
- Name="Source Files"
- Filter="cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx"
- UniqueIdentifier="{4FC737F1-C7A5-4376-A066-2A32D752A2FF}"
- >
- <File
- RelativePath="..\..\apple_macro_check.cpp"
- >
- </File>
- <File
- RelativePath="..\..\ascii_check.cpp"
- >
- </File>
- <File
- RelativePath="..\..\assert_macro_check.cpp"
- >
- </File>
- <File
- RelativePath="..\..\copyright_check.cpp"
- >
- </File>
- <File
- RelativePath="..\..\crlf_check.cpp"
- >
- </File>
- <File
- RelativePath="..\..\end_check.cpp"
- >
- </File>
- <File
- RelativePath="..\..\inspect.cpp"
- >
- </File>
- <File
- RelativePath="..\..\license_check.cpp"
- >
- </File>
- <File
- RelativePath="..\..\link_check.cpp"
- >
- </File>
- <File
- RelativePath="..\..\minmax_check.cpp"
- >
- </File>
- <File
- RelativePath="..\..\path_name_check.cpp"
- >
- </File>
- <File
- RelativePath="..\..\tab_check.cpp"
- >
- </File>
- <File
- RelativePath="..\..\unnamed_namespace_check.cpp"
- >
- </File>
- </Filter>
- <Filter
- Name="Header Files"
- Filter="h;hpp;hxx;hm;inl;inc;xsd"
- UniqueIdentifier="{93995380-89BD-4b04-88EB-625FBE52EBFB}"
- >
- </Filter>
- <Filter
- Name="Resource Files"
- Filter="rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav"
- UniqueIdentifier="{67DA6AB6-F800-4c08-8B7A-83BB121AAD01}"
- >
- </Filter>
- </Files>
- <Globals>
- </Globals>
-</VisualStudioProject>
diff --git a/tools/inspect/build/msvc/boost_inspect.vcxproj b/tools/inspect/build/msvc/boost_inspect.vcxproj
new file mode 100644
index 0000000000..3c35c22b84
--- /dev/null
+++ b/tools/inspect/build/msvc/boost_inspect.vcxproj
@@ -0,0 +1,117 @@
+<?xml version="1.0" encoding="utf-8"?>
+<Project DefaultTargets="Build" ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+ <ItemGroup Label="ProjectConfigurations">
+ <ProjectConfiguration Include="Debug|Win32">
+ <Configuration>Debug</Configuration>
+ <Platform>Win32</Platform>
+ </ProjectConfiguration>
+ <ProjectConfiguration Include="Release|Win32">
+ <Configuration>Release</Configuration>
+ <Platform>Win32</Platform>
+ </ProjectConfiguration>
+ </ItemGroup>
+ <PropertyGroup Label="Globals">
+ <ProjectName>inspect</ProjectName>
+ <ProjectGuid>{0EC8AC1C-6D1F-47FC-A06A-9CC3F924BD82}</ProjectGuid>
+ <RootNamespace>boost_inspect</RootNamespace>
+ <Keyword>Win32Proj</Keyword>
+ </PropertyGroup>
+ <Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
+ <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="Configuration">
+ <ConfigurationType>Application</ConfigurationType>
+ <CharacterSet>Unicode</CharacterSet>
+ <WholeProgramOptimization>true</WholeProgramOptimization>
+ </PropertyGroup>
+ <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="Configuration">
+ <ConfigurationType>Application</ConfigurationType>
+ <CharacterSet>Unicode</CharacterSet>
+ </PropertyGroup>
+ <Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
+ <ImportGroup Label="ExtensionSettings">
+ </ImportGroup>
+ <ImportGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="PropertySheets">
+ <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
+ </ImportGroup>
+ <ImportGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="PropertySheets">
+ <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
+ </ImportGroup>
+ <PropertyGroup Label="UserMacros" />
+ <PropertyGroup>
+ <_ProjectFileVersion>10.0.40219.1</_ProjectFileVersion>
+ <OutDir Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">$(SolutionDir)$(Configuration)\</OutDir>
+ <IntDir Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">$(Configuration)\</IntDir>
+ <LinkIncremental Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">true</LinkIncremental>
+ <OutDir Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">$(SolutionDir)$(Configuration)\</OutDir>
+ <IntDir Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">$(Configuration)\</IntDir>
+ <LinkIncremental Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">false</LinkIncremental>
+ </PropertyGroup>
+ <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
+ <ClCompile>
+ <Optimization>Disabled</Optimization>
+ <AdditionalIncludeDirectories>..\..\..\..;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
+ <PreprocessorDefinitions>WIN32;_DEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
+ <MinimalRebuild>true</MinimalRebuild>
+ <ExceptionHandling>Async</ExceptionHandling>
+ <BasicRuntimeChecks>EnableFastChecks</BasicRuntimeChecks>
+ <RuntimeLibrary>MultiThreadedDebugDLL</RuntimeLibrary>
+ <PrecompiledHeader>
+ </PrecompiledHeader>
+ <WarningLevel>Level3</WarningLevel>
+ <DebugInformationFormat>EditAndContinue</DebugInformationFormat>
+ </ClCompile>
+ <Link>
+ <AdditionalLibraryDirectories>..\..\..\..\stage\lib;%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
+ <GenerateDebugInformation>true</GenerateDebugInformation>
+ <SubSystem>Console</SubSystem>
+ <TargetMachine>MachineX86</TargetMachine>
+ </Link>
+ <PostBuildEvent>
+ <Command>"$(TargetDir)\$(TargetName).exe" ../.. -text -brief</Command>
+ </PostBuildEvent>
+ <PostBuildEvent>
+ <Message>Executing test $(TargetName).exe...</Message>
+ </PostBuildEvent>
+ </ItemDefinitionGroup>
+ <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
+ <ClCompile>
+ <Optimization>MaxSpeed</Optimization>
+ <IntrinsicFunctions>true</IntrinsicFunctions>
+ <AdditionalIncludeDirectories>..\..\..\..;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
+ <PreprocessorDefinitions>WIN32;NDEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
+ <ExceptionHandling>Async</ExceptionHandling>
+ <RuntimeLibrary>MultiThreadedDLL</RuntimeLibrary>
+ <FunctionLevelLinking>true</FunctionLevelLinking>
+ <PrecompiledHeader>
+ </PrecompiledHeader>
+ <WarningLevel>Level3</WarningLevel>
+ <DebugInformationFormat>ProgramDatabase</DebugInformationFormat>
+ </ClCompile>
+ <Link>
+ <AdditionalLibraryDirectories>..\..\..\..\stage\lib;%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
+ <GenerateDebugInformation>true</GenerateDebugInformation>
+ <SubSystem>Console</SubSystem>
+ <OptimizeReferences>true</OptimizeReferences>
+ <EnableCOMDATFolding>true</EnableCOMDATFolding>
+ <TargetMachine>MachineX86</TargetMachine>
+ </Link>
+ </ItemDefinitionGroup>
+ <ItemGroup>
+ <ClCompile Include="..\..\apple_macro_check.cpp" />
+ <ClCompile Include="..\..\ascii_check.cpp" />
+ <ClCompile Include="..\..\assert_macro_check.cpp" />
+ <ClCompile Include="..\..\copyright_check.cpp" />
+ <ClCompile Include="..\..\crlf_check.cpp" />
+ <ClCompile Include="..\..\deprecated_macro_check.cpp" />
+ <ClCompile Include="..\..\end_check.cpp" />
+ <ClCompile Include="..\..\inspect.cpp" />
+ <ClCompile Include="..\..\license_check.cpp" />
+ <ClCompile Include="..\..\link_check.cpp" />
+ <ClCompile Include="..\..\minmax_check.cpp" />
+ <ClCompile Include="..\..\path_name_check.cpp" />
+ <ClCompile Include="..\..\tab_check.cpp" />
+ <ClCompile Include="..\..\unnamed_namespace_check.cpp" />
+ </ItemGroup>
+ <Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
+ <ImportGroup Label="ExtensionTargets">
+ </ImportGroup>
+</Project> \ No newline at end of file
diff --git a/tools/inspect/build/msvc/readme.txt b/tools/inspect/build/msvc/readme.txt
index 1f8de0cb28..a549811adf 100644
--- a/tools/inspect/build/msvc/readme.txt
+++ b/tools/inspect/build/msvc/readme.txt
@@ -1,4 +1,8 @@
-The provided Microsoft VC++ solution assumes the following commands have been run in the root directory:
+The provided Microsoft VC++ 10 solution assumes the following commands have been run
+in the root directory:
- bjam --toolset=msvc-9.0express --build-type=complete --with-filesystem stage
- bjam --toolset=msvc-9.0express --build-type=complete --with-regex stage
+ b2 --toolset=msvc-10.0express --build-type=complete --with-filesystem stage
+ b2 --toolset=msvc-10.0express --build-type=complete --with-regex stage
+
+boost-no-inspect
+
diff --git a/tools/inspect/deprecated_macro_check.cpp b/tools/inspect/deprecated_macro_check.cpp
new file mode 100644
index 0000000000..f985982333
--- /dev/null
+++ b/tools/inspect/deprecated_macro_check.cpp
@@ -0,0 +1,146 @@
+// deprecated macro check implementation ---------------------------------------------//
+// Protect against ourself: boostinspect:ndprecated_macros
+
+// Copyright Eric Niebler 2010.
+// Based on the assert_macro_check checker by Marshall Clow
+//
+// Distributed under the Boost Software License, Version 1.0.
+// (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#include "deprecated_macro_check.hpp"
+#include <functional>
+#include "boost/regex.hpp"
+#include "boost/lexical_cast.hpp"
+#include "boost/filesystem/operations.hpp"
+
+namespace fs = boost::filesystem;
+
+namespace
+{
+ const char * boost150macros [] = {
+ "BOOST_NO_0X_HDR_ARRAY",
+ "BOOST_NO_0X_HDR_CHRONO",
+ "BOOST_NO_0X_HDR_CODECVT",
+ "BOOST_NO_0X_HDR_CONDITION_VARIABLE",
+ "BOOST_NO_0X_HDR_FORWARD_LIST",
+ "BOOST_NO_0X_HDR_FUTURE",
+ "BOOST_NO_0X_HDR_INITIALIZER_LIST",
+ "BOOST_NO_INITIALIZER_LISTS",
+ "BOOST_NO_0X_HDR_MUTEX",
+ "BOOST_NO_0X_HDR_RANDOM",
+ "BOOST_NO_0X_HDR_RATIO",
+ "BOOST_NO_0X_HDR_REGEX",
+ "BOOST_NO_0X_HDR_SYSTEM_ERROR",
+ "BOOST_NO_0X_HDR_THREAD",
+ "BOOST_NO_0X_HDR_TUPLE",
+ "BOOST_NO_0X_HDR_TYPE_TRAITS",
+ "BOOST_NO_0X_HDR_TYPEINDEX",
+ "BOOST_NO_0X_HDR_UNORDERED_SET",
+ "BOOST_NO_0X_HDR_UNORDERED_MAP",
+ "BOOST_NO_STD_UNORDERED",
+ NULL
+ };
+
+ const char * boost151macros [] = {
+ "BOOST_NO_AUTO_DECLARATIONS",
+ "BOOST_NO_AUTO_MULTIDECLARATIONS",
+ "BOOST_NO_CHAR16_T",
+ "BOOST_NO_CHAR32_T",
+ "BOOST_NO_TEMPLATE_ALIASES",
+ "BOOST_NO_CONSTEXPR",
+ "BOOST_NO_DECLTYPE",
+ "BOOST_NO_DECLTYPE_N3276",
+ "BOOST_NO_DEFAULTED_FUNCTIONS",
+ "BOOST_NO_DELETED_FUNCTIONS",
+ "BOOST_NO_EXPLICIT_CONVERSION_OPERATORS",
+ "BOOST_NO_EXTERN_TEMPLATE",
+ "BOOST_NO_FUNCTION_TEMPLATE_DEFAULT_ARGS",
+ "BOOST_NO_LAMBDAS",
+ "BOOST_NO_LOCAL_CLASS_TEMPLATE_PARAMETERS",
+ "BOOST_NO_NOEXCEPT",
+ "BOOST_NO_NULLPTR",
+ "BOOST_NO_RAW_LITERALS",
+ "BOOST_NO_RVALUE_REFERENCES",
+ "BOOST_NO_SCOPED_ENUMS",
+ "BOOST_NO_STATIC_ASSERT",
+ "BOOST_NO_STD_UNORDERED",
+ "BOOST_NO_UNICODE_LITERALS",
+ "BOOST_NO_UNIFIED_INITIALIZATION_SYNTAX",
+ "BOOST_NO_VARIADIC_TEMPLATES",
+ "BOOST_NO_VARIADIC_MACROS",
+ "BOOST_NO_NUMERIC_LIMITS_LOWEST",
+ NULL
+ };
+
+ const char * boost153macros [] = {
+ "BOOST_HAS_STATIC_ASSERT",
+ "BOOST_HAS_RVALUE_REFS",
+ "BOOST_HAS_VARIADIC_TMPL",
+ "BOOST_HAS_CHAR16_T",
+ "BOOST_HAS_CHAR32_T",
+ NULL
+ };
+} // unnamed namespace
+
+
+namespace boost
+{
+ namespace inspect
+ {
+ deprecated_macro_check::deprecated_macro_check()
+ : m_files_with_errors(0)
+ , m_from_boost_root(
+ fs::exists(search_root_path() / "boost") &&
+ fs::exists(search_root_path() / "libs"))
+ {
+ register_signature( ".c" );
+ register_signature( ".cpp" );
+ register_signature( ".cxx" );
+ register_signature( ".h" );
+ register_signature( ".hpp" );
+ register_signature( ".hxx" );
+ register_signature( ".ipp" );
+ }
+
+ void deprecated_macro_check::inspect(
+ const string & library_name,
+ const path & full_path, // example: c:/foo/boost/filesystem/path.hpp
+ const string & contents ) // contents of file to be inspected
+ {
+ if (contents.find( "boostinspect:" "ndprecated_macros" ) != string::npos)
+ return;
+
+ const char **ptr;
+ long errors = 0;
+ for ( ptr = boost150macros; *ptr != NULL; ++ptr )
+ {
+ if ( contents.find( *ptr ) != string::npos ) {
+ ++errors;
+ error( library_name, full_path, string ( "Boost macro deprecated in 1.50: " ) + *ptr );
+ }
+ }
+
+ for ( ptr = boost151macros; *ptr != NULL; ++ptr )
+ {
+ if ( contents.find( *ptr ) != string::npos ) {
+ ++errors;
+ error( library_name, full_path, string ( "Boost macro deprecated in 1.51: " ) + *ptr );
+ }
+ }
+
+ for ( ptr = boost153macros; *ptr != NULL; ++ptr )
+ {
+ if ( contents.find( *ptr ) != string::npos ) {
+ ++errors;
+ error( library_name, full_path, string ( "Boost macro deprecated in 1.53: " ) + *ptr );
+ }
+ }
+
+ if(errors > 0)
+ ++m_files_with_errors;
+ }
+ } // namespace inspect
+} // namespace boost
+
+
diff --git a/tools/inspect/deprecated_macro_check.hpp b/tools/inspect/deprecated_macro_check.hpp
new file mode 100644
index 0000000000..c9093907df
--- /dev/null
+++ b/tools/inspect/deprecated_macro_check.hpp
@@ -0,0 +1,40 @@
+// deprecated_macro_check header --------------------------------------------------------//
+
+// Copyright Eric Niebler 2010.
+// Based on the apple_macro_check checker by Marshall Clow
+// Distributed under the Boost Software License, Version 1.0.
+// (See accompanying file LICENSE_1_0.txt or copy at
+// http://www.boost.org/LICENSE_1_0.txt)
+
+#ifndef BOOST_DEPRECATED_MACRO_CHECK_HPP
+#define BOOST_DEPRECATED_MACRO_CHECK_HPP
+
+#include "inspector.hpp"
+
+
+namespace boost
+{
+ namespace inspect
+ {
+ class deprecated_macro_check : public inspector
+ {
+ long m_files_with_errors;
+ bool m_from_boost_root;
+ public:
+
+ deprecated_macro_check();
+ virtual const char * name() const { return "*DEPRECATED-MACROS*"; }
+ virtual const char * desc() const { return "presence of deprecated BOOST macro in file (see docs for replacements)"; }
+
+ virtual void inspect(
+ const std::string & library_name,
+ const path & full_path,
+ const std::string & contents );
+
+ virtual ~deprecated_macro_check()
+ { std::cout << " " << m_files_with_errors << " files with a deprecated BOOST macro" << line_break(); }
+ };
+ }
+}
+
+#endif // BOOST_DEPRECATED_MACRO_CHECK_HPP
diff --git a/tools/inspect/doc/inspect.qbk b/tools/inspect/doc/inspect.qbk
index 87f95e69ac..a16968f377 100644
--- a/tools/inspect/doc/inspect.qbk
+++ b/tools/inspect/doc/inspect.qbk
@@ -48,8 +48,8 @@ The program is run in the directory to be scanned for errors. Sub-directories ar
If the first program argument is -help, a usage message is displayed, showing all available program options:
[pre
-Usage: inspect \[-cvs\] \[-text\] \[-brief\] \[options...\]
-
+Usage: inspect \[search-root\] \[-cvs\] \[-text\] \[-brief\] \[options...\]
+ search-root default is the current directory
Options:
-license
-copyright
@@ -61,6 +61,7 @@ Usage: inspect \[-cvs\] \[-text\] \[-brief\] \[options...\]
-ascii
-apple_macro
-assert_macro
+ -deprecated_macro
-minmax
-unnamed
default is all checks on; otherwise options specify desired checks
diff --git a/tools/inspect/inspect.cpp b/tools/inspect/inspect.cpp
index 00e72f9935..9aa93809b8 100644
--- a/tools/inspect/inspect.cpp
+++ b/tools/inspect/inspect.cpp
@@ -1,4 +1,4 @@
-// inspect program ---------------------------------------------------------//
+// inspect program -------------------------------------------------------------------//
// Copyright Beman Dawes 2002.
// Copyright Rene Rivera 2004-2006.
@@ -15,6 +15,11 @@
// See http://www.boost.org/tools/inspect/ for more information.
+const char* boost_no_inspect = "boost-" "no-inspect";
+
+// Directories with a file name of the boost_no_inspect value are not inspected.
+// Files that contain the boost_no_inspect value are not inspected.
+
#include <vector>
#include <list>
@@ -26,6 +31,15 @@
#include "boost/filesystem/operations.hpp"
#include "boost/filesystem/fstream.hpp"
+#include <stdio.h> // for popen, pclose
+#if defined(_MSC_VER)
+# define POPEN _popen
+# define PCLOSE _pclose
+#else
+# define POPEN popen
+# define PCLOSE pclose
+#endif
+
#include "time_string.hpp"
#include "inspector.hpp"
@@ -41,12 +55,19 @@
#include "ascii_check.hpp"
#include "apple_macro_check.hpp"
#include "assert_macro_check.hpp"
+#include "deprecated_macro_check.hpp"
#include "minmax_check.hpp"
#include "unnamed_namespace_check.hpp"
-#include "cvs_iterator.hpp"
+//#include "cvs_iterator.hpp"
+#if !defined(INSPECT_USE_BOOST_TEST)
+#define INSPECT_USE_BOOST_TEST 0
+#endif
+
+#if INSPECT_USE_BOOST_TEST
#include "boost/test/included/prg_exec_monitor.hpp"
+#endif
namespace fs = boost::filesystem;
@@ -54,6 +75,8 @@ using namespace boost::inspect;
namespace
{
+ fs::path search_root = fs::initial_path();
+
class inspector_element
{
typedef boost::shared_ptr< boost::inspect::inspector > inspector_ptr;
@@ -109,33 +132,89 @@ namespace
typedef std::vector< lib_error_count > lib_error_count_vector;
lib_error_count_vector libs;
-// get info (as a string) if inspect_root is svn working copy --------------//
+// run subversion to get revisions info ------------------------------------//
+//
+// implemented as function object that can be passed to boost::execution_monitor
+// in order to swallow any errors from 'svn info'.
- void extract_info( fs::ifstream & entries_file, string & rev, string & repos )
- {
- std::getline( entries_file, rev );
- std::getline( entries_file, rev );
- std::getline( entries_file, rev );
- std::getline( entries_file, rev ); // revision number as a string
- std::getline( entries_file, repos ); // repository as a string
+ struct svn_check
+ {
+ explicit svn_check(const fs::path & inspect_root) :
+ inspect_root(inspect_root), fp(0) {}
+
+ int operator()() {
+ string rev("unknown");
+ string repos("unknown");
+ string command("cd ");
+ command += inspect_root.string() + " && svn info";
+
+ fp = (POPEN(command.c_str(), "r"));
+ if (fp)
+ {
+ static const int line_max = 128;
+ char line[line_max];
+ while (fgets(line, line_max, fp) != NULL)
+ {
+ string ln(line);
+ string::size_type pos;
+ if ((pos = ln.find("Revision: ")) != string::npos)
+ rev = ln.substr(pos + 10);
+ else if ((pos = ln.find("URL: ")) != string::npos)
+ repos = ln.substr(pos + 5);
+ }
+ }
+
+ result = repos + " at revision " + rev;
+ return 0;
}
+ ~svn_check() { if (fp) PCLOSE(fp); }
+
+ const fs::path & inspect_root;
+ std::string result;
+ FILE* fp;
+ private:
+ svn_check(svn_check const&);
+ svn_check const& operator=(svn_check const&);
+ };
+
+ // Small helper class because svn_check can't be passed by copy.
+ template <typename F, typename R>
+ struct nullary_function_ref
+ {
+ explicit nullary_function_ref(F& f) : f(f) {}
+ R operator()() const { return f(); }
+ F& f;
+ };
+
+// get info (as a string) if inspect_root is svn working copy --------------//
+
string info( const fs::path & inspect_root )
{
- string rev( "?" );
- string repos( "unknown" );
- fs::path entries( inspect_root / ".svn" / "entries" );
- fs::ifstream entries_file( entries );
- if ( entries_file )
- extract_info( entries_file, rev, repos );
- else
- {
- entries = inspect_root / ".." / "svn_info" / ".svn" / "entries";
- fs::ifstream entries_file( entries );
- if ( entries_file )
- extract_info( entries_file, rev, repos );
+ svn_check check(inspect_root);
+
+#if !INSPECT_USE_BOOST_TEST
+ check();
+#else
+
+ try {
+ boost::execution_monitor e;
+ e.execute(nullary_function_ref<svn_check, int>(check));
}
- return repos + " at revision " + rev;
+ catch(boost::execution_exception const& e) {
+ if (e.code() == boost::execution_exception::system_error) {
+ // There was an error running 'svn info' - it probably
+ // wasn't run in a subversion repo.
+ return string("unknown");
+ }
+ else {
+ throw;
+ }
+ }
+
+#endif
+
+ return check.result;
}
// visit_predicate (determines which directories are visited) --------------//
@@ -144,10 +223,13 @@ namespace
bool visit_predicate( const path & pth )
{
- string local( boost::inspect::relative_to( pth, fs::initial_path() ) );
+ string local( boost::inspect::relative_to( pth, search_root_path() ) );
string leaf( pth.leaf().string() );
+ if (leaf[0] == '.') // ignore hidden by convention directories such as
+ return false; // .htaccess, .git, .svn, .bzr, .DS_Store, etc.
+
return
- // so we can inspect a checkout
+ // so we can inspect a CVS checkout
leaf != "CVS"
// don't look at binaries
&& leaf != "bin"
@@ -155,17 +237,8 @@ namespace
// no point in checking doxygen xml output
&& local.find("doc/xml") != 0
&& local.find("doc\\xml") != 0
- // ignore some web files
- && leaf != ".htaccess"
- // ignore svn files:
- && leaf != ".svn"
- // ignore other version control files
- && leaf != ".git"
- && leaf != ".bzr"
- // ignore OS X directory info files:
- && leaf != ".DS_Store"
// ignore if tag file present
- && !boost::filesystem::exists(pth / "boost-no-inspect")
+ && !boost::filesystem::exists(pth / boost_no_inspect)
;
}
@@ -253,7 +326,6 @@ namespace
for ( DirectoryIterator itr( dir_path ); itr != end_itr; ++itr )
{
-
if ( fs::is_directory( *itr ) )
{
if ( visit_predicate( *itr ) )
@@ -263,14 +335,14 @@ namespace
visit_all<DirectoryIterator>( cur_lib, *itr, insps );
}
}
- else
+ else if (itr->path().leaf().string()[0] != '.') // ignore if hidden
{
++file_count;
string content;
load_content( *itr, content );
- check( lib.empty()
- ? library_from_content( content ) : lib
- , *itr, content, insps );
+ if (content.find(boost_no_inspect) == string::npos)
+ check( lib.empty() ? library_from_content( content ) : lib,
+ *itr, content, insps );
}
}
}
@@ -293,7 +365,7 @@ namespace
void display_summary_helper( const string & current_library, int err_count )
{
- if (display_text == display_format)
+ if (display_format == display_text)
{
std::cout << " " << current_library << " (" << err_count << ")\n";
}
@@ -312,7 +384,7 @@ namespace
void display_summary()
{
- if (display_text == display_format)
+ if (display_format == display_text)
{
std::cout << "Summary:\n";
}
@@ -339,7 +411,7 @@ namespace
}
display_summary_helper( current_library, err_count );
- if (display_text == display_format)
+ if (display_format == display_text)
std::cout << "\n";
else
std::cout << "</blockquote>\n";
@@ -376,9 +448,7 @@ namespace
void display_details()
{
- // gps - review this
-
- if (display_text == display_format)
+ if (display_format == display_text)
{
// display error messages with group indication
error_msg current;
@@ -517,7 +587,9 @@ namespace
void display_worst_offenders()
{
- if (display_text == display_format)
+ if (display_mode == display_brief)
+ return;
+ if (display_format == display_text)
{
std::cout << "Worst Offenders:\n";
}
@@ -537,7 +609,7 @@ namespace
|| itr->error_count == last_error_count);
++itr, ++display_count )
{
- if (display_text == display_format)
+ if (display_format == display_text)
{
std::cout << itr->library << " " << itr->error_count << "\n";
}
@@ -553,7 +625,7 @@ namespace
last_error_count = itr->error_count;
}
- if (display_text == display_format)
+ if (display_format == display_text)
std::cout << "\n";
else
std::cout << "</blockquote>\n";
@@ -573,6 +645,7 @@ namespace
" -ascii\n"
" -apple_macro\n"
" -assert_macro\n"
+ " -deprecated_macro\n"
" -minmax\n"
" -unnamed\n"
" default is all checks on; otherwise options specify desired checks"
@@ -611,6 +684,12 @@ namespace boost
return display_format ? "\n" : "<br>\n";
}
+// search_root_path --------------------------------------------------------//
+
+ path search_root_path()
+ {
+ return search_root;
+ }
// register_signature ------------------------------------------------------//
@@ -628,7 +707,7 @@ namespace boost
++error_count;
error_msg err_msg;
err_msg.library = library_name;
- err_msg.rel_path = relative_to( full_path, fs::initial_path() );
+ err_msg.rel_path = relative_to( full_path, search_root_path() );
err_msg.msg = msg;
err_msg.line_number = line_number;
msgs.push_back( err_msg );
@@ -699,7 +778,7 @@ namespace boost
// may return an empty string [gps]
string impute_library( const path & full_dir_path )
{
- path relative( relative_to( full_dir_path, fs::initial_path() ) );
+ path relative( relative_to( full_dir_path, search_root_path() ) );
if ( relative.empty() ) return "boost-root";
string first( (*relative.begin()).string() );
string second = // borland 5.61 requires op=
@@ -718,7 +797,11 @@ namespace boost
// cpp_main() --------------------------------------------------------------//
+#if !INSPECT_USE_BOOST_TEST
+int main( int argc_param, char * argv_param[] )
+#else
int cpp_main( int argc_param, char * argv_param[] )
+#endif
{
// <hack> for the moment, let's be on the safe side
// and ensure we don't modify anything being pointed to;
@@ -729,7 +812,9 @@ int cpp_main( int argc_param, char * argv_param[] )
if ( argc > 1 && (std::strcmp( argv[1], "-help" ) == 0
|| std::strcmp( argv[1], "--help" ) == 0 ) )
{
- std::clog << "Usage: inspect [-cvs] [-text] [-brief] [options...]\n\n"
+ //std::clog << "Usage: inspect [search-root] [-cvs] [-text] [-brief] [options...]\n\n"
+ std::clog << "Usage: inspect [search-root] [-text] [-brief] [options...]\n\n"
+ " search-root default is the current directory (i.e. '.')\n\n"
" Options:\n"
<< options() << '\n';
return 0;
@@ -743,18 +828,25 @@ int cpp_main( int argc_param, char * argv_param[] )
bool path_name_ck = true;
bool tab_ck = true;
bool ascii_ck = true;
- bool apple_ok = true;
- bool assert_ok = true;
+ bool apple_ck = true;
+ bool assert_ck = true;
+ bool deprecated_ck = true;
bool minmax_ck = true;
bool unnamed_ck = true;
- bool cvs = false;
+ //bool cvs = false;
- if ( argc > 1 && std::strcmp( argv[1], "-cvs" ) == 0 )
+ if ( argc > 1 && *argv[1] != '-' )
{
- cvs = true;
+ search_root = fs::canonical(fs::absolute(argv[1], fs::initial_path()));
--argc; ++argv;
}
+ //if ( argc > 1 && std::strcmp( argv[1], "-cvs" ) == 0 )
+ //{
+ // cvs = true;
+ // --argc; ++argv;
+ //}
+
if ( argc > 1 && std::strcmp( argv[1], "-text" ) == 0 )
{
display_format = display_text;
@@ -777,8 +869,9 @@ int cpp_main( int argc_param, char * argv_param[] )
path_name_ck = false;
tab_ck = false;
ascii_ck = false;
- apple_ok = false;
- assert_ok = false;
+ apple_ck = false;
+ assert_ck = false;
+ deprecated_ck = false;
minmax_ck = false;
unnamed_ck = false;
}
@@ -803,9 +896,11 @@ int cpp_main( int argc_param, char * argv_param[] )
else if ( std::strcmp( argv[1], "-ascii" ) == 0 )
ascii_ck = true;
else if ( std::strcmp( argv[1], "-apple_macro" ) == 0 )
- apple_ok = true;
+ apple_ck = true;
else if ( std::strcmp( argv[1], "-assert_macro" ) == 0 )
- assert_ok = true;
+ assert_ck = true;
+ else if ( std::strcmp( argv[1], "-deprecated_macro" ) == 0 )
+ deprecated_ck = true;
else if ( std::strcmp( argv[1], "-minmax" ) == 0 )
minmax_ck = true;
else if ( std::strcmp( argv[1], "-unnamed" ) == 0 )
@@ -823,8 +918,6 @@ int cpp_main( int argc_param, char * argv_param[] )
}
string inspector_keys;
- fs::initial_path();
-
{ // begin reporting block
@@ -849,22 +942,24 @@ int cpp_main( int argc_param, char * argv_param[] )
inspectors.push_back( inspector_element( new boost::inspect::tab_check ) );
if ( ascii_ck )
inspectors.push_back( inspector_element( new boost::inspect::ascii_check ) );
- if ( apple_ok )
+ if ( apple_ck )
inspectors.push_back( inspector_element( new boost::inspect::apple_macro_check ) );
- if ( assert_ok )
+ if ( assert_ck )
inspectors.push_back( inspector_element( new boost::inspect::assert_macro_check ) );
+ if ( deprecated_ck )
+ inspectors.push_back( inspector_element( new boost::inspect::deprecated_macro_check ) );
if ( minmax_ck )
inspectors.push_back( inspector_element( new boost::inspect::minmax_check ) );
if ( unnamed_ck )
inspectors.push_back( inspector_element( new boost::inspect::unnamed_namespace_check ) );
- // perform the actual inspection, using the requested type of iteration
- if ( cvs )
- visit_all<hack::cvs_iterator>( "boost-root",
- fs::initial_path(), inspectors );
- else
- visit_all<fs::directory_iterator>( "boost-root",
- fs::initial_path(), inspectors );
+ //// perform the actual inspection, using the requested type of iteration
+ //if ( cvs )
+ // visit_all<hack::cvs_iterator>( search_root.leaf().string(),
+ // search_root, inspectors );
+ //else
+ visit_all<fs::directory_iterator>( search_root.leaf().string(),
+ search_root, inspectors );
// close
for ( inspector_list::iterator itr = inspectors.begin();
@@ -876,17 +971,13 @@ int cpp_main( int argc_param, char * argv_param[] )
string run_date ( "n/a" );
boost::time_string( run_date );
- if (display_text == display_format)
+ if (display_format == display_text)
{
std::cout
<<
"Boost Inspection Report\n"
"Run Date: " << run_date << "\n"
"\n"
- "An inspection program <http://www.boost.org/tools/inspect/index.html>\n"
- "checks each file in the current Boost CVS for various problems,\n"
- "generating an HTML page as output.\n"
- "\n"
;
std::cout
@@ -928,7 +1019,7 @@ int cpp_main( int argc_param, char * argv_param[] )
;
std::cout
<< "<p>The files checked were from "
- << info( fs::initial_path() )
+ << info( search_root_path() )
<< ".</p>\n";
@@ -950,14 +1041,14 @@ int cpp_main( int argc_param, char * argv_param[] )
;
}
- if (display_text == display_format)
+ if (display_format == display_text)
std::cout << "\nProblem counts:\n";
else
std::cout << "\n<h2>Problem counts</h2>\n<blockquote><p>\n" ;
} // end of block: starts reporting
- if (display_text == display_format)
+ if (display_format == display_text)
std::cout << "\n" ;
else
std::cout << "</blockquote>\n";
@@ -967,25 +1058,29 @@ int cpp_main( int argc_param, char * argv_param[] )
worst_offenders_count();
std::stable_sort( libs.begin(), libs.end() );
- if ( !libs.empty() )
+ if ( !libs.empty() && display_mode != display_brief)
display_worst_offenders();
if ( !msgs.empty() )
{
display_summary();
- if (display_text == display_format)
+ if (display_format == display_text)
{
std::cout << "Details:\n" << inspector_keys;
- }
+ std::cout << "\nDirectories with a file named \"" << boost_no_inspect << "\" will not be inspected.\n"
+ "Files containing \"" << boost_no_inspect << "\" will not be inspected.\n";
+ }
else
{
std::cout << "<h2>Details</h2>\n" << inspector_keys;
+ std::cout << "\n<p>Directories with a file named \"" << boost_no_inspect << "\" will not be inspected.<br>\n"
+ "Files containing \"" << boost_no_inspect << "\" will not be inspected.</p>\n";
}
display_details();
}
- if (display_text == display_format)
+ if (display_format == display_text)
{
std::cout << "\n\n" ;
}
@@ -996,5 +1091,5 @@ int cpp_main( int argc_param, char * argv_param[] )
"</html>\n";
}
- return 0;
+ return error_count ? 1 : 0;
}
diff --git a/tools/inspect/inspector.hpp b/tools/inspect/inspector.hpp
index ad9fdd01f9..408a4f868f 100644
--- a/tools/inspect/inspector.hpp
+++ b/tools/inspect/inspector.hpp
@@ -28,6 +28,8 @@ namespace boost
const char * line_break();
+ path search_root_path();
+
class inspector
{
protected:
diff --git a/tools/inspect/link_check.cpp b/tools/inspect/link_check.cpp
index ca692bdfb2..182af3bf78 100644
--- a/tools/inspect/link_check.cpp
+++ b/tools/inspect/link_check.cpp
@@ -123,7 +123,7 @@ namespace boost
{
// keep track of paths already encountered to reduce disk activity
if ( !fs::is_directory( full_path ) )
- m_paths[ relative_to( full_path, fs::initial_path() ) ] |= m_present;
+ m_paths[ relative_to( full_path, search_root_path() ) ] |= m_present;
}
// inspect ( .htm, .html, .shtml, .css ) -----------------------------------//
@@ -134,7 +134,7 @@ namespace boost
const string & contents ) // contents of file to be inspected
{
if (contents.find( "boostinspect:" "nounlinked" ) != string::npos)
- m_paths[ relative_to( full_path, fs::initial_path() ) ] |= m_nounlinked_errors;
+ m_paths[ relative_to( full_path, search_root_path() ) ] |= m_nounlinked_errors;
bool no_link_errors =
(contents.find( "boostinspect:" "nolink" ) != string::npos);
@@ -438,7 +438,7 @@ namespace boost
// create a m_paths entry if necessary
std::pair< const string, int > entry(
- relative_to( target_path, fs::initial_path() ), 0 );
+ relative_to( target_path, search_root_path() ), 0 );
m_path_map::iterator itr( m_paths.find( entry.first ) );
if ( itr == m_paths.end() )
{
@@ -476,7 +476,7 @@ namespace boost
&& itr->first.rfind( "index.htm" ) == string::npos )
{
++m_unlinked_errors;
- path full_path( fs::initial_path() / path(itr->first) );
+ path full_path( search_root_path() / path(itr->first) );
error( impute_library( full_path ), full_path, "Unlinked file" );
}
}
diff --git a/tools/inspect/link_check.hpp b/tools/inspect/link_check.hpp
index 2878c4db02..726cd91f48 100644
--- a/tools/inspect/link_check.hpp
+++ b/tools/inspect/link_check.hpp
@@ -30,7 +30,7 @@ namespace boost
long m_duplicate_bookmark_errors;
typedef std::map< string, int > m_path_map;
- m_path_map m_paths; // first() is relative initial_path()
+ m_path_map m_paths; // first() is relative to search_root_path()
void do_url( const string & url, const string & library_name,
const path & full_source_path, bool no_link_errors,
diff --git a/tools/inspect/path_name_check.cpp b/tools/inspect/path_name_check.cpp
index 87bc6842c9..0714e20c05 100644
--- a/tools/inspect/path_name_check.cpp
+++ b/tools/inspect/path_name_check.cpp
@@ -82,7 +82,7 @@ namespace boost
//}
// the path, including a presumed root, does not exceed the maximum size
- path const relative_path( relative_to( full_path, filesystem::initial_path() ) );
+ path const relative_path( relative_to( full_path, search_root_path() ) );
const unsigned max_relative_path = 207; // ISO 9660:1999 sets this limit
const string generic_root( "boost_X_XX_X/" );
if ( relative_path.string().size() >
diff --git a/tools/inspect/tab_check.hpp b/tools/inspect/tab_check.hpp
index 9cc4cc3f9e..fb7f617ba1 100644
--- a/tools/inspect/tab_check.hpp
+++ b/tools/inspect/tab_check.hpp
@@ -20,7 +20,7 @@ namespace boost
public:
tab_check();
- virtual const char * name() const { return "*Tab*"; }
+ virtual const char * name() const { return "*Tabs*"; }
virtual const char * desc() const { return "tabs in file"; }
virtual void inspect(
diff --git a/tools/inspect/time_string.hpp b/tools/inspect/time_string.hpp
index fc005dddde..72ca439681 100644
--- a/tools/inspect/time_string.hpp
+++ b/tools/inspect/time_string.hpp
@@ -8,7 +8,7 @@
//
// ------------------------------------------------------------------
//
-// $Id: time_string.hpp 47222 2008-07-08 14:29:03Z bemandawes $
+// $Id$
#ifndef BOOST_TIME_STRING_HPP_GP_20060731
#define BOOST_TIME_STRING_HPP_GP_20060731
diff --git a/tools/inspect/unnamed_namespace_check.cpp b/tools/inspect/unnamed_namespace_check.cpp
index 3850e3af6c..92431a171c 100644
--- a/tools/inspect/unnamed_namespace_check.cpp
+++ b/tools/inspect/unnamed_namespace_check.cpp
@@ -15,7 +15,7 @@ namespace
{
boost::regex unnamed_namespace_regex(
- "\\<namespace\\s(\\?\\?<|\\{)" // trigraph ??< or {
+ "\\<namespace\\s*(\\?\\?<|\\{)" // trigraph ??< or {
);
} // unnamed namespace (ironical? :-)
diff --git a/tools/quickbook/doc/1_6.qbk b/tools/quickbook/doc/1_6.qbk
index 89c41f2ac0..7a10cb1298 100644
--- a/tools/quickbook/doc/1_6.qbk
+++ b/tools/quickbook/doc/1_6.qbk
@@ -1,50 +1,54 @@
[/
Copyright 2002,2004,2006 Joel de Guzman, Eric Niebler
- Copyright 2010-2011 Daniel James
+ Copyright 2010-2013 Daniel James
Distributed under the Boost Software License, Version 1.0.
(See accompanying file LICENSE_1_0.txt or copy at
http://www.boost.org/LICENSE_1_0.txt)
]
-[chapter Language Versions
- [quickbook 1.6]
- [compatibility-mode 1.5]
- [id quickbook.versions]
- [source-mode teletype]
-]
-
-[section:stable Stable Versions]
-
-Since quickbook 1.3 the `quickbook` attribute in the document block selects
-which version of the language to use. Not all changes to quickbook are
-implemented using a version switch, it's mainly just the changes that change
-the way a document is interpreted or would break existing documentation.
-
-[heading Quickbook 1.3 and later]
-
-* Introduced quickbook language versioning.
-* In the documentation info, allow phrase markup in license and purpose
- attributes.
-* Fully qualified section and headers. Subsection names are concatenated to the
- ID to avoid clashing. Example: `doc_name.sect_name.sub_sect_name.sub_sub_sect_name`.
+[section:1_6 Quickbook 1.6]
-[heading Quickbook 1.5 and later]
+Upgrading a document from an earlier version of quickbook shouldn't be too
+hard. The first thing to do is update the version in the docinfo block.
+For example, if you were updating the Xpressive documentation, the existing
+docinfo block looks like:
-* Ignore template argument separators inside square brackets.
-* Don't separate the final template argument if the `..` separator was
- used. i.e. never mix `..` and whitespace separators.
-* Statically scope templates and their arguments rather than dynamically
- scope them.
-* Give table ids, and let you set them.
-* Allow spaces between the `:` character and ids in elements which can
- have ids.
+ [library Boost.Xpressive
+ [quickbook 1.3]
+ ...
+ ]
-[endsect]
+Change this to:
-[section:1_6 Quickbook 1.6]
+ [library Boost.Xpressive
+ [quickbook 1.6]
+ [compatibility-mode 1.3]
+ ...
+ ]
-Everything described in here may change depending on the feedback received.
+The =compatibility-mode= tag ensures that automatically generated links
+won't change. It might turn out that it isn't required, but in Xpressive's
+case if it isn't included it will break a lot of links.
+
+Then try building it. You might need to fix some stray square brackets. The
+new version has a stricter parser which will have an error for brackets
+which don't pair up. They might be there by mistake, in which case they
+should probably be deleted, or if they're intentional escaped. For example,
+to write out the half-open range \[a,b), use: `\[a,b)`.
+
+Next, you might need to reconsider how templates and macros are defined.
+If you `include` a file to use its templates, you'll now need to `import` it
+instead as templates are now scoped by included files. Also, if you define
+templates and macros in your main quickbook file, you might want to put them
+into a separate file and `import` that, which allows the main documentation
+files to concentrate on the structure and contents of the document, making them
+easier to read.
+
+Now that headings can have ids, it can be a good idea to add ids to existing
+headings. This means that the headings will have more predictable ids which
+don't change when the text of the heading changes. In order to preserve
+links you can use the existing generated id as the heading.
[section:docinfo Includes with docinfo]
@@ -70,6 +74,19 @@ before.
[endsect] [/docinfo]
+[section:doc_info_macros Macros in docinfo block]
+
+You can now expand macros in text fields in the docinfo block. In the top
+docinfo block only the predefined macros are available, but in nested documents
+macros defined in the parent document are also available.
+
+There's a small bug here - this leaks into older versions for the `license`
+and `purpose` fields, but since only the predefined macros are available, it's
+unlikely to break any existing documents. So I'd rather not complicate the code
+further by fixing that.
+
+[endsect]
+
[section:scope Scoping templates and macros]
A long standing quickbook bug is that macros are scoped by file, but templates
@@ -233,131 +250,38 @@ if you're totally sure that you will need it.
[endsect] [/xmlbase]
-[section:elements New Elements]
-
-New elements in 1.6 (to be documented later):
-
-* `block`
-* `ordered_list`
-* `itemized_list`
-* `role`
-
-[endsect]
-
-[section:listparagraphs Pargraphs in lists]
-
-I'm still refining this, but paragraphs and block elements can now be used
-in lists:
-
-[pre
-* Para 1
-
- Para 2
- * Nested Para 1
-
- Nested Para 2
-
- Code block
- Para 3
-]
-
-generates:
-
-* Para 1
+[section:template_parser Improved template parser]
- Para 2
- * Nested Para 1
+There's a new parser for template declarations and parameters which does
+a better job of understanding escaped and bracketed text. Unfortunately
+it does not understand element names so there are some cases where it
+could go wrong. For example:
- Nested Para 2
-
- Code block
- Para 3
-
-The docbook markup that this generates is pretty bad, but seems to create okay
-html.
-
-[endsect]
-
-[endsect] [/ Quickbok 1.6]
-
-[section:1_7 Quickbook 1.7]
-
-[section:source_mode Source mode for single entities]
-
-1.7 introduces a new `!` element type for setting the source mode of a single
-entity without changing the source mode otherwise. This can be used for
-code blocks and other elements. For example:
-
-```
-[!c++]
- void foo() {};
-
-[!python]``\`\`\`\ ``def foo():``\`\`\`\ ``
-```
-
-It can also be used to set the source mode for elements:
-
-```
-[!teletype][table
- [[code][meaning]]
- [[`+`][addition]]
-]
-```
-
-When used a section, it's only set for the section element, not the
-whole section.
-
-Currently it does support other syntactic entities such as paragraphs
-and lists. I'm not sure if it would be a good idea.
-
-[endsect]
+ [template doesnt_work[]
+ [ordered_list
+ [`code phrase`]
+ ]
+ ]
-[section:callouts Callouts in code block]
+In this case it will think the `[\`` is a template call and give a parse
+error. To work around this put an escaped space before the code phrase:
-Currently callouts can only be used in code snippets. 1.7 add
-support in normal code blocks. The same syntax is used as in
-code snippets, the callout descriptions appear immediately
-after the code block.
+ [template works[]
+ [ordered_list
+ [\ `code phrase`]
+ ]
+ ]
[endsect]
-[section:escaped_docinfo_attributes Escaped docbook in docinfo blocks]
-
-Quickbook docinfo attributes will probably never be as rich as docbook
-attributes so to allow more flexible markup, not supported by quickbook
-escaped docbook can be included in the docinfo block:
-
-```
-[article Some article
-[quickbook 1.7]
-'''<author>
- <firstname>John</firstname>
- <surname>Doe</surname>
- <email>john.doe@example.com</email>
-</author>'''
-]
-```
-
-The escaped docbook is always placed at the end of the docinfo block,
-so it shouldn't be assumed that it will interleave the markup. A mixture
-of quickbook and docbook attributes for the same information will not work
-well.
-
-[endsect] [/escaped_docinfo_attributes]
-
-[section:templates_in_link_values Templates in link values]
+[section:elements New Elements]
-There's very premilinary support for calling templates in link values. A lot
-more work needs to be done, including:
+New elements added in quickbook 1.6:
-* Considering other places where templates could be called (e.g. images are
- quite tricky, as templates could get confused with attributes, should
- templates be callable from something like an element's id?).
-* Trimming spaces from the body of the template (which can cause surprising
- results).
-* Checking that the contents of the template are appropriate for the context.
- Possibly even using a different grammar.
+* [link quickbook.ref.block `block`]
+* [link quickbook.ref.list_tags `ordered_list` and `itemized_list`]
+* [link quickbook.ref.role `role`]
-[endsect] [/templates_in_link_values]
+[endsect] [/ elements]
-[endsect] [/ Quickbok 1.7]
+[endsect] [/ Quickbok 1.6]
diff --git a/tools/quickbook/doc/1_7.qbk b/tools/quickbook/doc/1_7.qbk
new file mode 100644
index 0000000000..745247fe45
--- /dev/null
+++ b/tools/quickbook/doc/1_7.qbk
@@ -0,0 +1,191 @@
+[/
+ Copyright 2002,2004,2006 Joel de Guzman, Eric Niebler
+ Copyright 2012-2013 Daniel James
+
+ Distributed under the Boost Software License, Version 1.0.
+ (See accompanying file LICENSE_1_0.txt or copy at
+ http://www.boost.org/LICENSE_1_0.txt)
+]
+
+[section:1_7 Quickbook 1.7]
+
+[section:context_error Error for elements used in incorrect context]
+
+Previously if you used an element in the wrong context it would just be
+unprocessed, which was surprising. People often didn't realise that their
+element hadn't been processed. So now it's an error.
+
+[endsect]
+
+[section:phrase_parse_error Error for invalid phrase elements]
+
+If the body of a phrase element didn't parse, it would be just used
+unprocessed. Now change it to be a hard error.
+
+[endsect]
+
+[section:source_mode Source mode for single entities]
+
+1.7 introduces a new `!` element type for setting the source mode of a single
+entity without changing the source mode otherwise. This can be used for
+code blocks and other elements. For example:
+
+```
+[!c++]
+ void foo() {};
+
+[!python]``\`\`\`\ ``def foo():``\`\`\`\ ``
+```
+
+It can also be used to set the source mode for elements:
+
+```
+[!teletype][table
+ [[code][meaning]]
+ [[`+`][addition]]
+]
+```
+
+When used before a section, it sets the source mode for the whole section.
+
+If it appears at the beginning of a paragraph, it will be used for the
+whole paragraph only if there's a newline, eg.
+
+```
+[!c++]
+A declaration `void foo();` and a definition `void foo() {}`.
+```
+
+[endsect]
+
+[section:callouts Callouts in code blocks]
+
+Currently callouts can only be used in code snippets. 1.7 adds
+support in normal code blocks. The same syntax is used as in
+code snippets, the callout descriptions appear immediately
+after the code block.
+
+[endsect]
+
+[section:escaped_docinfo_attributes Escaped docbook in docinfo blocks]
+
+Quickbook docinfo attributes will probably never be as rich as docbook
+attributes. To allow more flexible markup that is not supported by quickbook,
+escaped docbook can be included in the docinfo block:
+
+```
+[article Some article
+[quickbook 1.7]
+'''<author>
+ <firstname>John</firstname>
+ <surname>Doe</surname>
+ <email>john.doe@example.com</email>
+</author>'''
+]
+```
+
+The escaped docbook is always placed at the end of the docinfo block,
+so it shouldn't be assumed that it will interleave with markup generated from
+quickbook. A mixture
+of quickbook and docbook attributes for the same information will not work
+well.
+
+[endsect] [/escaped_docinfo_attributes]
+
+[section:listparagraphs Pargraphs in lists]
+
+Paragraphs and block elements can now be used in lists:
+
+[pre
+* Para 1
+
+ Para 2
+ * Nested Para 1
+
+ Nested Para 2
+
+ Code block
+ Para 3
+]
+
+generates:
+
+* Para 1
+
+ Para 2
+ * Nested Para 1
+
+ Nested Para 2
+
+ Code block
+ Para 3
+
+[endsect]
+
+[section:templates_in_attributes Templates in some attributes]
+
+There's support for calling templates in link values, anchors, roles and
+includes. This is sometimes a bit of a change, especially in places where
+spaces are currently allowed, so I might try using a slightly different
+grammar where required. I think I also need to add some validation, since
+the parser can allow more symbols than some of the old ones.
+
+[endsect] [/templates_in_attributes]
+
+[section:list_markup_in_tables List Markup in Nested Blocks]
+
+Can now place list markup in nested blocks, e.g in tables, variables lists etc.
+Unfortunately indented code blocks are more tricky, because the contents of
+these blocks are often indented already. It seemed easier to just not support
+indented code blocks in this context than to try to work out sensible actions
+for the edges cases. If you want to use code blocks in this context, you should
+still be able to use explicit markup.
+
+[endsect]
+
+[section:phrase_block_templates Allow block elements in phrase templates]
+
+Block elements can now be used in phrase templates, but paragraphs breaks aren't
+allowed, so this is an error:
+
+ [template paras[] Something or other.
+
+ Second paragraph.]
+
+If a phrase template only contains block elements, then it's practically
+indistinguishable from a block template. So you'll get the same output from:
+
+ [template foo[] [blurb Blah, blah, blah]]
+
+as:
+
+ [template foo[]
+ [blurb Blah, blah, blah]
+ ]
+
+If a phrase template has phrase content mixed with block elements, it'll generate
+output as if it was expanded inline.
+
+[endsect]
+
+[section:glob Including multiple files with Globs]
+
+One can now include multiple files at once using a glob pattern for the
+file reference:
+
+ [include sub/*/*.qbk]
+ [include include/*.h]
+
+All the matching files, and intermediate irectories, will match and be
+included. The glob pattern can be "\*" for matching zero or more characters,
+"?" for matching a single character, "\[<c>-<c>\]" to match a character class,
+"\[\^<char>-<char>\]" to exclusive match a character class, "\\\\" to escape
+a glob special character which is then matched, and anything else is matched
+to the character.
+
+[note Because of the escaping in file references the "\\\\" glob escape is
+a double "\\"; i.e. and escaped back-slash.]
+
+[endsect]
+
+[endsect] [/ Quickbok 1.7]
diff --git a/tools/quickbook/doc/Jamfile.v2 b/tools/quickbook/doc/Jamfile.v2
index f2f11f09eb..bfa7591578 100644
--- a/tools/quickbook/doc/Jamfile.v2
+++ b/tools/quickbook/doc/Jamfile.v2
@@ -8,7 +8,7 @@
# http://www.boost.org/LICENSE_1_0.txt)
#==============================================================================
-project boost/quickbook/doc ;
+project quickbook/doc ;
using boostbook ;
using quickbook ;
@@ -21,7 +21,11 @@ boostbook standalone
:
quickbook
:
- <xsl:param>boost.root=../../../..
+ <format>html:<xsl:param>boost.root=../../../..
+ <format>html:<xsl:param>img.src.path=../../../../doc/html/
+ <format>xhtml:<xsl:param>boost.root=../../../..
+ <format>xhtml:<xsl:param>img.src.path=../../../../doc/html/
+
#<xsl:param>callout.graphics.path=../../images/callouts//
<format>pdf:<xsl:param>img.src.path=$(images)/
<format>pdf:<xsl:param>boost.url.prefix=http://www.boost.org/doc/libs/release/doc/html
diff --git a/tools/quickbook/doc/block.qbk b/tools/quickbook/doc/block.qbk
index c6765131f7..1be2041b37 100644
--- a/tools/quickbook/doc/block.qbk
+++ b/tools/quickbook/doc/block.qbk
@@ -26,6 +26,38 @@ You can include another XML file with:
This is useful when file.xml has been generated by Doxygen and contains your
reference section.
+=xinclude= paths are normally used unchanged in the generated documentation,
+which will not work if you wish them to be relative to the current quickbook
+file. Quickbook can add a =xml:base= attribute to the boostbook documentation
+to specify where =xinclude= files should be found. For example, if you wish
+them to be relative to the current quickbook file:
+
+ [article Article with xincludes
+ [quickbook 1.6]
+ [xmlbase .]
+ ]
+
+ [xinclude file.xml]
+
+Now the xinclude should work if =file.xml= is in the same directory as the
+quickbook file. Although it might not work if you distribute the generated
+files (as their relative directories can change).
+
+Say the article is generated in a sub-directory, by running something like:
+
+ quickbook article.qbk --output-file=output/article.xml
+
+This will generate a boostbook root tag:
+
+ <article id="article_with_xincludes"
+ last-revision="$Date: 2013/08/20 08:26:48 $"
+ xml:base=".."
+ xmlns:xi="http://www.w3.org/2001/XInclude">
+
+Because =xml:base= is set to =..=, the xml processor will know to look in
+the parent directory to find =file.xml=, which it comes across the
+=xi:include= tag.
+
[endsect] [/xinclude]
[#quickbook.ref.paragraphs]
@@ -190,6 +222,31 @@ will generate:
* 2.b.2.b
[endsect] [/mixed_lists]
+
+[#quickbook.ref.list_tags]
+[section:list_tags Explicit list tags]
+
+Sometimes the wiki-style list markup can be tricky to use, especially
+if you wish to include more complicated markup with the list. So in
+quickbook 1.6, an alternative way to mark up lists introduced:
+
+ [ordered_list [item1][item2]]
+
+is equivalent to:
+
+ # item1
+ # item2
+
+And:
+
+ [itemized_list [item1][item2]]
+
+is equivalent to:
+
+ * item1
+ * item2
+
+[endsect] [/list_tags]
[endsect] [/lists]
[#quickbook.ref.code]
@@ -356,8 +413,23 @@ to produce the desired effect.
[h5 Heading 5]
[h6 Heading 6]
-Headings 1-3 \[h1 h2 and h3\] will automatically have anchors with
-normalized names with
+You can specify an id for a heading:
+
+```
+[h1:heading_id A heading to link to]
+```
+
+To link to it, you'll need to include the enclosing section's id:
+
+```
+[link document_id.section_id.heading_id The link text]
+```
+
+Although you can preceed a heading by an [link quickbook.ref.anchors anchor]
+if you wish to use a location independent link.
+
+If a heading doesn't have an id, one will be automatically generated
+with a normalized name with
[^name="document_id.section_id.normalized_header_text"] (i.e. valid
characters are =a-z=, =A-Z=, =0-9= and =_=. All non-valid characters are
converted to underscore and all upper-case are converted to lower-case.
@@ -370,6 +442,9 @@ For example: Heading 1 in section Section 2 will be normalized to
to link to them. See __anchor_links__ and __section__ for more info.
+[note Specifying heading ids is a quickbook 1.6 feature, earlier
+ versions don't support them.]
+
[endsect] [/headings]
[#quickbook.ref.generic_heading]
@@ -393,7 +468,7 @@ however, headings in a particular section is just flat. Example:
```
[section A]
[h2 X]
-[h2 Y]
+[h2:link_id Y]
[h2 Z]
[endsect]
```
@@ -965,17 +1040,25 @@ You can include one QuickBook file from another. The syntax is simply:
[include someother.qbk]
```
-The included file will be processed as if it had been cut and pasted
+In quickbook 1.6 and later, if the included file has a
+[link quickbook.ref.docinfo docinfo block] then it will create a nested
+document. This will be processed as a standalone document, although any macros
+or templates from the enclosing file will still be defined.
+
+Otherwise the included file will be processed as if it had been cut and pasted
into the current document, with the following exceptions:
* The '''__FILENAME__''' predefined macro will reflect the name of the
file currently being processed.
-* Any macros defined in the included file are scoped to that file.
+* Any macros or templates defined in the included file are scoped to that file,
+ i.e. they are not added to the enclosing file.
+
+[note In quickbook 1.5 and earlier templates weren't scoped in included files.
+If you want to use templates or macros from a file in quickbook 1.6,
+use [link quickbook.ref.import import] instead.]
The [^\[include\]] directive lets you specify a document id to use for the
-included file. When this id is not explicitly specified, the id defaults to
-the filename ("someother", in the example above). You can specify the id
-like this:
+included file. You can specify the id like this:
```
[include:someid someother.qbk]
@@ -986,11 +1069,70 @@ for instance, if there is a top section in someother.qbk named "Intro", the
named anchor for that section will be "someid.intro", and you can link to
it with [^\[link someid.intro The Intro\]].
+If the included file has a docinfo block, an id specified in an [^\[include\]]
+directive will overwrite it.
+
+You can also include C, C++ and python source files. This will include any
+quickbook blocks in the file that aren't inside of named code snippets. See
+the [link quickbook.ref.import Import section] for syntax details. For example,
+say you included this file:
+
+ /**
+ * Hello world example
+ */
+
+ // In this comment, the backtick indicates that this is a
+ // quickbook source block that will be included.
+
+ /*`
+ First include the appropriate header: [hello_includes]
+ Then write your main function: [hello_main]
+ */
+
+ // This defines a code snippet, the syntax is
+ // described in the import section. It's available
+ // in the whole of this source file, not just after
+ // its definition.
+
+ //[hello_includes
+ #include <iostream>
+ //]
+
+ //[hello_main
+ int main() {
+ std::cout << "Hello, trivial example" << std::endl;
+ }
+ //]
+
+It will generate:
+
+ First include the appropriate header:
+
+ #include <iostream>
+
+ Then write your main function:
+
+ int main() {
+ std::cout << "Hello, trivial example" << std::endl;
+ }
+
[endsect] [/include]
[#quickbook.ref.import]
[section:import Import]
+In quickbook 1.6 and later if you wish to use a template, macro or code
+snippet from a file, you need to import it. This will not include any
+of the content from that file, but will pull templates, macros and code
+snippets into the current file's scope.
+
+With quickbook files, this allows you to create template and macro
+libraries. For python (indicated by the `.py` extension), C or
+C++ files this allows you to include code snippets from source files,
+so that your code examples can be kept up to date and fully tested.
+
+[/ Old justification text, might move this into a new section:
+
When documenting code, you'd surely need to present code from actual source
files. While it is possible to copy some code and paste them in your QuickBook
file, doing so is error prone and the extracted code in the documentation tends
@@ -999,6 +1141,7 @@ always, is that once documentation is written, the tendency is for the docs to
languish in the archives without maintenance.
QuickBook's import facility provides a nice solution.
+]
[heading Example]
@@ -1132,6 +1275,36 @@ Example:
[class_]
-See the actual code here: [@boost:/tools/quickbook/test/stub.cpp]
+See the actual code here:
+[@boost:/tools/quickbook/test/stub.cpp tools/quickbook/test/stub.cpp]
[endsect] [/import]
+
+[#quickbook.ref.block]
+[section:block Plain blocks]
+
+`block` is a plain block element, that doesn't wrap its contents
+in any docbook or boostbook tags. This can be useful when using
+escaped docbook block tags, such as:
+
+ [template chapter[title]
+ [block'''<chapter><title>'''[title]'''</title>''']
+ ]
+
+ [template chapterend
+ [block'''</chapter>''']
+ ]
+
+ [chapter An example chapter]
+
+ Content
+
+ [chapterend]
+
+Without the `block` element, the `chapter` and `chapterend` templates
+would be wrapped in paragraph tags.
+
+[note In this example, the template body has to start with a newline so that
+the template will be interpreted in block mode.]
+
+[endsect] [/block]
diff --git a/tools/quickbook/doc/change_log.qbk b/tools/quickbook/doc/change_log.qbk
index 96fa45dec3..a4942c2632 100644
--- a/tools/quickbook/doc/change_log.qbk
+++ b/tools/quickbook/doc/change_log.qbk
@@ -248,3 +248,86 @@ Boost 1.46.1:
block (`quickbook`, `compatibility-mode`, `source-mode`).
* Only add explicit alt text to images.
* Don't put 'inline' code blocks inside paragraphs.
+
+[heading Version 1.5.8 - Boost 1.50]
+
+* Write dependencies to a file, using `--output-deps`
+ ([@https://svn.boost.org/trac/boost/ticket/6691 #6691]).
+* Fix handling of section tags in lists.
+* Fix indented code blocks in lists.
+* Fix handling UTF-8 code points in the syntax highlighter.
+ Was treating each individual byte as a character.
+ Still doesn't deal with combining code points.
+* Internal changes:
+ * A lot of restructuring.
+ * Stop using 'v3' filesystem paths and namespaces, it's now
+ the default version.
+ * Remove awkward intrusive reference counting implementation,
+ avoids a gcc internal compiler error
+ ([@http://svn.boost.org/trac/boost/ticket/6794 #6794]),
+ but is also a cleaner implementation.
+* 1.6 changes:
+ * Better handling of brackets in link values.
+ * Improved handling of escaped characters in include paths.
+* Starting to develop 1.7:
+ * Source mode for single entities.
+ * Callouts in code blocks.
+ * Escaped docbook in docinfo blocks.
+ * Starting to implement calling templates from link values.
+
+[heading Version 1.5.9 - Boost 1.54]
+
+* When code blocks are indented using a mixture of tabs and spaces,
+ convert indentation to spaces.
+* In the C++ systax highlighter, fix syntax highlighting for `#`, so that it's
+ used for preprocessor statements at the start of a line, and as a 'special'
+ character elsewhere
+ ([@https://svn.boost.org/trac/boost/ticket/8510 #8510],
+ [@https://svn.boost.org/trac/boost/ticket/8511 #8511]).
+* Add C++11 keywords to syntax highlighter
+ ([@https://svn.boost.org/trac/boost/ticket/8541 #8541]).
+* Hidden options for formatting of `--output-deps`. Not really for public use
+* yet.
+* 1.6 changes:
+ * Better template argument parsing, so that it understands things
+ like escaped markup.
+ * Support for using macros in the doc info block.
+* Internal changes:
+ * Convert to use `boost::string_ref`.
+ * Clean up the source map implementation (used to get the correct
+ location for error messages in things like templates and snippets).
+
+[heading Version 1.6.0 - Boost 1.55]
+
+* Remove nested blocks in lists from 1.6, move to 1.7.
+ (Can still nest block elements in lists though).
+* Don't break out of lists after a nested block element.
+* Check for errors when writing dependency files.
+* Improved markup for lists.
+* Make escaping templates with a punctuation identifier illegal.
+ Escaping templates with an alphanumeric identifier is still fine.
+* Fix detection of code blocks at the start of a file.
+* XML encode the contents of the `__FILENAME__` macro.
+* 1.7 changes:
+ * Make it an error to use an element in the wrong context.
+ * Error if the body of a phrase element doesn't parse.
+ * List markup in nested blocks.
+ * Allow block elements in phrase templates.
+ * Make it an error to put a paragraph break (i.e. a blank line)
+ in a phrase template.
+* Internal changes:
+ * Clean up the id manager implementation.
+
+[heading Version 1.6.1]
+
+* Better URI encoding of links.
+* Extra validation of attribute values.
+* 1.7 changes:
+ * Improved source mode tagging:
+ * Works for lists and paragraphs.
+ * If the source mode is changed inside a tagged element, that change
+ will now persist after the element.
+ * Tagged sections will now use the source mode for the whole section.
+ * Template calls from anchor, role and include elements.
+ * Stricter handling of templates called in attribute values.
+ * Glob support.
diff --git a/tools/quickbook/doc/language_versions.qbk b/tools/quickbook/doc/language_versions.qbk
new file mode 100644
index 0000000000..df9089dc53
--- /dev/null
+++ b/tools/quickbook/doc/language_versions.qbk
@@ -0,0 +1,46 @@
+[/
+ Copyright 2002,2004,2006 Joel de Guzman, Eric Niebler
+ Copyright 2010-2013 Daniel James
+
+ Distributed under the Boost Software License, Version 1.0.
+ (See accompanying file LICENSE_1_0.txt or copy at
+ http://www.boost.org/LICENSE_1_0.txt)
+]
+
+[chapter Language Versions
+ [quickbook 1.7]
+ [compatibility-mode 1.5]
+ [id quickbook.versions]
+ [source-mode teletype]
+]
+
+[section:stable Stable Versions]
+
+Since quickbook 1.3 the `quickbook` attribute in the document block selects
+which version of the language to use. Not all changes to quickbook are
+implemented using a version switch, it's mainly just the changes that change
+the way a document is interpreted or would break existing documentation.
+
+[heading Quickbook 1.3 and later]
+
+* Introduced quickbook language versioning.
+* In the documentation info, allow phrase markup in license and purpose
+ attributes.
+* Fully qualified section and headers. Subsection names are concatenated to the
+ ID to avoid clashing. Example: `doc_name.sect_name.sub_sect_name.sub_sub_sect_name`.
+
+[heading Quickbook 1.5 and later]
+
+* Ignore template argument separators inside square brackets.
+* Don't separate the final template argument if the `..` separator was
+ used. i.e. never mix `..` and whitespace separators.
+* Statically scope templates and their arguments rather than dynamically
+ scope them.
+* Give table ids, and let you set them.
+* Allow spaces between the `:` character and ids in elements which can
+ have ids.
+
+[endsect]
+
+[include 1_6.qbk]
+[include 1_7.qbk]
diff --git a/tools/quickbook/doc/phrase.qbk b/tools/quickbook/doc/phrase.qbk
index f6f8859798..2cdf57b18b 100644
--- a/tools/quickbook/doc/phrase.qbk
+++ b/tools/quickbook/doc/phrase.qbk
@@ -165,6 +165,42 @@ And one for the little boy who lives down the lane.
[endsect] [/simple_formatting]
+[#quickbook.ref.role]
+[section:role Role]
+
+This generates a docbook phrase with a `role` attribute, which can be used
+to classify the phrase. This can be used to mark text for a use that isn't
+covered elsewhere. The docbook `role` will generate a html class, which can
+be used to style text. And the xsl stylesheets can be customized to treat
+certain roles specially when generating pdfs.
+
+The boostbook css stylesheets, and xsl stylesheets
+contain support for a limited number of colours that can be used with
+`role`. For example if you write:
+
+ [role red Text content]
+
+You'll get red text if you're using the boostbook css (for html) or
+the boostbook xsl for generating pdfs.
+
+The full list of colours that will be available is:
+
+* [role red red]
+* [role green green]
+* [role lime lime]
+* [role blue blue]
+* [role navy navy]
+* [role yellow yellow]
+* [role magenta magenta]
+* [role indigo indigo]
+* [role cyan cyan]
+* [role purple purple]
+* [role gold gold]
+* [role silver silver]
+* [role gray gray]
+
+[endsect] [/role]
+
[#quickbook.ref.inline_code]
[section:inline_code Inline code]
diff --git a/tools/quickbook/doc/quickbook.qbk b/tools/quickbook/doc/quickbook.qbk
index ea988f9e2f..2c8de9fb92 100644
--- a/tools/quickbook/doc/quickbook.qbk
+++ b/tools/quickbook/doc/quickbook.qbk
@@ -11,7 +11,7 @@
[quickbook 1.6]
[compatibility-mode 1.5]
[id quickbook]
- [version 1.5]
+ [version 1.6]
[authors [de Guzman, Joel], [Niebler, Eric]]
[copyright 2002 2004 2006 Joel de Guzman, Eric Niebler]
[copyright 2010-2011 Daniel James]
@@ -87,8 +87,8 @@
[include structure.qbk]
[include phrase.qbk]
[include block.qbk]
-[include 1_6.qbk]
+[include language_versions.qbk]
[include install.qbk]
[include editors.qbk]
[include faq.qbk]
-[include ref.qbk] \ No newline at end of file
+[include ref.qbk]
diff --git a/tools/quickbook/doc/structure.qbk b/tools/quickbook/doc/structure.qbk
index c9a4be9c33..4e9bfb4f0b 100644
--- a/tools/quickbook/doc/structure.qbk
+++ b/tools/quickbook/doc/structure.qbk
@@ -82,13 +82,37 @@ They are all optional.
[heading Quickbook specific meta data]
```
- [quickbook 1.5]
+ [quickbook 1.6]
```
The `quickbook` attribute declares the version of quickbook
the document is written for.
In its absence, version 1.1 is assumed. It's recommended that
-you use `[quickbook 1.5]` which is the version described here.
+you use `[quickbook 1.6]` which is the version described here.
+
+[note
+
+The quickbook version also makes some changes to the markup
+that's generated. Most notably, the ids that are automatically
+for headers and sections are different in later versions. To
+minimise disruption, you can use the =compatibility-mode=
+attribute to generate similar markup to the old version:
+
+```
+[article Article that was original
+ written in quickbook 1.3
+[quickbook 1.6]
+[compatibility-mode 1.3]
+]
+```
+
+This feature shouldn't be used for new documents, just for
+porting old documents to the new version.
+]
+
+Both the =quickbook= and =compatibility-mode= tags can be used
+at the start of the file, before the document info block, and
+also in files that don't have a document info block.
```
[source-mode teletype]
@@ -144,6 +168,43 @@ that's just ignored by the style sheets.
[endsect] [/attributes]
+[section:nesting Nesting quickbook documents]
+
+Docinfo blocks can only appear at the beginning of a quickbook file, so to
+create a more complicated document you need to use several quickbook files and
+use the [link quickbook.ref.include include tag] to nest them. For example, say
+you wish to create a book with an introduction and a chapter, you first create
+a file for the book:
+
+ [book Simple example
+ [quickbook 1.6]
+ ]
+
+ [include introduction.qbk]
+ [include chapter.qbk]
+
+[note Structuring a document like this was introduced in quickbook 1.6, so the
+`[quickbook 1.6]` docinfo field is required.]
+
+The appropriate document type for an introduction is `preface`, so
+the contents of `introduction.qbk` should be something like:
+
+ [preface Introduction
+ [quickbook 1.6]
+ ]
+
+ Write the introduction to the book here....
+
+And `chapter.qbk`:
+
+ [chapter A chapter
+ [quickbook 1.6]
+ ]
+
+ Chapter contents....
+
+[endsect] [/nesting]
+
[endsect] [/docinfo]
[#quickbook.ref.section]
diff --git a/tools/quickbook/src/Jamfile.v2 b/tools/quickbook/src/Jamfile.v2
index 0c55c7ae8b..8d847123e8 100644
--- a/tools/quickbook/src/Jamfile.v2
+++ b/tools/quickbook/src/Jamfile.v2
@@ -15,8 +15,6 @@ project quickbook
<toolset>gcc:<cflags>-g0
<toolset>darwin:<cflags>-g0
<toolset>msvc:<cflags>/wd4709
- <toolset>gcc:<define>BOOST_DETAIL_CONTAINER_FWD
- <toolset>darwin:<define>BOOST_DETAIL_CONTAINER_FWD
;
lib shell32 ;
@@ -27,12 +25,16 @@ exe quickbook
actions.cpp
doc_info_actions.cpp
state.cpp
+ dependency_tracker.cpp
utils.cpp
files.cpp
- string_ref.cpp
- input_path.cpp
+ native_text.cpp
+ glob.cpp
+ include_paths.cpp
values.cpp
- id_manager.cpp
+ document_state.cpp
+ id_generation.cpp
+ id_xml.cpp
post_process.cpp
collector.cpp
template_stack.cpp
diff --git a/tools/quickbook/src/actions.cpp b/tools/quickbook/src/actions.cpp
index 304663ffe1..1ada175942 100644
--- a/tools/quickbook/src/actions.cpp
+++ b/tools/quickbook/src/actions.cpp
@@ -23,16 +23,18 @@
#include <boost/foreach.hpp>
#include "quickbook.hpp"
#include "actions.hpp"
+#include "syntax_highlight.hpp"
#include "utils.hpp"
#include "files.hpp"
#include "markups.hpp"
#include "state.hpp"
#include "state_save.hpp"
#include "grammar.hpp"
-#include "input_path.hpp"
+#include "native_text.hpp"
#include "block_tags.hpp"
#include "phrase_tags.hpp"
-#include "id_manager.hpp"
+#include "document_state.hpp"
+#include "include_paths.hpp"
namespace quickbook
{
@@ -41,12 +43,12 @@ namespace quickbook
{
// TODO: This works but is a bit of an odd place to put it.
// Might need to redefine the purpose of this function.
- if (!state.source_mode_next.empty()) {
- detail::outwarn(state.source_mode_next.get_file(),
- state.source_mode_next.get_position())
+ if (state.source_mode_next) {
+ detail::outwarn(state.source_mode_next_pos.get_file(),
+ state.source_mode_next_pos.get_position())
<< "Temporary source mode unsupported here."
<< std::endl;
- state.source_mode_next = value();
+ state.source_mode_next = 0;
}
for(quickbook::state::string_list::iterator
@@ -63,14 +65,62 @@ namespace quickbook
}
std::string add_anchor(quickbook::state& state,
- std::string const& id,
+ boost::string_ref id,
id_category::categories category =
id_category::explicit_anchor_id)
{
- std::string placeholder = state.ids.add_anchor(id, category);
+ std::string placeholder = state.document.add_anchor(id, category);
state.anchors.push_back(placeholder);
return placeholder;
}
+
+ std::string get_attribute_value(quickbook::state& state,
+ quickbook::value const& value)
+ {
+ std::string x = value.is_encoded() ?
+ value.get_encoded() : detail::to_s(value.get_quickbook());
+
+ if (x.empty()) {
+ detail::outerr(value.get_file(), value.get_position())
+ << "Empty attribute value."
+ << std::endl;
+ ++state.error_count;
+ x = "xxx";
+ }
+
+ return x;
+ }
+
+ std::string validate_id(quickbook::state& state,
+ quickbook::value const& id_value)
+ {
+ bool valid = true;
+ std::string id = get_attribute_value(state, id_value);
+
+ // Special case since I use dollar ids for id placeholders.
+ if (id[0] == '$') { valid = false; id[0] = '_'; }
+
+ if (qbk_version_n >= 107u) {
+ char const* allowed_punctuation = "_.-";
+
+ BOOST_FOREACH(char c, id) {
+ if (!std::isalnum(c) &&
+ !std::strchr(allowed_punctuation, c))
+ valid = false;
+ }
+ }
+
+ if (!valid) {
+ detail::outerr(id_value.get_file(), id_value.get_position())
+ << "Invalid id: "
+ << (id_value.is_encoded() ? id_value.get_encoded() :
+ detail::to_s(id_value.get_quickbook()))
+ << std::endl;
+ ++state.error_count;
+ }
+
+ return id;
+ }
}
bool quickbook_range::in_range() const {
@@ -189,6 +239,7 @@ namespace quickbook
case code_tags::inline_code_block:
case code_tags::inline_code:
return code_action(state, v);
+ case template_tags::attribute_template:
case template_tags::template_:
return do_template_action(state, v, first.base());
default:
@@ -198,7 +249,7 @@ namespace quickbook
void break_action::operator()(parse_iterator first, parse_iterator) const
{
- write_anchors(state, phrase);
+ write_anchors(state, state.phrase);
if(*first == '\\')
{
@@ -216,7 +267,7 @@ namespace quickbook
state.warned_about_breaks = true;
}
- phrase << detail::get_markup(phrase_tags::break_mark).pre;
+ state.phrase << detail::get_markup(phrase_tags::break_mark).pre;
}
void error_message_action::operator()(parse_iterator first, parse_iterator last) const
@@ -273,19 +324,23 @@ namespace quickbook
values.finish();
}
- void role_action(quickbook::state& state, value role)
+ void role_action(quickbook::state& state, value role_list)
{
write_anchors(state, state.phrase);
- value_consumer values = role;
+ value_consumer values = role_list;
+ value role = values.consume();
+ value phrase = values.consume();
+ values.finish();
+
state.phrase
<< "<phrase role=\"";
- detail::print_string(values.consume().get_quickbook(), state.phrase.get());
+ detail::print_string(get_attribute_value(state, role),
+ state.phrase.get());
state.phrase
<< "\">"
- << values.consume().get_encoded()
+ << phrase.get_encoded()
<< "</phrase>";
- values.finish();
}
void footnote_action(quickbook::state& state, value phrase)
@@ -295,7 +350,7 @@ namespace quickbook
value_consumer values = phrase;
state.phrase
<< "<footnote id=\""
- << state.ids.add_id("f", id_category::numbered)
+ << state.document.add_id("f", id_category::numbered)
<< "\"><para>"
<< values.consume().get_encoded()
<< "</para></footnote>";
@@ -322,27 +377,18 @@ namespace quickbook
while(pos != end && cl::space_p.test(*pos)) ++pos;
if(pos != end) {
- detail::markup markup = detail::get_markup(block_tags::paragraph);
+ detail::markup markup = state.in_list ?
+ detail::get_markup(block_tags::paragraph_in_list) :
+ detail::get_markup(block_tags::paragraph);
state.out << markup.pre << str;
write_anchors(state, state.out);
state.out << markup.post;
}
}
- void list_item_action::operator()() const
+ void explicit_list_action::operator()() const
{
- // Be careful as this is sometimes called in the wrong place
- // for markup such as:
- //
- // * A
- // [endsect]
- //
- // This action is called before [endsect] (to end the list item)
- // and then also after it due to the way the parser works.
- std::string str;
- state.phrase.swap(str);
- state.out << str;
- write_anchors(state, state.out);
+ state.explicit_list = true;
}
void phrase_end_action::operator()() const
@@ -358,7 +404,7 @@ namespace quickbook
{
state.out << "<bridgehead renderas=\"sect" << level << "\"";
state.out << " id=\"";
- state.out << state.ids.add_id("h", id_category::numbered);
+ state.out << state.document.add_id("h", id_category::numbered);
state.out << "\">";
state.out << "<phrase id=\"" << id << "\"/>";
state.out << "<link linkend=\"" << id << "\">";
@@ -390,7 +436,7 @@ namespace quickbook
if (generic)
{
- level = state.ids.section_level() + 1;
+ level = state.document.section_level() + 1;
// We need to use a heading which is one greater
// than the current.
if (level > 6 ) // The max is h6, clip it if it goes
@@ -405,47 +451,63 @@ namespace quickbook
if (!element_id.empty())
{
- std::string anchor = state.ids.add_id(
- element_id.get_quickbook(),
+ // Use an explicit id.
+
+ std::string anchor = state.document.add_id(
+ validate_id(state, element_id),
id_category::explicit_id);
write_bridgehead(state, level,
content.get_encoded(), anchor, self_linked_headers);
}
- else if (!generic && state.ids.compatibility_version() < 103) // version 1.2 and below
+ else if (state.document.compatibility_version() >= 106u)
{
- // This generates the old id style if both the interpreting
- // version and the generation version are less then 103u.
+ // Generate ids for 1.6+
- std::string anchor = state.ids.old_style_id(
- detail::make_identifier(
- state.ids.replace_placeholders_with_unresolved_ids(
- content.get_encoded())),
+ std::string anchor = state.document.add_id(
+ detail::make_identifier(content.get_quickbook()),
id_category::generated_heading);
write_bridgehead(state, level,
- content.get_encoded(), anchor, false);
-
+ content.get_encoded(), anchor, self_linked_headers);
}
else
{
- std::string anchor = state.ids.add_id(
- detail::make_identifier(
- state.ids.compatibility_version() >= 106 ?
- content.get_quickbook() :
- state.ids.replace_placeholders_with_unresolved_ids(
- content.get_encoded())
- ),
- id_category::generated_heading);
+ // Generate ids that are compatible with older versions of quickbook.
- write_bridgehead(state, level,
- content.get_encoded(), anchor, self_linked_headers);
+ // Older versions of quickbook used the generated boostbook, but
+ // we only have an intermediate version which can contain id
+ // placeholders. So to generate the ids they must be replaced
+ // by the ids that the older versions would have used - i.e. the
+ // unresolved ids.
+ //
+ // Note that this doesn't affect the actual boostbook generated for
+ // the content, it's just used to generate this id.
+
+ std::string id = detail::make_identifier(
+ state.document.replace_placeholders_with_unresolved_ids(
+ content.get_encoded()));
+
+ if (generic || state.document.compatibility_version() >= 103) {
+ std::string anchor =
+ state.document.add_id(id, id_category::generated_heading);
+
+ write_bridgehead(state, level,
+ content.get_encoded(), anchor, self_linked_headers);
+ }
+ else {
+ std::string anchor =
+ state.document.old_style_id(id, id_category::generated_heading);
+
+ write_bridgehead(state, level,
+ content.get_encoded(), anchor, false);
+ }
}
}
void simple_phrase_action::operator()(char mark) const
{
- write_anchors(state, out);
+ write_anchors(state, state.phrase);
int tag =
mark == '*' ? phrase_tags::bold :
@@ -461,9 +523,9 @@ namespace quickbook
value content = values.consume();
values.finish();
- out << markup.pre;
- out << content.get_encoded();
- out << markup.post;
+ state.phrase << markup.pre;
+ state.phrase << content.get_encoded();
+ state.phrase << markup.post;
}
bool cond_phrase_push::start()
@@ -474,14 +536,13 @@ namespace quickbook
if (saved_conditional)
{
- string_ref macro1 = values.consume().get_quickbook();
+ boost::string_ref macro1 = values.consume().get_quickbook();
std::string macro(macro1.begin(), macro1.end());
state.conditional = find(state.macro, macro.c_str());
if (!state.conditional) {
- state.phrase.push();
- state.out.push();
+ state.push_output();
state.anchors.swap(anchors);
}
}
@@ -493,38 +554,23 @@ namespace quickbook
{
if (saved_conditional && !state.conditional)
{
- state.phrase.pop();
- state.out.pop();
+ state.pop_output();
state.anchors.swap(anchors);
}
state.conditional = saved_conditional;
}
- namespace {
- int indent_length(std::string const& indent)
- {
- int length = 0;
- for(std::string::const_iterator
- first = indent.begin(), end = indent.end(); first != end; ++first)
- {
- switch(*first) {
- case ' ': ++length; break;
- // hardcoded tab to 4 for now
- case '\t': length = ((length + 4) / 4) * 4; break;
- default: BOOST_ASSERT(false);
- }
- }
-
- return length;
- }
- }
-
void state::start_list(char mark)
{
- write_anchors(*this, out);
+ push_tagged_source_mode(source_mode_next);
+ source_mode_next = 0;
+
+ write_anchors(*this, (in_list ? phrase : out));
assert(mark == '*' || mark == '#');
+ push_output();
out << ((mark == '#') ? "<orderedlist>\n" : "<itemizedlist>\n");
+ in_list = true;
}
void state::end_list(char mark)
@@ -532,23 +578,35 @@ namespace quickbook
write_anchors(*this, out);
assert(mark == '*' || mark == '#');
out << ((mark == '#') ? "\n</orderedlist>" : "\n</itemizedlist>");
+
+ std::string list_output;
+ out.swap(list_output);
+
+ pop_output();
+
+ (in_list ? phrase : out) << list_output;
+
+ pop_tagged_source_mode();
}
void state::start_list_item()
{
- out << "<listitem><simpara>";
- write_anchors(*this, out);
+ out << "<listitem>";
+ write_anchors(*this, phrase);
}
void state::end_list_item()
{
- write_anchors(*this, out);
- out << "</simpara></listitem>";
+ write_anchors(*this, phrase);
+ paragraph_action para(*this);
+ para();
+ out << "</listitem>";
}
namespace
{
- bool parse_template(value const&, quickbook::state& state);
+ bool parse_template(value const&, quickbook::state& state,
+ bool is_attribute_template = false);
}
void state::start_callouts()
@@ -558,8 +616,8 @@ namespace quickbook
std::string state::add_callout(value v)
{
- std::string callout_id1 = ids.add_id("c", id_category::numbered);
- std::string callout_id2 = ids.add_id("c", id_category::numbered);
+ std::string callout_id1 = document.add_id("c", id_category::numbered);
+ std::string callout_id2 = document.add_id("c", id_category::numbered);
callouts.insert(encoded_value(callout_id1));
callouts.insert(encoded_value(callout_id2));
@@ -593,7 +651,7 @@ namespace quickbook
std::string callout_value;
{
- template_state state(*this);
+ state_save save(*this, state_save::scope_all);
++template_depth;
bool r = parse_template(callout_body, *this);
@@ -647,53 +705,53 @@ namespace quickbook
value anchor_id = values.consume();
// Note: anchor_id is never encoded as boostbook. If it
// is encoded, it's just things like escapes.
- add_anchor(state, anchor_id.is_encoded() ?
- anchor_id.get_encoded() : anchor_id.get_quickbook());
+ add_anchor(state, validate_id(state, anchor_id));
values.finish();
}
void do_macro_action::operator()(std::string const& str) const
{
- write_anchors(state, phrase);
+ write_anchors(state, state.phrase);
if (str == quickbook_get_date)
{
char strdate[64];
strftime(strdate, sizeof(strdate), "%Y-%b-%d", current_time);
- phrase << strdate;
+ state.phrase << strdate;
}
else if (str == quickbook_get_time)
{
char strdate[64];
strftime(strdate, sizeof(strdate), "%I:%M:%S %p", current_time);
- phrase << strdate;
+ state.phrase << strdate;
}
else
{
- phrase << str;
+ state.phrase << str;
}
}
void raw_char_action::operator()(char ch) const
{
- out << ch;
+ state.phrase << ch;
}
void raw_char_action::operator()(parse_iterator first, parse_iterator last) const
{
while (first != last)
- out << *first++;
+ state.phrase << *first++;
}
void source_mode_action(quickbook::state& state, value source_mode)
{
- state.source_mode = source_mode_tags::name(source_mode.get_tag());
+ state.change_source_mode(source_mode.get_tag());
}
void next_source_mode_action(quickbook::state& state, value source_mode)
{
value_consumer values = source_mode;
- state.source_mode_next = values.consume();
+ state.source_mode_next_pos = values.consume();
+ state.source_mode_next = values.consume().get_int();
values.finish();
}
@@ -702,16 +760,16 @@ namespace quickbook
int code_tag = code_block.get_tag();
value_consumer values = code_block;
- string_ref code_value = values.consume().get_quickbook();
+ boost::string_ref code_value = values.consume().get_quickbook();
values.finish();
bool inline_code = code_tag == code_tags::inline_code ||
(code_tag == code_tags::inline_code_block && qbk_version_n < 106u);
bool block = code_tag != code_tags::inline_code;
- std::string source_mode = state.source_mode_next.empty() ?
- state.source_mode : state.source_mode_next.get_quickbook();
- state.source_mode_next = value();
+ source_mode_type source_mode = state.source_mode_next ?
+ state.source_mode_next : state.current_source_mode().source_mode;
+ state.source_mode_next = 0;
if (inline_code) {
write_anchors(state, state.phrase);
@@ -726,68 +784,66 @@ namespace quickbook
// preprocess the code section to remove the initial indentation
mapped_file_builder mapped;
mapped.start(state.current_file);
- mapped.unindent_and_add(code_value.begin(), code_value.end());
+ mapped.unindent_and_add(code_value);
file_ptr f = mapped.release();
- if (f->source.empty())
+ if (f->source().empty())
return; // Nothing left to do here. The program is empty.
if (qbk_version_n >= 107u) state.start_callouts();
- parse_iterator first_(f->source.begin());
- parse_iterator last_(f->source.end());
+ parse_iterator first_(f->source().begin());
+ parse_iterator last_(f->source().end());
file_ptr saved_file = f;
boost::swap(state.current_file, saved_file);
// print the code with syntax coloring
- std::string str = syntax_highlight(first_, last_, state,
- source_mode, block);
+ //
+ // We must not place a \n after the <programlisting> tag
+ // otherwise PDF output starts code blocks with a blank line:
+ state.phrase << "<programlisting>";
+ syntax_highlight(first_, last_, state, source_mode, block);
+ state.phrase << "</programlisting>\n";
boost::swap(state.current_file, saved_file);
- collector& output = inline_code ? state.phrase : state.out;
+ if (qbk_version_n >= 107u) state.phrase << state.end_callouts();
- // We must not place a \n after the <programlisting> tag
- // otherwise PDF output starts code blocks with a blank line:
- //
- output << "<programlisting>";
- output << str;
- output << "</programlisting>\n";
-
- if (qbk_version_n >= 107u) output << state.end_callouts();
+ if (!inline_code) {
+ state.out << state.phrase.str();
+ state.phrase.clear();
+ }
}
else {
parse_iterator first_(code_value.begin());
parse_iterator last_(code_value.end());
- std::string str = syntax_highlight(first_, last_, state,
- source_mode, block);
state.phrase << "<code>";
- state.phrase << str;
+ syntax_highlight(first_, last_, state, source_mode, block);
state.phrase << "</code>";
}
}
void plain_char_action::operator()(char ch) const
{
- write_anchors(state, phrase);
+ write_anchors(state, state.phrase);
- detail::print_char(ch, phrase.get());
+ detail::print_char(ch, state.phrase.get());
}
void plain_char_action::operator()(parse_iterator first, parse_iterator last) const
{
- write_anchors(state, phrase);
+ write_anchors(state, state.phrase);
while (first != last)
- detail::print_char(*first++, phrase.get());
+ detail::print_char(*first++, state.phrase.get());
}
void escape_unicode_action::operator()(parse_iterator first, parse_iterator last) const
{
- write_anchors(state, phrase);
+ write_anchors(state, state.phrase);
while(first != last && *first == '0') ++first;
@@ -799,10 +855,11 @@ namespace quickbook
if(hex_digits.size() == 2 && *first > '0' && *first <= '7') {
using namespace std;
- detail::print_char(strtol(hex_digits.c_str(), 0, 16), phrase.get());
+ detail::print_char(strtol(hex_digits.c_str(), 0, 16),
+ state.phrase.get());
}
else {
- phrase << "&#x" << hex_digits << ";";
+ state.phrase << "&#x" << hex_digits << ";";
}
}
@@ -813,8 +870,8 @@ namespace quickbook
detail::print_string(v.get_encoded(), out);
}
else {
- std::string value = v.get_quickbook();
- for(std::string::const_iterator
+ boost::string_ref value = v.get_quickbook();
+ for(boost::string_ref::const_iterator
first = value.begin(), last = value.end();
first != last; ++first)
{
@@ -841,8 +898,10 @@ namespace quickbook
value_consumer pair = pair_;
value name = pair.consume();
value value = pair.consume();
+ std::string name_str(name.get_quickbook().begin(),
+ name.get_quickbook().end());
pair.finish();
- if(!attributes.insert(std::make_pair(name.get_quickbook(), value)).second)
+ if(!attributes.insert(std::make_pair(name_str, value)).second)
{
detail::outwarn(name.get_file(), name.get_position())
<< "Duplicate image attribute: "
@@ -860,7 +919,7 @@ namespace quickbook
std::string fileref = attributes["fileref"].is_encoded() ?
attributes["fileref"].get_encoded() :
- attributes["fileref"].get_quickbook();
+ detail::to_s(attributes["fileref"].get_quickbook());
// Check for windows paths, then convert.
// A bit crude, but there you go.
@@ -937,7 +996,7 @@ namespace quickbook
// Now load the SVG file:
//
std::string svg_text;
- if (state.add_dependency(img)) {
+ if (state.dependencies.add_dependency(img)) {
fs::ifstream fs(img);
std::stringstream buffer;
buffer << fs.rdbuf();
@@ -1006,7 +1065,7 @@ namespace quickbook
void macro_definition_action(quickbook::state& state, quickbook::value macro_definition)
{
value_consumer values = macro_definition;
- std::string macro_id = values.consume().get_quickbook();
+ std::string macro_id = detail::to_s(values.consume().get_quickbook());
value phrase_value = values.optional_consume();
std::string phrase;
if (phrase_value.check()) phrase = phrase_value.get_encoded();
@@ -1035,11 +1094,11 @@ namespace quickbook
void template_body_action(quickbook::state& state, quickbook::value template_definition)
{
value_consumer values = template_definition;
- std::string identifier = values.consume().get_quickbook();
+ std::string identifier = detail::to_s(values.consume().get_quickbook());
std::vector<std::string> template_values;
BOOST_FOREACH(value const& p, values.consume()) {
- template_values.push_back(p.get_quickbook());
+ template_values.push_back(detail::to_s(p.get_quickbook()));
}
BOOST_ASSERT(values.check(template_tags::block) || values.check(template_tags::phrase));
@@ -1202,20 +1261,23 @@ namespace quickbook
bool parse_template(
value const& content
, quickbook::state& state
+ , bool is_attribute_template
)
{
file_ptr saved_current_file = state.current_file;
state.current_file = content.get_file();
- string_ref source = content.get_quickbook();
+ boost::string_ref source = content.get_quickbook();
parse_iterator first(source.begin());
parse_iterator last(source.end());
bool r = cl::parse(first, last,
+ is_attribute_template ?
+ state.grammar().attribute_template_body :
content.get_tag() == template_tags::phrase ?
state.grammar().inline_phrase :
- state.grammar().block
+ state.grammar().block_start
).full;
boost::swap(state.current_file, saved_current_file);
@@ -1227,26 +1289,23 @@ namespace quickbook
void call_template(quickbook::state& state,
template_symbol const* symbol,
std::vector<value> const& args,
- string_iterator first)
+ string_iterator first,
+ bool is_attribute_template = false)
{
bool is_block = symbol->content.get_tag() != template_tags::phrase;
+ assert(!(is_attribute_template && is_block));
+
+ quickbook::paragraph_action paragraph_action(state);
+
+ // Finish off any existing paragraphs.
+ if (is_block) paragraph_action();
// If this template contains already encoded text, then just
// write it out, without going through any of the rigamarole.
if (symbol->content.is_encoded())
{
- if (is_block)
- {
- paragraph_action para(state);
- para();
- state.out << symbol->content.get_encoded();
- }
- else
- {
- state.phrase << symbol->content.get_encoded();
- }
-
+ (is_block ? state.out : state.phrase) << symbol->content.get_encoded();
return;
}
@@ -1257,11 +1316,11 @@ namespace quickbook
// arguments are expanded.
template_scope const& call_scope = state.templates.top_scope();
- std::string block;
- std::string phrase;
-
{
- template_state save(state);
+ state_save save(state, state_save::scope_callables);
+ std::string save_block;
+ std::string save_phrase;
+
state.templates.start_template(symbol);
qbk_version_n = symbol->content.get_file()->version();
@@ -1277,7 +1336,7 @@ namespace quickbook
// Store the current section level so that we can ensure that
// [section] and [endsect] tags in the template are balanced.
- state.min_section_level = state.ids.section_level();
+ state.min_section_level = state.document.section_level();
///////////////////////////////////
// Prepare the arguments as local templates
@@ -1294,22 +1353,26 @@ namespace quickbook
///////////////////////////////////
// parse the template body:
- if (!parse_template(symbol->content, state))
+ if (symbol->content.get_file()->version() < 107u) {
+ state.out.swap(save_block);
+ state.phrase.swap(save_phrase);
+ }
+
+ if (!parse_template(symbol->content, state, is_attribute_template))
{
detail::outerr(state.current_file, first)
<< "Expanding "
<< (is_block ? "block" : "phrase")
- << " template: " << symbol->identifier << std::endl
- << std::endl
- << "------------------begin------------------" << std::endl
+ << " template: " << symbol->identifier << "\n\n"
+ << "------------------begin------------------\n"
<< symbol->content.get_quickbook()
- << "------------------end--------------------" << std::endl
+ << "------------------end--------------------\n"
<< std::endl;
++state.error_count;
return;
}
- if (state.ids.section_level() != state.min_section_level)
+ if (state.document.section_level() != state.min_section_level)
{
detail::outerr(state.current_file, first)
<< "Mismatched sections in template "
@@ -1319,19 +1382,24 @@ namespace quickbook
return;
}
- state.out.swap(block);
- state.phrase.swap(phrase);
- }
+ if (symbol->content.get_file()->version() < 107u) {
+ state.out.swap(save_block);
+ state.phrase.swap(save_phrase);
- if(is_block || !block.empty()) {
- paragraph_action para(state);
- para(); // For paragraphs before the template call.
- state.out << block;
- state.phrase << phrase;
- para();
- }
- else {
- state.phrase << phrase;
+ if(is_block || !save_block.empty()) {
+ paragraph_action();
+ state.out << save_block;
+ state.phrase << save_phrase;
+ paragraph_action();
+ }
+ else {
+ state.phrase << save_phrase;
+ }
+ }
+ else
+ {
+ if (is_block) paragraph_action();
+ }
}
}
@@ -1357,13 +1425,16 @@ namespace quickbook
void do_template_action(quickbook::state& state, value template_list,
string_iterator first)
{
+ bool const is_attribute_template =
+ template_list.get_tag() == template_tags::attribute_template;
+
// Get the arguments
value_consumer values = template_list;
bool template_escape = values.check(template_tags::escape);
if(template_escape) values.consume();
- std::string identifier = values.consume(template_tags::identifier).get_quickbook();
+ std::string identifier = detail::to_s(values.consume(template_tags::identifier).get_quickbook());
std::vector<value> args;
@@ -1419,6 +1490,20 @@ namespace quickbook
}
///////////////////////////////////
+ // Check that attribute templates are phrase templates
+
+ if (is_attribute_template &&
+ symbol->content.get_tag() != template_tags::phrase)
+ {
+ detail::outerr(state.current_file, first)
+ << "Only phrase templates can be used in attribute values."
+ << std::endl;
+
+ ++state.error_count;
+ return;
+ }
+
+ ///////////////////////////////////
// Initialise the arguments
switch(symbol->content.get_tag())
@@ -1443,7 +1528,7 @@ namespace quickbook
return;
}
- call_template(state, symbol, args, first);
+ call_template(state, symbol, args, first, is_attribute_template);
break;
case template_tags::snippet:
@@ -1477,10 +1562,19 @@ namespace quickbook
value content = values.consume();
values.finish();
- // Note: dst is never actually encoded as boostbook, which
- // is why the result is called with 'print_string' later.
- std::string dst = dst_value.is_encoded() ?
- dst_value.get_encoded() : dst_value.get_quickbook();
+ std::string dst;
+
+ if (link.get_tag() == phrase_tags::link) {
+ dst = validate_id(state, dst_value);
+ }
+ else {
+ dst = get_attribute_value(state, dst_value);
+
+ // TODO: Might be better to have an error for some invalid urls.
+ if (link.get_tag() == phrase_tags::url) {
+ dst = detail::partially_escape_uri(dst);
+ }
+ }
state.phrase << markup.pre;
detail::print_string(dst, state.phrase.get());
@@ -1499,7 +1593,7 @@ namespace quickbook
write_anchors(state, state.out);
value_consumer values = variable_list;
- std::string title = values.consume(table_tags::title).get_quickbook();
+ std::string title = detail::to_s(values.consume(table_tags::title).get_quickbook());
state.out << "<variablelist>\n";
@@ -1537,8 +1631,9 @@ namespace quickbook
value_consumer values = table;
std::string element_id;
- if(values.check(general_tags::element_id))
- element_id = values.consume().get_quickbook();
+ if(values.check(general_tags::element_id)) {
+ element_id = validate_id(state, values.consume());
+ }
value title = values.consume(table_tags::title);
bool has_title = !title.empty();
@@ -1546,14 +1641,14 @@ namespace quickbook
std::string table_id;
if (!element_id.empty()) {
- table_id = state.ids.add_id(element_id, id_category::explicit_id);
+ table_id = state.document.add_id(element_id, id_category::explicit_id);
}
else if (has_title) {
- if (state.ids.compatibility_version() >= 105) {
- table_id = state.ids.add_id(detail::make_identifier(title.get_quickbook()), id_category::generated);
+ if (state.document.compatibility_version() >= 105) {
+ table_id = state.document.add_id(detail::make_identifier(title.get_quickbook()), id_category::generated);
}
else {
- table_id = state.ids.add_id("t", id_category::numbered);
+ table_id = state.document.add_id("t", id_category::numbered);
}
}
@@ -1636,20 +1731,21 @@ namespace quickbook
value content = values.consume();
values.finish();
- std::string full_id = state.ids.begin_section(
- !element_id.empty() ?
- element_id.get_quickbook() :
- detail::make_identifier(content.get_quickbook()),
- !element_id.empty() ?
- id_category::explicit_section_id :
- id_category::generated_section);
+ std::string full_id = state.document.begin_section(
+ element_id.empty() ?
+ detail::make_identifier(content.get_quickbook()) :
+ validate_id(state, element_id),
+ element_id.empty() ?
+ id_category::generated_section :
+ id_category::explicit_section_id,
+ state.current_source_mode());
state.out << "\n<section id=\"" << full_id << "\">\n";
state.out << "<title>";
write_anchors(state, state.out);
- if (self_linked_headers && state.ids.compatibility_version() >= 103)
+ if (self_linked_headers && state.document.compatibility_version() >= 103)
{
state.out << "<link linkend=\"" << full_id << "\">"
<< content.get_encoded()
@@ -1668,7 +1764,7 @@ namespace quickbook
{
write_anchors(state, state.out);
- if (state.ids.section_level() <= state.min_section_level)
+ if (state.document.section_level() <= state.min_section_level)
{
file_position const pos = state.current_file->position_of(first);
@@ -1680,7 +1776,7 @@ namespace quickbook
}
state.out << "</section>";
- state.ids.end_section();
+ state.document.end_section();
}
void element_id_warning_action::operator()(parse_iterator first, parse_iterator) const
@@ -1763,71 +1859,49 @@ namespace quickbook
return result;
}
- struct path_details {
- // Will possibly add 'url' and 'glob' to this list later:
- enum path_type { path };
-
- std::string value;
- path_type type;
-
- path_details(std::string const& value, path_type type) :
- value(value), type(type)
- {
- }
- };
-
- path_details check_path(value const& path, quickbook::state& state)
+ xinclude_path calculate_xinclude_path(value const& p, quickbook::state& state)
{
- // Paths are encoded for quickbook 1.6+ and also xmlbase
- // values (technically xmlbase is a 1.6 feature, but that
- // isn't enforced as it's backwards compatible).
- //
- // Counter-intuitively: encoded == plain text here.
-
- std::string path_text = qbk_version_n >= 106u || path.is_encoded() ?
- path.get_encoded() : path.get_quickbook();
-
- if(path_text.find('\\') != std::string::npos)
- {
- quickbook::detail::ostream* err;
-
- if (qbk_version_n >= 106u) {
- err = &detail::outerr(path.get_file(), path.get_position());
+ path_parameter parameter = check_path(p, state);
+
+ switch (parameter.type) {
+ case path_parameter::glob:
+ // TODO: Should know if this is an xinclude or an xmlbase.
+ // Would also help with implementation of 'check_path'.
+ detail::outerr(p.get_file(), p.get_position())
+ << "Glob used in xinclude/xmlbase."
+ << std::endl;
++state.error_count;
- }
- else {
- err = &detail::outwarn(path.get_file(), path.get_position());
- }
-
- *err << "Path isn't portable: '"
- << path_text
- << "'"
- << std::endl;
+ break;
- boost::replace(path_text, '\\', '/');
- }
+ case path_parameter::invalid:
+ // There should have already been an error message in this case.
+ break;
- return path_details(path_text, path_details::path);
- }
+ case path_parameter::path:
+ {
+ fs::path path = detail::generic_to_path(parameter.value);
+ fs::path full_path = path;
- xinclude_path calculate_xinclude_path(value const& p, quickbook::state& state)
- {
- path_details details = check_path(p, state);
+ // If the path is relative
+ if (!path.has_root_directory())
+ {
+ // Resolve the path from the current file
+ full_path = state.current_file->path.parent_path() / path;
- fs::path path = detail::generic_to_path(details.value);
- fs::path full_path = path;
+ // Then calculate relative to the current xinclude_base.
+ path = path_difference(state.xinclude_base, full_path);
+ }
- // If the path is relative
- if (!path.has_root_directory())
- {
- // Resolve the path from the current file
- full_path = state.current_file->path.parent_path() / path;
+ return xinclude_path(full_path,
+ detail::escape_uri(detail::path_to_generic(path)));
+ }
- // Then calculate relative to the current xinclude_base.
- path = path_difference(state.xinclude_base, full_path);
+ default:
+ assert(false);
}
- return xinclude_path(full_path, detail::escape_uri(detail::path_to_generic(path)));
+ // If we didn't find a path, just use this:
+ return xinclude_path(state.current_file->path.parent_path(), "");
}
void xinclude_action(quickbook::state& state, value xinclude)
@@ -1843,77 +1917,8 @@ namespace quickbook
state.out << "\" />\n";
}
- namespace
- {
- struct include_search_return
- {
- include_search_return(fs::path const& x, fs::path const& y)
- : filename(x), filename_relative(y) {}
-
- fs::path filename;
- fs::path filename_relative;
-
- bool operator < (include_search_return const & other) const
- {
- if (filename_relative < other.filename_relative) return true;
- else if (other.filename_relative < filename_relative) return false;
- else return filename < other.filename;
- }
- };
-
- std::set<include_search_return> include_search(path_details const& details,
- quickbook::state& state, string_iterator pos)
- {
- std::set<include_search_return> result;
-
- fs::path path = detail::generic_to_path(details.value);
-
- // If the path is relative, try and resolve it.
- if (!path.has_root_directory() && !path.has_root_name())
- {
- fs::path local_path =
- state.current_file->path.parent_path() / path;
-
- // See if it can be found locally first.
- if (state.add_dependency(local_path))
- {
- result.insert(include_search_return(
- local_path,
- state.filename_relative.parent_path() / path));
- return result;
- }
-
- BOOST_FOREACH(fs::path full, include_path)
- {
- full /= path;
-
- if (state.add_dependency(full))
- {
- result.insert(include_search_return(full, path));
- return result;
- }
- }
- }
- else
- {
- if (state.add_dependency(path)) {
- result.insert(include_search_return(path, path));
- return result;
- }
- }
-
- detail::outerr(state.current_file, pos)
- << "Unable to find file: "
- << details.value
- << std::endl;
- ++state.error_count;
-
- return result;
- }
- }
-
void load_quickbook(quickbook::state& state,
- include_search_return const& paths,
+ quickbook_path const& path,
value::tag_type load_type,
value const& include_doc_id = value())
{
@@ -1931,18 +1936,17 @@ namespace quickbook
//
// For old versions of quickbook, templates aren't scoped by the
// file.
- file_state save(state,
- load_type == block_tags::import ? file_state::scope_output :
- qbk_version_n >= 106u ? file_state::scope_callables :
- file_state::scope_macros);
+ state_save save(state,
+ load_type == block_tags::import ? state_save::scope_output :
+ qbk_version_n >= 106u ? state_save::scope_callables :
+ state_save::scope_macros);
- state.current_file = load(paths.filename); // Throws load_error
- state.filename_relative = paths.filename_relative;
+ state.current_file = load(path.file_path); // Throws load_error
+ state.current_path = path;
state.imported = (load_type == block_tags::import);
// update the __FILENAME__ macro
- *boost::spirit::classic::find(state.macro, "__FILENAME__")
- = detail::path_to_generic(state.filename_relative);
+ state.update_filename_macro();
// parse the file
quickbook::parse_file(state, include_doc_id, true);
@@ -1952,12 +1956,11 @@ namespace quickbook
}
// restore the __FILENAME__ macro
- *boost::spirit::classic::find(state.macro, "__FILENAME__")
- = detail::path_to_generic(state.filename_relative);
+ state.update_filename_macro();
}
void load_source_file(quickbook::state& state,
- include_search_return const& paths,
+ quickbook_path const& path,
value::tag_type load_type,
string_iterator first,
value const& include_doc_id = value())
@@ -1965,11 +1968,11 @@ namespace quickbook
assert(load_type == block_tags::include ||
load_type == block_tags::import);
- std::string ext = paths.filename.extension().generic_string();
+ std::string ext = path.file_path.extension().generic_string();
std::vector<template_symbol> storage;
// Throws load_error
state.error_count +=
- load_snippets(paths.filename, storage, ext, load_type);
+ load_snippets(path.file_path, storage, ext, load_type);
if (load_type == block_tags::include)
{
@@ -2014,41 +2017,42 @@ namespace quickbook
value_consumer values = include;
value include_doc_id = values.optional_consume(general_tags::include_id);
- path_details details = check_path(values.consume(), state);
+ path_parameter parameter = check_path(values.consume(), state);
values.finish();
- std::set<include_search_return> search = include_search(details, state, first);
- std::set<include_search_return>::iterator i = search.begin();
- std::set<include_search_return>::iterator e = search.end();
+ std::set<quickbook_path> search =
+ include_search(parameter, state, first);
+ std::set<quickbook_path>::iterator i = search.begin();
+ std::set<quickbook_path>::iterator e = search.end();
for (; i != e; ++i)
{
- include_search_return const & paths = *i;
+ quickbook_path const & path = *i;
try {
if (qbk_version_n >= 106)
{
if (state.imported && include.get_tag() == block_tags::include)
return;
- std::string ext = paths.filename.extension().generic_string();
+ std::string ext = path.file_path.extension().generic_string();
if (ext == ".qbk" || ext == ".quickbook")
{
- load_quickbook(state, paths, include.get_tag(), include_doc_id);
+ load_quickbook(state, path, include.get_tag(), include_doc_id);
}
else
{
- load_source_file(state, paths, include.get_tag(), first, include_doc_id);
+ load_source_file(state, path, include.get_tag(), first, include_doc_id);
}
}
else
{
if (include.get_tag() == block_tags::include)
{
- load_quickbook(state, paths, include.get_tag(), include_doc_id);
+ load_quickbook(state, path, include.get_tag(), include_doc_id);
}
else
{
- load_source_file(state, paths, include.get_tag(), first, include_doc_id);
+ load_source_file(state, path, include.get_tag(), first, include_doc_id);
}
}
}
@@ -2057,7 +2061,7 @@ namespace quickbook
detail::outerr(state.current_file, first)
<< "Loading file "
- << paths.filename
+ << path.file_path
<< ": "
<< e.what()
<< std::endl;
@@ -2067,8 +2071,7 @@ namespace quickbook
bool to_value_scoped_action::start(value::tag_type t)
{
- state.out.push();
- state.phrase.push();
+ state.push_output();
state.anchors.swap(saved_anchors);
tag = t;
@@ -2099,8 +2102,7 @@ namespace quickbook
void to_value_scoped_action::cleanup()
{
- state.phrase.pop();
- state.out.pop();
+ state.pop_output();
state.anchors.swap(saved_anchors);
}
}
diff --git a/tools/quickbook/src/actions.hpp b/tools/quickbook/src/actions.hpp
index 5a93cf949a..44b5a27704 100644
--- a/tools/quickbook/src/actions.hpp
+++ b/tools/quickbook/src/actions.hpp
@@ -46,12 +46,6 @@ namespace quickbook
int load_snippets(fs::path const& file, std::vector<template_symbol>& storage,
std::string const& extension, value::tag_type load_type);
- std::string syntax_highlight(
- parse_iterator first, parse_iterator last,
- quickbook::state& state,
- std::string const& source_mode,
- bool is_block);
-
struct xinclude_path {
xinclude_path(fs::path const& path, std::string const& uri) :
path(path), uri(uri) {}
@@ -119,12 +113,12 @@ namespace quickbook
quickbook::state& state;
};
- struct list_item_action
+ struct explicit_list_action
{
// implicit paragraphs
// doesn't output the paragraph if it's only whitespace.
- list_item_action(
+ explicit_list_action(
quickbook::state& state)
: state(state) {}
@@ -149,15 +143,11 @@ namespace quickbook
{
// Handles simple text formats
- simple_phrase_action(
- collector& out
- , quickbook::state& state)
- : out(out)
- , state(state) {}
+ simple_phrase_action(quickbook::state& state)
+ : state(state) {}
void operator()(char) const;
- collector& out;
quickbook::state& state;
};
@@ -178,12 +168,9 @@ namespace quickbook
{
// Handles macro substitutions
- do_macro_action(collector& phrase, quickbook::state& state)
- : phrase(phrase)
- , state(state) {}
+ do_macro_action(quickbook::state& state) : state(state) {}
void operator()(std::string const& str) const;
- collector& phrase;
quickbook::state& state;
};
@@ -191,13 +178,12 @@ namespace quickbook
{
// Prints a space
- raw_char_action(collector& out)
- : out(out) {}
+ raw_char_action(quickbook::state& state) : state(state) {}
void operator()(char ch) const;
void operator()(parse_iterator first, parse_iterator last) const;
- collector& out;
+ quickbook::state& state;
};
struct plain_char_action
@@ -205,36 +191,29 @@ namespace quickbook
// Prints a single plain char.
// Converts '<' to "&lt;"... etc See utils.hpp
- plain_char_action(collector& phrase, quickbook::state& state)
- : phrase(phrase)
- , state(state) {}
+ plain_char_action(quickbook::state& state) : state(state) {}
void operator()(char ch) const;
void operator()(parse_iterator first, parse_iterator last) const;
- collector& phrase;
quickbook::state& state;
};
struct escape_unicode_action
{
- escape_unicode_action(collector& phrase, quickbook::state& state)
- : phrase(phrase)
- , state(state) {}
+ escape_unicode_action(quickbook::state& state) : state(state) {}
+
void operator()(parse_iterator first, parse_iterator last) const;
- collector& phrase;
quickbook::state& state;
};
struct break_action
{
- break_action(collector& phrase, quickbook::state& state)
- : phrase(phrase), state(state) {}
+ break_action(quickbook::state& state) : state(state) {}
void operator()(parse_iterator f, parse_iterator) const;
- collector& phrase;
quickbook::state& state;
};
@@ -265,6 +244,101 @@ namespace quickbook
std::vector<std::string> saved_anchors;
value::tag_type tag;
};
+
+ // member_action
+ //
+ // Action for calling a member function taking two parse iterators.
+
+ template <typename T>
+ struct member_action
+ {
+ typedef void(T::*member_function)(parse_iterator, parse_iterator);
+
+ T& l;
+ member_function mf;
+
+ member_action(T& l, member_function mf) : l(l), mf(mf) {}
+
+ void operator()(parse_iterator first, parse_iterator last) const {
+ (l.*mf)(first, last);
+ }
+ };
+
+ // member_action1
+ //
+ // Action for calling a member function taking two parse iterators and a value.
+
+ template <typename T, typename Arg1>
+ struct member_action1
+ {
+ typedef void(T::*member_function)(parse_iterator, parse_iterator, Arg1);
+
+ T& l;
+ member_function mf;
+
+ member_action1(T& l, member_function mf) : l(l), mf(mf) {}
+
+ struct impl
+ {
+ member_action1 a;
+ Arg1 value;
+
+ impl(member_action1& a, Arg1 value) :
+ a(a), value(value)
+ {}
+
+ void operator()(parse_iterator first, parse_iterator last) const {
+ (a.l.*a.mf)(first, last, value);
+ }
+ };
+
+ impl operator()(Arg1 a1) {
+ return impl(*this, a1);
+ }
+ };
+
+ // member_action_value
+ //
+ // Action for calling a unary member function.
+
+ template <typename T, typename Value>
+ struct member_action_value
+ {
+ typedef void(T::*member_function)(Value);
+
+ T& l;
+ member_function mf;
+
+ member_action_value(T& l, member_function mf) : l(l), mf(mf) {}
+
+ void operator()(Value v) const {
+ (l.*mf)(v);
+ }
+ };
+
+ // member_action_value
+ //
+ // Action for calling a unary member function with a fixed value.
+
+ template <typename T, typename Value>
+ struct member_action_fixed_value
+ {
+ typedef void(T::*member_function)(Value);
+
+ T& l;
+ member_function mf;
+ Value v;
+
+ member_action_fixed_value(T& l, member_function mf, Value v) : l(l), mf(mf), v(v) {}
+
+ void operator()() const {
+ (l.*mf)(v);
+ }
+
+ void operator()(parse_iterator first, parse_iterator last) const {
+ (l.*mf)(v);
+ }
+ };
}
#endif // BOOST_SPIRIT_QUICKBOOK_ACTIONS_HPP
diff --git a/tools/quickbook/src/block_element_grammar.cpp b/tools/quickbook/src/block_element_grammar.cpp
index 1f5d5daec4..04a320e9e3 100644
--- a/tools/quickbook/src/block_element_grammar.cpp
+++ b/tools/quickbook/src/block_element_grammar.cpp
@@ -35,7 +35,7 @@ namespace quickbook
xinclude, include, include_filename,
template_, template_id, template_formal_arg,
template_body, identifier, import,
- element_id, element_id_1_5, element_id_1_6,
+ element_id,
same_line;
};
@@ -47,27 +47,25 @@ namespace quickbook
// Actions
error_action error(state);
element_id_warning_action element_id_warning(state);
- raw_char_action raw_char(state.phrase);
+ raw_char_action raw_char(state);
+ explicit_list_action explicit_list(state);
scoped_parser<to_value_scoped_action> to_value(state);
local.element_id =
!( ':'
- >> ( !(qbk_ver(105u) >> space)
+ >> ( qbk_ver(107u)
+ >> to_value(general_tags::element_id) [attribute_value_1_7]
+ | qbk_ver(0, 107u)
+ >> !(qbk_ver(105u) >> space)
>> (+(cl::alnum_p | '_')) [state.values.entry(ph::arg1, ph::arg2, general_tags::element_id)]
| cl::eps_p [element_id_warning]
)
)
;
- local.element_id_1_5 =
- !(qbk_ver(105u) >> local.element_id);
-
- local.element_id_1_6 =
- !(qbk_ver(106u) >> local.element_id);
-
elements.add
- ("section", element_info(element_info::block, &local.begin_section, block_tags::begin_section))
- ("endsect", element_info(element_info::block, &local.end_section, block_tags::end_section))
+ ("section", element_info(element_info::section_block, &local.begin_section, block_tags::begin_section))
+ ("endsect", element_info(element_info::section_block, &local.end_section, block_tags::end_section))
;
local.begin_section =
@@ -83,7 +81,7 @@ namespace quickbook
local.heading
= space
- >> local.element_id_1_6
+ >> !(qbk_ver(106u) >> local.element_id)
>> space
>> local.inner_phrase
;
@@ -169,7 +167,10 @@ namespace quickbook
;
local.template_body =
- *(('[' >> local.template_body >> ']') | (cl::anychar_p - ']'))
+ qbk_ver(106u)
+ >> *(~cl::eps_p(']') >> skip_entity)
+ | qbk_ver(0,106u)
+ >> *(('[' >> local.template_body >> ']') | (cl::anychar_p - ']'))
>> cl::eps_p(space >> ']')
>> space
;
@@ -219,7 +220,7 @@ namespace quickbook
local.table =
local.same_line
- >> local.element_id_1_5
+ >> !(qbk_ver(105u) >> local.element_id)
>> local.same_line
>> local.table_title
>> *local.table_row
@@ -258,7 +259,11 @@ namespace quickbook
("itemized_list", element_info(element_info::nested_block, &local.list, block_tags::itemized_list, 106))
;
- local.list = *local.cell;
+ local.list =
+ *( cl::eps_p [explicit_list]
+ >> local.cell
+ )
+ ;
local.cell =
space
@@ -301,13 +306,15 @@ namespace quickbook
local.include_filename =
qbk_ver(0, 106u)
>> (*(cl::anychar_p - phrase_end)) [state.values.entry(ph::arg1, ph::arg2)]
- | qbk_ver(106u)
+ | qbk_ver(106u, 107u)
>> to_value()
[ *( raw_escape
| (cl::anychar_p - phrase_end)
[raw_char]
)
]
+ | qbk_ver(107u)
+ >> to_value() [ attribute_value_1_7 ]
;
local.inner_block =
diff --git a/tools/quickbook/src/block_tags.hpp b/tools/quickbook/src/block_tags.hpp
index b01271fc0b..3049b57c51 100644
--- a/tools/quickbook/src/block_tags.hpp
+++ b/tools/quickbook/src/block_tags.hpp
@@ -22,7 +22,7 @@ namespace quickbook
(macro_definition)(template_definition)
(variable_list)(table)
(xinclude)(import)(include)
- (paragraph)
+ (paragraph)(paragraph_in_list)
(ordered_list)(itemized_list)
(hr)
)
diff --git a/tools/quickbook/src/code_snippet.cpp b/tools/quickbook/src/code_snippet.cpp
index 4c63a3ba0e..fe54f46ce1 100644
--- a/tools/quickbook/src/code_snippet.cpp
+++ b/tools/quickbook/src/code_snippet.cpp
@@ -12,14 +12,13 @@
#include <boost/spirit/include/classic_confix.hpp>
#include <boost/shared_ptr.hpp>
#include <boost/bind.hpp>
-#include <boost/lexical_cast.hpp>
#include "block_tags.hpp"
#include "template_stack.hpp"
#include "actions.hpp"
#include "state.hpp"
#include "values.hpp"
#include "files.hpp"
-#include "input_path.hpp"
+#include "native_text.hpp"
namespace quickbook
{
@@ -30,7 +29,7 @@ namespace quickbook
code_snippet_actions(std::vector<template_symbol>& storage,
file_ptr source_file,
char const* source_type)
- : last_code_pos(source_file->source.begin())
+ : last_code_pos(source_file->source().begin())
, in_code(false)
, snippet_stack()
, storage(storage)
@@ -63,13 +62,13 @@ namespace quickbook
std::string id;
bool start_code;
- std::string::const_iterator source_pos;
+ string_iterator source_pos;
mapped_file_builder::pos start_pos;
boost::shared_ptr<snippet_data> next;
};
void push_snippet_data(std::string const& id,
- std::string::const_iterator pos)
+ string_iterator pos)
{
boost::shared_ptr<snippet_data> new_snippet(new snippet_data(id));
new_snippet->next = snippet_stack;
@@ -88,8 +87,8 @@ namespace quickbook
}
mapped_file_builder content;
- std::string::const_iterator mark_begin, mark_end;
- std::string::const_iterator last_code_pos;
+ boost::string_ref::const_iterator mark_begin, mark_end;
+ boost::string_ref::const_iterator last_code_pos;
bool in_code;
boost::shared_ptr<snippet_data> snippet_stack;
std::vector<template_symbol>& storage;
@@ -352,8 +351,8 @@ namespace quickbook
bool is_python = extension == ".py";
code_snippet_actions a(storage, load(filename, qbk_version_n), is_python ? "[python]" : "[c++]");
- string_iterator first(a.source_file->source.begin());
- string_iterator last(a.source_file->source.end());
+ string_iterator first(a.source_file->source().begin());
+ string_iterator last(a.source_file->source().end());
cl::parse_info<string_iterator> info;
@@ -376,14 +375,14 @@ namespace quickbook
if (last_code_pos != first) {
if (!in_code)
{
- content.add("\n\n", last_code_pos);
- content.add(source_type, last_code_pos);
- content.add("```\n", last_code_pos);
+ content.add_at_pos("\n\n", last_code_pos);
+ content.add_at_pos(source_type, last_code_pos);
+ content.add_at_pos("```\n", last_code_pos);
in_code = true;
}
- content.add(last_code_pos, first);
+ content.add(boost::string_ref(last_code_pos, first - last_code_pos));
}
}
@@ -396,7 +395,7 @@ namespace quickbook
if (in_code)
{
- content.add("\n```\n\n", last_code_pos);
+ content.add_at_pos("\n```\n\n", last_code_pos);
in_code = false;
}
}
@@ -414,13 +413,13 @@ namespace quickbook
if (!in_code)
{
- content.add("\n\n", first);
- content.add(source_type, first);
- content.add("```\n", first);
+ content.add_at_pos("\n\n", first);
+ content.add_at_pos(source_type, first);
+ content.add_at_pos("```\n", first);
in_code = true;
}
- content.add(mark_begin, mark_end);
+ content.add(boost::string_ref(mark_begin, mark_end - mark_begin));
}
void code_snippet_actions::escaped_comment(string_iterator first, string_iterator last)
@@ -437,8 +436,8 @@ namespace quickbook
snippet_data& snippet = *snippet_stack;
- content.add("\n", mark_begin);
- content.unindent_and_add(mark_begin, mark_end);
+ content.add_at_pos("\n", mark_begin);
+ content.unindent_and_add(boost::string_ref(mark_begin, mark_end - mark_begin));
if (snippet.id == "!")
{
@@ -516,13 +515,13 @@ namespace quickbook
mapped_file_builder f;
f.start(source_file);
if (snippet->start_code) {
- f.add("\n\n", snippet->source_pos);
- f.add(source_type, snippet->source_pos);
- f.add("```\n", snippet->source_pos);
+ f.add_at_pos("\n\n", snippet->source_pos);
+ f.add_at_pos(source_type, snippet->source_pos);
+ f.add_at_pos("```\n", snippet->source_pos);
}
f.add(content, snippet->start_pos, content.get_pos());
if (in_code) {
- f.add("\n```\n\n", position);
+ f.add_at_pos("\n```\n\n", position);
}
std::vector<std::string> params;
@@ -530,7 +529,7 @@ namespace quickbook
file_ptr body = f.release();
storage.push_back(template_symbol(snippet->id, params,
- qbk_value(body, body->source.begin(), body->source.end(),
+ qbk_value(body, body->source().begin(), body->source().end(),
template_tags::snippet)));
}
}
diff --git a/tools/quickbook/src/dependency_tracker.cpp b/tools/quickbook/src/dependency_tracker.cpp
new file mode 100644
index 0000000000..12b2a757a9
--- /dev/null
+++ b/tools/quickbook/src/dependency_tracker.cpp
@@ -0,0 +1,184 @@
+/*=============================================================================
+ Copyright (c) 2013 Daniel James
+
+ Use, modification and distribution is subject to the Boost Software
+ License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+ http://www.boost.org/LICENSE_1_0.txt)
+=============================================================================*/
+
+#include "dependency_tracker.hpp"
+#include "native_text.hpp"
+#include <boost/filesystem/operations.hpp>
+#include <boost/filesystem/fstream.hpp>
+#include <boost/foreach.hpp>
+
+namespace quickbook
+{
+ // Convert the path to its canonical representation if it exists.
+ // Or something close if it doesn't.
+ static fs::path normalize_path(fs::path const& path)
+ {
+ fs::path p = fs::absolute(path); // The base of the path.
+ fs::path extra; // The non-existant part of the path.
+ int parent_count = 0; // Number of active '..' sections
+
+ // Invariant: path is equivalent to: p / ('..' * parent_count) / extra
+ // i.e. if parent_count == 0: p/extra
+ // if parent_count == 2: p/../../extra
+
+ // Pop path sections from path until we find an existing
+ // path, adjusting for any dot path sections.
+ while (!fs::exists(fs::status(p))) {
+ fs::path name = p.filename();
+ p = p.parent_path();
+ if (name == "..") {
+ ++parent_count;
+ }
+ else if (name == ".") {
+ }
+ else if (parent_count) {
+ --parent_count;
+ }
+ else {
+ extra = name / extra;
+ }
+ }
+
+ // If there are any left over ".." sections, then add them
+ // on to the end of the real path, and trust Boost.Filesystem
+ // to sort them out.
+ while (parent_count) {
+ p = p / "..";
+ --parent_count;
+ }
+
+ // Cannoicalize the existing part of the path, and add 'extra' back to
+ // the end.
+ return fs::canonical(p) / extra;
+ }
+
+ static char const* control_escapes[16] = {
+ "\\000", "\\001", "\\002", "\\003",
+ "\\004", "\\005", "\\006", "\\a",
+ "\\b", "\\t", "\\n", "\\v",
+ "\\f", "\\r", "\\016", "\\017"
+ };
+
+ static std::string escaped_path(std::string const& generic)
+ {
+ std::string result;
+ result.reserve(generic.size());
+
+ BOOST_FOREACH(char c, generic)
+ {
+ if (c >= 0 && c < 16) {
+ result += control_escapes[(unsigned int) c];
+ }
+ else if (c == '\\') {
+ result += "\\\\";
+ }
+ else if (c == 127) {
+ result += "\\177";
+ }
+ else {
+ result += c;
+ }
+ }
+
+ return result;
+ }
+
+ static std::string get_path(fs::path const& path,
+ dependency_tracker::flags f)
+ {
+ std::string generic = quickbook::detail::path_to_generic(path);
+
+ if (f & dependency_tracker::escaped) {
+ generic = escaped_path(generic);
+ }
+
+ return generic;
+ }
+
+ dependency_tracker::dependency_tracker() :
+ dependencies(), glob_dependencies(),
+ last_glob(glob_dependencies.end()) {}
+
+ bool dependency_tracker::add_dependency(fs::path const& f) {
+ bool found = fs::exists(fs::status(f));
+ dependencies[normalize_path(f)] |= found;
+ return found;
+ }
+
+ void dependency_tracker::add_glob(fs::path const& f) {
+ std::pair<glob_list::iterator, bool> r = glob_dependencies.insert(
+ std::make_pair(normalize_path(f), glob_list::mapped_type()));
+ last_glob = r.first;
+ }
+
+ void dependency_tracker::add_glob_match(fs::path const& f) {
+ assert(last_glob != glob_dependencies.end());
+ last_glob->second.insert(normalize_path(f));
+ }
+
+ void dependency_tracker::write_dependencies(fs::path const& file_out,
+ flags f)
+ {
+ fs::ofstream out(file_out);
+
+ if (out.fail()) {
+ throw std::runtime_error(
+ "Error opening dependency file " +
+ quickbook::detail::path_to_generic(file_out));
+ }
+
+ out.exceptions(std::ios::badbit);
+ write_dependencies(out, f);
+ }
+
+ void dependency_tracker::write_dependencies(std::ostream& out,
+ flags f)
+ {
+ if (f & checked) {
+ BOOST_FOREACH(dependency_list::value_type const& d, dependencies)
+ {
+ out << (d.second ? "+ " : "- ")
+ << get_path(d.first, f) << std::endl;
+ }
+
+ BOOST_FOREACH(glob_list::value_type const& g, glob_dependencies)
+ {
+ out << "g "
+ << get_path(g.first, f) << std::endl;
+
+ BOOST_FOREACH(fs::path const& p, g.second)
+ {
+ out << "+ " << get_path(p, f) << std::endl;
+ }
+ }
+ }
+ else {
+ std::set<std::string> paths;
+
+ BOOST_FOREACH(dependency_list::value_type const& d, dependencies)
+ {
+ if (d.second) {
+ paths.insert(get_path(d.first, f));
+ }
+ }
+
+ BOOST_FOREACH(glob_list::value_type const& g, glob_dependencies)
+ {
+ BOOST_FOREACH(fs::path const& p, g.second)
+ {
+ paths.insert(get_path(p, f));
+ }
+ }
+
+ BOOST_FOREACH(std::string const& p, paths)
+ {
+ out << p << std::endl;
+ }
+ }
+ }
+}
diff --git a/tools/quickbook/src/dependency_tracker.hpp b/tools/quickbook/src/dependency_tracker.hpp
new file mode 100644
index 0000000000..2da44f5c03
--- /dev/null
+++ b/tools/quickbook/src/dependency_tracker.hpp
@@ -0,0 +1,53 @@
+/*=============================================================================
+ Copyright (c) 2013 Daniel James
+
+ Use, modification and distribution is subject to the Boost Software
+ License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+ http://www.boost.org/LICENSE_1_0.txt)
+=============================================================================*/
+
+#if !defined(QUICKBOOK_DEPENDENCY_TRACKER_HPP)
+#define QUICKBOOK_DEPENDENCY_TRACKER_HPP
+
+#include <map>
+#include <set>
+#include <iosfwd>
+#include <boost/filesystem/path.hpp>
+
+namespace quickbook
+{
+ namespace fs = boost::filesystem;
+
+ struct dependency_tracker {
+ private:
+
+ typedef std::map<fs::path, bool> dependency_list;
+ typedef std::map<fs::path, std::set<fs::path> > glob_list;
+
+ dependency_list dependencies;
+ glob_list glob_dependencies;
+ glob_list::iterator last_glob;
+
+ public:
+
+ enum flags {
+ default_ = 0,
+ checked = 1,
+ escaped = 2
+ };
+
+ dependency_tracker();
+
+ // Call this before loading any file so that it will be included in the
+ // list of dependencies. Returns true if file exists.
+ bool add_dependency(fs::path const&);
+
+ void add_glob(fs::path const&);
+ void add_glob_match(fs::path const&);
+
+ void write_dependencies(fs::path const&, flags = default_);
+ void write_dependencies(std::ostream&, flags = default_);
+ };
+}
+
+#endif
diff --git a/tools/quickbook/src/doc_info_actions.cpp b/tools/quickbook/src/doc_info_actions.cpp
index 42ee961832..4e60f5168f 100644
--- a/tools/quickbook/src/doc_info_actions.cpp
+++ b/tools/quickbook/src/doc_info_actions.cpp
@@ -16,11 +16,11 @@
#include "quickbook.hpp"
#include "utils.hpp"
#include "files.hpp"
-#include "input_path.hpp"
+#include "native_text.hpp"
#include "state.hpp"
#include "actions.hpp"
#include "doc_info_tags.hpp"
-#include "id_manager.hpp"
+#include "document_state.hpp"
namespace quickbook
{
@@ -29,7 +29,7 @@ namespace quickbook
static std::string doc_info_output(value const& p, unsigned version)
{
if (qbk_version_n < version) {
- std::string value = p.get_quickbook();
+ std::string value = detail::to_s(p.get_quickbook());
value.erase(value.find_last_not_of(" \t") + 1);
return value;
}
@@ -141,7 +141,7 @@ namespace quickbook
if (values.check(doc_info_tags::type))
{
- doc_type = values.consume(doc_info_tags::type).get_quickbook();
+ doc_type = detail::to_s(values.consume(doc_info_tags::type).get_quickbook());
doc_title = values.consume(doc_info_tags::title);
use_doc_info = !nested_file || qbk_version_n >= 106u;
}
@@ -200,20 +200,23 @@ namespace quickbook
std::string include_doc_id_, id_;
if (!include_doc_id.empty())
- include_doc_id_ = include_doc_id.get_quickbook();
+ include_doc_id_ = detail::to_s(include_doc_id.get_quickbook());
if (!id.empty())
- id_ = id.get_quickbook();
+ id_ = detail::to_s(id.get_quickbook());
// Quickbook version
unsigned new_version = get_version(state, use_doc_info, qbk_version);
- if (new_version != qbk_version_n && new_version >= 106)
+ if (new_version != qbk_version_n)
{
- detail::outwarn(state.current_file->path)
- << "Quickbook " << (new_version / 100) << "." << (new_version % 100)
- << " is still under development and is "
- "likely to change in the future." << std::endl;
+ if (new_version >= 107u)
+ {
+ detail::outwarn(state.current_file->path)
+ << "Quickbook " << (new_version / 100) << "." << (new_version % 100)
+ << " is still under development and is "
+ "likely to change in the future." << std::endl;
+ }
}
if (new_version) {
@@ -236,20 +239,20 @@ namespace quickbook
if (!compatibility_version) {
compatibility_version = use_doc_info ?
- qbk_version_n : state.ids.compatibility_version();
+ qbk_version_n : state.document.compatibility_version();
}
// Start file, finish here if not generating document info.
if (!use_doc_info)
{
- state.ids.start_file(compatibility_version, include_doc_id_, id_,
+ state.document.start_file(compatibility_version, include_doc_id_, id_,
doc_title);
return "";
}
std::string id_placeholder =
- state.ids.start_file_with_docinfo(
+ state.document.start_file_with_docinfo(
compatibility_version, include_doc_id_, id_, doc_title);
// Make sure we really did have a document info block.
@@ -460,7 +463,7 @@ namespace quickbook
if (!license.empty())
{
tmp << " <legalnotice id=\""
- << state.ids.add_id("legal", id_category::generated)
+ << state.document.add_id("legal", id_category::generated)
<< "\">\n"
<< " <para>\n"
<< " " << doc_info_output(license, 103) << "\n"
@@ -541,18 +544,18 @@ namespace quickbook
// *after* everything else.
// Close any open sections.
- if (!doc_type.empty() && state.ids.section_level() > 1) {
+ if (!doc_type.empty() && state.document.section_level() > 1) {
detail::outwarn(state.current_file->path)
<< "Missing [endsect] detected at end of file."
<< std::endl;
- while(state.ids.section_level() > 1) {
+ while(state.document.section_level() > 1) {
state.out << "</section>";
- state.ids.end_section();
+ state.document.end_section();
}
}
- state.ids.end_file();
+ state.document.end_file();
if (!doc_type.empty()) state.out << "\n</" << doc_type << ">\n\n";
}
diff --git a/tools/quickbook/src/doc_info_grammar.cpp b/tools/quickbook/src/doc_info_grammar.cpp
index 862d0ce57a..1d71e3b3eb 100644
--- a/tools/quickbook/src/doc_info_grammar.cpp
+++ b/tools/quickbook/src/doc_info_grammar.cpp
@@ -11,7 +11,6 @@
#include <map>
#include <boost/foreach.hpp>
#include <boost/spirit/include/classic_core.hpp>
-#include <boost/spirit/include/classic_actor.hpp>
#include <boost/spirit/include/classic_loops.hpp>
#include <boost/spirit/include/classic_symbols.hpp>
#include <boost/spirit/include/classic_chset.hpp>
@@ -75,7 +74,7 @@ namespace quickbook
doc_authors, doc_author,
doc_copyright, doc_copyright_holder,
doc_source_mode, doc_biblioid, doc_compatibility_mode,
- quickbook_version, char_;
+ quickbook_version, macro, char_;
cl::uint_parser<int, 10, 4, 4> doc_copyright_year;
cl::symbols<> doc_types;
cl::symbols<value::tag_type> doc_info_attributes;
@@ -118,15 +117,23 @@ namespace quickbook
// Actions
error_action error(state);
- plain_char_action plain_char(state.phrase, state);
+ plain_char_action plain_char(state);
+ do_macro_action do_macro(state);
scoped_parser<to_value_scoped_action> to_value(state);
+ member_action_value<quickbook::state, source_mode_type> change_source_mode(
+ state, &state::change_source_mode);
+ member_action_fixed_value<quickbook::state, source_mode_type> default_source_mode(
+ state, &state::change_source_mode, source_mode_tags::cpp);
doc_info_details =
- space [ph::var(local.source_mode_unset) = true]
- >> *( local.doc_attribute
- >> space
+ cl::eps_p [ph::var(local.source_mode_unset) = true]
+ >> *( space
+ >> local.doc_attribute
+ )
+ >> !( space
+ >> local.doc_info_block
)
- >> !local.doc_info_block
+ >> *eol
;
local.doc_info_block =
@@ -145,7 +152,7 @@ namespace quickbook
]
>> space
>> !(qbk_ver(106u) >> cl::eps_p(ph::var(local.source_mode_unset))
- [cl::assign_a(state.source_mode, "c++")]
+ [default_source_mode]
)
>> ( *( ( local.doc_info_attribute
| local.doc_info_escaped_attributes
@@ -154,7 +161,7 @@ namespace quickbook
)
) [state.values.sort()]
>> ( ']'
- >> (+eol | cl::end_p)
+ >> (eol | cl::end_p)
| cl::eps_p [error]
)
;
@@ -218,12 +225,8 @@ namespace quickbook
local.attribute_rules[doc_attributes::compatibility_mode] = &local.doc_compatibility_mode;
- local.doc_source_mode =
- (
- cl::str_p("c++")
- | "python"
- | "teletype"
- ) [cl::assign_a(state.source_mode)]
+ local.doc_source_mode = source_modes
+ [change_source_mode]
[ph::var(local.source_mode_unset) = false]
;
@@ -302,6 +305,21 @@ namespace quickbook
local.attribute_rules[doc_info_attributes::biblioid] = &local.doc_biblioid;
- local.char_ = escape | cl::anychar_p[plain_char];
+ local.char_ =
+ escape
+ | local.macro
+ | cl::anychar_p[plain_char];
+ ;
+
+ local.macro =
+ cl::eps_p
+ ( ( state.macro
+ >> ~cl::eps_p(cl::alpha_p | '_')
+ // must not be followed by alpha or underscore
+ )
+ & macro_identifier // must be a valid macro for the current version
+ )
+ >> state.macro [do_macro]
+ ;
}
}
diff --git a/tools/quickbook/src/document_state.cpp b/tools/quickbook/src/document_state.cpp
new file mode 100644
index 0000000000..2841389cab
--- /dev/null
+++ b/tools/quickbook/src/document_state.cpp
@@ -0,0 +1,472 @@
+/*=============================================================================
+ Copyright (c) 2011, 2013 Daniel James
+
+ Use, modification and distribution is subject to the Boost Software
+ License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+ http://www.boost.org/LICENSE_1_0.txt)
+=============================================================================*/
+
+#include "document_state_impl.hpp"
+#include "utils.hpp"
+#include <boost/make_shared.hpp>
+#include <boost/lexical_cast.hpp>
+#include <boost/range/algorithm.hpp>
+#include <cctype>
+
+namespace quickbook
+{
+ struct file_info
+ {
+ boost::shared_ptr<file_info> const parent;
+ boost::shared_ptr<doc_info> const document;
+
+ unsigned const compatibility_version;
+ unsigned const depth;
+ unsigned const override_depth;
+ id_placeholder const* const override_id;
+
+ // The 1.1-1.5 document id would actually change per file due to
+ // explicit ids in includes and a bug which would sometimes use the
+ // document title instead of the id.
+ std::string const doc_id_1_1;
+
+ // Constructor for files that aren't the root of a document.
+ file_info(boost::shared_ptr<file_info> const& parent,
+ unsigned compatibility_version,
+ boost::string_ref doc_id_1_1,
+ id_placeholder const* override_id) :
+ parent(parent), document(parent->document),
+ compatibility_version(compatibility_version),
+ depth(parent->depth + 1),
+ override_depth(override_id ? depth : parent->override_depth),
+ override_id(override_id ? override_id : parent->override_id),
+ doc_id_1_1(detail::to_s(doc_id_1_1))
+ {}
+
+ // Constructor for files that are the root of a document.
+ file_info(boost::shared_ptr<file_info> const& parent,
+ boost::shared_ptr<doc_info> const& document,
+ unsigned compatibility_version,
+ boost::string_ref doc_id_1_1) :
+ parent(parent), document(document),
+ compatibility_version(compatibility_version),
+ depth(0), override_depth(0), override_id(0),
+ doc_id_1_1(detail::to_s(doc_id_1_1))
+ {}
+ };
+
+ struct doc_info
+ {
+ boost::shared_ptr<section_info> current_section;
+
+ // Note: these are mutable to remain bug compatible with old versions
+ // of quickbook. They would set these values at the start of new files
+ // and sections and then not restore them at the end.
+ std::string last_title_1_1;
+ std::string section_id_1_1;
+ };
+
+ struct section_info
+ {
+ boost::shared_ptr<section_info> const parent;
+ unsigned const compatibility_version;
+ unsigned const file_depth;
+ unsigned const level;
+ std::string const id_1_1;
+ id_placeholder const* const placeholder_1_6;
+ source_mode_info const source_mode;
+
+ section_info(boost::shared_ptr<section_info> const& parent,
+ file_info const* current_file, boost::string_ref id,
+ boost::string_ref id_1_1, id_placeholder const* placeholder_1_6,
+ source_mode_info const& source_mode) :
+ parent(parent),
+ compatibility_version(current_file->compatibility_version),
+ file_depth(current_file->depth),
+ level(parent ? parent->level + 1 : 1),
+ id_1_1(detail::to_s(id_1_1)),
+ placeholder_1_6(placeholder_1_6),
+ source_mode(source_mode) {}
+ };
+
+ //
+ // document_state
+ //
+
+ document_state::document_state()
+ : state(new document_state_impl)
+ {
+ }
+
+ document_state::~document_state() {}
+
+ void document_state::start_file(
+ unsigned compatibility_version,
+ boost::string_ref include_doc_id,
+ boost::string_ref id,
+ value const& title)
+ {
+ state->start_file(compatibility_version, false, include_doc_id, id, title);
+ }
+
+ std::string document_state::start_file_with_docinfo(
+ unsigned compatibility_version,
+ boost::string_ref include_doc_id,
+ boost::string_ref id,
+ value const& title)
+ {
+ return state->start_file(compatibility_version, true, include_doc_id,
+ id, title)->to_string();
+ }
+
+ void document_state::end_file()
+ {
+ state->end_file();
+ }
+
+ std::string document_state::begin_section(boost::string_ref id,
+ id_category category, source_mode_info const& source_mode)
+ {
+ return state->begin_section(id, category, source_mode)->to_string();
+ }
+
+ void document_state::end_section()
+ {
+ return state->end_section();
+ }
+
+ int document_state::section_level() const
+ {
+ return state->current_file->document->current_section->level;
+ }
+
+ source_mode_info document_state::section_source_mode() const
+ {
+ return state->current_file ?
+ state->current_file->document->current_section->source_mode :
+ source_mode_info();
+ }
+
+ std::string document_state::old_style_id(boost::string_ref id, id_category category)
+ {
+ return state->old_style_id(id, category)->to_string();
+ }
+
+ std::string document_state::add_id(boost::string_ref id, id_category category)
+ {
+ return state->add_id(id, category)->to_string();
+ }
+
+ std::string document_state::add_anchor(boost::string_ref id, id_category category)
+ {
+ return state->add_placeholder(id, category)->to_string();
+ }
+
+ std::string document_state::replace_placeholders_with_unresolved_ids(
+ boost::string_ref xml) const
+ {
+ return replace_ids(*state, xml);
+ }
+
+ std::string document_state::replace_placeholders(boost::string_ref xml) const
+ {
+ assert(!state->current_file);
+ std::vector<std::string> ids = generate_ids(*state, xml);
+ return replace_ids(*state, xml, &ids);
+ }
+
+ unsigned document_state::compatibility_version() const
+ {
+ return state->current_file->compatibility_version;
+ }
+
+ //
+ // id_placeholder
+ //
+
+ id_placeholder::id_placeholder(
+ unsigned index,
+ boost::string_ref id,
+ id_category category,
+ id_placeholder const* parent_)
+ : index(index),
+ unresolved_id(parent_ ?
+ parent_->unresolved_id + '.' + detail::to_s(id) :
+ detail::to_s(id)),
+ id(id.begin(), id.end()),
+ parent(parent_),
+ category(category),
+ num_dots(boost::range::count(id, '.') +
+ (parent_ ? parent_->num_dots + 1 : 0))
+ {
+ }
+
+ std::string id_placeholder::to_string() const
+ {
+ return '$' + boost::lexical_cast<std::string>(index);
+ }
+
+ //
+ // document_state_impl
+ //
+
+ id_placeholder const* document_state_impl::add_placeholder(
+ boost::string_ref id, id_category category,
+ id_placeholder const* parent)
+ {
+ placeholders.push_back(id_placeholder(
+ placeholders.size(), id, category, parent));
+ return &placeholders.back();
+ }
+
+ id_placeholder const* document_state_impl::get_placeholder(boost::string_ref value) const
+ {
+ // If this isn't a placeholder id.
+ if (value.size() <= 1 || *value.begin() != '$')
+ return 0;
+
+ unsigned index = boost::lexical_cast<int>(std::string(
+ value.begin() + 1, value.end()));
+
+ return &placeholders.at(index);
+ }
+
+ id_placeholder const* document_state_impl::get_id_placeholder(
+ boost::shared_ptr<section_info> const& section) const
+ {
+ return !section ? 0 :
+ section->file_depth < current_file->override_depth ?
+ current_file->override_id : section->placeholder_1_6;
+ }
+
+ id_placeholder const* document_state_impl::start_file(
+ unsigned compatibility_version,
+ bool document_root,
+ boost::string_ref include_doc_id,
+ boost::string_ref id,
+ value const& title)
+ {
+ boost::shared_ptr<file_info> parent = current_file;
+ assert(parent || document_root);
+
+ boost::shared_ptr<doc_info> document =
+ document_root ? boost::make_shared<doc_info>() : parent->document;
+
+ // Choose specified id to use. Prefer 'include_doc_id' (the id
+ // specified in an 'include' element) unless backwards compatibility
+ // is required.
+
+ boost::string_ref initial_doc_id;
+
+ if (document_root ||
+ compatibility_version >= 106u ||
+ parent->compatibility_version >= 106u)
+ {
+ initial_doc_id = !include_doc_id.empty() ? include_doc_id : id;
+ }
+ else {
+ initial_doc_id = !id.empty() ? id : include_doc_id;
+ }
+
+ // Work out this file's doc_id for older versions of quickbook.
+ // A bug meant that this need to be done per file, not per
+ // document.
+
+ std::string doc_id_1_1;
+
+ if (document_root || compatibility_version < 106u) {
+ if (title.check())
+ document->last_title_1_1 = detail::to_s(title.get_quickbook());
+
+ doc_id_1_1 = !initial_doc_id.empty() ? detail::to_s(initial_doc_id) :
+ detail::make_identifier(document->last_title_1_1);
+ }
+ else if (parent) {
+ doc_id_1_1 = parent->doc_id_1_1;
+ }
+
+ if (document_root) {
+ // Create new file
+
+ current_file = boost::make_shared<file_info>(parent,
+ document, compatibility_version, doc_id_1_1);
+
+ // Create a section for the new document.
+
+ source_mode_info default_source_mode;
+
+ if (!initial_doc_id.empty()) {
+ return create_new_section(id, id_category::explicit_section_id,
+ default_source_mode);
+ }
+ else if (!title.empty()) {
+ return create_new_section(
+ detail::make_identifier(title.get_quickbook()),
+ id_category::generated_doc,
+ default_source_mode);
+ }
+ else if (compatibility_version >= 106u) {
+ return create_new_section("doc", id_category::numbered, default_source_mode);
+ }
+ else {
+ return create_new_section("", id_category::generated_doc, default_source_mode);
+ }
+ }
+ else {
+ // If an id was set for the file, then the file overrides the
+ // current section's id with this id.
+ //
+ // Don't do this for document_root as it will create a section
+ // for the document.
+ //
+ // Don't do this for older versions, as they use a different
+ // backwards compatible mechanism to handle file ids.
+
+ id_placeholder const* override_id = 0;
+
+ if (!initial_doc_id.empty() && compatibility_version >= 106u)
+ {
+ boost::shared_ptr<section_info> null_section;
+
+ override_id = add_id_to_section(initial_doc_id,
+ id_category::explicit_section_id, null_section);
+ }
+
+ // Create new file
+
+ current_file =
+ boost::make_shared<file_info>(parent, compatibility_version,
+ doc_id_1_1, override_id);
+
+ return 0;
+ }
+ }
+
+ void document_state_impl::end_file()
+ {
+ current_file = current_file->parent;
+ }
+
+ id_placeholder const* document_state_impl::add_id(
+ boost::string_ref id,
+ id_category category)
+ {
+ return add_id_to_section(id, category,
+ current_file->document->current_section);
+ }
+
+ id_placeholder const* document_state_impl::add_id_to_section(
+ boost::string_ref id,
+ id_category category,
+ boost::shared_ptr<section_info> const& section)
+ {
+ std::string id_part(id.begin(), id.end());
+
+ // Note: Normalizing id according to file compatibility version, but
+ // adding to section according to section compatibility version.
+
+ if (current_file->compatibility_version >= 106u &&
+ category.c < id_category::explicit_id) {
+ id_part = normalize_id(id);
+ }
+
+ id_placeholder const* placeholder_1_6 = get_id_placeholder(section);
+
+ if(!section || section->compatibility_version >= 106u) {
+ return add_placeholder(id_part, category, placeholder_1_6);
+ }
+ else {
+ std::string const& qualified_id = section->id_1_1;
+
+ std::string new_id;
+ if (!placeholder_1_6)
+ new_id = current_file->doc_id_1_1;
+ if (!new_id.empty() && !qualified_id.empty()) new_id += '.';
+ new_id += qualified_id;
+ if (!new_id.empty() && !id_part.empty()) new_id += '.';
+ new_id += id_part;
+
+ return add_placeholder(new_id, category, placeholder_1_6);
+ }
+ }
+
+ id_placeholder const* document_state_impl::old_style_id(
+ boost::string_ref id,
+ id_category category)
+ {
+ return current_file->compatibility_version < 103u ?
+ add_placeholder(
+ current_file->document->section_id_1_1 + "." + detail::to_s(id), category) :
+ add_id(id, category);
+ }
+
+ id_placeholder const* document_state_impl::begin_section(
+ boost::string_ref id,
+ id_category category,
+ source_mode_info const& source_mode)
+ {
+ current_file->document->section_id_1_1 = detail::to_s(id);
+ return create_new_section(id, category, source_mode);
+ }
+
+ id_placeholder const* document_state_impl::create_new_section(
+ boost::string_ref id,
+ id_category category,
+ source_mode_info const& source_mode)
+ {
+ boost::shared_ptr<section_info> parent =
+ current_file->document->current_section;
+
+ id_placeholder const* p = 0;
+ id_placeholder const* placeholder_1_6 = 0;
+
+ std::string id_1_1;
+
+ if (parent && current_file->compatibility_version < 106u) {
+ id_1_1 = parent->id_1_1;
+ if (!id_1_1.empty() && !id.empty())
+ id_1_1 += ".";
+ id_1_1.append(id.begin(), id.end());
+ }
+
+ if (current_file->compatibility_version >= 106u) {
+ p = placeholder_1_6 = add_id_to_section(id, category, parent);
+ }
+ else if (current_file->compatibility_version >= 103u) {
+ placeholder_1_6 = get_id_placeholder(parent);
+
+ std::string new_id;
+ if (!placeholder_1_6) {
+ new_id = current_file->doc_id_1_1;
+ if (!id_1_1.empty()) new_id += '.';
+ }
+ new_id += id_1_1;
+
+ p = add_placeholder(new_id, category, placeholder_1_6);
+ }
+ else {
+ placeholder_1_6 = get_id_placeholder(parent);
+
+ std::string new_id;
+ if (parent && !placeholder_1_6)
+ new_id = current_file->doc_id_1_1 + '.';
+
+ new_id += detail::to_s(id);
+
+ p = add_placeholder(new_id, category, placeholder_1_6);
+ }
+
+ current_file->document->current_section =
+ boost::make_shared<section_info>(parent,
+ current_file.get(), id, id_1_1, placeholder_1_6,
+ source_mode);
+
+ return p;
+ }
+
+ void document_state_impl::end_section()
+ {
+ current_file->document->current_section =
+ current_file->document->current_section->parent;
+ }
+}
diff --git a/tools/quickbook/src/document_state.hpp b/tools/quickbook/src/document_state.hpp
new file mode 100644
index 0000000000..2210c41731
--- /dev/null
+++ b/tools/quickbook/src/document_state.hpp
@@ -0,0 +1,89 @@
+/*=============================================================================
+ Copyright (c) 2011,2013 Daniel James
+
+ Use, modification and distribution is subject to the Boost Software
+ License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+ http://www.boost.org/LICENSE_1_0.txt)
+=============================================================================*/
+
+#if !defined(BOOST_QUICKBOOK_DOCUMENT_STATE_HPP)
+#define BOOST_QUICKBOOK_DOCUMENT_STATE_HPP
+
+#include <boost/scoped_ptr.hpp>
+#include <boost/utility/string_ref.hpp>
+#include <string>
+#include "values.hpp"
+#include "syntax_highlight.hpp"
+
+namespace quickbook
+{
+ // id_category
+ //
+ // Higher categories get priority over lower ones.
+
+ struct id_category
+ {
+ enum categories
+ {
+ default_category = 0,
+ numbered, // Just used to avoid random docbook ids
+ generated, // Generated ids for other elements.
+ generated_heading, // Generated ids for headings.
+ generated_section, // Generated ids for sections.
+ generated_doc, // Generated ids for document.
+ explicit_id, // Explicitly given by user
+ explicit_section_id,
+ explicit_anchor_id
+ };
+
+ id_category() : c(default_category) {}
+ id_category(categories c) : c(c) {}
+ explicit id_category(int c) : c(categories(c)) {}
+
+ bool operator==(id_category rhs) const { return c == rhs.c; }
+
+ categories c;
+ };
+
+ struct document_state_impl;
+
+ struct document_state
+ {
+ document_state();
+ ~document_state();
+
+ std::string start_file_with_docinfo(
+ unsigned compatibility_version,
+ boost::string_ref include_doc_id,
+ boost::string_ref id,
+ value const& title);
+
+ void start_file(
+ unsigned compatibility_version,
+ boost::string_ref include_doc_id,
+ boost::string_ref id,
+ value const& title);
+
+ void end_file();
+
+ std::string begin_section(boost::string_ref, id_category,
+ source_mode_info const&);
+ void end_section();
+ int section_level() const;
+ source_mode_info section_source_mode() const;
+
+ std::string old_style_id(boost::string_ref, id_category);
+ std::string add_id(boost::string_ref, id_category);
+ std::string add_anchor(boost::string_ref, id_category);
+
+ std::string replace_placeholders_with_unresolved_ids(
+ boost::string_ref) const;
+ std::string replace_placeholders(boost::string_ref) const;
+
+ unsigned compatibility_version() const;
+ private:
+ boost::scoped_ptr<document_state_impl> state;
+ };
+}
+
+#endif
diff --git a/tools/quickbook/src/document_state_impl.hpp b/tools/quickbook/src/document_state_impl.hpp
new file mode 100644
index 0000000000..e9258e3b11
--- /dev/null
+++ b/tools/quickbook/src/document_state_impl.hpp
@@ -0,0 +1,147 @@
+/*=============================================================================
+ Copyright (c) 2011-2013 Daniel James
+
+ Use, modification and distribution is subject to the Boost Software
+ License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+ http://www.boost.org/LICENSE_1_0.txt)
+=============================================================================*/
+
+#if !defined(BOOST_QUICKBOOK_DOCUMENT_STATE_IMPL_HPP)
+#define BOOST_QUICKBOOK_DOCUMENT_STATE_IMPL_HPP
+
+#include "document_state.hpp"
+#include "phrase_tags.hpp"
+#include "utils.hpp"
+#include <boost/utility/string_ref.hpp>
+#include <boost/shared_ptr.hpp>
+#include <deque>
+#include <string>
+#include <vector>
+
+namespace quickbook
+{
+ //
+ // id_placeholder
+ //
+ // When generating the xml, quickbook can't allocate the identifiers until
+ // the end, so it stores in the intermedia xml a placeholder string,
+ // e.g. id="$1". This represents one of these placeholders.
+ //
+
+ struct id_placeholder
+ {
+ unsigned index; // The index in document_state_impl::placeholders.
+ // Use for the dollar identifiers in
+ // intermediate xml.
+ std::string unresolved_id;
+ // The id that would be generated
+ // without any duplicate handling.
+ // Used for generating old style header anchors.
+ std::string id; // The node id.
+ id_placeholder const* parent;
+ // Placeholder of the parent id.
+ id_category category;
+ unsigned num_dots; // Number of dots in the id.
+ // Normally equal to the section level
+ // but not when an explicit id contains
+ // dots.
+
+ id_placeholder(unsigned index, boost::string_ref id,
+ id_category category, id_placeholder const* parent_);
+
+ std::string to_string() const;
+ };
+
+ //
+ // document_state_impl
+ //
+ // Contains all the data tracked by document_state.
+ //
+
+ struct file_info;
+ struct doc_info;
+ struct section_info;
+
+ struct document_state_impl
+ {
+ boost::shared_ptr<file_info> current_file;
+ std::deque<id_placeholder> placeholders;
+
+ // Placeholder methods
+
+ id_placeholder const* add_placeholder(boost::string_ref, id_category,
+ id_placeholder const* parent = 0);
+
+ id_placeholder const* get_placeholder(boost::string_ref) const;
+
+ id_placeholder const* get_id_placeholder(
+ boost::shared_ptr<section_info> const& section) const;
+
+ // Events
+
+ id_placeholder const* start_file(
+ unsigned compatibility_version,
+ bool document_root,
+ boost::string_ref include_doc_id,
+ boost::string_ref id,
+ value const& title);
+
+ void end_file();
+
+ id_placeholder const* add_id(
+ boost::string_ref id,
+ id_category category);
+ id_placeholder const* old_style_id(
+ boost::string_ref id,
+ id_category category);
+ id_placeholder const* begin_section(
+ boost::string_ref id,
+ id_category category,
+ source_mode_info const&);
+ void end_section();
+
+ private:
+ id_placeholder const* add_id_to_section(
+ boost::string_ref id,
+ id_category category,
+ boost::shared_ptr<section_info> const& section);
+ id_placeholder const* create_new_section(
+ boost::string_ref id,
+ id_category category,
+ source_mode_info const&);
+ };
+
+ std::string replace_ids(document_state_impl const& state, boost::string_ref xml,
+ std::vector<std::string> const* = 0);
+ std::vector<std::string> generate_ids(document_state_impl const&, boost::string_ref);
+
+ std::string normalize_id(boost::string_ref src_id);
+ std::string normalize_id(boost::string_ref src_id, std::size_t);
+
+ //
+ // Xml subset parser used for finding id values.
+ //
+ // I originally tried to integrate this into the post processor
+ // but that proved tricky. Alternatively it could use a proper
+ // xml parser, but I want this to be able to survive badly
+ // marked up escapes.
+ //
+
+ struct xml_processor
+ {
+ xml_processor();
+
+ std::vector<std::string> id_attributes;
+
+ struct callback {
+ virtual void start(boost::string_ref) {}
+ virtual void id_value(boost::string_ref) {}
+ virtual void finish(boost::string_ref) {}
+ virtual ~callback() {}
+ };
+
+ void parse(boost::string_ref, callback&);
+ };
+}
+
+#endif
diff --git a/tools/quickbook/src/files.cpp b/tools/quickbook/src/files.cpp
index f85b611623..9999b0f220 100644
--- a/tools/quickbook/src/files.cpp
+++ b/tools/quickbook/src/files.cpp
@@ -15,6 +15,7 @@
#include <boost/foreach.hpp>
#include <fstream>
#include <iterator>
+#include <vector>
namespace quickbook
{
@@ -135,12 +136,17 @@ namespace quickbook
return pos->second;
}
+ std::ostream& operator<<(std::ostream& out, file_position const& x)
+ {
+ return out << "line: " << x.line << ", column: " << x.column;
+ }
+
file_position relative_position(
- std::string::const_iterator begin,
- std::string::const_iterator iterator)
+ boost::string_ref::const_iterator begin,
+ boost::string_ref::const_iterator iterator)
{
file_position pos;
- std::string::const_iterator line_begin = begin;
+ boost::string_ref::const_iterator line_begin = begin;
while (begin != iterator)
{
@@ -172,9 +178,9 @@ namespace quickbook
return pos;
}
- file_position file::position_of(std::string::const_iterator iterator) const
+ file_position file::position_of(boost::string_ref::const_iterator iterator) const
{
- return relative_position(source.begin(), iterator);
+ return relative_position(source().begin(), iterator);
}
// Mapped files.
@@ -195,50 +201,8 @@ namespace quickbook
std::string::size_type original_pos,
std::string::size_type our_pos,
section_types section_type = normal) :
- original_pos(original_pos), our_pos(our_pos), section_type(section_type) {}
-
- std::string::size_type to_original_pos(std::string::size_type pos)
- {
- switch (section_type) {
- case normal:
- return pos - our_pos + original_pos;
- case empty:
- return original_pos;
- case indented:
- // Indented doesn't really work, but that's okay because we
- // currently don't break up indented code.
- assert(pos == our_pos);
- return pos - our_pos + original_pos;
- default:
- assert(false);
- return original_pos;
- }
- }
-
- // If 'to_original_pos' worked for indented blocks, this wouldn't
- // be necessary.
- file_position calculate_position(
- file_position const& original,
- file_position const& relative) const
- {
- switch (section_type) {
- case normal:
- return file_position(
- original.line + relative.line - 1,
- relative.line == 1 ?
- original.column + relative.column - 1 :
- relative.column);
- case empty:
- return original;
- case indented:
- return file_position(
- original.line + relative.line - 1,
- original.column + relative.column - 1);
- default:
- assert(false);
- return file_position();
- }
- }
+ original_pos(original_pos), our_pos(our_pos),
+ section_type(section_type) {}
};
struct mapped_section_original_cmp
@@ -293,9 +257,9 @@ namespace quickbook
file_ptr original;
std::vector<mapped_file_section> mapped_sections;
- void add_empty_mapped_file_section(std::string::const_iterator pos) {
+ void add_empty_mapped_file_section(boost::string_ref::const_iterator pos) {
std::string::size_type original_pos =
- pos - original->source.begin();
+ pos - original->source().begin();
if (mapped_sections.empty() ||
mapped_sections.back().section_type !=
@@ -303,23 +267,109 @@ namespace quickbook
mapped_sections.back().original_pos != original_pos)
{
mapped_sections.push_back(mapped_file_section(
- original_pos, source.size(),
+ original_pos, source().size(),
mapped_file_section::empty));
}
}
- void add_mapped_file_section(std::string::const_iterator pos) {
+ void add_mapped_file_section(boost::string_ref::const_iterator pos) {
mapped_sections.push_back(mapped_file_section(
- pos - original->source.begin(), source.size()));
+ pos - original->source().begin(), source().size()));
}
- void add_indented_mapped_file_section(std::string::const_iterator pos) {
+ void add_indented_mapped_file_section(boost::string_ref::const_iterator pos)
+ {
mapped_sections.push_back(mapped_file_section(
- pos - original->source.begin(), source.size(),
+ pos - original->source().begin(), source().size(),
mapped_file_section::indented));
}
- virtual file_position position_of(std::string::const_iterator) const;
+ std::string::size_type to_original_pos(
+ std::vector<mapped_file_section>::const_iterator section,
+ std::string::size_type pos) const
+ {
+ switch (section->section_type) {
+ case mapped_file_section::normal:
+ return pos - section->our_pos + section->original_pos;
+
+ case mapped_file_section::empty:
+ return section->original_pos;
+
+ case mapped_file_section::indented: {
+ // Will contain the start of the current line.
+ boost::string_ref::size_type our_line = section->our_pos;
+
+ // Will contain the number of lines in the block before
+ // the current line.
+ unsigned newline_count = 0;
+
+ for(boost::string_ref::size_type i = section->our_pos;
+ i != pos; ++i)
+ {
+ if (source()[i] == '\n') {
+ our_line = i + 1;
+ ++newline_count;
+ }
+ }
+
+ // The start of the line in the original source.
+ boost::string_ref::size_type original_line =
+ section->original_pos;
+
+ while(newline_count > 0) {
+ if (original->source()[original_line] == '\n')
+ --newline_count;
+ ++original_line;
+ }
+
+ // The start of line content (i.e. after indentation).
+ our_line = skip_indentation(source(), our_line);
+
+ // The position is in the middle of indentation, so
+ // just return the start of the whitespace, which should
+ // be good enough.
+ if (our_line > pos) return original_line;
+
+ original_line =
+ skip_indentation(original->source(), original_line);
+
+ // Confirm that we are actually in the same position.
+ assert(original->source()[original_line] ==
+ source()[our_line]);
+
+ // Calculate the position
+ return original_line + (pos - our_line);
+ }
+ default:
+ assert(false);
+ return section->original_pos;
+ }
+ }
+
+ std::vector<mapped_file_section>::const_iterator find_section(
+ boost::string_ref::const_iterator pos) const
+ {
+ std::vector<mapped_file_section>::const_iterator section =
+ boost::upper_bound(mapped_sections,
+ std::string::size_type(pos - source().begin()),
+ mapped_section_pos_cmp());
+ assert(section != mapped_sections.begin());
+ --section;
+
+ return section;
+ }
+
+ virtual file_position position_of(boost::string_ref::const_iterator) const;
+
+ private:
+
+ static std::string::size_type skip_indentation(
+ boost::string_ref src, std::string::size_type i)
+ {
+ while (i != src.size() && (src[i] == ' ' || src[i] == '\t')) ++i;
+ return i;
+ }
+
};
namespace {
@@ -361,57 +411,49 @@ namespace quickbook
bool mapped_file_builder::empty() const
{
- return data->new_file->source.empty();
+ return data->new_file->source().empty();
}
mapped_file_builder::pos mapped_file_builder::get_pos() const
{
- return data->new_file->source.size();
+ return data->new_file->source().size();
}
- void mapped_file_builder::add(char const* x, iterator pos)
- {
- data->new_file->add_empty_mapped_file_section(pos);
- data->new_file->source.append(x);
- }
-
- void mapped_file_builder::add(std::string const& x, iterator pos)
+ void mapped_file_builder::add_at_pos(boost::string_ref x, iterator pos)
{
data->new_file->add_empty_mapped_file_section(pos);
- data->new_file->source.append(x);
+ data->new_file->source_.append(x.begin(), x.end());
}
- void mapped_file_builder::add(iterator begin, iterator end)
+ void mapped_file_builder::add(boost::string_ref x)
{
- data->new_file->add_mapped_file_section(begin);
- data->new_file->source.append(begin, end);
+ data->new_file->add_mapped_file_section(x.begin());
+ data->new_file->source_.append(x.begin(), x.end());
}
void mapped_file_builder::add(mapped_file_builder const& x)
{
- add(x, 0, x.data->new_file->source.size());
+ add(x, 0, x.data->new_file->source_.size());
}
void mapped_file_builder::add(mapped_file_builder const& x,
pos begin, pos end)
{
assert(data->new_file->original == x.data->new_file->original);
- assert(begin <= x.data->new_file->source.size());
- assert(end <= x.data->new_file->source.size());
+ assert(begin <= x.data->new_file->source_.size());
+ assert(end <= x.data->new_file->source_.size());
if (begin != end)
{
- std::vector<mapped_file_section>::iterator start =
- boost::upper_bound(x.data->new_file->mapped_sections,
- begin, mapped_section_pos_cmp());
- assert(start != x.data->new_file->mapped_sections.begin());
- --start;
+ std::vector<mapped_file_section>::const_iterator start =
+ x.data->new_file->find_section(
+ x.data->new_file->source().begin() + begin);
- std::string::size_type size = data->new_file->source.size();
+ std::string::size_type size = data->new_file->source_.size();
data->new_file->mapped_sections.push_back(mapped_file_section(
- start->to_original_pos(begin), size,
- start->section_type));
+ x.data->new_file->to_original_pos(start, begin),
+ size, start->section_type));
for (++start; start != x.data->new_file->mapped_sections.end() &&
start->our_pos < end; ++start)
@@ -421,89 +463,141 @@ namespace quickbook
start->section_type));
}
- data->new_file->source.append(
- x.data->new_file->source.begin() + begin,
- x.data->new_file->source.begin() + end);
+ data->new_file->source_.append(
+ x.data->new_file->source_.begin() + begin,
+ x.data->new_file->source_.begin() + end);
}
}
- void mapped_file_builder::unindent_and_add(iterator begin, iterator end)
+ boost::string_ref::size_type indentation_count(boost::string_ref x)
{
- std::string program(begin, end);
+ unsigned count = 0;
- // Erase leading blank lines and newlines:
- std::string::size_type start = program.find_first_not_of(" \t");
- if (start != std::string::npos &&
- (program[start] == '\r' || program[start] == '\n'))
+ for(boost::string_ref::const_iterator begin = x.begin(), end = x.end();
+ begin != end; ++begin)
{
- program.erase(0, start);
+ switch(*begin)
+ {
+ case ' ':
+ ++count;
+ break;
+ case '\t':
+ // hardcoded tab to 4 for now
+ count = count - (count % 4) + 4;
+ break;
+ default:
+ assert(false);
+ }
}
- start = program.find_first_not_of("\r\n");
- program.erase(0, start);
- if (program.size() == 0)
- return; // nothing left to do
+ return count;
+ }
+
+ void mapped_file_builder::unindent_and_add(boost::string_ref x)
+ {
+ // I wanted to do everything using a string_ref, but unfortunately
+ // they don't have all the overloads used in here. So...
+ std::string const program(x.begin(), x.end());
+
+ // Erase leading blank lines and newlines:
+ std::string::size_type start = program.find_first_not_of(" \t\r\n");
+ if (start == std::string::npos) return;
+
+ start = program.find_last_of("\r\n", start);
+ start = start == std::string::npos ? 0 : start + 1;
+
+ assert(start < program.size());
+
+ // Get the first line indentation
+ std::string::size_type indent = program.find_first_not_of(" \t", start) - start;
+ boost::string_ref::size_type full_indent = indentation_count(
+ boost::string_ref(&program[start], indent));
+
+ std::string::size_type pos = start;
- // Get the first line indent
- std::string::size_type indent = program.find_first_not_of(" \t");
- std::string::size_type pos = 0;
- if (std::string::npos == indent)
+ // Calculate the minimum indent from the rest of the lines
+ // Detecting a mix of spaces and tabs.
+ while (std::string::npos != (pos = program.find_first_of("\r\n", pos)))
{
- // Nothing left to do here. The code is empty (just spaces).
- // We clear the program to signal the caller that it is empty
- // and return early.
- program.clear();
- return;
+ pos = program.find_first_not_of("\r\n", pos);
+ if (std::string::npos == pos) break;
+
+ std::string::size_type n = program.find_first_not_of(" \t", pos);
+ if (n == std::string::npos) break;
+
+ char ch = program[n];
+ if (ch == '\r' || ch == '\n') continue; // ignore empty lines
+
+ indent = (std::min)(indent, n-pos);
+ full_indent = (std::min)(full_indent, indentation_count(
+ boost::string_ref(&program[pos], n-pos)));
}
- // Calculate the minimum indent from the rest of the lines
- do
+ // Detect if indentation is mixed.
+ bool mixed_indentation = false;
+ boost::string_ref first_indent(&program[start], indent);
+ pos = start;
+
+ while (std::string::npos != (pos = program.find_first_of("\r\n", pos)))
{
pos = program.find_first_not_of("\r\n", pos);
- if (std::string::npos == pos)
- break;
+ if (std::string::npos == pos) break;
std::string::size_type n = program.find_first_not_of(" \t", pos);
- if (n != std::string::npos)
- {
- char ch = program[n];
- if (ch != '\r' && ch != '\n') // ignore empty lines
- indent = (std::min)(indent, n-pos);
+ if (n == std::string::npos || n-pos < indent) continue;
+
+ if (boost::string_ref(&program[pos], indent) != first_indent) {
+ mixed_indentation = true;
+ break;
}
}
- while (std::string::npos != (pos = program.find_first_of("\r\n", pos)));
// Trim white spaces from column 0..indent
- pos = 0;
- program.erase(0, indent);
- while (std::string::npos != (pos = program.find_first_of("\r\n", pos)))
- {
+ std::string unindented_program;
+ std::string::size_type copy_start = start;
+ pos = start;
+
+ do {
if (std::string::npos == (pos = program.find_first_not_of("\r\n", pos)))
- {
break;
+
+ unindented_program.append(program.begin() + copy_start, program.begin() + pos);
+ copy_start = pos;
+
+ // Find the end of the indentation.
+ std::string::size_type next = program.find_first_not_of(" \t", pos);
+ if (next == std::string::npos) next = program.size();
+
+ if (mixed_indentation)
+ {
+ unsigned length = indentation_count(boost::string_ref(
+ &program[pos], next - pos));
+
+ if (length > full_indent) {
+ std::string new_indentation(length - full_indent, ' ');
+ unindented_program.append(new_indentation);
+ }
+
+ copy_start = next;
+ }
+ else
+ {
+ copy_start = (std::min)(pos + indent, next);
}
- std::string::size_type next = program.find_first_of("\r\n", pos);
- program.erase(pos, (std::min)(indent, next-pos));
- }
+ pos = next;
+ } while (std::string::npos !=
+ (pos = program.find_first_of("\r\n", pos)));
+
+ unindented_program.append(program.begin() + copy_start, program.end());
- data->new_file->add_indented_mapped_file_section(begin + indent);
- data->new_file->source.append(program);
+ data->new_file->add_indented_mapped_file_section(x.begin());
+ data->new_file->source_.append(unindented_program);
}
- file_position mapped_file::position_of(std::string::const_iterator pos) const
+ file_position mapped_file::position_of(boost::string_ref::const_iterator pos) const
{
- std::vector<mapped_file_section>::const_iterator section =
- boost::upper_bound(mapped_sections,
- std::string::size_type(pos - source.begin()),
- mapped_section_pos_cmp());
- assert(section != mapped_sections.begin());
- --section;
-
- return section->calculate_position(
- original->position_of(
- original->source.begin() + section->original_pos),
- relative_position(source.begin() + section->our_pos, pos)
- );
+ return original->position_of(original->source().begin() +
+ to_original_pos(find_section(pos), pos - source().begin()));
}
}
diff --git a/tools/quickbook/src/files.hpp b/tools/quickbook/src/files.hpp
index 4f217e70cd..0a2e5309c5 100644
--- a/tools/quickbook/src/files.hpp
+++ b/tools/quickbook/src/files.hpp
@@ -14,8 +14,10 @@
#include <string>
#include <boost/filesystem/path.hpp>
#include <boost/intrusive_ptr.hpp>
+#include <boost/utility/string_ref.hpp>
#include <stdexcept>
#include <cassert>
+#include <iosfwd>
namespace quickbook {
@@ -31,6 +33,13 @@ namespace quickbook {
int line;
int column;
+
+ bool operator==(file_position const& other) const
+ {
+ return line == other.line && column == other.column;
+ }
+
+ friend std::ostream& operator<<(std::ostream&, file_position const&);
};
struct file
@@ -41,21 +50,23 @@ namespace quickbook {
file(file const&);
public:
fs::path const path;
- std::string source;
+ std::string source_;
bool is_code_snippets;
private:
unsigned qbk_version;
unsigned ref_count;
public:
+ boost::string_ref source() const { return source_; }
- file(fs::path const& path, std::string const& source,
+ file(fs::path const& path, boost::string_ref source,
unsigned qbk_version) :
- path(path), source(source), is_code_snippets(false),
+ path(path), source_(source.begin(), source.end()), is_code_snippets(false),
qbk_version(qbk_version), ref_count(0)
{}
- file(file const& f, std::string const& source) :
- path(f.path), source(source), is_code_snippets(f.is_code_snippets),
+ file(file const& f, boost::string_ref source) :
+ path(f.path), source_(source.begin(), source.end()),
+ is_code_snippets(f.is_code_snippets),
qbk_version(f.qbk_version), ref_count(0)
{}
@@ -76,7 +87,7 @@ namespace quickbook {
qbk_version = v;
}
- virtual file_position position_of(std::string::const_iterator) const;
+ virtual file_position position_of(boost::string_ref::const_iterator) const;
friend void intrusive_ptr_add_ref(file* ptr) { ++ptr->ref_count; }
@@ -101,8 +112,8 @@ namespace quickbook {
struct mapped_file_builder
{
- typedef std::string::const_iterator iterator;
- typedef std::string::size_type pos;
+ typedef boost::string_ref::const_iterator iterator;
+ typedef boost::string_ref::size_type pos;
mapped_file_builder();
~mapped_file_builder();
@@ -114,12 +125,11 @@ namespace quickbook {
bool empty() const;
pos get_pos() const;
- void add(char const*, iterator);
- void add(std::string const&, iterator);
- void add(iterator, iterator);
+ void add_at_pos(boost::string_ref, iterator);
+ void add(boost::string_ref);
void add(mapped_file_builder const&);
void add(mapped_file_builder const&, pos, pos);
- void unindent_and_add(iterator, iterator);
+ void unindent_and_add(boost::string_ref);
private:
mapped_file_builder_data* data;
diff --git a/tools/quickbook/src/fwd.hpp b/tools/quickbook/src/fwd.hpp
index 7cccde8735..5dec2d68a2 100644
--- a/tools/quickbook/src/fwd.hpp
+++ b/tools/quickbook/src/fwd.hpp
@@ -13,19 +13,21 @@
#include "iterator.hpp"
#include <boost/intrusive_ptr.hpp>
+#include <boost/utility/string_ref.hpp>
namespace quickbook
{
struct state;
struct quickbook_grammar;
struct collector;
- struct id_manager;
+ struct document_state;
struct section_info;
struct file;
struct template_symbol;
typedef boost::intrusive_ptr<file> file_ptr;
+ typedef unsigned source_mode_type;
- typedef std::string::const_iterator string_iterator;
+ typedef boost::string_ref::const_iterator string_iterator;
typedef lookback_iterator<string_iterator> parse_iterator;
inline void ignore_variable(void const*) {}
diff --git a/tools/quickbook/src/glob.cpp b/tools/quickbook/src/glob.cpp
new file mode 100644
index 0000000000..474bdeb077
--- /dev/null
+++ b/tools/quickbook/src/glob.cpp
@@ -0,0 +1,301 @@
+/*=============================================================================
+ Copyright (c) 2013 Daniel James
+
+ Use, modification and distribution is subject to the Boost Software
+ License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+ http://www.boost.org/LICENSE_1_0.txt)
+=============================================================================*/
+
+#include "glob.hpp"
+#include <cassert>
+
+namespace quickbook
+{
+ typedef boost::string_ref::const_iterator glob_iterator;
+
+ void check_glob_range(glob_iterator&, glob_iterator);
+ void check_glob_escape(glob_iterator&, glob_iterator);
+
+ bool match_section(glob_iterator& pattern_begin, glob_iterator pattern_end,
+ glob_iterator& filename_begin, glob_iterator& filename_end);
+ bool match_range(glob_iterator& pattern_begin, glob_iterator pattern_end,
+ unsigned char x);
+
+ bool check_glob(boost::string_ref pattern)
+ {
+ bool is_glob = false;
+ bool is_ascii = true;
+
+ glob_iterator begin = pattern.begin();
+ glob_iterator end = pattern.end();
+
+ while (begin != end) {
+ if (*begin < 32 || *begin > 127)
+ is_ascii = false;
+
+ switch(*begin) {
+ case '\\':
+ check_glob_escape(begin, end);
+ break;
+
+ case '[':
+ check_glob_range(begin, end);
+ is_glob = true;
+ break;
+
+ case ']':
+ throw glob_error("uneven square brackets");
+
+ case '?':
+ is_glob = true;
+ ++begin;
+ break;
+
+ case '*':
+ is_glob = true;
+ ++begin;
+
+ if (begin != end && *begin == '*') {
+ throw glob_error("'**' not supported");
+ }
+ break;
+
+ default:
+ ++begin;
+ }
+ }
+
+ if (is_glob && !is_ascii)
+ throw glob_error("invalid character, globs are ascii only");
+
+ return is_glob;
+ }
+
+ void check_glob_range(glob_iterator& begin, glob_iterator end)
+ {
+ assert(begin != end && *begin == '[');
+ ++begin;
+
+ if (*begin == ']')
+ throw glob_error("empty range");
+
+ while (begin != end) {
+ switch (*begin) {
+ case '\\':
+ ++begin;
+
+ if (begin == end) {
+ throw glob_error("trailing escape");
+ }
+ else if (*begin == '\\' || *begin == '/') {
+ throw glob_error("contains escaped slash");
+ }
+
+ ++begin;
+ break;
+ case '[':
+ // TODO: Allow?
+ throw glob_error("nested square brackets");
+ case ']':
+ ++begin;
+ return;
+ case '/':
+ throw glob_error("slash in square brackets");
+ default:
+ ++begin;
+ }
+ }
+
+ throw glob_error("uneven square brackets");
+ }
+
+ void check_glob_escape(glob_iterator& begin, glob_iterator end)
+ {
+ assert(begin != end && *begin == '\\');
+
+ ++begin;
+
+ if (begin == end) {
+ throw glob_error("trailing escape");
+ }
+ else if (*begin == '\\' || *begin == '/') {
+ throw glob_error("contains escaped slash");
+ }
+
+ ++begin;
+ }
+
+ bool glob(boost::string_ref const& pattern,
+ boost::string_ref const& filename)
+ {
+ // If there wasn't this special case then '*' would match an
+ // empty string.
+ if (filename.empty()) return pattern.empty();
+
+ glob_iterator pattern_it = pattern.begin();
+ glob_iterator pattern_end = pattern.end();
+
+ glob_iterator filename_it = filename.begin();
+ glob_iterator filename_end = filename.end();
+
+ if (!match_section(pattern_it, pattern_end, filename_it, filename_end))
+ return false;
+
+ while (pattern_it != pattern_end) {
+ assert(*pattern_it == '*');
+ ++pattern_it;
+
+ if (pattern_it == pattern_end) return true;
+
+ // TODO: Error?
+ if (*pattern_it == '*') return false;
+
+ while (true) {
+ if (filename_it == filename_end) return false;
+ if (match_section(pattern_it, pattern_end, filename_it, filename_end))
+ break;
+ ++filename_it;
+ }
+ }
+
+ return filename_it == filename_end;
+ }
+
+ bool match_section(glob_iterator& pattern_begin, glob_iterator pattern_end,
+ glob_iterator& filename_begin, glob_iterator& filename_end)
+ {
+ glob_iterator pattern_it = pattern_begin;
+ glob_iterator filename_it = filename_begin;
+
+ while (pattern_it != pattern_end && *pattern_it != '*') {
+ if (filename_it == filename_end) return false;
+
+ switch(*pattern_it) {
+ case '*':
+ assert(false);
+ return false;
+ case '[':
+ if (!match_range(pattern_it, pattern_end, *filename_it))
+ return false;
+ ++filename_it;
+ break;
+ case '?':
+ ++pattern_it;
+ ++filename_it;
+ break;
+ case '\\':
+ ++pattern_it;
+ if (pattern_it == pattern_end) return false;
+ BOOST_FALLTHROUGH;
+ default:
+ if (*pattern_it != *filename_it) return false;
+ ++pattern_it;
+ ++filename_it;
+ }
+ }
+
+ if (pattern_it == pattern_end && filename_it != filename_end)
+ return false;
+
+ pattern_begin = pattern_it;
+ filename_begin = filename_it;
+ return true;
+ }
+
+ bool match_range(glob_iterator& pattern_begin, glob_iterator pattern_end,
+ unsigned char x)
+ {
+ assert(pattern_begin != pattern_end && *pattern_begin == '[');
+ ++pattern_begin;
+ if (pattern_begin == pattern_end) return false;
+
+ bool invert_match = false;
+ bool matched = false;
+
+ if (*pattern_begin == '^') {
+ invert_match = true;
+ ++pattern_begin;
+ if (pattern_begin == pattern_end) return false;
+ }
+
+ // Search for a match
+ while (true) {
+ unsigned char first = *pattern_begin;
+ ++pattern_begin;
+ if (first == ']') break;
+ if (pattern_begin == pattern_end) return false;
+
+ if (first == '\\') {
+ first = *pattern_begin;
+ ++pattern_begin;
+ if (pattern_begin == pattern_end) return false;
+ }
+
+ if (*pattern_begin != '-') {
+ matched = matched || (first == x);
+ }
+ else {
+ ++pattern_begin;
+ if (pattern_begin == pattern_end) return false;
+
+ unsigned char second = *pattern_begin;
+ ++pattern_begin;
+ if (second == ']') {
+ matched = matched || (first == x) || (x == '-');
+ break;
+ }
+ if (pattern_begin == pattern_end) return false;
+
+ if (second == '\\') {
+ second = *pattern_begin;
+ ++pattern_begin;
+ if (pattern_begin == pattern_end) return false;
+ }
+
+ // TODO: What if second < first?
+ matched = matched || (first <= x && x <= second);
+ }
+ }
+
+ return invert_match != matched;
+ }
+
+ std::size_t find_glob_char(boost::string_ref pattern,
+ std::size_t pos)
+ {
+ // Weird style is because boost::string_ref's find_first_of
+ // doesn't take a position argument.
+ std::size_t removed = 0;
+
+ while (true) {
+ pos = pattern.find_first_of("[]?*\\");
+ if (pos == boost::string_ref::npos) return pos;
+ if (pattern[pos] != '\\') return pos + removed;
+ pattern.remove_prefix(pos + 2);
+ removed += pos + 2;
+ }
+ }
+
+ std::string glob_unescape(boost::string_ref pattern)
+ {
+ std::string result;
+
+ while (true) {
+ std::size_t pos = pattern.find("\\");
+ if (pos == boost::string_ref::npos) {
+ result.append(pattern.data(), pattern.size());
+ break;
+ }
+
+ result.append(pattern.data(), pos);
+ ++pos;
+ if (pos < pattern.size()) {
+ result += pattern[pos];
+ ++pos;
+ }
+ pattern.remove_prefix(pos);
+ }
+
+ return result;
+ }
+}
diff --git a/tools/quickbook/src/glob.hpp b/tools/quickbook/src/glob.hpp
new file mode 100644
index 0000000000..8e8458961a
--- /dev/null
+++ b/tools/quickbook/src/glob.hpp
@@ -0,0 +1,30 @@
+/*=============================================================================
+ Copyright (c) 2013 Daniel James
+
+ Use, modification and distribution is subject to the Boost Software
+ License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+ http://www.boost.org/LICENSE_1_0.txt)
+=============================================================================*/
+
+#include <boost/utility/string_ref.hpp>
+#include <stdexcept>
+
+namespace quickbook
+{
+ struct glob_error : std::runtime_error
+ {
+ explicit glob_error(char const* error) :
+ std::runtime_error(error) {}
+ };
+
+ // Is this path a glob? Throws glob_error if glob is invalid.
+ bool check_glob(boost::string_ref);
+
+ // pre: glob is valid (call check_glob first on user data).
+ bool glob(boost::string_ref const& pattern,
+ boost::string_ref const& filename);
+
+ std::size_t find_glob_char(boost::string_ref,
+ std::size_t start = 0);
+ std::string glob_unescape(boost::string_ref);
+}
diff --git a/tools/quickbook/src/grammar.cpp b/tools/quickbook/src/grammar.cpp
index 8f244f18ac..54ed64aa06 100644
--- a/tools/quickbook/src/grammar.cpp
+++ b/tools/quickbook/src/grammar.cpp
@@ -17,8 +17,10 @@ namespace quickbook
: impl_(new impl(s))
, command_line_macro(impl_->command_line, "command_line_macro")
, inline_phrase(impl_->inline_phrase, "inline_phrase")
- , phrase(impl_->phrase_start, "phrase")
- , block(impl_->block_start, "block")
+ , phrase_start(impl_->phrase_start, "phrase")
+ , block_start(impl_->block_start, "block")
+ , attribute_template_body(impl_->attribute_template_body,
+ "attribute_template_body")
, doc_info(impl_->doc_info_details, "doc_info")
{
}
diff --git a/tools/quickbook/src/grammar.hpp b/tools/quickbook/src/grammar.hpp
index 54aaf2b3ea..01d634c90f 100644
--- a/tools/quickbook/src/grammar.hpp
+++ b/tools/quickbook/src/grammar.hpp
@@ -58,8 +58,9 @@ namespace quickbook
public:
grammar command_line_macro;
grammar inline_phrase;
- grammar phrase;
- grammar block;
+ grammar phrase_start;
+ grammar block_start;
+ grammar attribute_template_body;
grammar doc_info;
quickbook_grammar(quickbook::state&);
diff --git a/tools/quickbook/src/grammar_impl.hpp b/tools/quickbook/src/grammar_impl.hpp
index 090b399048..8d37d351f1 100644
--- a/tools/quickbook/src/grammar_impl.hpp
+++ b/tools/quickbook/src/grammar_impl.hpp
@@ -20,26 +20,81 @@ namespace quickbook
{
namespace cl = boost::spirit::classic;
+ // Information about a square bracket element (e.g. [* word]).
+ //
+ // TODO: The naming is a bit confused as element is also sometimes used for
+ // syntactic/implicit elements (such as lists and horizontal rules). Maybe
+ // should use entity as a more general name instead of element. Or it might
+ // be better to use 'tag' for square bracket elements, although that is
+ // currently used for the type of entities.
struct element_info
{
+ // Types of elements.
+ //
+ // Used to determine:
+ //
+ // - where they can be used.
+ // - whether they end a paragraph
+ // - how following newlines are interpreted by the grammar.
+ // - and possibly other things.....
enum type_enum {
+ // Used when there's no element.
nothing = 0,
- block = 1,
+
+ // A section tag. These can't be nested.
+ section_block = 1,
+
+ // Block elements that can be used in conditional phrases and lists,
+ // but not nested. (TODO: not a good name).
conditional_or_block = 2,
+
+ // Block elements that can be nested in other elements.
nested_block = 4,
+
+ // Phrase elements.
phrase = 8,
+
+ // Depending on the context this can be a block or phrase.
+ //
+ // Currently this is only used for elements that don't actually
+ // generate output (e.g. anchors, source mode tags). The main
+ // reason is so that lists can be preceeded by the element, e.g.
+ //
+ // [#anchor]
+ // * list item.
+ //
+ // If the anchor was considered to be a phrase element, then the
+ // list wouldn't be recognised.
maybe_block = 16
};
+ // Masks to determine which context elements can be used in (in_*), and
+ // whether they are consided to be a block element (is_*).
enum context {
- in_phrase = phrase | maybe_block,
+ // At the top level we allow everything.
+ in_top_level = phrase | maybe_block | nested_block |
+ conditional_or_block | section_block,
+
+ // In conditional phrases and list blocks we everything but section
+ // elements.
+ in_conditional = phrase | maybe_block | nested_block |
+ conditional_or_block,
+ in_list_block = phrase | maybe_block | nested_block |
+ conditional_or_block,
+
+ // In nested blocks we allow a more limited range of elements.
in_nested_block = phrase | maybe_block | nested_block,
- in_conditional = phrase | maybe_block | nested_block | conditional_or_block,
- in_block = phrase | maybe_block | nested_block | conditional_or_block | block,
- only_nested_block = nested_block,
- only_block = nested_block | conditional_or_block | block,
- only_list_block = nested_block | conditional_or_block,
- only_contextual_block = maybe_block | nested_block | conditional_or_block | block
+
+ // In a phrase we only allow phrase elements, ('maybe_block'
+ // elements are treated as phrase elements in this context)
+ in_phrase = phrase | maybe_block,
+
+ // At the start of a block these are all block elements.
+ is_contextual_block = maybe_block | nested_block |
+ conditional_or_block | section_block,
+
+ // These are all block elements in all other contexts.
+ is_block = nested_block | conditional_or_block | section_block,
};
element_info()
@@ -74,9 +129,11 @@ namespace quickbook
cl::rule<scanner> inside_preformatted;
cl::rule<scanner> inside_paragraph;
cl::rule<scanner> command_line;
+ cl::rule<scanner> attribute_template_body;
cl::rule<scanner> attribute_value_1_7;
cl::rule<scanner> escape;
cl::rule<scanner> raw_escape;
+ cl::rule<scanner> skip_entity;
// Miscellaneous stuff
cl::rule<scanner> hard_space;
@@ -90,6 +147,9 @@ namespace quickbook
// Element Symbols
cl::symbols<element_info> elements;
+
+ // Source mode
+ cl::symbols<source_mode_type> source_modes;
// Doc Info
cl::rule<scanner> doc_info_details;
diff --git a/tools/quickbook/src/id_generation.cpp b/tools/quickbook/src/id_generation.cpp
new file mode 100644
index 0000000000..b6445a5816
--- /dev/null
+++ b/tools/quickbook/src/id_generation.cpp
@@ -0,0 +1,379 @@
+/*=============================================================================
+ Copyright (c) 2011, 2013 Daniel James
+
+ Use, modification and distribution is subject to the Boost Software
+ License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+ http://www.boost.org/LICENSE_1_0.txt)
+=============================================================================*/
+
+#include <cctype>
+#include "document_state_impl.hpp"
+#include <boost/make_shared.hpp>
+#include <boost/unordered_map.hpp>
+#include <boost/lexical_cast.hpp>
+#include <boost/foreach.hpp>
+#include <boost/range/algorithm.hpp>
+
+// TODO: This should possibly try to always generate valid XML ids:
+// http://www.w3.org/TR/REC-xml/#NT-NameStartChar
+
+namespace quickbook {
+ //
+ // The maximum size of a generated part of an id.
+ //
+ // Not a strict maximum, sometimes broken because the user
+ // explicitly uses a longer id, or for backwards compatibility.
+
+ static const std::size_t max_size = 32;
+
+ typedef std::vector<id_placeholder const*> placeholder_index;
+ placeholder_index index_placeholders(document_state_impl const&, boost::string_ref);
+
+ void generate_id_block(
+ placeholder_index::iterator, placeholder_index::iterator,
+ std::vector<std::string>& generated_ids);
+
+ std::vector<std::string> generate_ids(document_state_impl const& state, boost::string_ref xml)
+ {
+ std::vector<std::string> generated_ids(state.placeholders.size());
+
+ // Get a list of the placeholders in the order that we wish to
+ // process them.
+ placeholder_index placeholders = index_placeholders(state, xml);
+
+ typedef std::vector<id_placeholder const*>::iterator iterator;
+ iterator it = placeholders.begin(), end = placeholders.end();
+
+ while (it != end) {
+ // We process all the ids that have the same number of dots
+ // together. Note that ids with different parents can clash, e.g.
+ // because of old fashioned id generation or anchors containing
+ // multiple dots.
+ //
+ // So find the group of placeholders with the same number of dots.
+ iterator group_begin = it, group_end = it;
+ while (group_end != end && (*group_end)->num_dots == (*it)->num_dots)
+ ++group_end;
+
+ generate_id_block(group_begin, group_end, generated_ids);
+ it = group_end;
+ }
+
+ return generated_ids;
+ }
+
+ //
+ // index_placeholders
+ //
+ // Create a sorted index of the placeholders, in order
+ // to make numbering duplicates easy. A total order.
+ //
+
+ struct placeholder_compare
+ {
+ std::vector<unsigned>& order;
+
+ placeholder_compare(std::vector<unsigned>& order) : order(order) {}
+
+ bool operator()(id_placeholder const* x, id_placeholder const* y) const
+ {
+ bool x_explicit = x->category.c >= id_category::explicit_id;
+ bool y_explicit = y->category.c >= id_category::explicit_id;
+
+ return
+ x->num_dots < y->num_dots ? true :
+ x->num_dots > y->num_dots ? false :
+ x_explicit > y_explicit ? true :
+ x_explicit < y_explicit ? false :
+ order[x->index] < order[y->index];
+ }
+ };
+
+ struct get_placeholder_order_callback : xml_processor::callback
+ {
+ document_state_impl const& state;
+ std::vector<unsigned>& order;
+ unsigned count;
+
+ get_placeholder_order_callback(document_state_impl const& state,
+ std::vector<unsigned>& order)
+ : state(state),
+ order(order),
+ count(0)
+ {}
+
+ void id_value(boost::string_ref value)
+ {
+ set_placeholder_order(state.get_placeholder(value));
+ }
+
+ void set_placeholder_order(id_placeholder const* p)
+ {
+ if (p && !order[p->index]) {
+ set_placeholder_order(p->parent);
+ order[p->index] = ++count;
+ }
+ }
+ };
+
+ placeholder_index index_placeholders(
+ document_state_impl const& state,
+ boost::string_ref xml)
+ {
+ // The order that the placeholder appear in the xml source.
+ std::vector<unsigned> order(state.placeholders.size());
+
+ xml_processor processor;
+ get_placeholder_order_callback callback(state, order);
+ processor.parse(xml, callback);
+
+ placeholder_index sorted_placeholders;
+ sorted_placeholders.reserve(state.placeholders.size());
+ BOOST_FOREACH(id_placeholder const& p, state.placeholders)
+ if (order[p.index]) sorted_placeholders.push_back(&p);
+ boost::sort(sorted_placeholders, placeholder_compare(order));
+
+ return sorted_placeholders;
+ }
+
+ // Resolve and generate ids.
+
+ struct generate_id_block_type
+ {
+ // The ids which won't require duplicate handling.
+ typedef boost::unordered_map<std::string, id_placeholder const*>
+ chosen_id_map;
+ chosen_id_map chosen_ids;
+ std::vector<std::string>& generated_ids;
+
+ generate_id_block_type(std::vector<std::string>& generated_ids) :
+ generated_ids(generated_ids) {}
+
+ void generate(placeholder_index::iterator begin,
+ placeholder_index::iterator end);
+
+ std::string resolve_id(id_placeholder const*);
+ std::string generate_id(id_placeholder const*, std::string const&);
+ };
+
+ void generate_id_block(placeholder_index::iterator begin,
+ placeholder_index::iterator end,
+ std::vector<std::string>& generated_ids)
+ {
+ generate_id_block_type impl(generated_ids);
+ impl.generate(begin, end);
+ }
+
+ void generate_id_block_type::generate(placeholder_index::iterator begin,
+ placeholder_index::iterator end)
+ {
+ std::vector<std::string> resolved_ids;
+
+ for (placeholder_index::iterator i = begin; i != end; ++i)
+ resolved_ids.push_back(resolve_id(*i));
+
+ unsigned index = 0;
+ for (placeholder_index::iterator i = begin; i != end; ++i, ++index)
+ {
+ generated_ids[(**i).index] =
+ generate_id(*i, resolved_ids[index]);
+ }
+ }
+
+ std::string generate_id_block_type::resolve_id(id_placeholder const* p)
+ {
+ std::string id = p->parent ?
+ generated_ids[p->parent->index] + "." + p->id :
+ p->id;
+
+ if (p->category.c > id_category::numbered) {
+ // Reserve the id if it isn't already reserved.
+ chosen_id_map::iterator pos = chosen_ids.emplace(id, p).first;
+
+ // If it was reserved by a placeholder with a lower category,
+ // then overwrite it.
+ if (p->category.c > pos->second->category.c)
+ pos->second = p;
+ }
+
+ return id;
+ }
+
+ std::string generate_id_block_type::generate_id(id_placeholder const* p,
+ std::string const& resolved_id)
+ {
+ if (p->category.c > id_category::numbered &&
+ chosen_ids.at(resolved_id) == p)
+ {
+ return resolved_id;
+ }
+
+ // Split the id into its parent part and child part.
+ //
+ // Note: can't just use the placeholder's parent, as the
+ // placeholder id might contain dots.
+ std::size_t child_start = resolved_id.rfind('.');
+ std::string parent_id, base_id;
+
+ if (child_start == std::string::npos) {
+ base_id = normalize_id(resolved_id, max_size - 1);
+ }
+ else {
+ parent_id = resolved_id.substr(0, child_start + 1);
+ base_id = normalize_id(resolved_id.substr(child_start + 1),
+ max_size - 1);
+ }
+
+ // Since we're adding digits, don't want an id that ends in
+ // a digit.
+
+ unsigned int length = base_id.size();
+
+ if (length > 0 && std::isdigit(base_id[length - 1])) {
+ if (length < max_size - 1) {
+ base_id += '_';
+ ++length;
+ }
+ else {
+ while (length > 0 && std::isdigit(base_id[length -1]))
+ --length;
+ base_id.erase(length);
+ }
+ }
+
+ unsigned count = 0;
+
+ while (true)
+ {
+ std::string postfix =
+ boost::lexical_cast<std::string>(count++);
+
+ if ((base_id.size() + postfix.size()) > max_size) {
+ // The id is now too long, so reduce the length and
+ // start again.
+
+ // Would need a lot of ids to get this far....
+ if (length == 0) throw std::runtime_error("Too many ids");
+
+ // Trim a character.
+ --length;
+
+ // Trim any trailing digits.
+ while (length > 0 && std::isdigit(base_id[length -1]))
+ --length;
+
+ base_id.erase(length);
+ count = 0;
+ }
+ else {
+ // Try to reserve this id.
+ std::string generated_id = parent_id + base_id + postfix;
+
+ if (chosen_ids.emplace(generated_id, p).second) {
+ return generated_id;
+ }
+ }
+ }
+ }
+
+ //
+ // replace_ids
+ //
+ // Return a copy of the xml with all the placeholders replaced by
+ // generated_ids.
+ //
+
+ struct replace_ids_callback : xml_processor::callback
+ {
+ document_state_impl const& state;
+ std::vector<std::string> const* ids;
+ boost::string_ref::const_iterator source_pos;
+ std::string result;
+
+ replace_ids_callback(document_state_impl const& state,
+ std::vector<std::string> const* ids)
+ : state(state),
+ ids(ids),
+ source_pos(),
+ result()
+ {}
+
+ void start(boost::string_ref xml)
+ {
+ source_pos = xml.begin();
+ }
+
+ void id_value(boost::string_ref value)
+ {
+ if (id_placeholder const* p = state.get_placeholder(value))
+ {
+ boost::string_ref id = ids ?
+ (*ids)[p->index] : p->unresolved_id;
+
+ result.append(source_pos, value.begin());
+ result.append(id.begin(), id.end());
+ source_pos = value.end();
+ }
+ }
+
+ void finish(boost::string_ref xml)
+ {
+ result.append(source_pos, xml.end());
+ source_pos = xml.end();
+ }
+ };
+
+ std::string replace_ids(document_state_impl const& state, boost::string_ref xml,
+ std::vector<std::string> const* ids)
+ {
+ xml_processor processor;
+ replace_ids_callback callback(state, ids);
+ processor.parse(xml, callback);
+ return callback.result;
+ }
+
+ //
+ // normalize_id
+ //
+ // Normalizes generated ids.
+ //
+
+ std::string normalize_id(boost::string_ref src_id)
+ {
+ return normalize_id(src_id, max_size);
+ }
+
+ std::string normalize_id(boost::string_ref src_id, std::size_t size)
+ {
+ std::string id(src_id.begin(), src_id.end());
+
+ std::size_t src = 0;
+ std::size_t dst = 0;
+
+ while (src < id.length() && id[src] == '_') {
+ ++src;
+ }
+
+ if (src == id.length()) {
+ id = "_";
+ }
+ else {
+ while (src < id.length() && dst < size) {
+ if (id[src] == '_') {
+ do {
+ ++src;
+ } while(src < id.length() && id[src] == '_');
+
+ if (src < id.length()) id[dst++] = '_';
+ }
+ else {
+ id[dst++] = id[src++];
+ }
+ }
+
+ id.erase(dst);
+ }
+
+ return id;
+ }
+}
diff --git a/tools/quickbook/src/id_manager.cpp b/tools/quickbook/src/id_manager.cpp
deleted file mode 100644
index 3b2f601a11..0000000000
--- a/tools/quickbook/src/id_manager.cpp
+++ /dev/null
@@ -1,1143 +0,0 @@
-/*=============================================================================
- Copyright (c) 2011 Daniel James
-
- Use, modification and distribution is subject to the Boost Software
- License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
- http://www.boost.org/LICENSE_1_0.txt)
-=============================================================================*/
-
-#include "id_manager.hpp"
-#include "utils.hpp"
-#include "string_ref.hpp"
-#include <boost/make_shared.hpp>
-#include <boost/unordered_map.hpp>
-#include <boost/lexical_cast.hpp>
-#include <boost/range/algorithm.hpp>
-#include <boost/foreach.hpp>
-#include <deque>
-#include <vector>
-#include <cctype>
-
-namespace quickbook
-{
- // TODO: This should possibly try to make ids are generated:
- // http://www.w3.org/TR/REC-xml/#NT-NameStartChar
-
- //
- // Forward declarations
- //
-
- struct id_placeholder;
- struct id_data;
- std::string replace_ids(id_state& state, std::string const& xml,
- bool use_resolved_ids = true);
- std::string process_ids(id_state&, std::string const&);
-
- static const std::size_t max_size = 32;
-
- //
- // id_placeholder
- //
-
- struct id_placeholder
- {
- enum state_enum { child, unresolved, resolved, generated };
-
- unsigned index; // The poisition in the placeholder deque.
- state_enum generation_state;
- // Placeholder's position in generation
- // process.
- std::string unresolved_id;
- // The id that would be generated without any
- // duplicate handling.
- std::string id; // The id so far.
- id_placeholder* parent; // Placeholder of the parent id.
- // Only when generation_state == child
- id_category category;
- unsigned num_dots; // Number of dots in the id.
- // Normally equal to the section level.
- unsigned order; // Order of the placeholders in the generated
- // xml. Stored because it can be slightly
- // different to the order they're generated
- // in. e.g. for nested tables the cells
- // are processed before the title id.
- // Only set when processing ids.
- id_data* data; // Assigned data shared by duplicate ids
- // used to detect them. Only when
- // generation_state == resolved
-
- id_placeholder(
- unsigned index,
- std::string const& id,
- id_category category,
- id_placeholder* parent_ = 0)
- : index(index),
- generation_state(parent_ ? child : unresolved),
- unresolved_id(parent_ ? parent_->unresolved_id + '.' + id : id),
- id(id),
- parent(parent_),
- category(category),
- num_dots(boost::range::count(id, '.') +
- (parent_ ? parent_->num_dots + 1 : 0)),
- order(0),
- data(0)
- {
- }
-
- std::string to_string()
- {
- return '$' + boost::lexical_cast<std::string>(index);
- }
-
- bool check_state() const
- {
- return (
- (generation_state == child) == (bool) parent &&
- (generation_state == resolved) == (bool) data);
- }
-
- bool check_state(state_enum s) const
- {
- return s == generation_state && check_state();
- }
- };
-
- //
- // id_state
- //
-
- struct file_info;
- struct doc_info;
- struct section_info;
-
- struct id_state
- {
- boost::shared_ptr<file_info> current_file;
- std::deque<id_placeholder> placeholders;
-
- // Placeholder methods
-
- id_placeholder* add_placeholder(
- std::string const&, id_category, id_placeholder* parent = 0);
-
- id_placeholder* get_placeholder(string_ref);
-
- // Events
-
- id_placeholder* start_file(
- unsigned compatibility_version,
- bool document_root,
- std::string const& include_doc_id,
- std::string const& id,
- value const& title);
-
- void end_file();
-
- id_placeholder* add_id(
- std::string const& id,
- id_category category);
- id_placeholder* old_style_id(
- std::string const& id,
- id_category category);
- id_placeholder* begin_section(
- std::string const& id,
- id_category category);
- void end_section();
-
-private:
- id_placeholder* add_id_to_section(
- std::string const& id,
- id_category category,
- boost::shared_ptr<section_info> const& section);
- id_placeholder* create_new_section(
- std::string const& id,
- id_category category);
-
- void switch_section(id_placeholder*);
- void reswitch_sections(boost::shared_ptr<section_info> const&,
- boost::shared_ptr<section_info> const&);
- void restore_section();
- };
-
- struct file_info
- {
- boost::shared_ptr<file_info> parent;
- boost::shared_ptr<doc_info> document;
-
- bool document_root; // !parent || document != parent->document
- unsigned compatibility_version;
- boost::shared_ptr<section_info> switched_section;
- id_placeholder* original_placeholder;
-
- // The 1.1-1.5 document id would actually change per file due to
- // explicit ids in includes and a bug which would sometimes use the
- // document title instead of the id.
- std::string doc_id_1_1;
-
- file_info(boost::shared_ptr<file_info> const& parent,
- unsigned compatibility_version) :
- parent(parent), document(parent->document), document_root(false),
- compatibility_version(compatibility_version),
- switched_section(), original_placeholder()
- {}
-
- file_info(boost::shared_ptr<file_info> const& parent,
- boost::shared_ptr<doc_info> const& document,
- unsigned compatibility_version) :
- parent(parent), document(document), document_root(true),
- compatibility_version(compatibility_version),
- switched_section(), original_placeholder()
- {}
- };
-
- struct doc_info
- {
- boost::shared_ptr<section_info> current_section;
- std::string last_title_1_1;
- std::string section_id_1_1;
-
- doc_info() :
- current_section(), last_title_1_1(), section_id_1_1()
- {}
- };
-
- struct section_info
- {
- boost::shared_ptr<section_info> parent;
- unsigned compatibility_version;
- unsigned level;
- std::string id_1_1;
- id_placeholder* placeholder_1_6;
-
- section_info(boost::shared_ptr<section_info> const& parent,
- unsigned compatibility_version, std::string const& id) :
- parent(parent), compatibility_version(compatibility_version),
- level(parent ? parent->level + 1 : 1),
- id_1_1(), placeholder_1_6(0)
- {
- if (parent && compatibility_version < 106u) {
- id_1_1 = parent->id_1_1;
- if (!id_1_1.empty() && !id.empty())
- id_1_1 += ".";
- id_1_1 += id;
- }
- }
- };
-
- //
- // id_manager
- //
-
- id_manager::id_manager()
- : state(new id_state)
- {
- }
-
- id_manager::~id_manager() {}
-
- void id_manager::start_file(
- unsigned compatibility_version,
- std::string const& include_doc_id,
- std::string const& id,
- value const& title)
- {
- state->start_file(compatibility_version, false, include_doc_id, id, title);
- }
-
- std::string id_manager::start_file_with_docinfo(
- unsigned compatibility_version,
- std::string const& include_doc_id,
- std::string const& id,
- value const& title)
- {
- return state->start_file(compatibility_version, true, include_doc_id,
- id, title)->to_string();
- }
-
- void id_manager::end_file()
- {
- state->end_file();
- }
-
- std::string id_manager::begin_section(std::string const& id,
- id_category category)
- {
- return state->begin_section(id, category)->to_string();
- }
-
- void id_manager::end_section()
- {
- return state->end_section();
- }
-
- int id_manager::section_level() const
- {
- return state->current_file->document->current_section->level;
- }
-
- std::string id_manager::old_style_id(std::string const& id, id_category category)
- {
- return state->old_style_id(id, category)->to_string();
- }
-
- std::string id_manager::add_id(std::string const& id, id_category category)
- {
- return state->add_id(id, category)->to_string();
- }
-
- std::string id_manager::add_anchor(std::string const& id, id_category category)
- {
- return state->add_placeholder(id, category)->to_string();
- }
-
- std::string id_manager::replace_placeholders_with_unresolved_ids(
- std::string const& xml) const
- {
- return replace_ids(*state, xml, false);
- }
-
- std::string id_manager::replace_placeholders(std::string const& xml) const
- {
- assert(!state->current_file);
- return process_ids(*state, xml);
- }
-
- unsigned id_manager::compatibility_version() const
- {
- return state->current_file->compatibility_version;
- }
-
- //
- // normalize_id
- //
- // Normalizes generated ids.
- //
-
- namespace
- {
- std::string normalize_id(
- std::string src_id,
- std::size_t prefix = 0,
- std::size_t size = max_size)
- {
- std::string id;
- id.swap(src_id);
-
- std::size_t src = prefix;
- std::size_t dst = prefix;
- size += prefix;
-
- if (src >= id.length()) {
- return id;
- }
-
- while (src < id.length() && id[src] == '_') {
- ++src;
- }
-
- if (src >= id.length()) {
- id += '_';
- return id;
- }
-
- while (src < id.length() && dst < size) {
- if (id[src] == '_') {
- do {
- ++src;
- } while(src < id.length() && id[src] == '_');
-
- if (src < id.length()) id[dst++] = '_';
- }
- else {
- id[dst++] = id[src++];
- }
- }
-
- id.erase(dst);
-
- return id;
- }
- }
-
- //
- // id_state
- //
-
- id_placeholder* id_state::add_placeholder(
- std::string const& id, id_category category,
- id_placeholder* parent)
- {
- placeholders.push_back(id_placeholder(
- placeholders.size(), id, category, parent));
- return &placeholders.back();
- }
-
- id_placeholder* id_state::get_placeholder(string_ref value)
- {
- // If this isn't a placeholder id.
- if (value.size() <= 1 || *value.begin() != '$')
- return 0;
-
- unsigned index = boost::lexical_cast<int>(std::string(
- value.begin() + 1, value.end()));
-
- return &placeholders.at(index);
- }
-
- void id_state::switch_section(id_placeholder* p)
- {
- assert(!current_file->original_placeholder);
- current_file->switched_section = current_file->document->current_section;
- current_file->original_placeholder = current_file->switched_section->placeholder_1_6;
- current_file->switched_section->placeholder_1_6 = p;
- }
-
- void id_state::reswitch_sections(
- boost::shared_ptr<section_info> const& popped_section,
- boost::shared_ptr<section_info> const& parent_section)
- {
- boost::shared_ptr<file_info> file = current_file;
- boost::shared_ptr<file_info> first_switched_file;
-
- for (;;) {
- if (file->switched_section == popped_section)
- {
- first_switched_file = file;
- file->switched_section = parent_section;
- }
-
- if (file->document_root) break;
- file = file->parent;
- }
-
- if (first_switched_file) {
- first_switched_file->original_placeholder =
- parent_section->placeholder_1_6;
- parent_section->placeholder_1_6 =
- popped_section->placeholder_1_6;
- }
- }
-
- void id_state::restore_section()
- {
- if (current_file->original_placeholder) {
- current_file->switched_section->placeholder_1_6 =
- current_file->original_placeholder;
- }
- }
-
- id_placeholder* id_state::start_file(
- unsigned compatibility_version,
- bool document_root,
- std::string const& include_doc_id,
- std::string const& id,
- value const& title)
- {
- // Create new file
-
- boost::shared_ptr<file_info> parent = current_file;
-
- if (document_root) {
- current_file = boost::make_shared<file_info>(parent,
- boost::make_shared<doc_info>(),
- compatibility_version);
- }
- else {
- current_file =
- boost::make_shared<file_info>(parent, compatibility_version);
- }
-
- // Choose specified id to use. Prefer 'include_doc_id' (the id
- // specified in an 'include' element) unless backwards compatibility
- // is required.
-
- std::string initial_doc_id;
-
- if (document_root ||
- compatibility_version >= 106u ||
- (parent && parent->compatibility_version >= 106u)) {
- initial_doc_id = !include_doc_id.empty() ? include_doc_id : id;
- }
- else {
- initial_doc_id = !id.empty() ? id : include_doc_id;
- }
-
- // Set variables used for backwards compatible id generation.
- // They're a bit odd because of old bugs.
-
- if (document_root || compatibility_version < 106u) {
- // Note: this is done for older versions even if docinfo is
- // otherwise ignored.
-
- if (title.check())
- current_file->document->last_title_1_1 =
- title.get_quickbook();
-
- current_file->doc_id_1_1 = !initial_doc_id.empty() ? initial_doc_id :
- detail::make_identifier(current_file->document->last_title_1_1);
- }
- else if (parent) {
- current_file->doc_id_1_1 = parent->doc_id_1_1;
- }
-
- if (document_root) {
- if (!initial_doc_id.empty()) {
- return create_new_section(id, id_category::explicit_section_id);
- }
- else if (!title.empty()) {
- return create_new_section(
- detail::make_identifier(title.get_quickbook()),
- id_category::generated_doc);
- }
- else if (compatibility_version >= 106u) {
- return create_new_section("doc", id_category::numbered);
- }
- else {
- return create_new_section("", id_category::generated_doc);
- }
- }
- else {
- // If an id was set for the file, then switch the current section
- // with a new section with this id. This will be maintained in
- // 'end_section' if the current section ends, and then the original
- // section restored in 'end_file'
-
- if (compatibility_version >= 106u && !initial_doc_id.empty()) {
- switch_section(add_id_to_section(initial_doc_id,
- id_category::explicit_section_id,
- boost::shared_ptr<section_info>()));
- }
-
- return 0;
- }
- }
-
- void id_state::end_file()
- {
- restore_section();
- current_file = current_file->parent;
- }
-
- id_placeholder* id_state::add_id(
- std::string const& id,
- id_category category)
- {
- return add_id_to_section(id, category,
- current_file->document->current_section);
- }
-
- id_placeholder* id_state::add_id_to_section(
- std::string const& id,
- id_category category,
- boost::shared_ptr<section_info> const& section)
- {
- std::string id_part = id;
-
- // Note: Normalizing id according to file compatibility version, but
- // adding to section according to section compatibility version.
-
- if (current_file->compatibility_version >= 106u &&
- category.c < id_category::explicit_id) {
- id_part = normalize_id(id);
- }
-
- if(!section || section->compatibility_version >= 106u) {
- return add_placeholder(id_part, category,
- section ? section->placeholder_1_6 : 0);
- }
- else {
- std::string const& qualified_id = section->id_1_1;
-
- std::string new_id;
- if (!section->placeholder_1_6)
- new_id = current_file->doc_id_1_1;
- if (!new_id.empty() && !qualified_id.empty()) new_id += '.';
- new_id += qualified_id;
- if (!new_id.empty() && !id_part.empty()) new_id += '.';
- new_id += id_part;
-
- return add_placeholder(new_id, category,
- section->placeholder_1_6);
- }
- }
-
- id_placeholder* id_state::old_style_id(
- std::string const& id,
- id_category category)
- {
- return current_file->compatibility_version < 103u ?
- add_placeholder(
- current_file->document->section_id_1_1 + "." + id, category) :
- add_id(id, category);
- }
-
- id_placeholder* id_state::begin_section(
- std::string const& id,
- id_category category)
- {
- current_file->document->section_id_1_1 = id;
- return create_new_section(id, category);
- }
-
- id_placeholder* id_state::create_new_section(
- std::string const& id,
- id_category category)
- {
- boost::shared_ptr<section_info> parent =
- current_file->document->current_section;
-
- boost::shared_ptr<section_info> new_section =
- boost::make_shared<section_info>(parent,
- current_file->compatibility_version, id);
-
- id_placeholder* p;
-
- if (new_section->compatibility_version >= 106u) {
- p = add_id_to_section(id, category, parent);
- new_section->placeholder_1_6 = p;
- }
- else if (new_section->compatibility_version >= 103u) {
- if (parent)
- new_section->placeholder_1_6 = parent->placeholder_1_6;
-
- std::string new_id;
- if (!new_section->placeholder_1_6) {
- new_id = current_file->doc_id_1_1;
- if (!new_section->id_1_1.empty()) new_id += '.';
- }
- new_id += new_section->id_1_1;
-
- p = add_placeholder(new_id, category,
- new_section->placeholder_1_6);
- }
- else {
- if (parent)
- new_section->placeholder_1_6 = parent->placeholder_1_6;
-
- std::string new_id;
- if (parent && !new_section->placeholder_1_6)
- new_id = current_file->doc_id_1_1 + '.';
-
- new_id += id;
-
- p = add_placeholder(new_id, category,
- new_section->placeholder_1_6);
- }
-
- current_file->document->current_section = new_section;
-
- return p;
- }
-
- void id_state::end_section()
- {
- boost::shared_ptr<section_info> popped_section =
- current_file->document->current_section;
- current_file->document->current_section = popped_section->parent;
-
- reswitch_sections(popped_section, popped_section->parent);
- }
-
- //
- // Xml subset parser used for finding id values.
- //
- // I originally tried to integrate this into the post processor
- // but that proved tricky. Alternatively it could use a proper
- // xml parser, but I want this to be able to survive badly
- // marked up escapes.
- //
-
- struct xml_processor
- {
- xml_processor();
-
- std::vector<std::string> id_attributes;
-
- struct callback {
- virtual void start(string_ref) {}
- virtual void id_value(string_ref) {}
- virtual void finish(string_ref) {}
- virtual ~callback() {}
- };
-
- void parse(std::string const&, callback&);
- };
-
- namespace
- {
- char const* id_attributes_[] =
- {
- "id",
- "linkend",
- "linkends",
- "arearefs"
- };
- }
-
- xml_processor::xml_processor()
- {
- static int const n_id_attributes = sizeof(id_attributes_)/sizeof(char const*);
- for (int i = 0; i != n_id_attributes; ++i)
- {
- id_attributes.push_back(id_attributes_[i]);
- }
-
- boost::sort(id_attributes);
- }
-
- template <typename Iterator>
- bool read(Iterator& it, Iterator end, char const* text)
- {
- for(Iterator it2 = it;; ++it2, ++text) {
- if (!*text) {
- it = it2;
- return true;
- }
-
- if (it2 == end || *it2 != *text)
- return false;
- }
- }
-
- template <typename Iterator>
- void read_past(Iterator& it, Iterator end, char const* text)
- {
- while (it != end && !read(it, end, text)) ++it;
- }
-
- bool find_char(char const* text, char c)
- {
- for(;*text; ++text)
- if (c == *text) return true;
- return false;
- }
-
- template <typename Iterator>
- void read_some_of(Iterator& it, Iterator end, char const* text)
- {
- while(it != end && find_char(text, *it)) ++it;
- }
-
- template <typename Iterator>
- void read_to_one_of(Iterator& it, Iterator end, char const* text)
- {
- while(it != end && !find_char(text, *it)) ++it;
- }
-
- void xml_processor::parse(std::string const& source, callback& c)
- {
- typedef std::string::const_iterator iterator;
-
- string_ref source_ref(source.begin(), source.end());
- c.start(source_ref);
-
- iterator it = source_ref.begin(), end = source_ref.end();
-
- for(;;)
- {
- read_past(it, end, "<");
- if (it == end) break;
-
- if (read(it, end, "!--quickbook-escape-prefix-->"))
- {
- read_past(it, end, "<!--quickbook-escape-postfix-->");
- continue;
- }
-
- switch(*it)
- {
- case '?':
- ++it;
- read_past(it, end, "?>");
- break;
-
- case '!':
- if (read(it, end, "!--"))
- read_past(it, end, "-->");
- else
- read_past(it, end, ">");
- break;
-
- default:
- if ((*it >= 'a' && *it <= 'z') ||
- (*it >= 'A' && *it <= 'Z') ||
- *it == '_' || *it == ':')
- {
- read_to_one_of(it, end, " \t\n\r>");
-
- for (;;) {
- read_some_of(it, end, " \t\n\r");
- iterator name_start = it;
- read_to_one_of(it, end, "= \t\n\r>");
- if (it == end || *it == '>') break;
- string_ref name(name_start, it);
- ++it;
-
- read_some_of(it, end, "= \t\n\r");
- if (it == end || (*it != '"' && *it != '\'')) break;
-
- char delim = *it;
- ++it;
-
- iterator value_start = it;
-
- it = std::find(it, end, delim);
- if (it == end) break;
- string_ref value(value_start, it);
- ++it;
-
- if (boost::find(id_attributes, name)
- != id_attributes.end())
- {
- c.id_value(value);
- }
- }
- }
- else
- {
- read_past(it, end, ">");
- }
- }
- }
-
- c.finish(source_ref);
- }
-
- //
- // process_ids
- //
-
- //
- // Data used for generating placeholders that have duplicates.
- //
-
- struct id_generation_data
- {
- id_generation_data(std::string const& src_id)
- : child_start(src_id.rfind('.') + 1),
- id(normalize_id(src_id, child_start, max_size - 1)),
- // 'max_size - 1' leaves a character to append
- // a number.
- count(0)
- {
- if (std::isdigit(id[id.length() - 1]))
- {
- if (child_length() < max_size - 1)
- id += '_';
- else
- reduce_id();
- }
- }
-
- void reduce_id()
- {
- assert(id.length() > child_start);
- std::size_t length = id.length() - 1;
- while(length > child_start && std::isdigit(id[length - 1])) --length;
- id.erase(length);
- count = 0;
- }
-
- std::size_t child_length() const
- {
- return id.length() - child_start;
- }
-
- std::size_t child_start;
- std::string id;
- int count;
- };
-
- // Created for all desired ids, either when resolving an id or due to
- // generating a new id to avoid duplicates.
- struct id_data
- {
- id_data()
- : category(id_category::numbered),
- used(false),
- generation_data()
- {}
-
- void update_category(id_category c)
- {
- if (c.c > category.c) category = c;
- }
-
- id_category category; // The highest priority category of the
- // placeholders that want to use this id.
- bool used; // Whether this id has been used.
- boost::shared_ptr<id_generation_data> generation_data;
- // If a duplicates are found, this is
- // created to generate new ids.
- //
- // Many to one relationship, because truncation
- // can lead to different ids contending for the
- // same id prefix.
- };
-
- typedef boost::unordered_map<std::string, id_data> allocated_ids;
- typedef std::vector<id_placeholder*> placeholder_index;
-
- placeholder_index index_placeholders(id_state&, std::string const& xml);
- void resolve_id(id_placeholder&, allocated_ids&);
- void generate_id(id_placeholder&, allocated_ids&);
-
- std::string process_ids(id_state& state, std::string const& xml)
- {
- placeholder_index placeholders = index_placeholders(state, xml);
-
- typedef std::vector<id_placeholder*>::iterator iterator;
-
- iterator it = placeholders.begin(),
- end = placeholders.end();
-
- // Placeholder ids are processed in blocks of ids with
- // an equal number of dots.
- while (it != end) {
- unsigned num_dots = (*it)->num_dots;
-
- // ids can't clash with ids at a different num_dots, so
- // this only needs to track the id generation data
- // for a single num_dots at a time.
- allocated_ids ids;
-
- iterator it2 = it;
- do {
- resolve_id(**it2++, ids);
- } while(it2 != end && (*it2)->num_dots == num_dots);
-
- do {
- generate_id(**it++, ids);
- } while(it != it2);
- }
-
- return replace_ids(state, xml);
- }
-
- //
- // index_placeholders
- //
- // Create a sorted index of the placeholders, in order
- // to make numbering duplicates easy. A total order.
- //
-
- struct placeholder_compare
- {
- bool operator()(id_placeholder* x, id_placeholder* y) const
- {
- bool x_explicit = x->category.c >= id_category::explicit_id;
- bool y_explicit = y->category.c >= id_category::explicit_id;
-
- return
- x->num_dots < y->num_dots ? true :
- x->num_dots > y->num_dots ? false :
- x_explicit > y_explicit ? true :
- x_explicit < y_explicit ? false :
- x->order < y->order;
- }
- };
-
- struct number_placeholders_callback : xml_processor::callback
- {
- id_state& state;
- unsigned count;
-
- number_placeholders_callback(id_state& state)
- : state(state),
- count(0)
- {}
-
- void id_value(string_ref value)
- {
- id_placeholder* p = state.get_placeholder(value);
- number(p);
- }
-
- void number(id_placeholder* p)
- {
- if (p && !p->order) {
- number(p->parent);
- p->order = ++count;
- }
- }
- };
-
- placeholder_index index_placeholders(
- id_state& state,
- std::string const& xml)
- {
- xml_processor processor;
- number_placeholders_callback callback(state);
- processor.parse(xml, callback);
-
- placeholder_index sorted_placeholders;
- sorted_placeholders.reserve(state.placeholders.size());
- BOOST_FOREACH(id_placeholder& p, state.placeholders)
- if (p.order) sorted_placeholders.push_back(&p);
- boost::sort(sorted_placeholders, placeholder_compare());
-
- return sorted_placeholders;
- }
-
- //
- // resolve_id
- //
- // Convert child ids to full ids, and add to the
- // allocated ids (although not yet set in stone because
- // there might be duplicates).
- //
- // Note that the parent ids has to be generated before resolving
- // the child id.
- //
-
- void resolve_id(id_placeholder& p, allocated_ids& ids)
- {
- if (p.generation_state == id_placeholder::child)
- {
- assert(p.check_state());
-
- assert(p.parent->check_state(id_placeholder::generated));
-
- p.id = p.parent->id + "." + p.id;
- p.generation_state = id_placeholder::unresolved;
- p.parent = 0;
- }
-
- assert(p.check_state(id_placeholder::unresolved));
-
- id_data& data = ids.emplace(p.id, id_data()).first->second;
- data.update_category(p.category);
-
- p.data = &data;
- p.generation_state = id_placeholder::resolved;
- }
-
- //
- // generate_id
- //
- // Finally generate the final id.
- //
-
- void register_generation_data(id_placeholder&, allocated_ids&);
-
- void generate_id(id_placeholder& p, allocated_ids& ids)
- {
- assert(p.check_state(id_placeholder::resolved));
-
- // If the placeholder id is available, then update data
- // and return.
- if (p.category == p.data->category && !p.data->used &&
- p.category.c != id_category::numbered)
- {
- p.data->used = true;
- p.generation_state = id_placeholder::generated;
- p.data = 0;
- return;
- }
-
- if (!p.data->generation_data)
- {
- p.data->generation_data =
- boost::make_shared<id_generation_data>(p.id);
- register_generation_data(p, ids);
- }
-
- // Loop until an available id is found.
- for(;;)
- {
- id_generation_data& generation_data = *p.data->generation_data;
-
- std::string postfix =
- boost::lexical_cast<std::string>(generation_data.count++);
-
- if (generation_data.child_length() + postfix.length() > max_size) {
- // The resulting id is too long, so move to a shorter id.
- generation_data.reduce_id();
- register_generation_data(p, ids);
- }
- else {
- std::string id = generation_data.id + postfix;
-
- if (ids.find(id) == ids.end()) {
- p.id.swap(id);
- p.generation_state = id_placeholder::generated;
- p.data = 0;
- return;
- }
- }
- }
- }
-
- // Every time the generation id is changed, this is called to
- // check if that id is already in use.
- void register_generation_data(id_placeholder& p, allocated_ids& ids)
- {
- std::string const& id = p.data->generation_data->id;
-
- id_data& new_data = ids.emplace(id, id_data()).first->second;
-
- // If there is already generation_data for the new id then use that.
- // Otherwise use the placeholder's existing generation_data.
- if (new_data.generation_data)
- p.data->generation_data = new_data.generation_data;
- else
- new_data.generation_data = p.data->generation_data;
- }
-
- //
- // replace_ids
- //
- // Return a copy of the xml with all the placeholders replaced by
- // generated_ids.
- //
-
- struct replace_ids_callback : xml_processor::callback
- {
- id_state& state;
- bool use_resolved_ids;
- std::string::const_iterator source_pos;
- std::string result;
-
- replace_ids_callback(id_state& state, bool resolved)
- : state(state),
- use_resolved_ids(resolved),
- source_pos(),
- result()
- {}
-
- void start(string_ref xml)
- {
- source_pos = xml.begin();
- }
-
- void id_value(string_ref value)
- {
- if (id_placeholder* p = state.get_placeholder(value))
- {
- assert(!use_resolved_ids ||
- p->check_state(id_placeholder::generated));
- std::string const& id = use_resolved_ids ?
- p->id : p->unresolved_id;
-
- result.append(source_pos, value.begin());
- result.append(id.begin(), id.end());
- source_pos = value.end();
- }
- }
-
- void finish(string_ref xml)
- {
- result.append(source_pos, xml.end());
- source_pos = xml.end();
- }
- };
-
- std::string replace_ids(id_state& state, std::string const& xml,
- bool use_unresolved_ids)
- {
- xml_processor processor;
- replace_ids_callback callback(state, use_unresolved_ids);
- processor.parse(xml, callback);
- return callback.result;
- }
-}
diff --git a/tools/quickbook/src/id_manager.hpp b/tools/quickbook/src/id_manager.hpp
deleted file mode 100644
index e071241773..0000000000
--- a/tools/quickbook/src/id_manager.hpp
+++ /dev/null
@@ -1,86 +0,0 @@
-/*=============================================================================
- Copyright (c) 2011 Daniel James
-
- Use, modification and distribution is subject to the Boost Software
- License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
- http://www.boost.org/LICENSE_1_0.txt)
-=============================================================================*/
-
-#if !defined(BOOST_QUICKBOOK_ID_MANAGER_HPP)
-#define BOOST_QUICKBOOK_ID_MANAGER_HPP
-
-#include <boost/scoped_ptr.hpp>
-#include <string>
-#include "values.hpp"
-
-namespace quickbook
-{
- // id_category
- //
- // Higher categories get priority over lower ones.
-
- struct id_category
- {
- enum categories
- {
- default_category = 0,
- numbered, // Just used to avoid random docbook ids
- generated, // Generated ids for other elements.
- generated_heading, // Generated ids for headings.
- generated_section, // Generated ids for sections.
- generated_doc, // Generated ids for document.
- explicit_id, // Explicitly given by user
- explicit_section_id,
- explicit_anchor_id
- };
-
- id_category() : c(default_category) {}
- id_category(categories c) : c(c) {}
- explicit id_category(int c) : c(categories(c)) {}
-
- bool operator==(id_category rhs) const { return c == rhs.c; }
-
- categories c;
- };
-
- struct id_state;
- struct section_manager;
-
- struct id_manager
- {
- id_manager();
- ~id_manager();
-
- std::string start_file_with_docinfo(
- unsigned compatibility_version,
- std::string const& include_doc_id,
- std::string const& id,
- value const& title);
-
- void start_file(
- unsigned compatibility_version,
- std::string const& include_doc_id,
- std::string const& id,
- value const& title);
-
- void end_file();
-
- std::string begin_section(std::string const&, id_category);
- void end_section();
- int section_level() const;
-
- std::string old_style_id(std::string const&, id_category);
- std::string add_id(std::string const&, id_category);
- std::string add_anchor(std::string const&, id_category);
-
- std::string replace_placeholders_with_unresolved_ids(
- std::string const&) const;
- std::string replace_placeholders(std::string const&) const;
-
- unsigned compatibility_version() const;
- private:
- boost::scoped_ptr<id_state> state;
- };
-}
-
-#endif
diff --git a/tools/quickbook/src/id_xml.cpp b/tools/quickbook/src/id_xml.cpp
new file mode 100644
index 0000000000..d69336b784
--- /dev/null
+++ b/tools/quickbook/src/id_xml.cpp
@@ -0,0 +1,153 @@
+/*=============================================================================
+ Copyright (c) 2011-2013 Daniel James
+
+ Use, modification and distribution is subject to the Boost Software
+ License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+ http://www.boost.org/LICENSE_1_0.txt)
+=============================================================================*/
+
+#include "document_state_impl.hpp"
+#include "utils.hpp"
+#include <boost/range/algorithm.hpp>
+
+namespace quickbook
+{
+ namespace
+ {
+ char const* id_attributes_[] =
+ {
+ "id",
+ "linkend",
+ "linkends",
+ "arearefs"
+ };
+ }
+
+ xml_processor::xml_processor()
+ {
+ static int const n_id_attributes = sizeof(id_attributes_)/sizeof(char const*);
+ for (int i = 0; i != n_id_attributes; ++i)
+ {
+ id_attributes.push_back(id_attributes_[i]);
+ }
+
+ boost::sort(id_attributes);
+ }
+
+ template <typename Iterator>
+ bool read(Iterator& it, Iterator end, char const* text)
+ {
+ for(Iterator it2 = it;; ++it2, ++text) {
+ if (!*text) {
+ it = it2;
+ return true;
+ }
+
+ if (it2 == end || *it2 != *text)
+ return false;
+ }
+ }
+
+ template <typename Iterator>
+ void read_past(Iterator& it, Iterator end, char const* text)
+ {
+ while (it != end && !read(it, end, text)) ++it;
+ }
+
+ bool find_char(char const* text, char c)
+ {
+ for(;*text; ++text)
+ if (c == *text) return true;
+ return false;
+ }
+
+ template <typename Iterator>
+ void read_some_of(Iterator& it, Iterator end, char const* text)
+ {
+ while(it != end && find_char(text, *it)) ++it;
+ }
+
+ template <typename Iterator>
+ void read_to_one_of(Iterator& it, Iterator end, char const* text)
+ {
+ while(it != end && !find_char(text, *it)) ++it;
+ }
+
+ void xml_processor::parse(boost::string_ref source, callback& c)
+ {
+ typedef boost::string_ref::const_iterator iterator;
+
+ c.start(source);
+
+ iterator it = source.begin(), end = source.end();
+
+ for(;;)
+ {
+ read_past(it, end, "<");
+ if (it == end) break;
+
+ if (read(it, end, "!--quickbook-escape-prefix-->"))
+ {
+ read_past(it, end, "<!--quickbook-escape-postfix-->");
+ continue;
+ }
+
+ switch(*it)
+ {
+ case '?':
+ ++it;
+ read_past(it, end, "?>");
+ break;
+
+ case '!':
+ if (read(it, end, "!--"))
+ read_past(it, end, "-->");
+ else
+ read_past(it, end, ">");
+ break;
+
+ default:
+ if ((*it >= 'a' && *it <= 'z') ||
+ (*it >= 'A' && *it <= 'Z') ||
+ *it == '_' || *it == ':')
+ {
+ read_to_one_of(it, end, " \t\n\r>");
+
+ for (;;) {
+ read_some_of(it, end, " \t\n\r");
+ iterator name_start = it;
+ read_to_one_of(it, end, "= \t\n\r>");
+ if (it == end || *it == '>') break;
+ boost::string_ref name(name_start, it - name_start);
+ ++it;
+
+ read_some_of(it, end, "= \t\n\r");
+ if (it == end || (*it != '"' && *it != '\'')) break;
+
+ char delim = *it;
+ ++it;
+
+ iterator value_start = it;
+
+ it = std::find(it, end, delim);
+ if (it == end) break;
+ boost::string_ref value(value_start, it - value_start);
+ ++it;
+
+ if (boost::find(id_attributes, detail::to_s(name))
+ != id_attributes.end())
+ {
+ c.id_value(value);
+ }
+ }
+ }
+ else
+ {
+ read_past(it, end, ">");
+ }
+ }
+ }
+
+ c.finish(source);
+ }
+}
diff --git a/tools/quickbook/src/include_paths.cpp b/tools/quickbook/src/include_paths.cpp
new file mode 100644
index 0000000000..de910dcbaf
--- /dev/null
+++ b/tools/quickbook/src/include_paths.cpp
@@ -0,0 +1,291 @@
+/*=============================================================================
+ Copyright (c) 2002 2004 2006 Joel de Guzman
+ Copyright (c) 2004 Eric Niebler
+ Copyright (c) 2005 Thomas Guest
+ Copyright (c) 2013 Daniel James
+
+ Use, modification and distribution is subject to the Boost Software
+ License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+ http://www.boost.org/LICENSE_1_0.txt)
+=============================================================================*/
+
+#include "native_text.hpp"
+#include "glob.hpp"
+#include "include_paths.hpp"
+#include "state.hpp"
+#include "utils.hpp"
+#include "quickbook.hpp" // For the include_path global (yuck)
+#include <boost/foreach.hpp>
+#include <boost/range/algorithm/replace.hpp>
+#include <boost/filesystem/operations.hpp>
+#include <cassert>
+
+namespace quickbook
+{
+ //
+ // check_path
+ //
+
+ path_parameter check_path(value const& path, quickbook::state& state)
+ {
+ if (qbk_version_n >= 107u) {
+ std::string path_text = path.get_encoded();
+
+ try {
+ if (check_glob(path_text)) {
+ return path_parameter(path_text, path_parameter::glob);
+ }
+ else {
+ return path_parameter(glob_unescape(path_text),
+ path_parameter::path);
+ }
+ } catch(glob_error& e) {
+ detail::outerr(path.get_file(), path.get_position())
+ << "Invalid path (" << e.what() << "): "
+ << path_text
+ << std::endl;
+ ++state.error_count;
+ return path_parameter(path_text, path_parameter::invalid);
+ }
+ }
+ else {
+ // Paths are encoded for quickbook 1.6+ and also xmlbase
+ // values (technically xmlbase is a 1.6 feature, but that
+ // isn't enforced as it's backwards compatible).
+ //
+ // Counter-intuitively: encoded == plain text here.
+
+ std::string path_text = qbk_version_n >= 106u || path.is_encoded() ?
+ path.get_encoded() : detail::to_s(path.get_quickbook());
+
+ if (path_text.find('\\') != std::string::npos)
+ {
+ quickbook::detail::ostream* err;
+
+ if (qbk_version_n >= 106u) {
+ err = &detail::outerr(path.get_file(), path.get_position());
+ ++state.error_count;
+ }
+ else {
+ err = &detail::outwarn(path.get_file(), path.get_position());
+ }
+
+ *err << "Path isn't portable: '"
+ << path_text
+ << "'"
+ << std::endl;
+
+ boost::replace(path_text, '\\', '/');
+ }
+
+ return path_parameter(path_text, path_parameter::path);
+ }
+ }
+
+ //
+ // Search include path
+ //
+
+ void include_search_glob(std::set<quickbook_path> & result,
+ quickbook_path const& location,
+ std::string path, quickbook::state& state)
+ {
+ std::size_t glob_pos = find_glob_char(path);
+
+ if (glob_pos == std::string::npos)
+ {
+ quickbook_path complete_path = location / glob_unescape(path);
+
+ if (fs::exists(complete_path.file_path))
+ {
+ state.dependencies.add_glob_match(complete_path.file_path);
+ result.insert(complete_path);
+ }
+ return;
+ }
+
+ std::size_t prev = path.rfind('/', glob_pos);
+ std::size_t next = path.find('/', glob_pos);
+
+ std::size_t glob_begin = prev == std::string::npos ? 0 : prev + 1;
+ std::size_t glob_end = next == std::string::npos ? path.size() : next;
+
+ quickbook_path new_location = location;
+
+ if (prev != std::string::npos) {
+ new_location /= glob_unescape(path.substr(0, prev));
+ }
+
+ if (next != std::string::npos) ++next;
+
+ boost::string_ref glob(
+ path.data() + glob_begin,
+ glob_end - glob_begin);
+
+ fs::path base_dir = new_location.file_path.empty() ?
+ fs::path(".") : new_location.file_path;
+ if (!fs::is_directory(base_dir)) return;
+
+ // Walk through the dir for matches.
+ for (fs::directory_iterator dir_i(base_dir), dir_e;
+ dir_i != dir_e; ++dir_i)
+ {
+ fs::path f = dir_i->path().filename();
+ std::string generic_path = detail::path_to_generic(f);
+
+ // Skip if the dir item doesn't match.
+ if (!quickbook::glob(glob, generic_path)) continue;
+
+ // If it's a file we add it to the results.
+ if (next == std::string::npos)
+ {
+ if (fs::is_regular_file(dir_i->status()))
+ {
+ quickbook_path r = new_location / generic_path;
+ state.dependencies.add_glob_match(r.file_path);
+ result.insert(r);
+ }
+ }
+ // If it's a matching dir, we recurse looking for more files.
+ else
+ {
+ if (!fs::is_regular_file(dir_i->status()))
+ {
+ include_search_glob(result, new_location / generic_path,
+ path.substr(next), state);
+ }
+ }
+ }
+ }
+
+ std::set<quickbook_path> include_search(path_parameter const& parameter,
+ quickbook::state& state, string_iterator pos)
+ {
+ std::set<quickbook_path> result;
+
+ switch (parameter.type) {
+ case path_parameter::glob:
+ // If the path has some glob match characters
+ // we do a discovery of all the matches..
+ {
+ fs::path current = state.current_file->path.parent_path();
+
+ // Search for the current dir accumulating to the result.
+ state.dependencies.add_glob(current / parameter.value);
+ include_search_glob(result, state.current_path.parent_path(),
+ parameter.value, state);
+
+ // Search the include path dirs accumulating to the result.
+ unsigned count = 0;
+ BOOST_FOREACH(fs::path dir, include_path)
+ {
+ ++count;
+ state.dependencies.add_glob(dir / parameter.value);
+ include_search_glob(result,
+ quickbook_path(dir, count, fs::path()),
+ parameter.value, state);
+ }
+
+ // Done.
+ return result;
+ }
+
+ case path_parameter::path:
+ {
+ fs::path path = detail::generic_to_path(parameter.value);
+
+ // If the path is relative, try and resolve it.
+ if (!path.has_root_directory() && !path.has_root_name())
+ {
+ quickbook_path path2 =
+ state.current_path.parent_path() / parameter.value;
+
+ // See if it can be found locally first.
+ if (state.dependencies.add_dependency(path2.file_path))
+ {
+ result.insert(path2);
+ return result;
+ }
+
+ // Search in each of the include path locations.
+ unsigned count = 0;
+ BOOST_FOREACH(fs::path full, include_path)
+ {
+ ++count;
+ full /= path;
+
+ if (state.dependencies.add_dependency(full))
+ {
+ result.insert(quickbook_path(full, count, path));
+ return result;
+ }
+ }
+ }
+ else
+ {
+ if (state.dependencies.add_dependency(path)) {
+ result.insert(quickbook_path(path, 0, path));
+ return result;
+ }
+ }
+
+ detail::outerr(state.current_file, pos)
+ << "Unable to find file: "
+ << parameter.value
+ << std::endl;
+ ++state.error_count;
+
+ return result;
+ }
+
+ case path_parameter::invalid:
+ return result;
+
+ default:
+ assert(0);
+ return result;
+ }
+ }
+
+ //
+ // quickbook_path
+ //
+
+ void swap(quickbook_path& x, quickbook_path& y) {
+ boost::swap(x.file_path, y.file_path);
+ boost::swap(x.include_path_offset, y.include_path_offset);
+ boost::swap(x.abstract_file_path, y.abstract_file_path);
+ }
+
+ bool quickbook_path::operator<(quickbook_path const& other) const
+ {
+ // TODO: Is comparing file_path redundant? Surely if quickbook_path
+ // and abstract_file_path are equal, it must also be.
+ // (but not vice-versa)
+ return
+ abstract_file_path != other.abstract_file_path ?
+ abstract_file_path < other.abstract_file_path :
+ include_path_offset != other.include_path_offset ?
+ include_path_offset < other.include_path_offset :
+ file_path < other.file_path;
+ }
+
+ quickbook_path quickbook_path::operator/(boost::string_ref x) const
+ {
+ return quickbook_path(*this) /= x;
+ }
+
+ quickbook_path& quickbook_path::operator/=(boost::string_ref x)
+ {
+ fs::path x2 = detail::generic_to_path(x);
+ file_path /= x2;
+ abstract_file_path /= x2;
+ return *this;
+ }
+
+ quickbook_path quickbook_path::parent_path() const
+ {
+ return quickbook_path(file_path.parent_path(), include_path_offset,
+ abstract_file_path.parent_path());
+ }
+}
diff --git a/tools/quickbook/src/include_paths.hpp b/tools/quickbook/src/include_paths.hpp
new file mode 100644
index 0000000000..2139f9c99f
--- /dev/null
+++ b/tools/quickbook/src/include_paths.hpp
@@ -0,0 +1,68 @@
+/*=============================================================================
+ Copyright (c) 2002 2004 2006 Joel de Guzman
+ Copyright (c) 2004 Eric Niebler
+ Copyright (c) 2005 Thomas Guest
+ Copyright (c) 2013 Daniel James
+
+ Use, modification and distribution is subject to the Boost Software
+ License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+ http://www.boost.org/LICENSE_1_0.txt)
+=============================================================================*/
+
+#if !defined(BOOST_QUICKBOOK_INCLUDE_PATHS_HPP)
+#define BOOST_QUICKBOOK_INCLUDE_PATHS_HPP
+
+// Classes and functions for dealing with the values from include, import and
+// xinclude elements.
+
+#include "fwd.hpp"
+#include "values.hpp"
+#include <set>
+#include <string>
+#include <boost/filesystem/path.hpp>
+
+namespace quickbook
+{
+ struct path_parameter {
+ // Will possibly add 'url' to this list later:
+ enum path_type { invalid, path, glob };
+
+ std::string value;
+ path_type type;
+
+ path_parameter(std::string const& value, path_type type) :
+ value(value), type(type) {}
+ };
+
+ path_parameter check_path(value const& path, quickbook::state& state);
+
+ struct quickbook_path
+ {
+ quickbook_path(fs::path const& x, unsigned offset, fs::path const& y)
+ : file_path(x), include_path_offset(offset), abstract_file_path(y) {}
+
+ friend void swap(quickbook_path&, quickbook_path&);
+
+ quickbook_path parent_path() const;
+
+ bool operator<(quickbook_path const& other) const;
+ quickbook_path operator/(boost::string_ref) const;
+ quickbook_path& operator/=(boost::string_ref);
+
+ // The actual location of the file.
+ fs::path file_path;
+
+ // The member of the include path that this file is relative to.
+ // (1-indexed, 0 == original quickbook file)
+ unsigned include_path_offset;
+
+ // A machine independent representation of the file's
+ // path - not unique per-file
+ fs::path abstract_file_path;
+ };
+
+ std::set<quickbook_path> include_search(path_parameter const&,
+ quickbook::state& state, string_iterator pos);
+}
+
+#endif
diff --git a/tools/quickbook/src/input_path.cpp b/tools/quickbook/src/input_path.cpp
deleted file mode 100644
index 9b6a87784d..0000000000
--- a/tools/quickbook/src/input_path.cpp
+++ /dev/null
@@ -1,318 +0,0 @@
-/*=============================================================================
- Copyright (c) 2009 Daniel James
-
- Use, modification and distribution is subject to the Boost Software
- License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
- http://www.boost.org/LICENSE_1_0.txt)
-=============================================================================*/
-
-#include <boost/program_options.hpp>
-#include <iostream>
-#include "input_path.hpp"
-#include "utils.hpp"
-#include "files.hpp"
-
-#if QUICKBOOK_WIDE_PATHS || QUICKBOOK_WIDE_STREAMS
-#include <boost/scoped_ptr.hpp>
-#include <windows.h>
-#include <io.h>
-#include <fcntl.h>
-#endif
-
-#if QUICKBOOK_CYGWIN_PATHS
-#include <boost/scoped_array.hpp>
-#include <boost/program_options/errors.hpp>
-#include <sys/cygwin.h>
-#endif
-
-namespace quickbook {
- extern bool ms_errors;
-}
-
-namespace quickbook {
-namespace detail {
-
-// This is used for converting paths to UTF-8 on cygin.
-// Might be better not to use a windows
-#if QUICKBOOK_WIDE_PATHS || QUICKBOOK_WIDE_STREAMS
- namespace {
- std::string to_utf8(std::wstring const& x)
- {
- int buffer_count = WideCharToMultiByte(CP_UTF8, 0, x.c_str(), -1, 0, 0, 0, 0);
-
- if (!buffer_count)
- throw conversion_error("Error converting wide string to utf-8.");
-
- boost::scoped_ptr<char> buffer(new char[buffer_count]);
-
- if (!WideCharToMultiByte(CP_UTF8, 0, x.c_str(), -1, buffer.get(), buffer_count, 0, 0))
- throw conversion_error("Error converting wide string to utf-8.");
-
- return std::string(buffer.get());
- }
-
- std::wstring from_utf8(std::string const& x)
- {
- int buffer_count = MultiByteToWideChar(CP_UTF8, 0, x.c_str(), -1, 0, 0);
-
- if (!buffer_count)
- throw conversion_error("Error converting utf-8 to wide string.");
-
- boost::scoped_ptr<wchar_t> buffer(new wchar_t[buffer_count]);
-
- if (!MultiByteToWideChar(CP_UTF8, 0, x.c_str(), -1, buffer.get(), buffer_count))
- throw conversion_error("Error converting utf-8 to wide string.");
-
- return std::wstring(buffer.get());
- }
- }
-#endif
-
-#if QUICKBOOK_WIDE_PATHS
- std::string input_to_utf8(input_string const& x)
- {
- return to_utf8(x);
- }
-#else
- std::string input_to_utf8(input_string const& x)
- {
- return x;
- }
-#endif
-
-#if QUICKBOOK_WIDE_PATHS
- fs::path generic_to_path(std::string const& x)
- {
- return fs::path(from_utf8(x));
- }
-
- std::string path_to_generic(fs::path const& x)
- {
- return to_utf8(x.generic_wstring());
- }
-#else
- fs::path generic_to_path(std::string const& x)
- {
- return fs::path(x);
- }
-
- std::string path_to_generic(fs::path const& x)
- {
- return x.generic_string();
- }
-
-#endif
-
-#if QUICKBOOK_CYGWIN_PATHS
- fs::path input_to_path(input_string const& path)
- {
- cygwin_conv_path_t flags = CCP_POSIX_TO_WIN_W | CCP_RELATIVE;
-
- ssize_t size = cygwin_conv_path(flags, path.c_str(), NULL, 0);
-
- if (size < 0)
- throw conversion_error("Error converting cygwin path to windows.");
-
- boost::scoped_array<char> result(new char[size]);
- void* ptr = result.get();
-
- if(cygwin_conv_path(flags, path.c_str(), ptr, size))
- throw conversion_error("Error converting cygwin path to windows.");
-
- return fs::path(static_cast<wchar_t*>(ptr));
- }
-
- ostream::string path_to_stream(fs::path const& path)
- {
- cygwin_conv_path_t flags = CCP_WIN_W_TO_POSIX | CCP_RELATIVE;
-
- ssize_t size = cygwin_conv_path(flags, path.native().c_str(), NULL, 0);
-
- if (size < 0)
- throw conversion_error("Error converting windows path to cygwin.");
-
- boost::scoped_array<char> result(new char[size]);
-
- if(cygwin_conv_path(flags, path.native().c_str(), result.get(), size))
- throw conversion_error("Error converting windows path to cygwin.");
-
- return std::string(result.get());
- }
-#else
- fs::path input_to_path(input_string const& path)
- {
- return fs::path(path);
- }
-
-#if QUICKBOOK_WIDE_PATHS && !QUICKBOOK_WIDE_STREAMS
- ostream::string path_to_stream(fs::path const& path)
- {
- return path.string();
- }
-#else
- ostream::string path_to_stream(fs::path const& path)
- {
- return path.native();
- }
-#endif
-
-#endif // QUICKBOOK_CYGWIN_PATHS
-
-#if QUICKBOOK_WIDE_STREAMS
-
- void initialise_output()
- {
- if (_isatty(_fileno(stdout))) _setmode(_fileno(stdout), _O_U16TEXT);
- if (_isatty(_fileno(stderr))) _setmode(_fileno(stderr), _O_U16TEXT);
- }
-
- void write_utf8(ostream::base_ostream& out, std::string const& x)
- {
- out << from_utf8(x);
- }
-
- ostream& out()
- {
- static ostream x(std::wcout);
- return x;
- }
-
- namespace
- {
- inline ostream& error_stream()
- {
- static ostream x(std::wcerr);
- return x;
- }
- }
-
-#else
-
- void initialise_output()
- {
- }
-
- void write_utf8(ostream::base_ostream& out, std::string const& x)
- {
- out << x;
- }
-
- ostream& out()
- {
- static ostream x(std::cout);
- return x;
- }
-
- namespace
- {
- inline ostream& error_stream()
- {
- static ostream x(std::clog);
- return x;
- }
- }
-
-#endif
-
- ostream& outerr()
- {
- return error_stream() << "Error: ";
- }
-
- ostream& outerr(fs::path const& file, int line)
- {
- if (line >= 0)
- {
- if (ms_errors)
- return error_stream() << path_to_stream(file) << "(" << line << "): error: ";
- else
- return error_stream() << path_to_stream(file) << ":" << line << ": error: ";
- }
- else
- {
- return error_stream() << path_to_stream(file) << ": error: ";
- }
- }
-
- ostream& outerr(file_ptr const& f, string_iterator pos)
- {
- return outerr(f->path, f->position_of(pos).line);
- }
-
- ostream& outwarn(fs::path const& file, int line)
- {
- if (line >= 0)
- {
- if (ms_errors)
- return error_stream() << path_to_stream(file) << "(" << line << "): warning: ";
- else
- return error_stream() << path_to_stream(file) << ":" << line << ": warning: ";
- }
- else
- {
- return error_stream() << path_to_stream(file) << ": warning: ";
- }
- }
-
- ostream& outwarn(file_ptr const& f, string_iterator pos)
- {
- return outwarn(f->path, f->position_of(pos).line);
- }
-
- ostream& ostream::operator<<(char c) {
- assert(c > 0 && c <= 127);
- base << c;
- return *this;
- }
-
- inline bool check_ascii(char const* x) {
- for(;*x;++x) if(*x <= 0 || *x > 127) return false;
- return true;
- }
-
- ostream& ostream::operator<<(char const* x) {
- assert(check_ascii(x));
- base << x;
- return *this;
- }
-
- ostream& ostream::operator<<(std::string const& x) {
- write_utf8(base, x);
- return *this;
- }
-
- ostream& ostream::operator<<(int x) {
- base << x;
- return *this;
- }
-
- ostream& ostream::operator<<(unsigned int x) {
- base << x;
- return *this;
- }
-
- ostream& ostream::operator<<(long x) {
- base << x;
- return *this;
- }
-
- ostream& ostream::operator<<(unsigned long x) {
- base << x;
- return *this;
- }
-
- ostream& ostream::operator<<(fs::path const& x) {
- base << path_to_stream(x);
- return *this;
- }
-
- ostream& ostream::operator<<(base_ostream& (*x)(base_ostream&)) {
- base << x;
- return *this;
- }
-
- ostream& ostream::operator<<(base_ios& (*x)(base_ios&)) {
- base << x;
- return *this;
- }
-}}
diff --git a/tools/quickbook/src/input_path.hpp b/tools/quickbook/src/input_path.hpp
deleted file mode 100644
index a9b55f6b2c..0000000000
--- a/tools/quickbook/src/input_path.hpp
+++ /dev/null
@@ -1,130 +0,0 @@
-/*=============================================================================
- Copyright (c) 2009 Daniel James
-
- Use, modification and distribution is subject to the Boost Software
- License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
- http://www.boost.org/LICENSE_1_0.txt)
-=============================================================================*/
-
-#if !defined(BOOST_QUICKBOOK_DETAIL_INPUT_PATH_HPP)
-#define BOOST_QUICKBOOK_DETAIL_INPUT_PATH_HPP
-
-#include <boost/config.hpp>
-#include <boost/filesystem/path.hpp>
-#include <string>
-#include <stdexcept>
-#include <iostream>
-#include "fwd.hpp"
-
-#if defined(__cygwin__) || defined(__CYGWIN__)
-# define QUICKBOOK_CYGWIN_PATHS 1
-#elif defined(_WIN32)
-# define QUICKBOOK_WIDE_PATHS 1
-# if defined(BOOST_MSVC) && BOOST_MSVC >= 1400
-# define QUICKBOOK_WIDE_STREAMS 1
-# endif
-#endif
-
-#if !defined(QUICKBOOK_WIDE_PATHS)
-#define QUICKBOOK_WIDE_PATHS 0
-#endif
-
-#if !defined(QUICKBOOK_WIDE_STREAMS)
-#define QUICKBOOK_WIDE_STREAMS 0
-#endif
-
-#if !defined(QUICKBOOK_CYGWIN_PATHS)
-#define QUICKBOOK_CYGWIN_PATHS 0
-#endif
-
-namespace quickbook
-{
- namespace fs = boost::filesystem;
-
- namespace detail
- {
- struct conversion_error : std::runtime_error
- {
- conversion_error(char const* m) : std::runtime_error(m) {}
- };
-
- // 'generic': Paths in quickbook source and the generated boostbook.
- // Always UTF-8.
- // 'input': Paths (or other parameters) from the command line and
- // possibly other sources in the future. Wide strings on
- // normal windows, UTF-8 for cygwin and other platforms
- // (hopefully).
- // 'stream': Strings to be written to a stream.
- // 'path': Stored as a boost::filesystem::path. Since
- // Boost.Filesystem doesn't support cygwin, this
- // is always wide on windows. UTF-8 on other
- // platforms (again, hopefully).
-
-#if QUICKBOOK_WIDE_PATHS
- typedef std::wstring input_string;
-#else
- typedef std::string input_string;
-#endif
-
- // A light wrapper around C++'s streams that gets things right
- // in the quickbook context.
- //
- // This is far from perfect but it fixes some issues.
- struct ostream
- {
-#if QUICKBOOK_WIDE_STREAMS
- typedef std::wostream base_ostream;
- typedef std::wios base_ios;
- typedef std::wstring string;
-#else
- typedef std::ostream base_ostream;
- typedef std::ios base_ios;
- typedef std::string string;
-#endif
- base_ostream& base;
-
- explicit ostream(base_ostream& x) : base(x) {}
-
- // C strings should always be ascii.
- ostream& operator<<(char);
- ostream& operator<<(char const*);
-
- // std::string should be UTF-8 (what a mess!)
- ostream& operator<<(std::string const&);
-
- // Other value types.
- ostream& operator<<(int x);
- ostream& operator<<(unsigned int x);
- ostream& operator<<(long x);
- ostream& operator<<(unsigned long x);
- ostream& operator<<(fs::path const&);
-
- // Modifiers
- ostream& operator<<(base_ostream& (*)(base_ostream&));
- ostream& operator<<(base_ios& (*)(base_ios&));
- };
-
-
- std::string input_to_utf8(input_string const&);
- fs::path input_to_path(input_string const&);
-
- std::string path_to_generic(fs::path const&);
- fs::path generic_to_path(std::string const&);
-
- void initialise_output();
-
- ostream& out();
-
- // Preformats an error/warning message so that it can be parsed by
- // common IDEs. Uses the ms_errors global to determine if VS format
- // or GCC format. Returns the stream to continue ouput of the verbose
- // error message.
- ostream& outerr();
- ostream& outerr(fs::path const& file, int line = -1);
- ostream& outwarn(fs::path const& file, int line = -1);
- ostream& outerr(file_ptr const&, string_iterator);
- ostream& outwarn(file_ptr const&, string_iterator);
- }
-}
-
-#endif
diff --git a/tools/quickbook/src/main_grammar.cpp b/tools/quickbook/src/main_grammar.cpp
index 74dbecea76..b01f4a0b84 100644
--- a/tools/quickbook/src/main_grammar.cpp
+++ b/tools/quickbook/src/main_grammar.cpp
@@ -17,7 +17,7 @@
#include "phrase_tags.hpp"
#include "parsers.hpp"
#include "scoped.hpp"
-#include "input_path.hpp"
+#include "native_text.hpp"
#include <boost/spirit/include/classic_core.hpp>
#include <boost/spirit/include/classic_chset.hpp>
#include <boost/spirit/include/classic_if.hpp>
@@ -33,8 +33,15 @@ namespace quickbook
namespace cl = boost::spirit::classic;
struct list_stack_item {
- bool root; // Is this the root of the context
- // (e.g. top, template, table cell etc.)
+ // Is this the root of the context
+ // (e.g. top, template, table cell etc.)
+ enum list_item_type {
+ syntactic_list, // In a list marked up '*' or '#'
+ top_level, // At the top level of a parse
+ // (might be a template body)
+ nested_block // Nested in a block element.
+ } type;
+
unsigned int indent; // Indent of list marker
// (or paragraph if not in a list)
unsigned int indent2; // Indent of paragraph
@@ -46,11 +53,11 @@ namespace quickbook
// * List item
// |indent2
- list_stack_item() :
- root(true), indent(0), indent2(0), mark('\0') {}
+ list_stack_item(list_item_type r) :
+ type(r), indent(0), indent2(0), mark('\0') {}
list_stack_item(char mark, unsigned int indent, unsigned int indent2) :
- root(false), indent(indent), indent2(indent2), mark(mark)
+ type(syntactic_list), indent(indent), indent2(indent2), mark(mark)
{}
};
@@ -61,27 +68,13 @@ namespace quickbook
};
};
- template <typename T>
- struct member_action
- {
- typedef void(T::*member_function)(parse_iterator, parse_iterator);
-
- T& l;
- member_function mf;
-
- member_action(T& l, member_function mf) : l(l), mf(mf) {}
-
- void operator()(parse_iterator first, parse_iterator last) const {
- (l.*mf)(first, last);
- }
- };
-
struct main_grammar_local
{
////////////////////////////////////////////////////////////////////////
// Local actions
void start_blocks_impl(parse_iterator first, parse_iterator last);
+ void start_nested_blocks_impl(parse_iterator first, parse_iterator last);
void end_blocks_impl(parse_iterator first, parse_iterator last);
void check_indentation_impl(parse_iterator first, parse_iterator last);
void check_code_block_impl(parse_iterator first, parse_iterator last);
@@ -90,87 +83,51 @@ namespace quickbook
string_iterator last);
void clear_stack();
- struct in_list_impl {
- main_grammar_local& l;
-
- in_list_impl(main_grammar_local& l) :
- l(l) {}
-
- bool operator()() const {
- return !l.list_stack.top().root;
- }
- };
-
- struct set_no_eols_scoped : scoped_action_base
- {
- set_no_eols_scoped(main_grammar_local& l)
- : l(l) {}
-
- bool start() {
- saved_no_eols = l.no_eols;
- l.no_eols = false;
-
- return true;
- }
-
- void cleanup() {
- l.no_eols = saved_no_eols;
- }
-
- main_grammar_local& l;
- bool saved_no_eols;
- };
-
////////////////////////////////////////////////////////////////////////
// Local members
cl::rule<scanner>
- top_level, indent_check,
- paragraph_separator,
+ template_phrase, top_level, indent_check,
+ paragraph_separator, inside_paragraph,
code, code_line, blank_line, hr,
- inline_code,
- template_,
- code_block, macro,
+ inline_code, skip_inline_code,
+ template_, attribute_template, template_body,
+ code_block, skip_code_block, macro,
template_args,
template_args_1_4, template_arg_1_4,
template_inner_arg_1_4, brackets_1_4,
template_args_1_5, template_arg_1_5, template_arg_1_5_content,
template_inner_arg_1_5, brackets_1_5,
+ template_args_1_6, template_arg_1_6, template_arg_1_6_content,
break_,
command_line_macro_identifier,
- dummy_block, line_dummy_block, square_brackets
+ dummy_block, line_dummy_block, square_brackets,
+ skip_escape
;
- struct simple_markup_closure
- : cl::closure<simple_markup_closure, char>
+ struct block_context_closure : cl::closure<block_context_closure,
+ element_info::context>
{
- member1 mark;
+ // Mask used to determine whether or not an element is a block
+ // element.
+ member1 is_block_mask;
};
- struct block_item_closure : cl::closure<block_item_closure, bool>
- {
- member1 still_in_block;
- };
-
- struct context_closure : cl::closure<context_closure, element_info::context>
- {
- member1 context;
- };
-
- cl::rule<scanner, simple_markup_closure::context_t> simple_markup;
- cl::rule<scanner> simple_markup_end;
+ cl::rule<scanner> simple_markup, simple_markup_end;
- cl::rule<scanner, block_item_closure::context_t> paragraph;
- cl::rule<scanner, context_closure::context_t> paragraph_item;
- cl::rule<scanner, block_item_closure::context_t> list;
- cl::rule<scanner, context_closure::context_t> list_item;
- cl::rule<scanner, context_closure::context_t> common;
- cl::rule<scanner, context_closure::context_t> element;
+ cl::rule<scanner> paragraph;
+ cl::rule<scanner> list;
+ cl::rule<scanner, block_context_closure::context_t> syntactic_block_item;
+ cl::rule<scanner> common;
+ cl::rule<scanner> element;
// state
std::stack<list_stack_item> list_stack;
unsigned int list_indent;
bool no_eols;
+ element_info::context context;
+ char mark; // Simple markup's deliminator
+ bool still_in_block; // Inside a syntatic block
// transitory state
block_types::values block_type;
@@ -187,31 +144,36 @@ namespace quickbook
: list_stack()
, list_indent(0)
, no_eols(true)
+ , context(element_info::in_top_level)
+ , mark('\0')
, state_(state)
{}
};
struct process_element_impl : scoped_action_base {
- process_element_impl(main_grammar_local& l)
- : l(l) {}
+ process_element_impl(main_grammar_local& l) :
+ l(l), pushed_source_mode_(false), element_context_error_(false) {}
bool start()
{
- if (!(l.info.type & l.element.context()) ||
- qbk_version_n < l.info.qbk_version)
+ // This element doesn't exist in the current language version.
+ if (qbk_version_n < l.info.qbk_version)
return false;
- info_ = l.info;
-
- if (!l.list_stack.empty() && !l.list_stack.top().root &&
- info_.type == element_info::block)
+ // The element is not allowed in this context.
+ if (!(l.info.type & l.context))
{
- // If in a list and the element is a block, end the list.
- list_item_action list_item(l.state_);
- list_item();
- l.clear_stack();
+ if (qbk_version_n < 107u) {
+ return false;
+ }
+ else {
+ element_context_error_ = true;
+ }
}
- else if (info_.type != element_info::phrase &&
+
+ info_ = l.info;
+
+ if (info_.type != element_info::phrase &&
info_.type != element_info::maybe_block)
{
paragraph_action para(l.state_);
@@ -220,12 +182,12 @@ namespace quickbook
assert(l.state_.values.builder.empty());
- if (!l.state_.source_mode_next.empty() &&
+ if (l.state_.source_mode_next &&
info_.type != element_info::maybe_block)
{
- l.state_.source_mode.swap(saved_source_mode_);
- l.state_.source_mode = l.state_.source_mode_next.get_quickbook();
- l.state_.source_mode_next = value();
+ l.state_.push_tagged_source_mode(l.state_.source_mode_next);
+ pushed_source_mode_ = true;
+ l.state_.source_mode_next = 0;
}
return true;
@@ -234,45 +196,61 @@ namespace quickbook
template <typename ResultT, typename ScannerT>
bool result(ResultT result, ScannerT const& scan)
{
- if (result || info_.type & element_info::in_phrase)
- return result;
-
- error_action error(l.state_);
- error(scan.first, scan.first);
- return true;
+ if (element_context_error_) {
+ error_message_action error(l.state_,
+ "Element not allowed in this context.");
+ error(scan.first, scan.first);
+ return true;
+ }
+ else if (result) {
+ return true;
+ }
+ else if (qbk_version_n < 107u &&
+ info_.type & element_info::in_phrase) {
+ // Old versions of quickbook had a soft fail
+ // for unparsed phrase elements.
+ return false;
+ }
+ else {
+ // Parse error in body.
+ error_action error(l.state_);
+ error(scan.first, scan.first);
+ return true;
+ }
}
void success(parse_iterator, parse_iterator) { l.element_type = info_.type; }
void failure() { l.element_type = element_info::nothing; }
void cleanup() {
- if (!saved_source_mode_.empty())
- l.state_.source_mode.swap(saved_source_mode_);
+ if (pushed_source_mode_)
+ l.state_.pop_tagged_source_mode();
}
main_grammar_local& l;
element_info info_;
- std::string saved_source_mode_;
+ bool pushed_source_mode_;
+ bool element_context_error_;
};
- struct set_no_eols_scoped : scoped_action_base
+ struct scoped_paragraph : scoped_action_base
{
- set_no_eols_scoped(main_grammar_local& l)
- : l(l) {}
+ scoped_paragraph(quickbook::state& state) :
+ state(state), pushed(false) {}
bool start() {
- saved_no_eols = l.no_eols;
- l.no_eols = false;
-
+ state.push_tagged_source_mode(state.source_mode_next);
+ pushed = true;
+ state.source_mode_next = 0;
return true;
}
void cleanup() {
- l.no_eols = saved_no_eols;
+ if (pushed) state.pop_tagged_source_mode();
}
- main_grammar_local& l;
- bool saved_no_eols;
+ quickbook::state& state;
+ bool pushed;
};
struct in_list_impl {
@@ -282,8 +260,42 @@ namespace quickbook
l(l) {}
bool operator()() const {
- return !l.list_stack.top().root;
+ return !l.list_stack.empty() &&
+ l.list_stack.top().type == list_stack_item::syntactic_list;
+ }
+ };
+
+ template <typename T, typename M>
+ struct set_scoped_value_impl : scoped_action_base
+ {
+ typedef M T::*member_ptr;
+
+ set_scoped_value_impl(T& l, member_ptr ptr)
+ : l(l), ptr(ptr), saved_value() {}
+
+ bool start(M const& value) {
+ saved_value = l.*ptr;
+ l.*ptr = value;
+
+ return true;
+ }
+
+ void cleanup() {
+ l.*ptr = saved_value;
}
+
+ T& l;
+ member_ptr ptr;
+ M saved_value;
+ };
+
+ template <typename T, typename M>
+ struct set_scoped_value : scoped_parser<set_scoped_value_impl<T, M> >
+ {
+ typedef set_scoped_value_impl<T, M> impl;
+
+ set_scoped_value(T& l, typename impl::member_ptr ptr) :
+ scoped_parser<impl>(impl(l, ptr)) {}
};
////////////////////////////////////////////////////////////////////////////
@@ -295,35 +307,44 @@ namespace quickbook
new main_grammar_local(state));
// Global Actions
- element_action element(state);
- paragraph_action paragraph(state);
- list_item_action list_item(state);
+ quickbook::element_action element_action(state);
+ quickbook::paragraph_action paragraph_action(state);
phrase_end_action end_phrase(state);
- raw_char_action raw_char(state.phrase);
- plain_char_action plain_char(state.phrase, state);
- escape_unicode_action escape_unicode(state.phrase, state);
+ raw_char_action raw_char(state);
+ plain_char_action plain_char(state);
+ escape_unicode_action escape_unicode(state);
- simple_phrase_action simple_markup(state.phrase, state);
+ simple_phrase_action simple_markup(state);
- break_action break_(state.phrase, state);
- do_macro_action do_macro(state.phrase, state);
+ break_action break_(state);
+ do_macro_action do_macro(state);
error_action error(state);
element_id_warning_action element_id_warning(state);
scoped_parser<to_value_scoped_action> to_value(state);
+ scoped_parser<scoped_paragraph> scope_paragraph(state);
// Local Actions
scoped_parser<process_element_impl> process_element(local);
- scoped_parser<set_no_eols_scoped> scoped_no_eols(local);
in_list_impl in_list(local);
+
+ set_scoped_value<main_grammar_local, bool> scoped_no_eols(
+ local, &main_grammar_local::no_eols);
+ set_scoped_value<main_grammar_local, element_info::context> scoped_context(
+ local, &main_grammar_local::context);
+ set_scoped_value<main_grammar_local, bool> scoped_still_in_block(
+ local, &main_grammar_local::still_in_block);
+
member_action<main_grammar_local> check_indentation(local,
&main_grammar_local::check_indentation_impl);
member_action<main_grammar_local> check_code_block(local,
&main_grammar_local::check_code_block_impl);
member_action<main_grammar_local> start_blocks(local,
&main_grammar_local::start_blocks_impl);
+ member_action<main_grammar_local> start_nested_blocks(local,
+ &main_grammar_local::start_nested_blocks_impl);
member_action<main_grammar_local> end_blocks(local,
&main_grammar_local::end_blocks_impl);
@@ -338,9 +359,9 @@ namespace quickbook
// brackets.
nested_phrase =
state.values.save()
- [ *( ~cl::eps_p(']')
- >> local.common(element_info::in_phrase)
- )
+ [
+ scoped_context(element_info::in_phrase)
+ [*(~cl::eps_p(']') >> local.common)]
]
;
@@ -348,9 +369,9 @@ namespace quickbook
// by a paragraph end.
paragraph_phrase =
state.values.save()
- [ *( ~cl::eps_p(phrase_end)
- >> local.common(element_info::in_phrase)
- )
+ [
+ scoped_context(element_info::in_phrase)
+ [*(~cl::eps_p(phrase_end) >> local.common)]
]
;
@@ -358,9 +379,9 @@ namespace quickbook
// elements.
extended_phrase =
state.values.save()
- [ *( ~cl::eps_p(phrase_end)
- >> local.common(element_info::in_conditional)
- )
+ [
+ scoped_context(element_info::in_conditional)
+ [*(~cl::eps_p(phrase_end) >> local.common)]
]
;
@@ -370,28 +391,65 @@ namespace quickbook
// is part of the paragraph that contains it.
inline_phrase =
state.values.save()
- [ *local.common(element_info::in_phrase)
+ [ qbk_ver(107u)
+ >> local.template_phrase
+ | qbk_ver(0, 107u)
+ >> scoped_context(element_info::in_phrase)
+ [*local.common]
]
;
table_title_phrase =
state.values.save()
- [ *( ~cl::eps_p(space >> (']' | '[' >> space >> '['))
- >> local.common(element_info::in_phrase)
- )
+ [
+ scoped_context(element_info::in_phrase)
+ [ *( ~cl::eps_p(space >> (']' | '[' >> space >> '['))
+ >> local.common
+ )
+ ]
]
;
inside_preformatted =
- scoped_no_eols()
+ scoped_no_eols(false)
[ paragraph_phrase
]
;
+ // Phrase templates can contain block tags, but can't contain
+ // syntatic blocks.
+ local.template_phrase =
+ scoped_context(element_info::in_top_level)
+ [ *( (local.paragraph_separator >> space >> cl::anychar_p)
+ [error("Paragraph in phrase template.")]
+ | local.common
+ )
+ ]
+ ;
+
// Top level blocks
block_start =
- (*eol) [start_blocks]
- >> (*local.top_level) [end_blocks]
+ (*eol) [start_blocks]
+ >> ( *( local.top_level
+ >> !( qbk_ver(106u)
+ >> cl::ch_p(']')
+ >> cl::eps_p [error("Mismatched close bracket")]
+ )
+ )
+ ) [end_blocks]
+ ;
+
+ // Blocks contains within an element, e.g. a table cell or a footnote.
+ inside_paragraph =
+ state.values.save()
+ [ cl::eps_p [start_nested_blocks]
+ >> ( qbk_ver(107u)
+ >> (*eol)
+ >> (*local.top_level)
+ | qbk_ver(0, 107u)
+ >> local.inside_paragraph
+ ) [end_blocks]
+ ]
;
local.top_level =
@@ -416,40 +474,57 @@ namespace quickbook
;
local.paragraph =
- cl::eps_p [local.paragraph.still_in_block = true]
- >> local.paragraph_item(element_info::only_contextual_block)
- >> *( cl::eps_p(local.paragraph.still_in_block)
- >> local.paragraph_item(element_info::only_block)
- )
- >> cl::eps_p [paragraph]
- ;
-
- local.paragraph_item =
- local.element(local.paragraph_item.context)
- >> !eol [local.paragraph.still_in_block = false]
- | local.paragraph_separator [local.paragraph.still_in_block = false]
- | local.common(element_info::in_phrase)
+ // Usually superfluous call
+ // for paragraphs in lists.
+ cl::eps_p [paragraph_action]
+ >> scope_paragraph()
+ [
+ scoped_context(element_info::in_top_level)
+ [ scoped_still_in_block(true)
+ [ local.syntactic_block_item(element_info::is_contextual_block)
+ >> *( cl::eps_p(ph::var(local.still_in_block))
+ >> local.syntactic_block_item(element_info::is_block)
+ )
+ ]
+ ]
+ ] [paragraph_action]
;
local.list =
*cl::blank_p
>> (cl::ch_p('*') | '#')
- >> (*cl::blank_p) [local.list.still_in_block = true]
- >> *( cl::eps_p(local.list.still_in_block)
- >> ( qbk_ver(106u) >> local.list_item(element_info::only_block)
- | qbk_ver(0, 106u) >> local.list_item(element_info::only_list_block)
- )
- )
- // TODO: This is sometimes called in the wrong place. Currently
- // harmless.
- >> cl::eps_p [list_item]
+ >> (*cl::blank_p)
+ >> scoped_context(element_info::in_list_block)
+ [ scoped_still_in_block(true)
+ [ *( cl::eps_p(ph::var(local.still_in_block))
+ >> local.syntactic_block_item(element_info::is_block)
+ )
+ ]
+ ]
;
- local.list_item =
- local.element(local.list_item.context)
- >> !eol [local.list.still_in_block = false]
- | local.paragraph_separator [local.list.still_in_block = false]
- | local.common(element_info::in_phrase)
+ local.syntactic_block_item =
+ local.paragraph_separator [ph::var(local.still_in_block) = false]
+ | (cl::eps_p(~cl::ch_p(']')) | qbk_ver(0, 107u))
+ [ph::var(local.element_type) = element_info::nothing]
+ >> local.common
+
+ // If the element is a block, then a newline will end the
+ // current syntactic block.
+ //
+ // Note that we don't do this for lists in 1.6, as it causes
+ // the list block to end. The support for nested syntactic
+ // blocks in 1.7 will fix that. Although it does mean the
+ // following line will need to be indented. TODO: Flag that
+ // the indentation check shouldn't be made?
+ >> !( cl::eps_p(in_list) >> qbk_ver(106u, 107u)
+ | cl::eps_p
+ (
+ ph::static_cast_<int>(local.syntactic_block_item.is_block_mask) &
+ ph::static_cast_<int>(ph::var(local.element_type))
+ )
+ >> eol [ph::var(local.still_in_block) = false]
+ )
;
local.paragraph_separator =
@@ -465,14 +540,13 @@ namespace quickbook
;
// Blocks contains within an element, e.g. a table cell or a footnote.
- inside_paragraph =
- state.values.save()
- [ *( local.paragraph_separator [paragraph]
- >> *eol
+ local.inside_paragraph =
+ scoped_context(element_info::in_nested_block)
+ [ *( local.paragraph_separator [paragraph_action]
| ~cl::eps_p(']')
- >> local.common(element_info::in_nested_block)
+ >> local.common
)
- ] [paragraph]
+ ] [paragraph_action]
;
local.hr =
@@ -484,7 +558,7 @@ namespace quickbook
>> *(line_comment | (cl::anychar_p - (cl::eol_p | "[/")))
)
>> *eol
- ] [element]
+ ] [element_action]
;
local.element
@@ -499,7 +573,7 @@ namespace quickbook
[ cl::lazy_p(*ph::var(local.info.rule))
>> space
>> ']'
- ] [element]
+ ] [element_action]
]
;
@@ -508,7 +582,7 @@ namespace quickbook
[( local.code_line
>> *(*local.blank_line >> local.code_line)
) [state.values.entry(ph::arg1, ph::arg2)]
- ] [element]
+ ] [element_action]
>> *eol
;
@@ -527,7 +601,7 @@ namespace quickbook
local.common =
local.macro
- | local.element(local.common.context)
+ | local.element
| local.template_
| local.break_
| local.code_block
@@ -540,6 +614,19 @@ namespace quickbook
| cl::anychar_p [plain_char]
;
+ skip_entity =
+ '['
+ // For escaped templates:
+ >> !(space >> cl::ch_p('`') >> (cl::alpha_p | '_'))
+ >> *(~cl::eps_p(']') >> skip_entity)
+ >> !cl::ch_p(']')
+ | local.skip_code_block
+ | local.skip_inline_code
+ | local.skip_escape
+ | comment
+ | (cl::anychar_p - '[' - ']')
+ ;
+
local.square_brackets =
( cl::ch_p('[') [plain_char]
>> paragraph_phrase
@@ -566,21 +653,50 @@ namespace quickbook
( '['
>> space
>> state.values.list(template_tags::template_)
- [ !cl::str_p("`") [state.values.entry(ph::arg1, ph::arg2, template_tags::escape)]
- >> ( cl::eps_p(cl::punct_p)
- >> state.templates.scope [state.values.entry(ph::arg1, ph::arg2, template_tags::identifier)]
- | state.templates.scope [state.values.entry(ph::arg1, ph::arg2, template_tags::identifier)]
+ [ local.template_body
+ >> ']'
+ ]
+ ) [element_action]
+ ;
+
+ local.attribute_template =
+ ( '['
+ >> space
+ >> state.values.list(template_tags::attribute_template)
+ [ local.template_body
+ >> ']'
+ ]
+ ) [element_action]
+ ;
+
+ local.template_body =
+ ( cl::str_p('`')
+ >> cl::eps_p(cl::punct_p)
+ >> state.templates.scope
+ [state.values.entry(ph::arg1, ph::arg2, template_tags::escape)]
+ [state.values.entry(ph::arg1, ph::arg2, template_tags::identifier)]
+ >> !qbk_ver(106u)
+ [error("Templates with punctuation names can't be escaped in quickbook 1.6+")]
+ | cl::str_p('`')
+ >> state.templates.scope
+ [state.values.entry(ph::arg1, ph::arg2, template_tags::escape)]
+ [state.values.entry(ph::arg1, ph::arg2, template_tags::identifier)]
+
+ | cl::eps_p(cl::punct_p)
+ >> state.templates.scope
+ [state.values.entry(ph::arg1, ph::arg2, template_tags::identifier)]
+
+ | state.templates.scope
+ [state.values.entry(ph::arg1, ph::arg2, template_tags::identifier)]
>> cl::eps_p(hard_space)
)
>> space
>> !local.template_args
- >> ']'
- ]
- ) [element]
;
local.template_args =
- qbk_ver(105u) >> local.template_args_1_5
+ qbk_ver(106u) >> local.template_args_1_6
+ | qbk_ver(105u, 106u) >> local.template_args_1_5
| qbk_ver(0, 105u) >> local.template_args_1_4
;
@@ -622,6 +738,19 @@ namespace quickbook
'[' >> local.template_inner_arg_1_5 >> ']'
;
+ local.template_args_1_6 = local.template_arg_1_6 >> *(".." >> local.template_arg_1_6);
+
+ local.template_arg_1_6 =
+ ( cl::eps_p(*cl::blank_p >> cl::eol_p)
+ >> local.template_arg_1_6_content [state.values.entry(ph::arg1, ph::arg2, template_tags::block)]
+ | local.template_arg_1_6_content [state.values.entry(ph::arg1, ph::arg2, template_tags::phrase)]
+ )
+ ;
+
+ local.template_arg_1_6_content =
+ + ( ~cl::eps_p("..") >> skip_entity )
+ ;
+
local.break_
= ( '['
>> space
@@ -642,7 +771,49 @@ namespace quickbook
) >> cl::eps_p('`')
) [state.values.entry(ph::arg1, ph::arg2)]
>> '`'
- ] [element]
+ ] [element_action]
+ ;
+
+ local.skip_inline_code =
+ '`'
+ >> *(cl::anychar_p -
+ ( '`'
+ | (cl::eol_p >> *cl::blank_p >> cl::eol_p)
+ // Make sure that we don't go
+ ) // past a single block
+ )
+ >> !cl::ch_p('`')
+ ;
+
+ local.skip_code_block =
+ "```"
+ >> ~cl::eps_p("`")
+ >> ( !( *(*cl::blank_p >> cl::eol_p)
+ >> ( *( "````" >> *cl::ch_p('`')
+ | ( cl::anychar_p
+ - (*cl::space_p >> "```" >> ~cl::eps_p("`"))
+ )
+ )
+ >> !(*cl::blank_p >> cl::eol_p)
+ )
+ >> (*cl::space_p >> "```")
+ )
+ | *cl::anychar_p
+ )
+ | "``"
+ >> ~cl::eps_p("`")
+ >> ( ( *(*cl::blank_p >> cl::eol_p)
+ >> ( *( "```" >> *cl::ch_p('`')
+ | ( cl::anychar_p
+ - (*cl::space_p >> "``" >> ~cl::eps_p("`"))
+ )
+ )
+ >> !(*cl::blank_p >> cl::eol_p)
+ )
+ >> (*cl::space_p >> "``")
+ )
+ | *cl::anychar_p
+ )
;
local.code_block =
@@ -658,7 +829,7 @@ namespace quickbook
>> !(*cl::blank_p >> cl::eol_p)
) [state.values.entry(ph::arg1, ph::arg2)]
>> (*cl::space_p >> "```")
- ] [element]
+ ] [element_action]
| cl::eps_p [error("Unfinished code block")]
>> *cl::anychar_p
)
@@ -674,18 +845,18 @@ namespace quickbook
>> !(*cl::blank_p >> cl::eol_p)
) [state.values.entry(ph::arg1, ph::arg2)]
>> (*cl::space_p >> "``")
- ] [element]
+ ] [element_action]
| cl::eps_p [error("Unfinished code block")]
>> *cl::anychar_p
)
;
local.simple_markup =
- cl::chset<>("*/_=") [local.simple_markup.mark = ph::arg1]
+ cl::chset<>("*/_=") [ph::var(local.mark) = ph::arg1]
>> cl::eps_p(cl::graph_p) // graph_p must follow first mark
>> lookback
[ cl::anychar_p // skip back over the markup
- >> ~cl::eps_p(cl::f_ch_p(local.simple_markup.mark))
+ >> ~cl::eps_p(cl::ch_p(boost::ref(local.mark)))
// first mark not be preceeded by
// the same character.
>> (cl::space_p | cl::punct_p | cl::end_p)
@@ -699,15 +870,15 @@ namespace quickbook
[
cl::eps_p((state.macro & macro_identifier) >> local.simple_markup_end)
>> state.macro [do_macro]
- | ~cl::eps_p(cl::f_ch_p(local.simple_markup.mark))
+ | ~cl::eps_p(cl::ch_p(boost::ref(local.mark)))
>> +( ~cl::eps_p
- ( lookback [~cl::f_ch_p(local.simple_markup.mark)]
+ ( lookback [~cl::ch_p(boost::ref(local.mark))]
>> local.simple_markup_end
)
>> cl::anychar_p [plain_char]
)
]
- >> cl::f_ch_p(local.simple_markup.mark)
+ >> cl::ch_p(boost::ref(local.mark))
[simple_markup]
]
;
@@ -715,8 +886,8 @@ namespace quickbook
local.simple_markup_end
= ( lookback[cl::graph_p] // final mark must be preceeded by
// graph_p
- >> cl::f_ch_p(local.simple_markup.mark)
- >> ~cl::eps_p(cl::f_ch_p(local.simple_markup.mark))
+ >> cl::ch_p(boost::ref(local.mark))
+ >> ~cl::eps_p(cl::ch_p(boost::ref(local.mark)))
// final mark not be followed by
// the same character.
>> (cl::space_p | cl::punct_p | cl::end_p)
@@ -742,10 +913,23 @@ namespace quickbook
[ (*(cl::anychar_p - "'''")) [state.values.entry(ph::arg1, ph::arg2, phrase_tags::escape)]
>> ( cl::str_p("'''")
| cl::eps_p [error("Unclosed boostbook escape.")]
- ) [element]
+ ) [element_action]
]
;
+ local.skip_escape =
+ cl::str_p("\\n")
+ | cl::str_p("\\ ")
+ | '\\' >> cl::punct_p
+ | "\\u" >> cl::repeat_p(4) [cl::chset<>("0-9a-fA-F")]
+ | "\\U" >> cl::repeat_p(8) [cl::chset<>("0-9a-fA-F")]
+ | ("'''" >> !eol)
+ >> (*(cl::anychar_p - "'''"))
+ >> ( cl::str_p("'''")
+ | cl::eps_p
+ )
+ ;
+
raw_escape =
cl::str_p("\\n") [error("Newlines invalid here.")]
| cl::str_p("\\ ") // ignore an escaped space
@@ -760,11 +944,12 @@ namespace quickbook
>> (*(cl::anychar_p - "'''"))
>> ( cl::str_p("'''")
| cl::eps_p [error("Unclosed boostbook escape.")]
- ) [element]
+ )
;
- attribute_value_1_7 =
- *( ~cl::eps_p(']' | cl::space_p | comment)
+ attribute_template_body =
+ space
+ >> *( ~cl::eps_p(space >> cl::end_p | comment)
>> ( cl::eps_p
( cl::ch_p('[')
>> space
@@ -775,13 +960,38 @@ namespace quickbook
)
) [error("Elements not allowed in attribute values.")]
>> local.square_brackets
- | local.template_
+ | local.attribute_template
| cl::eps_p(cl::ch_p('[')) [error("Unmatched template in attribute value.")]
>> local.square_brackets
| raw_escape
| cl::anychar_p [raw_char]
)
)
+ >> space
+ ;
+
+
+ attribute_value_1_7 =
+ state.values.save() [
+ +( ~cl::eps_p(']' | cl::space_p | comment)
+ >> ( cl::eps_p
+ ( cl::ch_p('[')
+ >> space
+ >> ( cl::eps_p(cl::punct_p)
+ >> elements
+ | elements
+ >> (cl::eps_p - (cl::alnum_p | '_'))
+ )
+ ) [error("Elements not allowed in attribute values.")]
+ >> local.square_brackets
+ | local.attribute_template
+ | cl::eps_p(cl::ch_p('['))[error("Unmatched template in attribute value.")]
+ >> local.square_brackets
+ | raw_escape
+ | cl::anychar_p [raw_char]
+ )
+ )
+ ]
;
//
@@ -800,7 +1010,7 @@ namespace quickbook
>> *cl::space_p
)
>> cl::end_p
- ] [element]
+ ] [element_action]
;
local.command_line_macro_identifier =
@@ -883,7 +1093,21 @@ namespace quickbook
void main_grammar_local::start_blocks_impl(parse_iterator, parse_iterator)
{
- list_stack.push(list_stack_item());
+ list_stack.push(list_stack_item(list_stack_item::top_level));
+ }
+
+ void main_grammar_local::start_nested_blocks_impl(parse_iterator, parse_iterator)
+ {
+ // If this nested block is part of a list, then tell the
+ // output state.
+ //
+ // TODO: This is a bit dodgy, it would be better if this
+ // was handled when the output state is pushed (currently
+ // in to_value_scoped_action).
+ state_.in_list = state_.explicit_list;
+ state_.explicit_list = false;
+
+ list_stack.push(list_stack_item(list_stack_item::nested_block));
}
void main_grammar_local::end_blocks_impl(parse_iterator, parse_iterator)
@@ -922,10 +1146,16 @@ namespace quickbook
unsigned int new_indent = indent_length(first, last);
if (new_indent > list_stack.top().indent2) {
- block_type = block_types::code;
+ if (list_stack.top().type != list_stack_item::nested_block) {
+ block_type = block_types::code;
+ }
+ else {
+ block_type = block_types::paragraph;
+ }
}
else {
- while (!list_stack.top().root && new_indent < list_stack.top().indent)
+ while (list_stack.top().type == list_stack_item::syntactic_list
+ && new_indent < list_stack.top().indent)
{
state_.end_list_item();
state_.end_list(list_stack.top().mark);
@@ -933,7 +1163,8 @@ namespace quickbook
list_indent = list_stack.top().indent;
}
- if (!list_stack.top().root && new_indent == list_stack.top().indent)
+ if (list_stack.top().type == list_stack_item::syntactic_list
+ && new_indent == list_stack.top().indent)
{
// If the paragraph is aligned with the list item's marker,
// then end the current list item if that's aligned (or to
@@ -956,7 +1187,7 @@ namespace quickbook
list_stack_item save = list_stack.top();
list_stack.pop();
- assert(list_stack.top().root ?
+ assert(list_stack.top().type != list_stack_item::syntactic_list ?
new_indent >= list_stack.top().indent :
new_indent > list_stack.top().indent);
@@ -972,14 +1203,24 @@ namespace quickbook
block_type = block_types::paragraph;
}
+
+ if (qbk_version_n == 106u &&
+ list_stack.top().type == list_stack_item::syntactic_list) {
+ detail::outerr(state_.current_file, first)
+ << "Nested blocks in lists won't be supported in "
+ << "quickbook 1.6"
+ << std::endl;
+ ++state_.error_count;
+ }
}
else {
clear_stack();
- if (last == first)
- block_type = block_types::paragraph;
- else
+ if (list_stack.top().type != list_stack_item::nested_block &&
+ last != first)
block_type = block_types::code;
+ else
+ block_type = block_types::paragraph;
}
}
@@ -990,12 +1231,14 @@ namespace quickbook
unsigned int new_indent2 = indent_length(first, last);
char mark = *mark_pos;
- if (list_stack.top().root && new_indent > 0) {
+ if (list_stack.top().type == list_stack_item::top_level &&
+ new_indent > 0) {
block_type = block_types::code;
return;
}
- if (list_stack.top().root || new_indent > list_indent) {
+ if (list_stack.top().type != list_stack_item::syntactic_list ||
+ new_indent > list_indent) {
list_stack.push(list_stack_item(mark, new_indent, new_indent2));
state_.start_list(mark);
}
@@ -1005,7 +1248,8 @@ namespace quickbook
else {
// This should never reach root, since the first list
// has indentation 0.
- while(!list_stack.top().root && new_indent < list_stack.top().indent)
+ while(list_stack.top().type == list_stack_item::syntactic_list &&
+ new_indent < list_stack.top().indent)
{
state_.end_list_item();
state_.end_list(list_stack.top().mark);
@@ -1032,7 +1276,7 @@ namespace quickbook
void main_grammar_local::clear_stack()
{
- while (!list_stack.top().root) {
+ while (list_stack.top().type == list_stack_item::syntactic_list) {
state_.end_list_item();
state_.end_list(list_stack.top().mark);
list_stack.pop();
diff --git a/tools/quickbook/src/markups.cpp b/tools/quickbook/src/markups.cpp
index 558848444b..f077943a0f 100644
--- a/tools/quickbook/src/markups.cpp
+++ b/tools/quickbook/src/markups.cpp
@@ -26,6 +26,7 @@ namespace quickbook
{
markup init_markups[] = {
{ block_tags::paragraph, "<para>\n", "</para>\n" },
+ { block_tags::paragraph_in_list, "<simpara>\n", "</simpara>\n" },
{ block_tags::blurb, "<sidebar role=\"blurb\">\n", "</sidebar>\n" },
{ block_tags::blockquote, "<blockquote>", "</blockquote>" },
{ block_tags::preformatted, "<programlisting>", "</programlisting>" },
diff --git a/tools/quickbook/src/native_text.cpp b/tools/quickbook/src/native_text.cpp
new file mode 100644
index 0000000000..3e4ce8749a
--- /dev/null
+++ b/tools/quickbook/src/native_text.cpp
@@ -0,0 +1,336 @@
+/*=============================================================================
+ Copyright (c) 2009 Daniel James
+
+ Use, modification and distribution is subject to the Boost Software
+ License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+ http://www.boost.org/LICENSE_1_0.txt)
+=============================================================================*/
+
+#include <boost/program_options.hpp>
+#include <iostream>
+#include "native_text.hpp"
+#include "utils.hpp"
+#include "files.hpp"
+
+#if QUICKBOOK_WIDE_PATHS || QUICKBOOK_WIDE_STREAMS
+#include <boost/scoped_ptr.hpp>
+#include <windows.h>
+#include <io.h>
+#include <fcntl.h>
+#endif
+
+#if QUICKBOOK_CYGWIN_PATHS
+#include <boost/scoped_array.hpp>
+#include <boost/program_options/errors.hpp>
+#include <sys/cygwin.h>
+#endif
+
+namespace quickbook {
+ extern bool ms_errors;
+}
+
+namespace quickbook {
+namespace detail {
+
+// This is used for converting paths to UTF-8 on cygin.
+// Might be better not to use a windows
+#if QUICKBOOK_WIDE_PATHS || QUICKBOOK_WIDE_STREAMS
+ namespace {
+ std::string to_utf8(std::wstring const& x)
+ {
+ int buffer_count = WideCharToMultiByte(CP_UTF8, 0, x.c_str(), -1, 0, 0, 0, 0);
+
+ if (!buffer_count)
+ throw conversion_error("Error converting wide string to utf-8.");
+
+ boost::scoped_ptr<char> buffer(new char[buffer_count]);
+
+ if (!WideCharToMultiByte(CP_UTF8, 0, x.c_str(), -1, buffer.get(), buffer_count, 0, 0))
+ throw conversion_error("Error converting wide string to utf-8.");
+
+ return std::string(buffer.get());
+ }
+
+ std::wstring from_utf8(boost::string_ref text)
+ {
+ std::string x(text.begin(), text.end());
+ int buffer_count = MultiByteToWideChar(CP_UTF8, 0, x.c_str(), -1, 0, 0);
+
+ if (!buffer_count)
+ throw conversion_error("Error converting utf-8 to wide string.");
+
+ boost::scoped_ptr<wchar_t> buffer(new wchar_t[buffer_count]);
+
+ if (!MultiByteToWideChar(CP_UTF8, 0, x.c_str(), -1, buffer.get(), buffer_count))
+ throw conversion_error("Error converting utf-8 to wide string.");
+
+ return std::wstring(buffer.get());
+ }
+ }
+#endif
+
+#if QUICKBOOK_WIDE_PATHS
+ std::string command_line_to_utf8(command_line_string const& x)
+ {
+ return to_utf8(x);
+ }
+#else
+ std::string command_line_to_utf8(command_line_string const& x)
+ {
+ return x;
+ }
+#endif
+
+#if QUICKBOOK_WIDE_PATHS
+ fs::path generic_to_path(boost::string_ref x)
+ {
+ return fs::path(from_utf8(x));
+ }
+
+ std::string path_to_generic(fs::path const& x)
+ {
+ return to_utf8(x.generic_wstring());
+ }
+#else
+ fs::path generic_to_path(boost::string_ref x)
+ {
+ return fs::path(x.begin(), x.end());
+ }
+
+ std::string path_to_generic(fs::path const& x)
+ {
+ return x.generic_string();
+ }
+
+#endif
+
+#if QUICKBOOK_CYGWIN_PATHS
+ fs::path command_line_to_path(command_line_string const& path)
+ {
+ cygwin_conv_path_t flags = CCP_POSIX_TO_WIN_W | CCP_RELATIVE;
+
+ ssize_t size = cygwin_conv_path(flags, path.c_str(), NULL, 0);
+
+ if (size < 0)
+ throw conversion_error("Error converting cygwin path to windows.");
+
+ boost::scoped_array<char> result(new char[size]);
+ void* ptr = result.get();
+
+ if(cygwin_conv_path(flags, path.c_str(), ptr, size))
+ throw conversion_error("Error converting cygwin path to windows.");
+
+ return fs::path(static_cast<wchar_t*>(ptr));
+ }
+
+ ostream::string path_to_stream(fs::path const& path)
+ {
+ cygwin_conv_path_t flags = CCP_WIN_W_TO_POSIX | CCP_RELATIVE;
+
+ ssize_t size = cygwin_conv_path(flags, path.native().c_str(), NULL, 0);
+
+ if (size < 0)
+ throw conversion_error("Error converting windows path to cygwin.");
+
+ boost::scoped_array<char> result(new char[size]);
+
+ if(cygwin_conv_path(flags, path.native().c_str(), result.get(), size))
+ throw conversion_error("Error converting windows path to cygwin.");
+
+ return std::string(result.get());
+ }
+#else
+ fs::path command_line_to_path(command_line_string const& path)
+ {
+ return fs::path(path);
+ }
+
+#if QUICKBOOK_WIDE_PATHS && !QUICKBOOK_WIDE_STREAMS
+ ostream::string path_to_stream(fs::path const& path)
+ {
+ return path.string();
+ }
+#else
+ ostream::string path_to_stream(fs::path const& path)
+ {
+ return path.native();
+ }
+#endif
+
+#endif // QUICKBOOK_CYGWIN_PATHS
+
+#if QUICKBOOK_WIDE_STREAMS
+
+ void initialise_output()
+ {
+ if (_isatty(_fileno(stdout))) _setmode(_fileno(stdout), _O_U16TEXT);
+ if (_isatty(_fileno(stderr))) _setmode(_fileno(stderr), _O_U16TEXT);
+ }
+
+ void write_utf8(ostream::base_ostream& out, boost::string_ref x)
+ {
+ out << from_utf8(x);
+ }
+
+ ostream& out()
+ {
+ static ostream x(std::wcout);
+ return x;
+ }
+
+ namespace
+ {
+ inline ostream& error_stream()
+ {
+ static ostream x(std::wcerr);
+ return x;
+ }
+ }
+
+#else
+
+ void initialise_output()
+ {
+ }
+
+ void write_utf8(ostream::base_ostream& out, boost::string_ref x)
+ {
+ out << x;
+ }
+
+ ostream& out()
+ {
+ static ostream x(std::cout);
+ return x;
+ }
+
+ namespace
+ {
+ inline ostream& error_stream()
+ {
+ static ostream x(std::clog);
+ return x;
+ }
+ }
+
+#endif
+
+ ostream& outerr()
+ {
+ return error_stream() << "Error: ";
+ }
+
+ ostream& outerr(fs::path const& file, int line)
+ {
+ if (line >= 0)
+ {
+ if (ms_errors)
+ return error_stream() << path_to_stream(file) << "(" << line << "): error: ";
+ else
+ return error_stream() << path_to_stream(file) << ":" << line << ": error: ";
+ }
+ else
+ {
+ return error_stream() << path_to_stream(file) << ": error: ";
+ }
+ }
+
+ ostream& outerr(file_ptr const& f, string_iterator pos)
+ {
+ return outerr(f->path, f->position_of(pos).line);
+ }
+
+ ostream& outwarn(fs::path const& file, int line)
+ {
+ if (line >= 0)
+ {
+ if (ms_errors)
+ return error_stream() << path_to_stream(file) << "(" << line << "): warning: ";
+ else
+ return error_stream() << path_to_stream(file) << ":" << line << ": warning: ";
+ }
+ else
+ {
+ return error_stream() << path_to_stream(file) << ": warning: ";
+ }
+ }
+
+ ostream& outwarn(file_ptr const& f, string_iterator pos)
+ {
+ return outwarn(f->path, f->position_of(pos).line);
+ }
+
+ ostream& ostream::operator<<(char c) {
+ assert(c > 0 && c <= 127);
+ base << c;
+ return *this;
+ }
+
+ inline bool check_ascii(char const* x) {
+ for(;*x;++x) if(*x <= 0 || *x > 127) return false;
+ return true;
+ }
+
+ ostream& ostream::operator<<(char const* x) {
+ assert(check_ascii(x));
+ base << x;
+ return *this;
+ }
+
+ ostream& ostream::operator<<(std::string const& x) {
+ write_utf8(base, x);
+ return *this;
+ }
+
+ ostream& ostream::operator<<(boost::string_ref x) {
+ write_utf8(base, x);
+ return *this;
+ }
+
+ ostream& ostream::operator<<(int x) {
+ base << x;
+ return *this;
+ }
+
+ ostream& ostream::operator<<(unsigned int x) {
+ base << x;
+ return *this;
+ }
+
+ ostream& ostream::operator<<(long x) {
+ base << x;
+ return *this;
+ }
+
+ ostream& ostream::operator<<(unsigned long x) {
+ base << x;
+ return *this;
+ }
+
+#if !defined(BOOST_NO_LONG_LONG)
+ ostream& ostream::operator<<(long long x) {
+ base << x;
+ return *this;
+ }
+
+ ostream& ostream::operator<<(unsigned long long x) {
+ base << x;
+ return *this;
+ }
+#endif
+
+ ostream& ostream::operator<<(fs::path const& x) {
+ base << path_to_stream(x);
+ return *this;
+ }
+
+ ostream& ostream::operator<<(base_ostream& (*x)(base_ostream&)) {
+ base << x;
+ return *this;
+ }
+
+ ostream& ostream::operator<<(base_ios& (*x)(base_ios&)) {
+ base << x;
+ return *this;
+ }
+}}
diff --git a/tools/quickbook/src/native_text.hpp b/tools/quickbook/src/native_text.hpp
new file mode 100644
index 0000000000..11b990da0d
--- /dev/null
+++ b/tools/quickbook/src/native_text.hpp
@@ -0,0 +1,144 @@
+/*=============================================================================
+ Copyright (c) 2009 Daniel James
+
+ Use, modification and distribution is subject to the Boost Software
+ License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+ http://www.boost.org/LICENSE_1_0.txt)
+=============================================================================*/
+
+// For handling native strings and streams.
+
+#if !defined(BOOST_QUICKBOOK_DETAIL_NATIVE_TEXT_HPP)
+#define BOOST_QUICKBOOK_DETAIL_NATIVE_TEXT_HPP
+
+#include <boost/config.hpp>
+#include <boost/filesystem/path.hpp>
+#include <boost/utility/string_ref.hpp>
+#include <string>
+#include <stdexcept>
+#include <iostream>
+#include "fwd.hpp"
+
+#if defined(__cygwin__) || defined(__CYGWIN__)
+# define QUICKBOOK_CYGWIN_PATHS 1
+#elif defined(_WIN32)
+# define QUICKBOOK_WIDE_PATHS 1
+# if defined(BOOST_MSVC) && BOOST_MSVC >= 1400
+# define QUICKBOOK_WIDE_STREAMS 1
+# endif
+#endif
+
+#if !defined(QUICKBOOK_WIDE_PATHS)
+#define QUICKBOOK_WIDE_PATHS 0
+#endif
+
+#if !defined(QUICKBOOK_WIDE_STREAMS)
+#define QUICKBOOK_WIDE_STREAMS 0
+#endif
+
+#if !defined(QUICKBOOK_CYGWIN_PATHS)
+#define QUICKBOOK_CYGWIN_PATHS 0
+#endif
+
+namespace quickbook
+{
+ namespace fs = boost::filesystem;
+
+ namespace detail
+ {
+ struct conversion_error : std::runtime_error
+ {
+ conversion_error(char const* m) : std::runtime_error(m) {}
+ };
+
+ // 'generic': Paths in quickbook source and the generated boostbook.
+ // Always UTF-8.
+ // 'command_line':
+ // Paths (or other parameters) from the command line and
+ // possibly other sources in the future. Wide strings on
+ // normal windows, UTF-8 for cygwin and other platforms
+ // (hopefully).
+ // 'path': Stored as a boost::filesystem::path. Since
+ // Boost.Filesystem doesn't support cygwin, this
+ // is always wide on windows. UTF-8 on other
+ // platforms (again, hopefully).
+
+#if QUICKBOOK_WIDE_PATHS
+ typedef std::wstring command_line_string;
+ typedef boost::wstring_ref command_line_string_ref;
+#else
+ typedef std::string command_line_string;
+ typedef boost::string_ref command_line_string_ref;
+#endif
+
+ // A light wrapper around C++'s streams that gets things right
+ // in the quickbook context.
+ //
+ // This is far from perfect but it fixes some issues.
+ struct ostream
+ {
+#if QUICKBOOK_WIDE_STREAMS
+ typedef std::wostream base_ostream;
+ typedef std::wios base_ios;
+ typedef std::wstring string;
+ typedef boost::wstring_ref string_ref;
+#else
+ typedef std::ostream base_ostream;
+ typedef std::ios base_ios;
+ typedef std::string string;
+ typedef boost::string_ref string_ref;
+#endif
+ base_ostream& base;
+
+ explicit ostream(base_ostream& x) : base(x) {}
+
+ // C strings should always be ascii.
+ ostream& operator<<(char);
+ ostream& operator<<(char const*);
+
+ // std::string should be UTF-8 (what a mess!)
+ ostream& operator<<(std::string const&);
+ ostream& operator<<(boost::string_ref);
+
+ // Other value types.
+ ostream& operator<<(int x);
+ ostream& operator<<(unsigned int x);
+ ostream& operator<<(long x);
+ ostream& operator<<(unsigned long x);
+
+#if !defined(BOOST_NO_LONG_LONG)
+ ostream& operator<<(long long x);
+ ostream& operator<<(unsigned long long x);
+#endif
+
+ ostream& operator<<(fs::path const&);
+
+ // Modifiers
+ ostream& operator<<(base_ostream& (*)(base_ostream&));
+ ostream& operator<<(base_ios& (*)(base_ios&));
+ };
+
+
+ std::string command_line_to_utf8(command_line_string const&);
+ fs::path command_line_to_path(command_line_string const&);
+
+ std::string path_to_generic(fs::path const&);
+ fs::path generic_to_path(boost::string_ref);
+
+ void initialise_output();
+
+ ostream& out();
+
+ // Preformats an error/warning message so that it can be parsed by
+ // common IDEs. Uses the ms_errors global to determine if VS format
+ // or GCC format. Returns the stream to continue ouput of the verbose
+ // error message.
+ ostream& outerr();
+ ostream& outerr(fs::path const& file, int line = -1);
+ ostream& outwarn(fs::path const& file, int line = -1);
+ ostream& outerr(file_ptr const&, string_iterator);
+ ostream& outwarn(file_ptr const&, string_iterator);
+ }
+}
+
+#endif
diff --git a/tools/quickbook/src/phrase_element_grammar.cpp b/tools/quickbook/src/phrase_element_grammar.cpp
index b287d63621..2c0a597ff5 100644
--- a/tools/quickbook/src/phrase_element_grammar.cpp
+++ b/tools/quickbook/src/phrase_element_grammar.cpp
@@ -19,6 +19,7 @@
#include <boost/spirit/include/classic_if.hpp>
#include <boost/spirit/include/phoenix1_primitives.hpp>
#include <boost/spirit/include/phoenix1_casts.hpp>
+#include <boost/foreach.hpp>
namespace quickbook
{
@@ -38,7 +39,7 @@ namespace quickbook
new phrase_element_grammar_local);
error_action error(state);
- raw_char_action raw_char(state.phrase);
+ raw_char_action raw_char(state);
scoped_parser<cond_phrase_push> scoped_cond_phrase(state);
scoped_parser<to_value_scoped_action> to_value(state);
@@ -146,13 +147,15 @@ namespace quickbook
blank
>> ( qbk_ver(0, 106u)
>> (*(cl::anychar_p - phrase_end)) [state.values.entry(ph::arg1, ph::arg2)]
- | qbk_ver(106u)
+ | qbk_ver(106u, 107u)
>> to_value()
[ *( raw_escape
| (cl::anychar_p - phrase_end)
[raw_char]
)
]
+ | qbk_ver(107u)
+ >> to_value() [attribute_value_1_7]
)
;
@@ -171,16 +174,14 @@ namespace quickbook
;
local.source_mode =
- ( cl::str_p("c++")
- | "python"
- | "teletype"
- ) [state.values.entry(ph::arg1, ph::arg2)];
+ cl::eps_p [state.values.entry(ph::arg1, ph::arg2)]
+ >> source_modes [state.values.entry(ph::arg1)];
- elements.add
- ("c++", element_info(element_info::phrase, &local.empty, source_mode_tags::cpp))
- ("python", element_info(element_info::phrase, &local.empty, source_mode_tags::python))
- ("teletype", element_info(element_info::phrase, &local.empty, source_mode_tags::teletype))
- ;
+ BOOST_FOREACH(int tag, source_mode_tags::tags()) {
+ source_modes.add(source_mode_tags::name(tag), tag);
+ elements.add(source_mode_tags::name(tag),
+ element_info(element_info::phrase, &local.empty, tag));
+ }
elements.add
("role", element_info(element_info::phrase, &local.role, phrase_tags::role, 106u))
@@ -188,7 +189,11 @@ namespace quickbook
local.role
= space
- >> (+(cl::alnum_p | '_')) [state.values.entry(ph::arg1, ph::arg2)]
+ >> ( qbk_ver(0, 107u)
+ >> (+(cl::alnum_p | '_')) [state.values.entry(ph::arg1, ph::arg2)]
+ | qbk_ver(107u)
+ >> to_value() [attribute_value_1_7]
+ )
>> hard_space
>> local.inner_phrase
;
diff --git a/tools/quickbook/src/quickbook.cpp b/tools/quickbook/src/quickbook.cpp
index 5a359ed3e1..f867f3583e 100644
--- a/tools/quickbook/src/quickbook.cpp
+++ b/tools/quickbook/src/quickbook.cpp
@@ -14,8 +14,8 @@
#include "post_process.hpp"
#include "utils.hpp"
#include "files.hpp"
-#include "input_path.hpp"
-#include "id_manager.hpp"
+#include "native_text.hpp"
+#include "document_state.hpp"
#include <boost/program_options.hpp>
#include <boost/filesystem/path.hpp>
#include <boost/filesystem/operations.hpp>
@@ -24,6 +24,8 @@
#include <boost/ref.hpp>
#include <boost/version.hpp>
#include <boost/foreach.hpp>
+#include <boost/algorithm/string/split.hpp>
+#include <boost/algorithm/string/classification.hpp>
#include <stdexcept>
#include <vector>
@@ -38,7 +40,7 @@
#pragma warning(disable:4355)
#endif
-#define QUICKBOOK_VERSION "Quickbook Version 1.5.8"
+#define QUICKBOOK_VERSION "Quickbook Version 1.6.1"
namespace quickbook
{
@@ -61,8 +63,9 @@ namespace quickbook
end = preset_defines.end();
it != end; ++it)
{
- parse_iterator first(it->begin());
- parse_iterator last(it->end());
+ boost::string_ref val(*it);
+ parse_iterator first(val.begin());
+ parse_iterator last(val.end());
cl::parse_info<parse_iterator> info =
cl::parse(first, last, state.grammar().command_line_macro);
@@ -85,8 +88,8 @@ namespace quickbook
///////////////////////////////////////////////////////////////////////////
void parse_file(quickbook::state& state, value include_doc_id, bool nested_file)
{
- parse_iterator first(state.current_file->source.begin());
- parse_iterator last(state.current_file->source.end());
+ parse_iterator first(state.current_file->source().begin());
+ parse_iterator last(state.current_file->source().end());
cl::parse_info<parse_iterator> info = cl::parse(first, last, state.grammar().doc_info);
assert(info.hit);
@@ -96,7 +99,7 @@ namespace quickbook
parse_iterator pos = info.stop;
std::string doc_type = pre(state, pos, include_doc_id, nested_file);
- info = cl::parse(info.hit ? info.stop : first, last, state.grammar().block);
+ info = cl::parse(info.hit ? info.stop : first, last, state.grammar().block_start);
post(state, doc_type);
@@ -110,28 +113,41 @@ namespace quickbook
}
}
+ struct parse_document_options
+ {
+ parse_document_options() :
+ indent(-1),
+ linewidth(-1),
+ pretty_print(true),
+ deps_out_flags(quickbook::dependency_tracker::default_)
+ {}
+
+ int indent;
+ int linewidth;
+ bool pretty_print;
+ fs::path deps_out;
+ quickbook::dependency_tracker::flags deps_out_flags;
+ fs::path locations_out;
+ fs::path xinclude_base;
+ };
+
static int
parse_document(
fs::path const& filein_
, fs::path const& fileout_
- , fs::path const& deps_out_
- , fs::path const& locations_out_
- , fs::path const& xinclude_base_
- , int indent
- , int linewidth
- , bool pretty_print)
+ , parse_document_options const& options_)
{
string_stream buffer;
- id_manager ids;
+ document_state output;
int result = 0;
try {
- quickbook::state state(filein_, xinclude_base_, buffer, ids);
+ quickbook::state state(filein_, options_.xinclude_base, buffer, output);
set_macros(state);
if (state.error_count == 0) {
- state.add_dependency(filein_);
+ state.dependencies.add_dependency(filein_);
state.current_file = load(filein_); // Throws load_error
parse_file(state);
@@ -144,37 +160,31 @@ namespace quickbook
result = state.error_count ? 1 : 0;
- if (!deps_out_.empty())
+ if (!options_.deps_out.empty())
{
- fs::ofstream out(deps_out_);
- BOOST_FOREACH(quickbook::state::dependency_list::value_type
- const& d, state.dependencies)
- {
- if (d.second) {
- out << detail::path_to_generic(d.first) << std::endl;
- }
- }
+ state.dependencies.write_dependencies(options_.deps_out,
+ options_.deps_out_flags);
}
- if (!locations_out_.empty())
+ if (!options_.locations_out.empty())
{
- fs::ofstream out(locations_out_);
- BOOST_FOREACH(quickbook::state::dependency_list::value_type
- const& d, state.dependencies)
- {
- out << (d.second ? "+ " : "- ")
- << detail::path_to_generic(d.first) << std::endl;
- }
+ fs::ofstream out(options_.locations_out);
+ state.dependencies.write_dependencies(options_.locations_out,
+ dependency_tracker::checked);
}
}
catch (load_error& e) {
detail::outerr(filein_) << e.what() << std::endl;
result = 1;
}
+ catch (std::runtime_error& e) {
+ detail::outerr() << e.what() << std::endl;
+ result = 1;
+ }
if (!fileout_.empty() && result == 0)
{
- std::string stage2 = ids.replace_placeholders(buffer.str());
+ std::string stage2 = output.replace_placeholders(buffer.str());
fs::ofstream fileout(fileout_);
@@ -187,11 +197,12 @@ namespace quickbook
return 1;
}
- if (pretty_print)
+ if (options_.pretty_print)
{
try
{
- fileout << post_process(stage2, indent, linewidth);
+ fileout << post_process(stage2, options_.indent,
+ options_.linewidth);
}
catch (quickbook::post_process_failure&)
{
@@ -244,7 +255,7 @@ main(int argc, char* argv[])
using boost::program_options::notify;
using boost::program_options::positional_options_description;
- using quickbook::detail::input_string;
+ using quickbook::detail::command_line_string;
// First thing, the filesystem should record the current working directory.
fs::initial_path<fs::path>();
@@ -253,6 +264,8 @@ main(int argc, char* argv[])
quickbook::detail::initialise_output();
quickbook::detail::initialise_markups();
+ // Declare the program options
+
options_description desc("Allowed options");
options_description hidden("Hidden options");
options_description all("All options");
@@ -270,27 +283,32 @@ main(int argc, char* argv[])
("no-self-linked-headers", "stop headers linking to themselves")
("indent", PO_VALUE<int>(), "indent spaces")
("linewidth", PO_VALUE<int>(), "line width")
- ("input-file", PO_VALUE<input_string>(), "input file")
- ("output-file", PO_VALUE<input_string>(), "output file")
- ("output-deps", PO_VALUE<input_string>(), "output dependency file")
+ ("input-file", PO_VALUE<command_line_string>(), "input file")
+ ("output-file", PO_VALUE<command_line_string>(), "output file")
+ ("output-deps", PO_VALUE<command_line_string>(), "output dependency file")
("debug", "debug mode (for developers)")
("ms-errors", "use Microsoft Visual Studio style error & warn message format")
- ("include-path,I", PO_VALUE< std::vector<input_string> >(), "include path")
- ("define,D", PO_VALUE< std::vector<input_string> >(), "define macro")
- ("image-location", PO_VALUE<input_string>(), "image location")
+ ("include-path,I", PO_VALUE< std::vector<command_line_string> >(), "include path")
+ ("define,D", PO_VALUE< std::vector<command_line_string> >(), "define macro")
+ ("image-location", PO_VALUE<command_line_string>(), "image location")
;
hidden.add_options()
("expect-errors",
"Succeed if the input file contains a correctly handled "
"error, fail otherwise.")
- ("xinclude-base", PO_VALUE<input_string>(),
+ ("xinclude-base", PO_VALUE<command_line_string>(),
"Generate xincludes as if generating for this target "
"directory.")
- ("output-checked-locations", PO_VALUE<input_string>(),
+ ("output-deps-format", PO_VALUE<command_line_string>(),
+ "Comma separated list of formatting options for output-deps, "
+ "options are: escaped, checked")
+ ("output-checked-locations", PO_VALUE<command_line_string>(),
"Writes a file listing all the file locations that were "
"checked, starting with '+' if they were found, or '-' "
- "if they weren't.")
+ "if they weren't.\n"
+ "This is deprecated, use 'output-deps-format=checked' to "
+ "write the deps file in this format.")
;
all.add(desc).add(hidden);
@@ -298,10 +316,9 @@ main(int argc, char* argv[])
positional_options_description p;
p.add("input-file", -1);
+ // Read option from the command line
+
variables_map vm;
- int indent = -1;
- int linewidth = -1;
- bool pretty_print = true;
#if QUICKBOOK_WIDE_PATHS
quickbook::ignore_variable(&argc);
@@ -331,6 +348,9 @@ main(int argc, char* argv[])
notify(vm);
+ // Process the command line options
+
+ quickbook::parse_document_options parse_document_options;
bool expect_errors = vm.count("expect-errors");
int error_count = 0;
@@ -362,15 +382,15 @@ main(int argc, char* argv[])
quickbook::ms_errors = true;
if (vm.count("no-pretty-print"))
- pretty_print = false;
+ parse_document_options.pretty_print = false;
quickbook::self_linked_headers = !vm.count("no-self-link-headers");
if (vm.count("indent"))
- indent = vm["indent"].as<int>();
+ parse_document_options.indent = vm["indent"].as<int>();
if (vm.count("linewidth"))
- linewidth = vm["linewidth"].as<int>();
+ parse_document_options.linewidth = vm["linewidth"].as<int>();
if (vm.count("debug"))
{
@@ -401,48 +421,82 @@ main(int argc, char* argv[])
if (vm.count("include-path"))
{
boost::transform(
- vm["include-path"].as<std::vector<input_string> >(),
+ vm["include-path"].as<std::vector<command_line_string> >(),
std::back_inserter(quickbook::include_path),
- quickbook::detail::input_to_path);
+ quickbook::detail::command_line_to_path);
}
quickbook::preset_defines.clear();
if (vm.count("define"))
{
boost::transform(
- vm["define"].as<std::vector<input_string> >(),
+ vm["define"].as<std::vector<command_line_string> >(),
std::back_inserter(quickbook::preset_defines),
- quickbook::detail::input_to_utf8);
+ quickbook::detail::command_line_to_utf8);
}
if (vm.count("input-file"))
{
- fs::path filein = quickbook::detail::input_to_path(
- vm["input-file"].as<input_string>());
+ fs::path filein = quickbook::detail::command_line_to_path(
+ vm["input-file"].as<command_line_string>());
fs::path fileout;
- fs::path deps_out;
- fs::path locations_out;
bool default_output = true;
if (vm.count("output-deps"))
{
- deps_out = quickbook::detail::input_to_path(
- vm["output-deps"].as<input_string>());
+ parse_document_options.deps_out =
+ quickbook::detail::command_line_to_path(
+ vm["output-deps"].as<command_line_string>());
default_output = false;
}
+ if (vm.count("output-deps-format"))
+ {
+ std::string format_flags =
+ quickbook::detail::command_line_to_utf8(
+ vm["output-deps-format"].as<command_line_string>());
+
+ std::vector<std::string> flag_names;
+ boost::algorithm::split(flag_names, format_flags,
+ boost::algorithm::is_any_of(", "),
+ boost::algorithm::token_compress_on);
+
+ unsigned flags = 0;
+
+ BOOST_FOREACH(std::string const& flag, flag_names) {
+ if (flag == "checked") {
+ flags |= quickbook::dependency_tracker::checked;
+ }
+ else if (flag == "escaped") {
+ flags |= quickbook::dependency_tracker::escaped;
+ }
+ else if (!flag.empty()) {
+ quickbook::detail::outerr()
+ << "Unknown dependency format flag: "
+ << flag
+ <<std::endl;
+
+ ++error_count;
+ }
+ }
+
+ parse_document_options.deps_out_flags =
+ quickbook::dependency_tracker::flags(flags);
+ }
+
if (vm.count("output-checked-locations"))
{
- locations_out = quickbook::detail::input_to_path(
- vm["output-checked-locations"].as<input_string>());
+ parse_document_options.locations_out =
+ quickbook::detail::command_line_to_path(
+ vm["output-checked-locations"].as<command_line_string>());
default_output = false;
}
if (vm.count("output-file"))
{
- fileout = quickbook::detail::input_to_path(
- vm["output-file"].as<input_string>());
+ fileout = quickbook::detail::command_line_to_path(
+ vm["output-file"].as<command_line_string>());
}
else if (default_output)
{
@@ -450,20 +504,20 @@ main(int argc, char* argv[])
fileout.replace_extension(".xml");
}
- fs::path xinclude_base;
if (vm.count("xinclude-base"))
{
- xinclude_base = quickbook::detail::input_to_path(
- vm["xinclude-base"].as<input_string>());
+ parse_document_options.xinclude_base =
+ quickbook::detail::command_line_to_path(
+ vm["xinclude-base"].as<command_line_string>());
}
else
{
- xinclude_base = fileout.parent_path();
- if (xinclude_base.empty())
- xinclude_base = ".";
+ parse_document_options.xinclude_base = fileout.parent_path();
+ if (parse_document_options.xinclude_base.empty())
+ parse_document_options.xinclude_base = ".";
}
- if (!fs::is_directory(xinclude_base))
+ if (!fs::is_directory(parse_document_options.xinclude_base))
{
quickbook::detail::outerr()
<< (vm.count("xinclude-base") ?
@@ -474,8 +528,8 @@ main(int argc, char* argv[])
if (vm.count("image-location"))
{
- quickbook::image_location = quickbook::detail::input_to_path(
- vm["image-location"].as<input_string>());
+ quickbook::image_location = quickbook::detail::command_line_to_path(
+ vm["image-location"].as<command_line_string>());
}
else
{
@@ -490,8 +544,7 @@ main(int argc, char* argv[])
if (!error_count)
error_count += quickbook::parse_document(
- filein, fileout, deps_out, locations_out,
- xinclude_base, indent, linewidth, pretty_print);
+ filein, fileout, parse_document_options);
if (expect_errors)
{
diff --git a/tools/quickbook/src/state.cpp b/tools/quickbook/src/state.cpp
index d16200d626..be14604f71 100644
--- a/tools/quickbook/src/state.cpp
+++ b/tools/quickbook/src/state.cpp
@@ -10,10 +10,13 @@
=============================================================================*/
#include "state.hpp"
#include "state_save.hpp"
+#include "document_state.hpp"
#include "quickbook.hpp"
#include "grammar.hpp"
-#include "input_path.hpp"
-#include <boost/filesystem/operations.hpp>
+#include "native_text.hpp"
+#include "utils.hpp"
+#include "phrase_tags.hpp"
+#include <boost/foreach.hpp>
#if (defined(BOOST_MSVC) && (BOOST_MSVC <= 1310))
#pragma warning(disable:4355)
@@ -27,9 +30,10 @@ namespace quickbook
unsigned qbk_version_n = 0; // qbk_major_version * 100 + qbk_minor_version
state::state(fs::path const& filein_, fs::path const& xinclude_base_,
- string_stream& out_, id_manager& ids)
+ string_stream& out_, document_state& document)
: grammar_()
+ , order_pos(0)
, xinclude_base(xinclude_base_)
, templates()
@@ -37,30 +41,37 @@ namespace quickbook
, anchors()
, warned_about_breaks(false)
, conditional(true)
- , ids(ids)
+ , document(document)
, callouts()
, callout_depth(0)
+ , dependencies()
+ , explicit_list(false)
, imported(false)
, macro()
- , source_mode("c++")
+ , source_mode()
, source_mode_next()
+ , source_mode_next_pos()
, current_file(0)
- , filename_relative(filein_.filename())
+ , current_path(filein_, 0, filein_.filename())
, template_depth(0)
, min_section_level(1)
+ , in_list(false)
+ , in_list_save()
, out(out_)
, phrase()
+
, values(&current_file)
{
// add the predefined macros
macro.add
("__DATE__", std::string(quickbook_get_date))
("__TIME__", std::string(quickbook_get_time))
- ("__FILENAME__", detail::path_to_generic(filename_relative))
+ ("__FILENAME__", std::string())
;
+ update_filename_macro();
boost::scoped_ptr<quickbook_grammar> g(
new quickbook_grammar(*this));
@@ -71,89 +82,100 @@ namespace quickbook
return *grammar_;
}
- bool state::add_dependency(fs::path const& f) {
- fs::path p = fs::absolute(f);
- bool found = fs::exists(fs::status(p));
-
- // Pop path sections from path until we find an existing
- // path, adjusting for any dot path sections.
- fs::path extra;
- int parent_count = 0;
- while (!fs::exists(fs::status(p))) {
- fs::path name = p.filename();
- p = p.parent_path();
- if (name == "..") {
- ++parent_count;
- }
- else if (name == ".") {
- }
- else if (parent_count) {
- --parent_count;
- }
- else {
- extra = name / extra;
- }
+ void state::update_filename_macro() {
+ *boost::spirit::classic::find(macro, "__FILENAME__")
+ = detail::encode_string(
+ detail::path_to_generic(current_path.abstract_file_path));
+ }
+
+ unsigned state::get_new_order_pos() {
+ return ++order_pos;
+ }
+
+ void state::push_output() {
+ out.push();
+ phrase.push();
+ in_list_save.push(in_list);
+ }
+
+ void state::pop_output() {
+ phrase.pop();
+ out.pop();
+ in_list = in_list_save.top();
+ in_list_save.pop();
+ }
+
+ source_mode_info state::tagged_source_mode() const {
+ source_mode_info result;
+
+ BOOST_FOREACH(source_mode_info const& s, tagged_source_mode_stack) {
+ result.update(s);
}
- // If there are any left over ".." sections, then add them
- // on to the end of the real path, and trust Boost.Filesystem
- // to sort them out.
- while (parent_count) {
- p = p / "..";
- --parent_count;
+ return result;
+ }
+
+ source_mode_info state::current_source_mode() const {
+ source_mode_info result = source_mode;
+
+ result.update(document.section_source_mode());
+
+ BOOST_FOREACH(source_mode_info const& s, tagged_source_mode_stack) {
+ result.update(s);
}
- p = fs::canonical(p) / extra;
- dependencies[p] |= found;
- return found;
+ return result;
+ }
+
+ void state::change_source_mode(source_mode_type s) {
+ source_mode = source_mode_info(s, get_new_order_pos());
}
- file_state::file_state(quickbook::state& state, scope_flags scope)
+ void state::push_tagged_source_mode(source_mode_type s) {
+ tagged_source_mode_stack.push_back(
+ source_mode_info(s, s ? get_new_order_pos() : 0));
+ }
+
+ void state::pop_tagged_source_mode() {
+ assert(!tagged_source_mode_stack.empty());
+ tagged_source_mode_stack.pop_back();
+ }
+
+ state_save::state_save(quickbook::state& state, scope_flags scope)
: state(state)
, scope(scope)
, qbk_version(qbk_version_n)
, imported(state.imported)
, current_file(state.current_file)
- , filename_relative(state.filename_relative)
+ , current_path(state.current_path)
, xinclude_base(state.xinclude_base)
, source_mode(state.source_mode)
, macro()
+ , template_depth(state.template_depth)
+ , min_section_level(state.min_section_level)
{
if (scope & scope_macros) macro = state.macro;
if (scope & scope_templates) state.templates.push();
if (scope & scope_output) {
- state.out.push();
- state.phrase.push();
+ state.push_output();
}
state.values.builder.save();
}
- file_state::~file_state()
+ state_save::~state_save()
{
state.values.builder.restore();
boost::swap(qbk_version_n, qbk_version);
boost::swap(state.imported, imported);
boost::swap(state.current_file, current_file);
- boost::swap(state.filename_relative, filename_relative);
+ boost::swap(state.current_path, current_path);
boost::swap(state.xinclude_base, xinclude_base);
boost::swap(state.source_mode, source_mode);
if (scope & scope_output) {
- state.out.pop();
- state.phrase.pop();
+ state.pop_output();
}
if (scope & scope_templates) state.templates.pop();
if (scope & scope_macros) state.macro = macro;
- }
-
- template_state::template_state(quickbook::state& state)
- : file_state(state, file_state::scope_all)
- , template_depth(state.template_depth)
- , min_section_level(state.min_section_level)
- {
- }
-
- template_state::~template_state()
- {
boost::swap(state.template_depth, template_depth);
boost::swap(state.min_section_level, min_section_level);
}
diff --git a/tools/quickbook/src/state.hpp b/tools/quickbook/src/state.hpp
index b750904588..e4271906ad 100644
--- a/tools/quickbook/src/state.hpp
+++ b/tools/quickbook/src/state.hpp
@@ -17,6 +17,9 @@
#include "collector.hpp"
#include "template_stack.hpp"
#include "symbols.hpp"
+#include "dependency_tracker.hpp"
+#include "syntax_highlight.hpp"
+#include "include_paths.hpp"
namespace quickbook
{
@@ -26,7 +29,7 @@ namespace quickbook
struct state
{
state(fs::path const& filein_, fs::path const& xinclude_base, string_stream& out_,
- id_manager&);
+ document_state&);
private:
boost::scoped_ptr<quickbook_grammar> grammar_;
@@ -37,39 +40,46 @@ namespace quickbook
///////////////////////////////////////////////////////////////////////////
typedef std::vector<std::string> string_list;
- typedef std::map<fs::path, bool> dependency_list;
static int const max_template_depth = 100;
// global state
+ unsigned order_pos;
fs::path xinclude_base;
template_stack templates;
int error_count;
string_list anchors;
bool warned_about_breaks;
bool conditional;
- id_manager& ids;
+ document_state& document;
value_builder callouts; // callouts are global as
int callout_depth; // they don't nest.
- dependency_list dependencies;
+ dependency_tracker dependencies;
+ bool explicit_list; // set when using a list
// state saved for files and templates.
bool imported;
string_symbols macro;
- std::string source_mode;
- value source_mode_next;
+ source_mode_info source_mode;
+ source_mode_type source_mode_next;
+ value source_mode_next_pos;
+ std::vector<source_mode_info>
+ tagged_source_mode_stack;
file_ptr current_file;
- fs::path filename_relative; // for the __FILENAME__ macro.
- // (relative to the original file
- // or include path).
+ quickbook_path current_path;
// state saved for templates.
int template_depth;
int min_section_level;
// output state - scoped by templates and grammar
+ bool in_list; // generating a list
+ std::stack<bool> in_list_save; // save the in_list state
+ // TODO: Something better...
collector out; // main output stream
collector phrase; // phrase output stream
+
+ // values state - scoped by everything.
value_parser values; // parsed values
quickbook_grammar& grammar() const;
@@ -78,9 +88,12 @@ namespace quickbook
// actions
///////////////////////////////////////////////////////////////////////////
- // Call this before loading any file so that it will be included in the
- // list of dependencies. Returns true if file exists.
- bool add_dependency(fs::path const&);
+ void update_filename_macro();
+
+ unsigned get_new_order_pos();
+
+ void push_output();
+ void pop_output();
void start_list(char mark);
void end_list(char mark);
@@ -90,6 +103,12 @@ namespace quickbook
void start_callouts();
std::string add_callout(value);
std::string end_callouts();
+
+ source_mode_info current_source_mode() const;
+ source_mode_info tagged_source_mode() const;
+ void change_source_mode(source_mode_type);
+ void push_tagged_source_mode(source_mode_type);
+ void pop_tagged_source_mode();
};
extern unsigned qbk_version_n; // qbk_major_version * 100 + qbk_minor_version
diff --git a/tools/quickbook/src/state_save.hpp b/tools/quickbook/src/state_save.hpp
index f8b53e7c8e..8cae8cd1b2 100644
--- a/tools/quickbook/src/state_save.hpp
+++ b/tools/quickbook/src/state_save.hpp
@@ -18,7 +18,7 @@ namespace quickbook
//
// Defined in state.cpp
- struct file_state
+ struct state_save
{
enum scope_flags {
scope_none = 0,
@@ -29,8 +29,8 @@ namespace quickbook
scope_all = scope_callables + scope_output
};
- explicit file_state(quickbook::state&, scope_flags);
- ~file_state();
+ explicit state_save(quickbook::state&, scope_flags);
+ ~state_save();
quickbook::state& state;
scope_flags scope;
@@ -38,22 +38,15 @@ namespace quickbook
bool imported;
std::string doc_type;
file_ptr current_file;
- fs::path filename_relative;
+ quickbook_path current_path;
fs::path xinclude_base;
- std::string source_mode;
+ source_mode_info source_mode;
string_symbols macro;
- private:
- file_state(file_state const&);
- file_state& operator=(file_state const&);
- };
-
- struct template_state : file_state
- {
- explicit template_state(quickbook::state&);
- ~template_state();
-
int template_depth;
int min_section_level;
+ private:
+ state_save(state_save const&);
+ state_save& operator=(state_save const&);
};
}
diff --git a/tools/quickbook/src/string_ref.cpp b/tools/quickbook/src/string_ref.cpp
deleted file mode 100644
index 6c33df1260..0000000000
--- a/tools/quickbook/src/string_ref.cpp
+++ /dev/null
@@ -1,37 +0,0 @@
-/*=============================================================================
- Copyright (c) 2011 Daniel James
-
- Use, modification and distribution is subject to the Boost Software
- License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
- http://www.boost.org/LICENSE_1_0.txt)
-=============================================================================*/
-
-#include "string_ref.hpp"
-#include <boost/range/algorithm/equal.hpp>
-#include <boost/range/algorithm/lexicographical_compare.hpp>
-#include <boost/utility/swap.hpp>
-#include <ostream>
-
-namespace quickbook
-{
- void string_ref::swap(string_ref& x)
- {
- boost::swap(begin_, x.begin_);
- boost::swap(end_, x.end_);
- }
-
- bool operator==(string_ref const& x, string_ref const& y)
- {
- return boost::equal(x, y);
- }
-
- bool operator<(string_ref const& x, string_ref const& y)
- {
- return boost::lexicographical_compare(x, y);
- }
-
- std::ostream& operator<<(std::ostream& out, string_ref const& x)
- {
- return out.write(&*x.begin(), x.end() - x.begin());
- }
-}
diff --git a/tools/quickbook/src/string_ref.hpp b/tools/quickbook/src/string_ref.hpp
deleted file mode 100644
index ffb95bf71d..0000000000
--- a/tools/quickbook/src/string_ref.hpp
+++ /dev/null
@@ -1,89 +0,0 @@
-/*=============================================================================
- Copyright (c) 2011 Daniel James
-
- Use, modification and distribution is subject to the Boost Software
- License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
- http://www.boost.org/LICENSE_1_0.txt)
-=============================================================================*/
-
-#if !defined(BOOST_QUICKBOOK_STRING_REF_HPP)
-#define BOOST_QUICKBOOK_STRING_REF_HPP
-
-#include <boost/operators.hpp>
-#include <string>
-#include <iosfwd>
-
-namespace quickbook
-{
- struct string_ref
- : boost::less_than_comparable<string_ref,
- boost::less_than_comparable<string_ref, std::string,
- boost::equality_comparable<string_ref,
- boost::equality_comparable<string_ref, std::string> > > >
- {
- public:
- typedef std::string::const_iterator iterator;
- typedef std::string::const_iterator const_iterator;
-
- private:
- iterator begin_, end_;
-
- public:
- string_ref() : begin_(), end_() {}
-
- explicit string_ref(iterator b, iterator e)
- : begin_(b), end_(e) {}
-
- explicit string_ref(std::string const& x)
- : begin_(x.begin()), end_(x.end()) {}
-
- void swap(string_ref&);
-
- void clear() {
- begin_ = end_ = iterator();
- }
-
- operator std::string() const {
- return std::string(begin_, end_);
- }
-
- iterator begin() const { return begin_; }
- iterator end() const { return end_; }
-
- std::size_t size() const
- {
- return static_cast<std::size_t>(end_ - begin_);
- }
-
- bool empty() const
- {
- return begin_ == end_;
- }
- };
-
- bool operator==(string_ref const& x, string_ref const& y);
- bool operator<(string_ref const& x, string_ref const& y);
- std::ostream& operator<<(std::ostream&, string_ref const& x);
-
- inline bool operator==(string_ref const& x, std::string const& y)
- {
- return x == string_ref(y);
- }
-
- inline bool operator<(string_ref const& x, std::string const& y)
- {
- return x < string_ref(y);
- }
-
- inline bool operator>(string_ref const& x, std::string const& y)
- {
- return x > string_ref(y);
- }
-
- inline void swap(string_ref& x, string_ref& y)
- {
- x.swap(y);
- }
-}
-
-#endif
diff --git a/tools/quickbook/src/syntax_highlight.cpp b/tools/quickbook/src/syntax_highlight.cpp
index e618c07dd6..12e2c5e077 100644
--- a/tools/quickbook/src/syntax_highlight.cpp
+++ b/tools/quickbook/src/syntax_highlight.cpp
@@ -13,91 +13,32 @@
#include <boost/spirit/include/classic_symbols.hpp>
#include <boost/spirit/include/classic_loops.hpp>
#include "grammar.hpp"
-#include "grammar_impl.hpp" // Just for context stuff. Should move?
#include "state.hpp"
#include "actions.hpp"
+#include "syntax_highlight.hpp"
#include "utils.hpp"
#include "files.hpp"
-#include "input_path.hpp"
+#include "native_text.hpp"
+#include "phrase_tags.hpp"
namespace quickbook
{
namespace cl = boost::spirit::classic;
- template <typename T, typename Value>
- struct member_action_value
- {
- typedef void(T::*member_function)(Value);
-
- T& l;
- member_function mf;
-
- member_action_value(T& l, member_function mf) : l(l), mf(mf) {}
-
- void operator()(Value v) const {
- (l.*mf)(v);
- }
- };
-
- template <typename T>
- struct member_action
- {
- typedef void(T::*member_function)(parse_iterator, parse_iterator);
-
- T& l;
- member_function mf;
-
- member_action(T& l, member_function mf) : l(l), mf(mf) {}
-
- void operator()(parse_iterator first, parse_iterator last) const {
- (l.*mf)(first, last);
- }
- };
-
- template <typename T, typename Arg1>
- struct member_action1
- {
- typedef void(T::*member_function)(parse_iterator, parse_iterator, Arg1);
-
- T& l;
- member_function mf;
-
- member_action1(T& l, member_function mf) : l(l), mf(mf) {}
-
- struct impl
- {
- member_action1 a;
- Arg1 value;
-
- impl(member_action1& a, Arg1 value) :
- a(a), value(value)
- {}
-
- void operator()(parse_iterator first, parse_iterator last) const {
- (a.l.*a.mf)(first, last, value);
- }
- };
-
- impl operator()(Arg1 a1) {
- return impl(*this, a1);
- }
- };
-
// Syntax Highlight Actions
struct syntax_highlight_actions
{
- quickbook::collector out;
quickbook::state& state;
do_macro_action do_macro_impl;
// State
bool support_callouts;
- string_ref marked_text;
+ boost::string_ref marked_text;
syntax_highlight_actions(quickbook::state& state, bool is_block) :
- out(), state(state),
- do_macro_impl(out, state),
+ state(state),
+ do_macro_impl(state),
support_callouts(is_block && (qbk_version_n >= 107u ||
state.current_file->is_code_snippets)),
marked_text()
@@ -119,26 +60,26 @@ namespace quickbook
void syntax_highlight_actions::span(parse_iterator first,
parse_iterator last, char const* name)
{
- out << "<phrase role=\"" << name << "\">";
+ state.phrase << "<phrase role=\"" << name << "\">";
while (first != last)
- detail::print_char(*first++, out.get());
- out << "</phrase>";
+ detail::print_char(*first++, state.phrase.get());
+ state.phrase << "</phrase>";
}
void syntax_highlight_actions::span_start(parse_iterator first,
parse_iterator last, char const* name)
{
- out << "<phrase role=\"" << name << "\">";
+ state.phrase << "<phrase role=\"" << name << "\">";
while (first != last)
- detail::print_char(*first++, out.get());
+ detail::print_char(*first++, state.phrase.get());
}
void syntax_highlight_actions::span_end(parse_iterator first,
parse_iterator last)
{
while (first != last)
- detail::print_char(*first++, out.get());
- out << "</phrase>";
+ detail::print_char(*first++, state.phrase.get());
+ state.phrase << "</phrase>";
}
void syntax_highlight_actions::unexpected_char(parse_iterator first,
@@ -152,30 +93,32 @@ namespace quickbook
<< "\n";
// print out an unexpected character
- out << "<phrase role=\"error\">";
+ state.phrase << "<phrase role=\"error\">";
while (first != last)
- detail::print_char(*first++, out.get());
- out << "</phrase>";
+ detail::print_char(*first++, state.phrase.get());
+ state.phrase << "</phrase>";
}
void syntax_highlight_actions::plain_char(parse_iterator first,
parse_iterator last)
{
while (first != last)
- detail::print_char(*first++, out.get());
+ detail::print_char(*first++, state.phrase.get());
}
void syntax_highlight_actions::pre_escape_back(parse_iterator,
parse_iterator)
{
- state.phrase.push(); // save the stream
+ state.push_output(); // save the stream
}
void syntax_highlight_actions::post_escape_back(parse_iterator,
parse_iterator)
{
- out << state.phrase.str();
- state.phrase.pop(); // restore the stream
+ std::string tmp;
+ state.phrase.swap(tmp);
+ state.pop_output(); // restore the stream
+ state.phrase << tmp;
}
void syntax_highlight_actions::do_macro(std::string const& v)
@@ -186,12 +129,12 @@ namespace quickbook
void syntax_highlight_actions::mark_text(parse_iterator first,
parse_iterator last)
{
- marked_text = string_ref(first.base(), last.base());
+ marked_text = boost::string_ref(first.base(), last.base() - first.base());
}
void syntax_highlight_actions::callout(parse_iterator, parse_iterator)
{
- out << state.add_callout(qbk_value(state.current_file,
+ state.phrase << state.add_callout(qbk_value(state.current_file,
marked_text.begin(), marked_text.end()));
marked_text.clear();
}
@@ -205,20 +148,23 @@ namespace quickbook
keywords_holder()
{
cpp
- = "and_eq", "and", "asm", "auto", "bitand", "bitor",
- "bool", "break", "case", "catch", "char", "class",
- "compl", "const_cast", "const", "continue", "default",
- "delete", "do", "double", "dynamic_cast", "else",
- "enum", "explicit", "export", "extern", "false",
- "float", "for", "friend", "goto", "if", "inline",
- "int", "long", "mutable", "namespace", "new", "not_eq",
- "not", "operator", "or_eq", "or", "private",
- "protected", "public", "register", "reinterpret_cast",
- "return", "short", "signed", "sizeof", "static",
+ = "alignas", "alignof", "and_eq", "and", "asm", "auto",
+ "bitand", "bitor", "bool", "break", "case", "catch",
+ "char", "char16_t", "char32_t", "class", "compl",
+ "const", "const_cast", "constexpr", "continue",
+ "decltype", "default", "delete", "do", "double",
+ "dynamic_cast", "else", "enum", "explicit", "export",
+ "extern", "false", "float", "for", "friend", "goto",
+ "if", "inline", "int", "long", "mutable", "namespace",
+ "new", "noexcept", "not_eq", "not", "nullptr",
+ "operator", "or_eq", "or", "private", "protected",
+ "public", "register", "reinterpret_cast", "return",
+ "short", "signed", "sizeof", "static", "static_assert",
"static_cast", "struct", "switch", "template", "this",
- "throw", "true", "try", "typedef", "typeid",
- "typename", "union", "unsigned", "using", "virtual",
- "void", "volatile", "wchar_t", "while", "xor_eq", "xor"
+ "thread_local", "throw", "true", "try", "typedef",
+ "typeid", "typename", "union", "unsigned", "using",
+ "virtual", "void", "volatile", "wchar_t", "while",
+ "xor_eq", "xor"
;
python
@@ -270,12 +216,21 @@ namespace quickbook
do_macro(self.actions, &syntax_highlight_actions::do_macro);
error_action error(self.actions.state);
- program
- =
- *( (+cl::space_p) [plain_char]
+ program =
+ *( (*cl::space_p) [plain_char]
+ >> (line_start | rest_of_line)
+ >> *rest_of_line
+ )
+ ;
+
+ line_start =
+ preprocessor [span("preprocessor")]
+ ;
+
+ rest_of_line =
+ (+cl::blank_p) [plain_char]
| macro
| escape
- | preprocessor [span("preprocessor")]
| cl::eps_p(ph::var(self.actions.support_callouts))
>> ( line_callout [callout]
| inline_callout [callout]
@@ -287,8 +242,8 @@ namespace quickbook
| string_ [span("string")]
| char_ [span("char")]
| number [span("number")]
- | u8_codepoint_p [unexpected_char]
- )
+ | ~cl::eps_p(cl::eol_p)
+ >> u8_codepoint_p [unexpected_char]
;
macro =
@@ -306,7 +261,7 @@ namespace quickbook
(
(
(+(cl::anychar_p - "``") >> cl::eps_p("``"))
- & g.phrase
+ & g.phrase_start
)
>> cl::str_p("``")
)
@@ -359,7 +314,7 @@ namespace quickbook
; // make sure we recognize whole words only
special
- = +cl::chset_p("~!%^&*()+={[}]:;,<.>?/|\\-")
+ = +cl::chset_p("~!%^&*()+={[}]:;,<.>?/|\\#-")
;
string_char = ('\\' >> u8_codepoint_p) | (cl::anychar_p - '\\');
@@ -387,7 +342,7 @@ namespace quickbook
}
cl::rule<Scanner>
- program, macro, preprocessor,
+ program, line_start, rest_of_line, macro, preprocessor,
inline_callout, line_callout, comment,
special, string_,
char_, number, identifier, keyword, escape,
@@ -461,7 +416,7 @@ namespace quickbook
(
(
(+(cl::anychar_p - "``") >> cl::eps_p("``"))
- & g.phrase
+ & g.phrase_start
)
>> cl::str_p("``")
)
@@ -583,7 +538,7 @@ namespace quickbook
(
(
(+(cl::anychar_p - "``") >> cl::eps_p("``"))
- & g.phrase
+ & g.phrase_start
)
>> cl::str_p("``")
)
@@ -607,39 +562,35 @@ namespace quickbook
syntax_highlight_actions& actions;
};
- std::string syntax_highlight(
+ void syntax_highlight(
parse_iterator first,
parse_iterator last,
quickbook::state& state,
- std::string const& source_mode,
+ source_mode_type source_mode,
bool is_block)
{
syntax_highlight_actions syn_actions(state, is_block);
// print the code with syntax coloring
- if (source_mode == "c++")
- {
- cpp_highlight cpp_p(syn_actions);
- boost::spirit::classic::parse(first, last, cpp_p);
- }
- else if (source_mode == "python")
+ switch(source_mode)
{
- python_highlight python_p(syn_actions);
- boost::spirit::classic::parse(first, last, python_p);
- }
- else if (source_mode == "teletype")
- {
- teletype_highlight teletype_p(syn_actions);
- boost::spirit::classic::parse(first, last, teletype_p);
- }
- else
- {
- BOOST_ASSERT(0);
+ case source_mode_tags::cpp: {
+ cpp_highlight cpp_p(syn_actions);
+ boost::spirit::classic::parse(first, last, cpp_p);
+ break;
+ }
+ case source_mode_tags::python: {
+ python_highlight python_p(syn_actions);
+ boost::spirit::classic::parse(first, last, python_p);
+ break;
+ }
+ case source_mode_tags::teletype: {
+ teletype_highlight teletype_p(syn_actions);
+ boost::spirit::classic::parse(first, last, teletype_p);
+ break;
+ }
+ default:
+ BOOST_ASSERT(0);
}
-
- std::string str;
- syn_actions.out.swap(str);
-
- return str;
}
}
diff --git a/tools/quickbook/src/syntax_highlight.hpp b/tools/quickbook/src/syntax_highlight.hpp
new file mode 100644
index 0000000000..12b3a63ae0
--- /dev/null
+++ b/tools/quickbook/src/syntax_highlight.hpp
@@ -0,0 +1,58 @@
+/*=============================================================================
+ Copyright (c) 2011,2013 Daniel James
+
+ Use, modification and distribution is subject to the Boost Software
+ License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+ http://www.boost.org/LICENSE_1_0.txt)
+=============================================================================*/
+
+#if !defined(BOOST_QUICKBOOK_SYNTAX_HIGHLIGHT_HPP)
+#define BOOST_QUICKBOOK_SYNTAX_HIGHLIGHT_HPP
+
+#include "fwd.hpp"
+#include "phrase_tags.hpp"
+#include <boost/swap.hpp>
+
+namespace quickbook
+{
+ //
+ // source_mode_info
+ //
+ // The source mode is stored in a few places, so the order needs to also be
+ // stored to work out which is the current source mode.
+
+ struct source_mode_info {
+ source_mode_type source_mode;
+ unsigned order;
+
+ source_mode_info() : source_mode(source_mode_tags::cpp), order(0) {}
+
+ source_mode_info(source_mode_type source_mode, unsigned order) :
+ source_mode(source_mode),
+ order(order) {}
+
+ void update(source_mode_info const& x) {
+ if (x.order > order) {
+ source_mode = x.source_mode;
+ order = x.order;
+ }
+ }
+
+ void swap(source_mode_info& x) {
+ boost::swap(source_mode, x.source_mode);
+ boost::swap(order, x.order);
+ }
+ };
+
+ inline void swap(source_mode_info& x, source_mode_info& y) {
+ x.swap(y);
+ }
+
+ void syntax_highlight(
+ parse_iterator first, parse_iterator last,
+ quickbook::state& state,
+ source_mode_type source_mode,
+ bool is_block);
+}
+
+#endif
diff --git a/tools/quickbook/src/template_tags.hpp b/tools/quickbook/src/template_tags.hpp
index a47158dae6..d895707bc9 100644
--- a/tools/quickbook/src/template_tags.hpp
+++ b/tools/quickbook/src/template_tags.hpp
@@ -15,6 +15,7 @@ namespace quickbook
{
QUICKBOOK_VALUE_TAGS(template_tags, 0x100,
(template_)
+ (attribute_template)
(escape)
(identifier)
(block)
diff --git a/tools/quickbook/src/utils.cpp b/tools/quickbook/src/utils.cpp
index 3a5ee42e4d..6f3b49acda 100644
--- a/tools/quickbook/src/utils.cpp
+++ b/tools/quickbook/src/utils.cpp
@@ -15,6 +15,27 @@
namespace quickbook { namespace detail
{
+ std::string encode_string(boost::string_ref str)
+ {
+ std::string result;
+ result.reserve(str.size());
+
+ for (boost::string_ref::const_iterator it = str.begin();
+ it != str.end(); ++it)
+ {
+ switch (*it)
+ {
+ case '<': result += "&lt;"; break;
+ case '>': result += "&gt;"; break;
+ case '&': result += "&amp;"; break;
+ case '"': result += "&quot;"; break;
+ default: result += *it; break;
+ }
+ }
+
+ return result;
+ }
+
void print_char(char ch, std::ostream& out)
{
switch (ch)
@@ -29,9 +50,9 @@ namespace quickbook { namespace detail
}
}
- void print_string(std::basic_string<char> const& str, std::ostream& out)
+ void print_string(boost::string_ref str, std::ostream& out)
{
- for (std::string::const_iterator cur = str.begin();
+ for (boost::string_ref::const_iterator cur = str.begin();
cur != str.end(); ++cur)
{
print_char(*cur, out);
@@ -45,21 +66,45 @@ namespace quickbook { namespace detail
return static_cast<char>(std::tolower(static_cast<unsigned char>(ch)));
}
- std::string escape_uri(std::string uri)
+ static std::string escape_uri_impl(std::string& uri_param, char const* mark)
{
+ // Extra capital characters for validating percent escapes.
+ static char const hex[] = "0123456789abcdefABCDEF";
+
+ std::string uri;
+ uri.swap(uri_param);
+
for (std::string::size_type n = 0; n < uri.size(); ++n)
{
- static char const mark[] = "-_.!~*'()?\\/";
- if((!std::isalnum(static_cast<unsigned char>(uri[n])) || 127 < static_cast<unsigned char>(uri[n]))
- && 0 == std::strchr(mark, uri[n]))
+ if (static_cast<unsigned char>(uri[n]) > 127 ||
+ (!std::isalnum(static_cast<unsigned char>(uri[n])) &&
+ !std::strchr(mark, uri[n])) ||
+ (uri[n] == '%' && !(n + 2 < uri.size() &&
+ std::strchr(hex, uri[n+1]) &&
+ std::strchr(hex, uri[n+2]))))
{
- static char const hex[] = "0123456789abcdef";
char escape[] = { hex[uri[n] / 16], hex[uri[n] % 16] };
uri.insert(n + 1, escape, 2);
uri[n] = '%';
n += 2;
}
+ else if (uri[n] == '%')
+ {
+ n += 2;
+ }
}
+
return uri;
}
+
+ std::string escape_uri(std::string uri_param)
+ {
+ // TODO: I don't understand this choice of characters.....
+ return escape_uri_impl(uri_param, "-_.!~*'()?\\/");
+ }
+
+ std::string partially_escape_uri(std::string uri_param)
+ {
+ return escape_uri_impl(uri_param, "-_.!~*'()?\\/:&=#%+");
+ }
}}
diff --git a/tools/quickbook/src/utils.hpp b/tools/quickbook/src/utils.hpp
index 9170f81043..3f5c5455dc 100644
--- a/tools/quickbook/src/utils.hpp
+++ b/tools/quickbook/src/utils.hpp
@@ -14,10 +14,12 @@
#include <ostream>
#include <boost/range/algorithm_ext/push_back.hpp>
#include <boost/range/adaptor/transformed.hpp>
+#include <boost/utility/string_ref.hpp>
namespace quickbook { namespace detail {
+ std::string encode_string(boost::string_ref);
void print_char(char ch, std::ostream& out);
- void print_string(std::basic_string<char> const& str, std::ostream& out);
+ void print_string(boost::string_ref str, std::ostream& out);
char filter_identifier_char(char ch);
template <typename Range>
@@ -32,7 +34,21 @@ namespace quickbook { namespace detail {
return out_name;
}
+ // URI escape string
std::string escape_uri(std::string uri);
+ inline std::string escape_uri(boost::string_ref uri) {
+ return escape_uri(std::string(uri.begin(), uri.end()));
+ }
+
+ // URI escape string, leaving characters generally used in URIs.
+ std::string partially_escape_uri(std::string uri);
+ inline std::string partially_escape_uri(boost::string_ref uri) {
+ return escape_uri(std::string(uri.begin(), uri.end()));
+ }
+
+ inline std::string to_s(boost::string_ref x) {
+ return std::string(x.begin(), x.end());
+ }
}}
#endif // BOOST_SPIRIT_QUICKBOOK_UTILS_HPP
diff --git a/tools/quickbook/src/values.cpp b/tools/quickbook/src/values.cpp
index 904cc8b38f..59c11cfa48 100644
--- a/tools/quickbook/src/values.cpp
+++ b/tools/quickbook/src/values.cpp
@@ -50,7 +50,7 @@ namespace quickbook
file_ptr value_node::get_file() const { UNDEFINED_ERROR(); }
string_iterator value_node::get_position() const { UNDEFINED_ERROR(); }
int value_node::get_int() const { UNDEFINED_ERROR(); }
- string_ref value_node::get_quickbook() const { UNDEFINED_ERROR(); }
+ boost::string_ref value_node::get_quickbook() const { UNDEFINED_ERROR(); }
std::string value_node::get_encoded() const { UNDEFINED_ERROR(); }
value_node* value_node::get_list() const { UNDEFINED_ERROR(); }
@@ -332,7 +332,7 @@ namespace quickbook
virtual value_node* clone() const;
virtual file_ptr get_file() const;
virtual string_iterator get_position() const;
- virtual string_ref get_quickbook() const;
+ virtual boost::string_ref get_quickbook() const;
virtual bool empty() const;
virtual bool equals(value_node*) const;
@@ -354,7 +354,7 @@ namespace quickbook
virtual value_node* clone() const;
virtual file_ptr get_file() const;
virtual string_iterator get_position() const;
- virtual string_ref get_quickbook() const;
+ virtual boost::string_ref get_quickbook() const;
virtual std::string get_encoded() const;
virtual bool empty() const;
virtual bool is_encoded() const;
@@ -433,8 +433,8 @@ namespace quickbook
string_iterator qbk_value_impl::get_position() const
{ return begin_; }
- string_ref qbk_value_impl::get_quickbook() const
- { return string_ref(begin_, end_); }
+ boost::string_ref qbk_value_impl::get_quickbook() const
+ { return boost::string_ref(begin_, end_ - begin_); }
bool qbk_value_impl::empty() const
{ return begin_ == end_; }
@@ -481,8 +481,8 @@ namespace quickbook
string_iterator encoded_qbk_value_impl::get_position() const
{ return begin_; }
- string_ref encoded_qbk_value_impl::get_quickbook() const
- { return string_ref(begin_, end_); }
+ boost::string_ref encoded_qbk_value_impl::get_quickbook() const
+ { return boost::string_ref(begin_, end_ - begin_); }
std::string encoded_qbk_value_impl::get_encoded() const
{ return encoded_value_; }
diff --git a/tools/quickbook/src/values.hpp b/tools/quickbook/src/values.hpp
index d637ea2adb..e681c8a75f 100644
--- a/tools/quickbook/src/values.hpp
+++ b/tools/quickbook/src/values.hpp
@@ -16,9 +16,9 @@
#include <cassert>
#include <boost/scoped_ptr.hpp>
#include <boost/iterator/iterator_traits.hpp>
+#include <boost/utility/string_ref.hpp>
#include <stdexcept>
#include "fwd.hpp"
-#include "string_ref.hpp"
#include "files.hpp"
namespace quickbook
@@ -51,7 +51,7 @@ namespace quickbook
virtual file_ptr get_file() const;
virtual string_iterator get_position() const;
- virtual string_ref get_quickbook() const;
+ virtual boost::string_ref get_quickbook() const;
virtual std::string get_encoded() const;
virtual int get_int() const;
@@ -113,7 +113,7 @@ namespace quickbook
{ return value_->get_file(); }
string_iterator get_position() const
{ return value_->get_position(); }
- string_ref get_quickbook() const
+ boost::string_ref get_quickbook() const
{ return value_->get_quickbook(); }
std::string get_encoded() const
{ return value_->get_encoded(); }
diff --git a/tools/quickbook/test/Jamfile.v2 b/tools/quickbook/test/Jamfile.v2
index 900b4758a2..505b58ff1a 100644
--- a/tools/quickbook/test/Jamfile.v2
+++ b/tools/quickbook/test/Jamfile.v2
@@ -6,7 +6,7 @@
# http://www.boost.org/LICENSE_1_0.txt)
#
-project test
+project quickbook/test
: requirements
<toolset>msvc:<debug-symbols>off
;
@@ -24,6 +24,7 @@ import quickbook-testing : quickbook-test quickbook-error-test ;
test-suite quickbook.test :
[ quickbook-test anchor-1_1 ]
[ quickbook-test anchor-1_6 ]
+ [ quickbook-test anchor-1_7 ]
[ quickbook-test blocks-1_5 ]
[ quickbook-test callouts-1_5 ]
[ quickbook-test callouts-1_7 ]
@@ -52,6 +53,7 @@ test-suite quickbook.test :
[ quickbook-test heading-1_3 ]
[ quickbook-test heading-1_5 ]
[ quickbook-test heading-1_6 ]
+ [ quickbook-test heading-1_7 ]
[ quickbook-error-test heading_unclosed-1_4-fail ]
[ quickbook-test hr-1_5 ]
[ quickbook-test hr-1_6 ]
@@ -63,33 +65,48 @@ test-suite quickbook.test :
[ quickbook-error-test include-1_1-fail ]
[ quickbook-test include-1_5 ]
[ quickbook-test include-1_6 ]
+ [ quickbook-test include-1_7 ]
[ quickbook-test include2-1_6 ]
[ quickbook-error-test include_win_path-1_6-fail ]
+ [ quickbook-error-test include_invalid_path1-1_7-fail ]
+ [ quickbook-error-test include_invalid_path2-1_7-fail ]
+ [ quickbook-error-test include_invalid_path3-1_7-fail ]
+ [ quickbook-error-test include_unicode_glob-1_7-fail ]
[ quickbook-test link-1_1 ]
[ quickbook-test link-1_6 ]
[ quickbook-test link-1_7 ]
+ [ quickbook-error-test link-1_7-fail ]
+ [ quickbook-error-test link-1_7-fail2 ]
[ quickbook-test list_test-1_5 ]
[ quickbook-test list_test-1_6 ]
+ [ quickbook-error-test list_test-1_6-fail ]
+ [ quickbook-test list_test-1_7 ]
+ [ quickbook-error-test list_test-1_7-fail1 ]
[ quickbook-test macro-1_5 ]
[ quickbook-test macro-1_6 ]
[ quickbook-error-test mismatched_brackets-1_1-fail ]
[ quickbook-test mismatched_brackets1-1_1 ]
[ quickbook-test mismatched_brackets2-1_1 ]
+ [ quickbook-test mismatched_brackets3-1_1 ]
[ quickbook-test newline-1_1 ]
[ quickbook-test para_test-1_5 ]
[ quickbook-error-test post_process-fail ]
[ quickbook-test preformatted-1_1 ]
[ quickbook-test preformatted-1_6 ]
[ quickbook-test role-1_6 ]
+ [ quickbook-test role-1_7 ]
+ [ quickbook-error-test role-1_7-fail ]
[ quickbook-test section-1_4 ]
[ quickbook-test section-1_5-unclosed ]
[ quickbook-test section-1_5 ]
+ [ quickbook-test section-1_7 ]
[ quickbook-test simple_markup-1_5 ]
[ quickbook-test source_mode-1_7 ]
[ quickbook-test svg-1_1 ]
[ quickbook-test table-1_3 ]
[ quickbook-test table-1_5 ]
[ quickbook-test table-1_6 ]
+ [ quickbook-test table-1_7 ]
[ quickbook-error-test template_arguments1-1_1-fail ]
[ quickbook-error-test template_arguments2-1_1-fail ]
[ quickbook-error-test template_arguments3-1_1-fail ]
@@ -100,6 +117,11 @@ test-suite quickbook.test :
[ quickbook-test templates-1_3 ]
[ quickbook-test templates-1_4 ]
[ quickbook-test templates-1_5 ]
+ [ quickbook-test templates-1_6 ]
+ [ quickbook-error-test templates-1_6-fail1 ]
+ [ quickbook-test templates-1_7 ]
+ [ quickbook-error-test templates-1_7-fail1 ]
+ [ quickbook-error-test templates-1_7-fail2 ]
[ quickbook-test unicode_escape-1_5 ]
[ quickbook-test unmatched_element-1_5 ]
[ quickbook-test unmatched_element-1_6 ]
diff --git a/tools/quickbook/test/anchor-1_7.gold b/tools/quickbook/test/anchor-1_7.gold
new file mode 100644
index 0000000000..71e22458da
--- /dev/null
+++ b/tools/quickbook/test/anchor-1_7.gold
@@ -0,0 +1,151 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE article PUBLIC "-//Boost//DTD BoostBook XML V1.0//EN" "http://www.boost.org/tools/boostbook/dtd/boostbook.dtd">
+<article id="anchor_test" last-revision="DEBUG MODE Date: 2000/12/20 12:00:00 $"
+ xmlns:xi="http://www.w3.org/2001/XInclude">
+ <title>Anchor Test</title>
+ <section id="anchor_test.anchors">
+ <title><link linkend="anchor_test.anchors">Anchors</link></title>
+ <para>
+ <anchor id="a1"/>A paragraph containing several anchors. <anchor id="a2"/>We
+ want to make sure they appear in the correct place. <anchor id="a3"/>
+ </para>
+ <bridgehead renderas="sect3" id="anchor_test.anchors.h0">
+ <phrase id="anchor_test.anchors.this_heading_shouldn_t_pick_up_t"/><link linkend="anchor_test.anchors.this_heading_shouldn_t_pick_up_t">This
+ heading shouldn't pick up the previous anchor</link>
+ </bridgehead>
+ <anchor id="a4"/>
+ <bridgehead renderas="sect3" id="anchor_test.anchors.h1">
+ <phrase id="anchor_test.anchors.this_heading_should_pick_up_the_"/><link linkend="anchor_test.anchors.this_heading_should_pick_up_the_">This
+ heading should pick up the previous anchor</link>
+ </bridgehead>
+ <anchor id="a5"/>
+ <bridgehead renderas="sect3" id="anchor_test.anchors.h2">
+ <phrase id="anchor_test.anchors.and_this_one"/><link linkend="anchor_test.anchors.and_this_one">And
+ this one</link>
+ </bridgehead>
+ <anchor id="a6"/>
+ <bridgehead renderas="sect3" id="anchor_test.anchors.h3">
+ <phrase id="anchor_test.anchors.also_this_one"/><link linkend="anchor_test.anchors.also_this_one">Also
+ this one</link>
+ </bridgehead>
+ <anchor id="a7"/>
+ <bridgehead renderas="sect3" id="anchor_test.anchors.h4">
+ <phrase id="anchor_test.anchors.finally_this"/><link linkend="anchor_test.anchors.finally_this">Finally
+ this</link>
+ </bridgehead>
+ <anchor id="a8"/>
+ </section>
+ <section id="anchor_test.section_anchor">
+ <title><anchor id="a9"/><link linkend="anchor_test.section_anchor">Section Anchor</link></title>
+ <section id="anchor_test.section_anchor.nested_section">
+ <title><anchor id="a10"/><link linkend="anchor_test.section_anchor.nested_section">Nested
+ Section</link></title>
+ </section>
+ <anchor id="a11"/>
+ </section>
+ <section id="anchor_test.conditional_section_anchor">
+ <title><anchor id="a12"/><link linkend="anchor_test.conditional_section_anchor">Conditional
+ Section Anchor</link></title>
+ </section>
+ <section id="anchor_test.lists">
+ <title><link linkend="anchor_test.lists">Lists</link></title> <anchor id="a14"/>
+ <itemizedlist>
+ <listitem>
+ <simpara>
+ Item 1
+ </simpara>
+ </listitem>
+ <listitem>
+ <simpara>
+ Item 2
+ </simpara>
+ </listitem>
+ <listitem>
+ <simpara>
+ Nested List <anchor id="a15"/>
+ <itemizedlist>
+ <listitem>
+ <simpara>
+ Nested Item 1
+ </simpara>
+ </listitem>
+ <listitem>
+ <simpara>
+ Nested Item 2
+ </simpara>
+ </listitem>
+ <listitem>
+ <simpara>
+ <anchor id="a16"/>Nested Item 3
+ </simpara>
+ </listitem>
+ </itemizedlist>
+ </simpara>
+ </listitem>
+ <listitem>
+ <simpara>
+ Item 3
+ </simpara>
+ </listitem>
+ </itemizedlist>
+ </section>
+ <section id="anchor_test.anchors_in_templates">
+ <title><link linkend="anchor_test.anchors_in_templates">Anchors in templates</link></title>
+ <para>
+ <anchor id="t1"/>Some text.
+ </para>
+ <para>
+ <anchor id="t2"/>Text content
+ </para>
+ </section>
+ <section id="anchor_test.anchors_in_syntax_highlighted_co">
+ <title><link linkend="anchor_test.anchors_in_syntax_highlighted_co">Anchors in
+ syntax highlighted code</link></title>
+<programlisting><phrase role="keyword">int</phrase> <anchor id="s1"/><phrase role="identifier">main</phrase><phrase role="special">()</phrase> <phrase role="special">{}</phrase>
+</programlisting>
+ </section>
+ <section id="anchor_test.nested_anchors">
+ <title><link linkend="anchor_test.nested_anchors">Nested anchors</link></title>
+ <table frame="all" id="anchor_test.nested_anchors.table_with_anchors">
+ <title>Table with anchors</title>
+ <tgroup cols="1">
+ <thead>
+ <row>
+ <entry>
+ <para>
+ Heading
+ </para>
+ </entry>
+ </row>
+ </thead>
+ <tbody>
+ <row>
+ <entry>
+ <para>
+ <anchor id="table1"/>Cell 1
+ </para>
+ </entry>
+ </row>
+ <row>
+ <entry>
+ <para>
+ <anchor id="table2"/>Cell 2
+ </para>
+ </entry>
+ </row>
+ <row>
+ <entry>
+ <para>
+ Cell 3<anchor id="table3"/>
+ </para>
+ </entry>
+ </row>
+ </tbody>
+ </tgroup>
+ </table>
+ </section>
+ <section id="anchor_test.templates">
+ <title><link linkend="anchor_test.templates">Anchors with templates</link></title>
+ <anchor id="anchor1"/><anchor id="anchor2"/><anchor id="x1y"/><anchor id="x12y"/>
+ </section>
+</article>
diff --git a/tools/quickbook/test/anchor-1_7.quickbook b/tools/quickbook/test/anchor-1_7.quickbook
new file mode 100644
index 0000000000..f2b5c13db5
--- /dev/null
+++ b/tools/quickbook/test/anchor-1_7.quickbook
@@ -0,0 +1,97 @@
+[article Anchor Test
+[quickbook 1.7]
+]
+
+[section Anchors]
+
+[#a1] A paragraph containing several anchors. [#a2] We want to make sure
+they appear in the correct place. [#a3]
+
+[heading This heading shouldn't pick up the previous anchor]
+
+[#a4]
+
+[heading This heading should pick up the previous anchor]
+
+[#a5]
+[heading And this one]
+
+[#a6][heading Also this one]
+
+[#a7][h3 Finally this]
+
+[#a8]
+
+[endsect]
+
+[#a9]
+[section Section Anchor]
+[#a10][section Nested Section]
+[endsect]
+[/ This anchor is invalid, I'm not sure what to do with it]
+[#a11]
+[endsect]
+
+[#a12][?__not_defined__ #a13]
+[section Conditional Section Anchor]
+[endsect]
+
+[section Lists]
+
+[#a14]
+* Item 1
+* Item 2
+* Nested List
+ [#a15]
+ * Nested Item 1
+ * Nested Item 2
+ * [#a16] Nested Item 3
+* Item 3
+
+[endsect]
+
+[section Anchors in templates]
+
+[template anchor1[][#t1]]
+[template para[] Text content]
+
+[anchor1]
+
+Some text.
+
+[#t2]
+
+[para]
+
+[endsect]
+
+[section Anchors in syntax highlighted code]
+
+ int ``[#s1]``main() {}
+
+[endsect]
+
+[section Nested anchors]
+
+[table Table with anchors
+ [[Heading]]
+ [[[#table1]Cell 1]]
+ [[[#table2] Cell 2]]
+ [[Cell 3[#table3]]]
+]
+[endsect]
+
+[section:templates Anchors with templates]
+
+[template a1 anchor1]
+[template a2 anchor2]
+
+[#[a1]]
+[#[a2]]
+
+[template anchor[name] [#x[name]y]]
+
+[anchor 1]
+[anchor 12]
+
+[endsect] [/ templates]
diff --git a/tools/quickbook/test/code_cpp-1_5.gold b/tools/quickbook/test/code_cpp-1_5.gold
index be33bfdfd3..d0bc79a3f6 100644
--- a/tools/quickbook/test/code_cpp-1_5.gold
+++ b/tools/quickbook/test/code_cpp-1_5.gold
@@ -3,7 +3,11 @@
<article id="c___code_blocks" last-revision="DEBUG MODE Date: 2000/12/20 12:00:00 $"
xmlns:xi="http://www.w3.org/2001/XInclude">
<title>C++ Code Blocks</title>
-<programlisting><phrase role="comment">// No escape</phrase>
+<programlisting><phrase role="preprocessor">#include</phrase> <phrase role="special">&lt;</phrase><phrase role="identifier">some_header</phrase><phrase role="special">&gt;</phrase>
+ <phrase role="preprocessor">#include</phrase> <phrase role="string">&quot;another_header.hpp&quot;</phrase>
+<phrase role="preprocessor"># define</phrase> <phrase role="identifier">A_MACRO</phrase> <phrase role="identifier">value</phrase>
+<phrase role="preprocessor">#define</phrase> <phrase role="identifier">stringize</phrase><phrase role="special">(</phrase><phrase role="identifier">hello</phrase><phrase role="special">)</phrase> <phrase role="special">#</phrase><phrase role="identifier">hello</phrase>
+<phrase role="comment">// No escape</phrase>
<phrase role="comment">/* No escape */</phrase>
<phrase role="comment">/* No escape
* with newlines
@@ -13,6 +17,7 @@
<phrase role="comment">/* Multiple escapes: <emphasis>italic</emphasis>
* <emphasis role="underline">underline</emphasis><emphasis role="bold">bold</emphasis>
*/</phrase>
+<phrase role="comment">/* Token pasting: */</phrase> <phrase role="identifier">a</phrase><phrase role="special">##</phrase><phrase role="identifier">b</phrase>
</programlisting>
<para>
A badly formed comment:
@@ -24,4 +29,12 @@
</para>
<programlisting><phrase role="comment">/* Oh dear <emphasis role="bold">bold</emphasis>
</phrase></programlisting>
+ <para>
+ Just some code:
+ </para>
+<programlisting><phrase role="keyword">int</phrase> <phrase role="identifier">main</phrase><phrase role="special">()</phrase> <phrase role="special">{</phrase>
+ <phrase role="keyword">constexpr</phrase> <phrase role="keyword">char32_t</phrase> <phrase role="identifier">x</phrase> <phrase role="special">=</phrase> <phrase role="char">'a'</phrase><phrase role="special">;</phrase>
+ <phrase role="keyword">const</phrase> <phrase role="keyword">auto</phrase> <phrase role="identifier">y</phrase> <phrase role="special">=</phrase> <phrase role="identifier">x</phrase> <phrase role="special">-</phrase> <phrase role="char">' '</phrase><phrase role="special">;</phrase>
+<phrase role="special">}</phrase>
+</programlisting>
</article>
diff --git a/tools/quickbook/test/code_cpp-1_5.quickbook b/tools/quickbook/test/code_cpp-1_5.quickbook
index be8043a018..fdaa926bbb 100644
--- a/tools/quickbook/test/code_cpp-1_5.quickbook
+++ b/tools/quickbook/test/code_cpp-1_5.quickbook
@@ -2,6 +2,10 @@
[quickbook 1.5]
]
+ #include <some_header>
+ #include "another_header.hpp"
+ # define A_MACRO value
+ #define stringize(hello) #hello
// No escape
/* No escape */
/* No escape
@@ -12,6 +16,7 @@
/* Multiple escapes: ``/italic/``
* ``_underline_````*bold*``
*/
+ /* Token pasting: */ a##b
A badly formed comment:
@@ -20,3 +25,10 @@ A badly formed comment:
A badly formed comment with an escape:
/* Oh dear ``*bold*``
+
+Just some code:
+
+ int main() {
+ constexpr char32_t x = 'a';
+ const auto y = x - ' ';
+ }
diff --git a/tools/quickbook/test/command-line/Jamfile.v2 b/tools/quickbook/test/command-line/Jamfile.v2
index 052ec82afb..9e838e0a4e 100644
--- a/tools/quickbook/test/command-line/Jamfile.v2
+++ b/tools/quickbook/test/command-line/Jamfile.v2
@@ -7,7 +7,7 @@
# http://www.boost.org/LICENSE_1_0.txt)
#
-project quickook/tests/command-line ;
+project quickbook/tests/command-line ;
import quickbook-testing : quickbook-test quickbook-fail-test quickbook-error-test ;
@@ -25,4 +25,4 @@ test-suite command-line.test :
output_nested_in_file :
basic-1_6.quickbook :
<testing.arg>--output-file=basic-1_6.quickbook/basic.xml ]
- ; \ No newline at end of file
+ ;
diff --git a/tools/quickbook/test/doc-info/Jamfile.v2 b/tools/quickbook/test/doc-info/Jamfile.v2
index 9d4913b81c..a35229c09b 100644
--- a/tools/quickbook/test/doc-info/Jamfile.v2
+++ b/tools/quickbook/test/doc-info/Jamfile.v2
@@ -6,7 +6,7 @@
# http://www.boost.org/LICENSE_1_0.txt)
#
-project quickook/tests/doc-info ;
+project quickbook/tests/doc-info ;
import quickbook-testing : quickbook-test quickbook-error-test ;
@@ -20,6 +20,8 @@ test-suite quickbook.test :
[ quickbook-test escaped_attributes2-1_7 ]
[ quickbook-test duplicates-1.1 ]
[ quickbook-test duplicates-1.5 ]
+ [ quickbook-test macros1-1_5 ]
+ [ quickbook-test macros1-1_6 ]
[ quickbook-test source-mode-1.4 ]
[ quickbook-test source-mode-1.5 ]
[ quickbook-test source-mode-1.6 ]
diff --git a/tools/quickbook/test/doc-info/macros1-1_5.gold b/tools/quickbook/test/doc-info/macros1-1_5.gold
new file mode 100644
index 0000000000..0b76b9da15
--- /dev/null
+++ b/tools/quickbook/test/doc-info/macros1-1_5.gold
@@ -0,0 +1,11 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE article PUBLIC "-//Boost//DTD BoostBook XML V1.0//EN" "http://www.boost.org/tools/boostbook/dtd/boostbook.dtd">
+<article id="macro_shouldn_t_expand____date__" last-revision="__DATE__" xmlns:xi="http://www.w3.org/2001/XInclude">
+ <title>Macro shouldn't expand: __DATE__</title>
+ <articleinfo>
+ <articlepurpose>
+ Unfortunately this does expand: 2000-Dec-20, but I'm not fixing it, as I don't
+ think it'll cause any real problems.
+ </articlepurpose>
+ </articleinfo>
+</article>
diff --git a/tools/quickbook/test/doc-info/macros1-1_5.quickbook b/tools/quickbook/test/doc-info/macros1-1_5.quickbook
new file mode 100644
index 0000000000..c66a1de6d5
--- /dev/null
+++ b/tools/quickbook/test/doc-info/macros1-1_5.quickbook
@@ -0,0 +1,6 @@
+[article Macro shouldn't expand: __DATE__
+ [quickbook 1.5]
+ [last-revision __DATE__]
+ [purpose Unfortunately this does expand: __DATE__, but I'm not fixing it,
+ as I don't think it'll cause any real problems.]
+]
diff --git a/tools/quickbook/test/doc-info/macros1-1_6.gold b/tools/quickbook/test/doc-info/macros1-1_6.gold
new file mode 100644
index 0000000000..92ac1a191d
--- /dev/null
+++ b/tools/quickbook/test/doc-info/macros1-1_6.gold
@@ -0,0 +1,8 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE article PUBLIC "-//Boost//DTD BoostBook XML V1.0//EN" "http://www.boost.org/tools/boostbook/dtd/boostbook.dtd">
+<article id="testing_date_date" last-revision="2000-Dec-20" xmlns:xi="http://www.w3.org/2001/XInclude">
+ <title>Testing date: 2000-Dec-20</title>
+ <chapter id="testing_macro_date_nested_macro" last-revision="5 May 2013" xmlns:xi="http://www.w3.org/2001/XInclude">
+ <title>Testing macro date: 5 May 2013</title>
+ </chapter>
+</article>
diff --git a/tools/quickbook/test/doc-info/macros1-1_6.quickbook b/tools/quickbook/test/doc-info/macros1-1_6.quickbook
new file mode 100644
index 0000000000..058c349547
--- /dev/null
+++ b/tools/quickbook/test/doc-info/macros1-1_6.quickbook
@@ -0,0 +1,7 @@
+[article Testing date: __DATE__
+ [quickbook 1.6]
+ [last-revision __DATE__]
+]
+
+[def NESTED_MACRO 5 May 2013]
+[include macros1-inc_1_6.quickbook]
diff --git a/tools/quickbook/test/doc-info/macros1-inc_1_6.quickbook b/tools/quickbook/test/doc-info/macros1-inc_1_6.quickbook
new file mode 100644
index 0000000000..47fc84a813
--- /dev/null
+++ b/tools/quickbook/test/doc-info/macros1-inc_1_6.quickbook
@@ -0,0 +1,4 @@
+[chapter Testing macro date: NESTED_MACRO
+ [quickbook 1.6]
+ [last-revision NESTED_MACRO]
+]
diff --git a/tools/quickbook/test/elements-1_6.gold b/tools/quickbook/test/elements-1_6.gold
index bc9ee06f31..59a90af5cf 100644
--- a/tools/quickbook/test/elements-1_6.gold
+++ b/tools/quickbook/test/elements-1_6.gold
@@ -5,28 +5,36 @@
<title>1.6 Elements</title>
<orderedlist>
<listitem>
- <para>
+ <simpara>
item1
- </para>
+ </simpara>
</listitem>
<listitem>
- <para>
+ <simpara>
item2
- </para>
+ </simpara>
</listitem>
</orderedlist>
<itemizedlist>
<listitem>
- <para>
+ <simpara>
item1
- </para>
+ </simpara>
</listitem>
<listitem>
- <para>
+ <simpara>
item2
- </para>
+ </simpara>
</listitem>
</itemizedlist>
+ <orderedlist>
+ <listitem>
+ <simpara>
+ Check that <emphasis role="bold">bold text</emphasis> isn't confused with
+ a list.
+ </simpara>
+ </listitem>
+ </orderedlist>
<simplesect><title>A <emphasis role="bold">simplesect</emphasis>!</title></simplesect>
</article>
diff --git a/tools/quickbook/test/elements-1_6.quickbook b/tools/quickbook/test/elements-1_6.quickbook
index 68372d1238..e313cd98d6 100644
--- a/tools/quickbook/test/elements-1_6.quickbook
+++ b/tools/quickbook/test/elements-1_6.quickbook
@@ -8,6 +8,14 @@
[itemized_list [item1][item2]]
+[ordered_list
+ [
+ Check that
+ *bold text*
+ isn't confused with a list.
+ ]
+]
+
[block'''<simplesect><title>'''A *simplesect*!'''</title>''']
-[block'''</simplesect>'''] \ No newline at end of file
+[block'''</simplesect>''']
diff --git a/tools/build/v2/test/dependency-test/x.foo b/tools/quickbook/test/empty-inc.quickbook
index e69de29bb2..e69de29bb2 100644
--- a/tools/build/v2/test/dependency-test/x.foo
+++ b/tools/quickbook/test/empty-inc.quickbook
diff --git a/tools/quickbook/test/empty.qbk b/tools/quickbook/test/empty.qbk
deleted file mode 100644
index e69de29bb2..0000000000
--- a/tools/quickbook/test/empty.qbk
+++ /dev/null
diff --git a/tools/quickbook/test/heading-1_7.gold b/tools/quickbook/test/heading-1_7.gold
new file mode 100644
index 0000000000..52f8291ab5
--- /dev/null
+++ b/tools/quickbook/test/heading-1_7.gold
@@ -0,0 +1,113 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE article PUBLIC "-//Boost//DTD BoostBook XML V1.0//EN" "http://www.boost.org/tools/boostbook/dtd/boostbook.dtd">
+<article id="heading_test_1_7" last-revision="DEBUG MODE Date: 2000/12/20 12:00:00 $"
+ xmlns:xi="http://www.w3.org/2001/XInclude">
+ <title>Heading Test 1.7</title>
+ <bridgehead renderas="sect2" id="heading_test_1_7.h0">
+ <phrase id="heading_test_1_7.generic_header"/><link linkend="heading_test_1_7.generic_header">Generic
+ header</link>
+ </bridgehead>
+ <bridgehead renderas="sect1" id="heading_test_1_7.h2">
+ <phrase id="heading_test_1_7.level_1"/><link linkend="heading_test_1_7.level_1">Level
+ 1</link>
+ </bridgehead>
+ <bridgehead renderas="sect2" id="heading_test_1_7.h3">
+ <phrase id="heading_test_1_7.level_2"/><link linkend="heading_test_1_7.level_2">Level
+ 2</link>
+ </bridgehead>
+ <bridgehead renderas="sect3" id="heading_test_1_7.h4">
+ <phrase id="heading_test_1_7.level_3"/><link linkend="heading_test_1_7.level_3">Level
+ 3</link>
+ </bridgehead>
+ <bridgehead renderas="sect4" id="heading_test_1_7.h5">
+ <phrase id="heading_test_1_7.level_4"/><link linkend="heading_test_1_7.level_4">Level
+ 4</link>
+ </bridgehead>
+ <bridgehead renderas="sect5" id="heading_test_1_7.h6">
+ <phrase id="heading_test_1_7.level_5"/><link linkend="heading_test_1_7.level_5">Level
+ 5</link>
+ </bridgehead>
+ <bridgehead renderas="sect6" id="heading_test_1_7.h8">
+ <phrase id="heading_test_1_7.level_6"/><link linkend="heading_test_1_7.level_6">Level
+ 6</link>
+ </bridgehead>
+ <bridgehead renderas="sect1" id="heading_test_1_7.h9">
+ <phrase id="heading_test_1_7.bold"/><link linkend="heading_test_1_7.bold"><emphasis
+ role="bold">Bold</emphasis></link>
+ </bridgehead>
+ <bridgehead renderas="sect1" id="heading_test_1_7.h10">
+ <phrase id="heading_test_1_7.comment"/><link linkend="heading_test_1_7.comment">Comment</link>
+ </bridgehead>
+ <bridgehead renderas="sect1" id="heading_test_1_7.h11">
+ <phrase id="heading_test_1_7.anchor_anchor_heading"/><link linkend="heading_test_1_7.anchor_anchor_heading"><anchor
+ id="anchor"/>Anchor heading</link>
+ </bridgehead>
+ <bridgehead renderas="sect1" id="heading_test_1_7.h12">
+ <phrase id="heading_test_1_7.link_anchor_link_heading"/><link linkend="heading_test_1_7.link_anchor_link_heading"><link
+ linkend="anchor">Link heading</link></link>
+ </bridgehead>
+ <bridgehead renderas="sect1" id="heading_test_1_7.h13">
+ <phrase id="heading_test_1_7.h1"/><link linkend="heading_test_1_7.h1">H1</link>
+ </bridgehead>
+ <section id="heading_test_1_7.s1">
+ <title><link linkend="heading_test_1_7.s1">S1</link></title>
+ <bridgehead renderas="sect3" id="heading_test_1_7.s1.h0">
+ <phrase id="heading_test_1_7.s1.h2"/><link linkend="heading_test_1_7.s1.h2">H2</link>
+ </bridgehead>
+ <section id="heading_test_1_7.s1.s2">
+ <title><link linkend="heading_test_1_7.s1.s2">S2</link></title>
+ <bridgehead renderas="sect3" id="heading_test_1_7.s1.s2.h0">
+ <phrase id="heading_test_1_7.s1.s2.h3"/><link linkend="heading_test_1_7.s1.s2.h3">H3</link>
+ </bridgehead>
+ </section>
+ <bridgehead renderas="sect2" id="heading_test_1_7.s1.h1">
+ <phrase id="heading_test_1_7.s1.h4"/><link linkend="heading_test_1_7.s1.h4">H4</link>
+ </bridgehead>
+ <section id="heading_test_1_7.s1.s3">
+ <title><link linkend="heading_test_1_7.s1.s3">S3</link></title>
+ <bridgehead renderas="sect4" id="heading_test_1_7.s1.s3.h0">
+ <phrase id="heading_test_1_7.s1.s3.h5"/><link linkend="heading_test_1_7.s1.s3.h5">H5</link>
+ </bridgehead>
+ </section>
+ <bridgehead renderas="sect2" id="heading_test_1_7.s1.h3">
+ <phrase id="heading_test_1_7.s1.h6"/><link linkend="heading_test_1_7.s1.h6">H6</link>
+ </bridgehead>
+ </section>
+ <bridgehead renderas="sect1" id="heading_test_1_7.h14">
+ <phrase id="heading_test_1_7.h7"/><link linkend="heading_test_1_7.h7">H7</link>
+ </bridgehead>
+ <bridgehead renderas="sect1" id="heading_test_1_7.h15">
+ <phrase id="heading_test_1_7.a1"/><link linkend="heading_test_1_7.a1">H1</link>
+ </bridgehead>
+ <section id="heading_test_1_7.s1_0">
+ <title><link linkend="heading_test_1_7.s1_0">S1</link></title>
+ <bridgehead renderas="sect2" id="heading_test_1_7.s1_0.h0">
+ <phrase id="heading_test_1_7.s1_0.a2"/><link linkend="heading_test_1_7.s1_0.a2">H2</link>
+ </bridgehead>
+ <section id="heading_test_1_7.s1_0.s2">
+ <title><link linkend="heading_test_1_7.s1_0.s2">S2</link></title>
+ <bridgehead renderas="sect3" id="heading_test_1_7.s1_0.s2.h0">
+ <phrase id="heading_test_1_7.s1_0.s2.a3"/><link linkend="heading_test_1_7.s1_0.s2.a3">H3</link>
+ </bridgehead>
+ </section>
+ <bridgehead renderas="sect2" id="heading_test_1_7.s1_0.h1">
+ <phrase id="heading_test_1_7.s1_0.a4"/><link linkend="heading_test_1_7.s1_0.a4">H4</link>
+ </bridgehead>
+ <section id="heading_test_1_7.s1_0.s3">
+ <title><link linkend="heading_test_1_7.s1_0.s3">S3</link></title>
+ <bridgehead renderas="sect3" id="heading_test_1_7.s1_0.s3.h0">
+ <phrase id="heading_test_1_7.s1_0.s3.a5"/><link linkend="heading_test_1_7.s1_0.s3.a5">H5</link>
+ </bridgehead>
+ </section>
+ <bridgehead renderas="sect3" id="heading_test_1_7.s1_0.h2">
+ <phrase id="heading_test_1_7.s1_0.a6"/><link linkend="heading_test_1_7.s1_0.a6">H6</link>
+ </bridgehead>
+ </section>
+ <bridgehead renderas="sect1" id="heading_test_1_7.h16">
+ <phrase id="heading_test_1_7.a7"/><link linkend="heading_test_1_7.a7">H7</link>
+ </bridgehead>
+ <bridgehead renderas="sect2" id="heading_test_1_7.h17">
+ <phrase id="heading_test_1_7.abc-2"/><link linkend="heading_test_1_7.abc-2">Template
+ Id</link>
+ </bridgehead>
+</article>
diff --git a/tools/quickbook/test/heading-1_7.quickbook b/tools/quickbook/test/heading-1_7.quickbook
new file mode 100644
index 0000000000..47f878add2
--- /dev/null
+++ b/tools/quickbook/test/heading-1_7.quickbook
@@ -0,0 +1,83 @@
+[article Heading Test 1.7
+[quickbook 1.7]
+]
+
+[/ Basic headers ]
+
+[heading Generic header]
+[h1 Level 1]
+[h2 Level 2]
+[h3 Level 3]
+[h4 Level 4]
+[h5 Level 5]
+[h6 Level 6]
+
+[/ Test how heading ids are generated when different types of markup are present]
+
+[h1 *Bold*]
+[h1 [/]Comment[/]]
+
+[/ Test how ids are generated for headings containing things like anchors
+ and links ]
+
+[h1 [#anchor]Anchor heading]
+[h1 [link anchor Link heading]]
+
+[/ Test how heading ids are generated inside sections]
+
+[h1 H1]
+
+[section:s1 S1]
+
+[heading H2]
+
+[section:s2 S2]
+
+[h3 H3]
+
+[endsect]
+
+[h2 H4]
+
+[section:s3 S3]
+
+[heading H5]
+
+[endsect]
+
+[h2 H6]
+
+[endsect]
+
+[h1 H7]
+
+[/ Repeat with explicit ids]
+
+[h1:a1 H1]
+
+[section:s1 S1]
+
+[h2:a2 H2]
+
+[section:s2 S2]
+
+[h3:a3 H3]
+
+[endsect]
+
+[h2:a4 H4]
+
+[section:s3 S3]
+
+[h3:a5 H5]
+
+[endsect]
+
+[heading:a6 H6]
+
+[endsect]
+
+[h1:a7 H7]
+
+[template thing[] abc]
+[heading:[thing]-2 Template Id]
diff --git a/tools/quickbook/test/include-1_5.quickbook b/tools/quickbook/test/include-1_5.quickbook
index 849d19ceca..9720121925 100644
--- a/tools/quickbook/test/include-1_5.quickbook
+++ b/tools/quickbook/test/include-1_5.quickbook
@@ -3,15 +3,15 @@
[id include-test]
]
-[include include_sub-1_5.qbk]
-[include .\include_sub.qbk]
-[include:foo include_sub.qbk]
-[include empty.qbk]
+[include include-inc-1_5.quickbook]
+[include .\include-inc.quickbook]
+[include:foo include-inc.quickbook]
+[include empty-inc.quickbook]
[def __defined__]
-[? __undefined__ [include:foo1 include_sub.qbk] ]
-[? __undefined__ [include:foo1 not_a_file.qbk] ]
+[? __undefined__ [include:foo1 include-inc.quickbook] ]
+[? __undefined__ [include:foo1 not_a_file.quickbook] ]
[? __defined__
- Just trying including in a conditional macro. [include:foo2 include_sub.qbk]
+ Just trying including in a conditional macro. [include:foo2 include-inc.quickbook]
With some text around it.]
diff --git a/tools/quickbook/test/include-1_6.quickbook b/tools/quickbook/test/include-1_6.quickbook
index 00d6033e3b..29a7611926 100644
--- a/tools/quickbook/test/include-1_6.quickbook
+++ b/tools/quickbook/test/include-1_6.quickbook
@@ -3,13 +3,13 @@
[id include-test]
]
-[include include_sub.qbk]
-[include:foo include_sub.qbk]
+[include include-inc.quickbook]
+[include:foo include-inc.quickbook]
[def __defined__]
-[? __undefined__ [include:foo1 include_sub.qbk] ]
-[? __undefined__ [include:foo1 not-a-file.qbk] ]
+[? __undefined__ [include:foo1 include-inc.quickbook] ]
+[? __undefined__ [include:foo1 not-a-file.quickbook] ]
[? __defined__
- Just trying including in a conditional macro. [include:foo2 include_sub.qbk]
+ Just trying including in a conditional macro. [include:foo2 include-inc.quickbook]
With some text around it.]
diff --git a/tools/quickbook/test/include-1_7.gold b/tools/quickbook/test/include-1_7.gold
new file mode 100644
index 0000000000..206509bf20
--- /dev/null
+++ b/tools/quickbook/test/include-1_7.gold
@@ -0,0 +1,36 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE article PUBLIC "-//Boost//DTD BoostBook XML V1.0//EN" "http://www.boost.org/tools/boostbook/dtd/boostbook.dtd">
+<article id="include-test" last-revision="DEBUG MODE Date: 2000/12/20 12:00:00 $"
+ xmlns:xi="http://www.w3.org/2001/XInclude">
+ <title>Include Test</title>
+ <section id="include-test.test">
+ <title><link linkend="include-test.test">Test</link></title>
+ <para>
+ Just testing.
+ </para>
+ </section>
+ <section id="foo.test">
+ <title><link linkend="foo.test">Test</link></title>
+ <para>
+ Just testing.
+ </para>
+ </section>
+ <section id="foo0.test">
+ <title><link linkend="foo0.test">Test</link></title>
+ <para>
+ Just testing.
+ </para>
+ </section>
+ <para>
+ Just trying including in a conditional macro.
+ </para>
+ <section id="foo2.test">
+ <title><link linkend="foo2.test">Test</link></title>
+ <para>
+ Just testing.
+ </para>
+ </section>
+ <para>
+ With some text around it.
+ </para>
+</article>
diff --git a/tools/quickbook/test/include-1_7.quickbook b/tools/quickbook/test/include-1_7.quickbook
new file mode 100644
index 0000000000..481e4130ae
--- /dev/null
+++ b/tools/quickbook/test/include-1_7.quickbook
@@ -0,0 +1,18 @@
+[article Include Test
+ [quickbook 1.7]
+ [id include-test]
+]
+
+[include include-inc.quickbook]
+[include:foo include-inc.quickbook]
+
+[template id2[] include-inc]
+[include:foo0 [id2].quickbook]
+
+[def __defined__]
+
+[? __undefined__ [include:foo1 include-inc.quickbook] ]
+[? __undefined__ [include:foo1 not-a-file.quickbook] ]
+[? __defined__
+ Just trying including in a conditional macro. [include:foo2 include-inc.quickbook]
+ With some text around it.]
diff --git a/tools/quickbook/test/include_sub-1_5.qbk b/tools/quickbook/test/include-inc-1_5.quickbook
index 1dcbff9ff4..1dcbff9ff4 100644
--- a/tools/quickbook/test/include_sub-1_5.qbk
+++ b/tools/quickbook/test/include-inc-1_5.quickbook
diff --git a/tools/quickbook/test/include_sub.qbk b/tools/quickbook/test/include-inc.quickbook
index 1fa61608b8..1fa61608b8 100644
--- a/tools/quickbook/test/include_sub.qbk
+++ b/tools/quickbook/test/include-inc.quickbook
diff --git a/tools/quickbook/test/include/Jamfile.v2 b/tools/quickbook/test/include/Jamfile.v2
index 3ab39b201a..a40bf55430 100644
--- a/tools/quickbook/test/include/Jamfile.v2
+++ b/tools/quickbook/test/include/Jamfile.v2
@@ -6,7 +6,7 @@
# http://www.boost.org/LICENSE_1_0.txt)
#
-project test/includes
+project quickbook/test/includes
: requirements
<toolset>msvc:<debug-symbols>off
;
@@ -16,7 +16,9 @@ import quickbook-testing : quickbook-test quickbook-error-test ;
test-suite quickbook.test :
[ quickbook-test import-basic-1.6 ]
[ quickbook-test filename ]
+ [ quickbook-test filename-1_7 ]
[ quickbook-test filename-path : : : <quickbook-test-include>sub ]
+ [ quickbook-test filename_path-1_7 : : : <quickbook-test-include>sub ]
[ quickbook-test doc-title1-1.5 ]
[ quickbook-test doc-title1a-1.5 ]
[ quickbook-test section ]
@@ -41,4 +43,6 @@ test-suite quickbook.test :
[ quickbook-test source_mode-1_6 ]
[ quickbook-test nested_compatibility-1_5 ]
[ quickbook-test nested_compatibility-1_6 ]
+ [ quickbook-test template_include-1_7 ]
+ [ quickbook-test glob-1_7 ]
;
diff --git a/tools/quickbook/test/include/compatibility-1_1.quickbook b/tools/quickbook/test/include/compatibility-1_1.quickbook
index ec5d62f55b..afdd39dc41 100644
--- a/tools/quickbook/test/include/compatibility-1_1.quickbook
+++ b/tools/quickbook/test/include/compatibility-1_1.quickbook
@@ -5,14 +5,14 @@
]
[section:collision Outer]
-[include compatibility-inc.qbk]
+[include compatibility-inc.quickbook]
[endsect]
[section:collision Include compatibility 1.1]
-[include compatibility-inc_1_1.qbk]
+[include compatibility-inc_1_1.quickbook]
[endsect]
[section:collision Include compatibility 1.5]
-[include compatibility-inc_1_5.qbk]
+[include compatibility-inc_1_5.quickbook]
[endsect]
[section:collision Include compatibility 1.6]
-[include compatibility-inc_1_6.qbk]
+[include compatibility-inc_1_6.quickbook]
[endsect]
diff --git a/tools/quickbook/test/include/compatibility-1_5.quickbook b/tools/quickbook/test/include/compatibility-1_5.quickbook
index c16ddac5e9..9593df720b 100644
--- a/tools/quickbook/test/include/compatibility-1_5.quickbook
+++ b/tools/quickbook/test/include/compatibility-1_5.quickbook
@@ -5,14 +5,14 @@
]
[section:collision Outer]
-[include compatibility-inc.qbk]
+[include compatibility-inc.quickbook]
[endsect]
[section:collision Include compatibility 1.1]
-[include compatibility-inc_1_1.qbk]
+[include compatibility-inc_1_1.quickbook]
[endsect]
[section:collision Include compatibility 1.5]
-[include compatibility-inc_1_5.qbk]
+[include compatibility-inc_1_5.quickbook]
[endsect]
[section:collision Include compatibility 1.6]
-[include compatibility-inc_1_6.qbk]
+[include compatibility-inc_1_6.quickbook]
[endsect]
diff --git a/tools/quickbook/test/include/compatibility-1_6.quickbook b/tools/quickbook/test/include/compatibility-1_6.quickbook
index 276674afca..46880a8c39 100644
--- a/tools/quickbook/test/include/compatibility-1_6.quickbook
+++ b/tools/quickbook/test/include/compatibility-1_6.quickbook
@@ -4,14 +4,14 @@
]
[section:collision Outer]
-[include compatibility-inc.qbk]
+[include compatibility-inc.quickbook]
[endsect]
[section:collision Include compatibility 1.1]
-[include compatibility-inc_1_1.qbk]
+[include compatibility-inc_1_1.quickbook]
[endsect]
[section:collision Include compatibility 1.5]
-[include compatibility-inc_1_5.qbk]
+[include compatibility-inc_1_5.quickbook]
[endsect]
[section:collision Include compatibility 1.6]
-[include compatibility-inc_1_6.qbk]
+[include compatibility-inc_1_6.quickbook]
[endsect]
diff --git a/tools/quickbook/test/include/compatibility-inc.qbk b/tools/quickbook/test/include/compatibility-inc.quickbook
index 66fbb54eee..66fbb54eee 100644
--- a/tools/quickbook/test/include/compatibility-inc.qbk
+++ b/tools/quickbook/test/include/compatibility-inc.quickbook
diff --git a/tools/quickbook/test/include/compatibility-inc_1_1.qbk b/tools/quickbook/test/include/compatibility-inc_1_1.quickbook
index 89b4d6554a..89b4d6554a 100644
--- a/tools/quickbook/test/include/compatibility-inc_1_1.qbk
+++ b/tools/quickbook/test/include/compatibility-inc_1_1.quickbook
diff --git a/tools/quickbook/test/include/compatibility-inc_1_5.qbk b/tools/quickbook/test/include/compatibility-inc_1_5.quickbook
index 5551da5b37..5551da5b37 100644
--- a/tools/quickbook/test/include/compatibility-inc_1_5.qbk
+++ b/tools/quickbook/test/include/compatibility-inc_1_5.quickbook
diff --git a/tools/quickbook/test/include/compatibility-inc_1_6.qbk b/tools/quickbook/test/include/compatibility-inc_1_6.quickbook
index 61f3c7bcd1..61f3c7bcd1 100644
--- a/tools/quickbook/test/include/compatibility-inc_1_6.qbk
+++ b/tools/quickbook/test/include/compatibility-inc_1_6.quickbook
diff --git a/tools/quickbook/test/include/filename-1_7.gold b/tools/quickbook/test/include/filename-1_7.gold
new file mode 100644
index 0000000000..935b426422
--- /dev/null
+++ b/tools/quickbook/test/include/filename-1_7.gold
@@ -0,0 +1,46 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE article PUBLIC "-//Boost//DTD BoostBook XML V1.0//EN" "http://www.boost.org/tools/boostbook/dtd/boostbook.dtd">
+<article id="filename_test" last-revision="DEBUG MODE Date: 2000/12/20 12:00:00 $"
+ xmlns:xi="http://www.w3.org/2001/XInclude">
+ <title>Filename Test</title>
+ <para>
+ filename-1_7.quickbook
+ </para>
+ <bridgehead renderas="sect2" id="filename_test.h0">
+ <phrase id="filename_test.test_1"/><link linkend="filename_test.test_1">Test
+ 1</link>
+ </bridgehead>
+ <para>
+ sub/filename_include1.quickbook
+ </para>
+ <para>
+ sub/../filename_include2.quickbook
+ </para>
+ <bridgehead renderas="sect2" id="filename_test.h1">
+ <phrase id="filename_test.test_2"/><link linkend="filename_test.test_2">Test
+ 2</link>
+ </bridgehead>
+ <para>
+ filename_include2.quickbook
+ </para>
+ <bridgehead renderas="sect2" id="filename_test.h2">
+ <phrase id="filename_test.test_3"/><link linkend="filename_test.test_3">Test
+ 3</link>
+ </bridgehead>
+ <para>
+ sub/filename_include1.quickbook
+ </para>
+ <para>
+ sub/../filename_include2.quickbook
+ </para>
+ <bridgehead renderas="sect2" id="filename_test.h3">
+ <phrase id="filename_test.test_4"/><link linkend="filename_test.test_4">Test
+ 4</link>
+ </bridgehead>
+ <para>
+ sub/filename_include1.quickbook
+ </para>
+ <para>
+ sub/../filename_include2.quickbook
+ </para>
+</article>
diff --git a/tools/quickbook/test/include/filename-1_7.quickbook b/tools/quickbook/test/include/filename-1_7.quickbook
new file mode 100644
index 0000000000..9d462af89f
--- /dev/null
+++ b/tools/quickbook/test/include/filename-1_7.quickbook
@@ -0,0 +1,21 @@
+[article Filename Test
+[quickbook 1.7]
+]
+
+__FILENAME__
+
+[heading Test 1]
+
+[include sub/*.quickbook]
+
+[heading Test 2]
+
+[include filename_include?.quickbook]
+
+[heading Test 3]
+
+[include su\[b\]/filename\\_include1.quickbook]
+
+[heading Test 4]
+
+[include su\\b/*.quickbook]
diff --git a/tools/quickbook/test/include/filename_path-1_7.gold b/tools/quickbook/test/include/filename_path-1_7.gold
new file mode 100644
index 0000000000..6f52dafc1d
--- /dev/null
+++ b/tools/quickbook/test/include/filename_path-1_7.gold
@@ -0,0 +1,18 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE article PUBLIC "-//Boost//DTD BoostBook XML V1.0//EN" "http://www.boost.org/tools/boostbook/dtd/boostbook.dtd">
+<article id="filename_test_with_include_path" last-revision="DEBUG MODE Date: 2000/12/20 12:00:00 $"
+ xmlns:xi="http://www.w3.org/2001/XInclude">
+ <title>Filename test with include path</title>
+ <para>
+ filename_path-1_7.quickbook
+ </para>
+ <para>
+ filename_include1.quickbook
+ </para>
+ <para>
+ ../filename_include2.quickbook
+ </para>
+ <para>
+ filename_include2.quickbook
+ </para>
+</article>
diff --git a/tools/quickbook/test/include/filename_path-1_7.quickbook b/tools/quickbook/test/include/filename_path-1_7.quickbook
new file mode 100644
index 0000000000..741d76ad57
--- /dev/null
+++ b/tools/quickbook/test/include/filename_path-1_7.quickbook
@@ -0,0 +1,7 @@
+[article Filename test with include path
+[quickbook 1.7]
+]
+
+__FILENAME__
+
+[include filename_include?.quickbook]
diff --git a/tools/quickbook/test/include/glob-1_7.gold b/tools/quickbook/test/include/glob-1_7.gold
new file mode 100644
index 0000000000..ba31ae9615
--- /dev/null
+++ b/tools/quickbook/test/include/glob-1_7.gold
@@ -0,0 +1,41 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE article PUBLIC "-//Boost//DTD BoostBook XML V1.0//EN" "http://www.boost.org/tools/boostbook/dtd/boostbook.dtd">
+<article id="glob_test" last-revision="DEBUG MODE Date: 2000/12/20 12:00:00 $" xmlns:xi="http://www.w3.org/2001/XInclude">
+ <title>Glob Test</title>
+ <section id="glob_test.t1_0">
+ <title><link linkend="glob_test.t1_0">Test 1.0</link></title>
+ </section>
+ <section id="glob_test.t1_1">
+ <title><link linkend="glob_test.t1_1">Test 1.1</link></title>
+ <para>
+ A
+ </para>
+ </section>
+ <section id="glob_test.t1_2">
+ <title><link linkend="glob_test.t1_2">Test 1.2</link></title>
+ <para>
+ B
+ </para>
+ </section>
+ <section id="glob_test.t1_3">
+ <title><link linkend="glob_test.t1_3">Test 1.3</link></title>
+ <para>
+ B
+ </para>
+ </section>
+ <section id="glob_test.t2_1">
+ <title><link linkend="glob_test.t2_1">Test 2.1</link></title>
+ <para>
+ A
+ </para>
+ <para>
+ B
+ </para>
+ </section>
+ <section id="glob_test.t2_2">
+ <title><link linkend="glob_test.t2_2">Test 2.2</link></title>
+ <para>
+ B
+ </para>
+ </section>
+</article>
diff --git a/tools/quickbook/test/include/glob-1_7.quickbook b/tools/quickbook/test/include/glob-1_7.quickbook
new file mode 100644
index 0000000000..826c12be81
--- /dev/null
+++ b/tools/quickbook/test/include/glob-1_7.quickbook
@@ -0,0 +1,39 @@
+[article Glob Test
+[quickbook 1.7]
+]
+
+[section:t1_0 Test 1.0]
+
+[include glob0/*]
+
+[endsect] [/t1_0]
+
+[section:t1_1 Test 1.1]
+
+[include glob1/*]
+
+[endsect] [/t1_1]
+
+[section:t1_2 Test 1.2]
+
+[include glob1/*/*]
+
+[endsect] [/t1_2]
+
+[section:t1_3 Test 1.3]
+
+[include glob1/*/b.qbk]
+
+[endsect] [/t1_3]
+
+[section:t2_1 Test 2.1]
+
+[include glob2/*]
+
+[endsect] [/t2_1]
+
+[section:t2_2 Test 2.2]
+
+[include glob2/*/*]
+
+[endsect] [/t2_2]
diff --git a/tools/quickbook/test/include/glob1/a.qbk b/tools/quickbook/test/include/glob1/a.qbk
new file mode 100644
index 0000000000..f70f10e4db
--- /dev/null
+++ b/tools/quickbook/test/include/glob1/a.qbk
@@ -0,0 +1 @@
+A
diff --git a/tools/quickbook/test/include/glob1/glob1-1/b.qbk b/tools/quickbook/test/include/glob1/glob1-1/b.qbk
new file mode 100644
index 0000000000..223b7836fb
--- /dev/null
+++ b/tools/quickbook/test/include/glob1/glob1-1/b.qbk
@@ -0,0 +1 @@
+B
diff --git a/tools/quickbook/test/include/glob2/a.qbk b/tools/quickbook/test/include/glob2/a.qbk
new file mode 100644
index 0000000000..1605808010
--- /dev/null
+++ b/tools/quickbook/test/include/glob2/a.qbk
@@ -0,0 +1,3 @@
+A
+
+[include */*.qbk]
diff --git a/tools/quickbook/test/include/glob2/glob2-1/b.qbk b/tools/quickbook/test/include/glob2/glob2-1/b.qbk
new file mode 100644
index 0000000000..223b7836fb
--- /dev/null
+++ b/tools/quickbook/test/include/glob2/glob2-1/b.qbk
@@ -0,0 +1 @@
+B
diff --git a/tools/quickbook/test/include/in_section-1_5.gold b/tools/quickbook/test/include/in_section-1_5.gold
index 1dfeaff908..3548e2c479 100644
--- a/tools/quickbook/test/include/in_section-1_5.gold
+++ b/tools/quickbook/test/include/in_section-1_5.gold
@@ -13,6 +13,11 @@
<bridgehead renderas="sect1" id="include_in_section_include_1.container.inner.h0">
<phrase id="include_in_section_include_1.container.inner.test2"/><link linkend="include_in_section_include_1.container.inner.test2">Test2</link>
</bridgehead>
+ <bridgehead renderas="sect4" id="include_in_section_include_1.container.inner.h1">
+ <phrase id="include_in_section_include_1.container.inner.simple_include"/><link
+ linkend="include_in_section_include_1.container.inner.simple_include">Simple
+ include</link>
+ </bridgehead>
</section>
<bridgehead renderas="sect1" id="section_include.container.h0">
<phrase id="section_include.container.test1"/><link linkend="section_include.container.test1">Test1</link>
@@ -22,6 +27,11 @@
<bridgehead renderas="sect1" id="section_include.container.inner.h0">
<phrase id="section_include.container.inner.test2"/><link linkend="section_include.container.inner.test2">Test2</link>
</bridgehead>
+ <bridgehead renderas="sect4" id="include_in_section_include_2.container.inner.h0">
+ <phrase id="include_in_section_include_2.container.inner.simple_include"/><link
+ linkend="include_in_section_include_2.container.inner.simple_include">Simple
+ include</link>
+ </bridgehead>
</section>
</section>
<bridgehead renderas="sect1" id="include_in_section_include_1.h0">
@@ -32,6 +42,10 @@
<bridgehead renderas="sect1" id="include_in_section_include_1.inner.h0">
<phrase id="include_in_section_include_1.inner.test2"/><link linkend="include_in_section_include_1.inner.test2">Test2</link>
</bridgehead>
+ <bridgehead renderas="sect3" id="include_in_section_include_1.inner.h1">
+ <phrase id="include_in_section_include_1.inner.simple_include"/><link linkend="include_in_section_include_1.inner.simple_include">Simple
+ include</link>
+ </bridgehead>
</section>
<bridgehead renderas="sect1" id="section_include.h0">
<phrase id="section_include.test1"/><link linkend="section_include.test1">Test1</link>
@@ -41,6 +55,10 @@
<bridgehead renderas="sect1" id="section_include.inner.h0">
<phrase id="section_include.inner.test2"/><link linkend="section_include.inner.test2">Test2</link>
</bridgehead>
+ <bridgehead renderas="sect3" id="include_in_section_include_2.inner.h0">
+ <phrase id="include_in_section_include_2.inner.simple_include"/><link linkend="include_in_section_include_2.inner.simple_include">Simple
+ include</link>
+ </bridgehead>
</section>
<section id="include_in_section_1_5.container2">
<title><link linkend="include_in_section_1_5.container2">Container2</link></title>
@@ -52,6 +70,11 @@
<bridgehead renderas="sect1" id="include_in_section_include_1.container2.inner.h0">
<phrase id="include_in_section_include_1.container2.inner.test2"/><link linkend="include_in_section_include_1.container2.inner.test2">Test2</link>
</bridgehead>
+ <bridgehead renderas="sect4" id="include_in_section_include_1.container2.inner.h1">
+ <phrase id="include_in_section_include_1.container2.inner.simple_include"/><link
+ linkend="include_in_section_include_1.container2.inner.simple_include">Simple
+ include</link>
+ </bridgehead>
</section>
<bridgehead renderas="sect1" id="section_include.container2.h0">
<phrase id="section_include.container2.test1"/><link linkend="section_include.container2.test1">Test1</link>
@@ -61,6 +84,11 @@
<bridgehead renderas="sect1" id="section_include.container2.inner.h0">
<phrase id="section_include.container2.inner.test2"/><link linkend="section_include.container2.inner.test2">Test2</link>
</bridgehead>
+ <bridgehead renderas="sect4" id="include_in_section_include_2.container2.inner.h0">
+ <phrase id="include_in_section_include_2.container2.inner.simple_include"/><link
+ linkend="include_in_section_include_2.container2.inner.simple_include">Simple
+ include</link>
+ </bridgehead>
</section>
</section>
</article>
diff --git a/tools/quickbook/test/include/in_section-1_5.quickbook b/tools/quickbook/test/include/in_section-1_5.quickbook
index 9cd15f61d9..28183096a0 100644
--- a/tools/quickbook/test/include/in_section-1_5.quickbook
+++ b/tools/quickbook/test/include/in_section-1_5.quickbook
@@ -3,14 +3,14 @@
]
[section:container Container]
-[include in_section-include1.qbk]
-[include in_section-include2.qbk]
+[include in_section-inc1.quickbook]
+[include in_section-inc2.quickbook]
[endsect]
-[include in_section-include1.qbk]
-[include in_section-include2.qbk]
+[include in_section-inc1.quickbook]
+[include in_section-inc2.quickbook]
[section:container2 Container2]
-[include in_section-include1.qbk]
-[include in_section-include2.qbk]
+[include in_section-inc1.quickbook]
+[include in_section-inc2.quickbook]
[endsect]
diff --git a/tools/quickbook/test/include/in_section-1_6.gold b/tools/quickbook/test/include/in_section-1_6.gold
index 7601df44f7..3ffa601709 100644
--- a/tools/quickbook/test/include/in_section-1_6.gold
+++ b/tools/quickbook/test/include/in_section-1_6.gold
@@ -16,6 +16,10 @@
<bridgehead renderas="sect1" id="include_in_section_include_1.inner.h0">
<phrase id="include_in_section_include_1.inner.test2"/><link linkend="include_in_section_include_1.inner.test2">Test2</link>
</bridgehead>
+ <bridgehead renderas="sect3" id="include_in_section_include_1.inner.h1">
+ <phrase id="include_in_section_include_1.inner.simple_include"/><link linkend="include_in_section_include_1.inner.simple_include">Simple
+ include</link>
+ </bridgehead>
</section>
</article>
<article id="section_include" last-revision="DEBUG MODE Date: 2000/12/20 12:00:00 $"
@@ -29,6 +33,10 @@
<bridgehead renderas="sect1" id="section_include.inner.h0">
<phrase id="section_include.inner.test2"/><link linkend="section_include.inner.test2">Test2</link>
</bridgehead>
+ <bridgehead renderas="sect3" id="section_include.inner.h1">
+ <phrase id="section_include.inner.simple_include"/><link linkend="section_include.inner.simple_include">Simple
+ include</link>
+ </bridgehead>
</section>
</article>
</section>
@@ -43,6 +51,10 @@
<bridgehead renderas="sect1" id="include_in_section_include_1_0.inner.h0">
<phrase id="include_in_section_include_1_0.inner.test2"/><link linkend="include_in_section_include_1_0.inner.test2">Test2</link>
</bridgehead>
+ <bridgehead renderas="sect3" id="include_in_section_include_1_0.inner.h1">
+ <phrase id="include_in_section_include_1_0.inner.simple_include"/><link linkend="include_in_section_include_1_0.inner.simple_include">Simple
+ include</link>
+ </bridgehead>
</section>
</article>
<article id="section_include0" last-revision="DEBUG MODE Date: 2000/12/20 12:00:00 $"
@@ -56,6 +68,10 @@
<bridgehead renderas="sect1" id="section_include0.inner.h0">
<phrase id="section_include0.inner.test2"/><link linkend="section_include0.inner.test2">Test2</link>
</bridgehead>
+ <bridgehead renderas="sect3" id="section_include0.inner.h1">
+ <phrase id="section_include0.inner.simple_include"/><link linkend="section_include0.inner.simple_include">Simple
+ include</link>
+ </bridgehead>
</section>
</article>
<section id="include_in_section_1_6.container2">
@@ -71,6 +87,10 @@
<bridgehead renderas="sect1" id="include_in_section_include_1_1.inner.h0">
<phrase id="include_in_section_include_1_1.inner.test2"/><link linkend="include_in_section_include_1_1.inner.test2">Test2</link>
</bridgehead>
+ <bridgehead renderas="sect3" id="include_in_section_include_1_1.inner.h1">
+ <phrase id="include_in_section_include_1_1.inner.simple_include"/><link
+ linkend="include_in_section_include_1_1.inner.simple_include">Simple include</link>
+ </bridgehead>
</section>
</article>
<article id="section_include1" last-revision="DEBUG MODE Date: 2000/12/20 12:00:00 $"
@@ -84,6 +104,10 @@
<bridgehead renderas="sect1" id="section_include1.inner.h0">
<phrase id="section_include1.inner.test2"/><link linkend="section_include1.inner.test2">Test2</link>
</bridgehead>
+ <bridgehead renderas="sect3" id="section_include1.inner.h1">
+ <phrase id="section_include1.inner.simple_include"/><link linkend="section_include1.inner.simple_include">Simple
+ include</link>
+ </bridgehead>
</section>
</article>
</section>
diff --git a/tools/quickbook/test/include/in_section-1_6.quickbook b/tools/quickbook/test/include/in_section-1_6.quickbook
index 45be4411b1..686f97ec4d 100644
--- a/tools/quickbook/test/include/in_section-1_6.quickbook
+++ b/tools/quickbook/test/include/in_section-1_6.quickbook
@@ -3,14 +3,14 @@
]
[section:container Container]
-[include in_section-include1.qbk]
-[include in_section-include2.qbk]
+[include in_section-inc1.quickbook]
+[include in_section-inc2.quickbook]
[endsect]
-[include in_section-include1.qbk]
-[include in_section-include2.qbk]
+[include in_section-inc1.quickbook]
+[include in_section-inc2.quickbook]
[section:container2 Container2]
-[include in_section-include1.qbk]
-[include in_section-include2.qbk]
+[include in_section-inc1.quickbook]
+[include in_section-inc2.quickbook]
[endsect]
diff --git a/tools/quickbook/test/include/in_section-inc1.quickbook b/tools/quickbook/test/include/in_section-inc1.quickbook
new file mode 100644
index 0000000000..f74bf5dbd9
--- /dev/null
+++ b/tools/quickbook/test/include/in_section-inc1.quickbook
@@ -0,0 +1,13 @@
+[article Include in section include 1
+[quickbook 1.6]
+]
+
+[h1 Test1]
+
+[section:inner]
+
+[h1 Test2]
+
+[include include-id-inc1.quickbook]
+
+[endsect]
diff --git a/tools/quickbook/test/include/in_section-inc2.quickbook b/tools/quickbook/test/include/in_section-inc2.quickbook
new file mode 100644
index 0000000000..0a30aaa744
--- /dev/null
+++ b/tools/quickbook/test/include/in_section-inc2.quickbook
@@ -0,0 +1,14 @@
+[article Include in section include 2
+[quickbook 1.6]
+[id section_include]
+]
+
+[h1 Test1]
+
+[section:inner]
+
+[h1 Test2]
+
+[include include-id-inc1.quickbook]
+
+[endsect]
diff --git a/tools/quickbook/test/include/in_section-include1.qbk b/tools/quickbook/test/include/in_section-include1.qbk
deleted file mode 100644
index f8fab4d12a..0000000000
--- a/tools/quickbook/test/include/in_section-include1.qbk
+++ /dev/null
@@ -1,11 +0,0 @@
-[article Include in section include 1
-[quickbook 1.6]
-]
-
-[h1 Test1]
-
-[section:inner]
-
-[h1 Test2]
-
-[endsect] \ No newline at end of file
diff --git a/tools/quickbook/test/include/in_section-include2.qbk b/tools/quickbook/test/include/in_section-include2.qbk
deleted file mode 100644
index 9d8ee51698..0000000000
--- a/tools/quickbook/test/include/in_section-include2.qbk
+++ /dev/null
@@ -1,12 +0,0 @@
-[article Include in section include 2
-[quickbook 1.6]
-[id section_include]
-]
-
-[h1 Test1]
-
-[section:inner]
-
-[h1 Test2]
-
-[endsect] \ No newline at end of file
diff --git a/tools/quickbook/test/include/include_id_unbalanced-1_6.quickbook b/tools/quickbook/test/include/include_id_unbalanced-1_6.quickbook
index 0b299a606b..b1568d4f9b 100644
--- a/tools/quickbook/test/include/include_id_unbalanced-1_6.quickbook
+++ b/tools/quickbook/test/include/include_id_unbalanced-1_6.quickbook
@@ -5,7 +5,7 @@
[/ Sections start in included files ]
-[include:include1 include_id_unbalanced-inc1.qbk]
+[include:include1 include_id_unbalanced-inc1.quickbook]
[heading:x1 X1]
[endsect]
[heading:x2 X2]
@@ -16,4 +16,4 @@
[heading:x1 X1]
[section:sect2 Section 2]
[heading:x2 X2]
-[include:include2 include_id_unbalanced-inc2.qbk]
+[include:include2 include_id_unbalanced-inc2.quickbook]
diff --git a/tools/quickbook/test/include/include_id_unbalanced-inc1.qbk b/tools/quickbook/test/include/include_id_unbalanced-inc1.qbk
deleted file mode 100644
index 3170f2601d..0000000000
--- a/tools/quickbook/test/include/include_id_unbalanced-inc1.qbk
+++ /dev/null
@@ -1,4 +0,0 @@
-[section:inc1 Include 1]
-[heading:inc1_1 Heading 1]
-[include:include1a include_id_unbalanced-inc1a.qbk]
-[heading:inc1_2 Heading 2]
diff --git a/tools/quickbook/test/include/include_id_unbalanced-inc1.quickbook b/tools/quickbook/test/include/include_id_unbalanced-inc1.quickbook
new file mode 100644
index 0000000000..d3214811a0
--- /dev/null
+++ b/tools/quickbook/test/include/include_id_unbalanced-inc1.quickbook
@@ -0,0 +1,4 @@
+[section:inc1 Include 1]
+[heading:inc1_1 Heading 1]
+[include:include1a include_id_unbalanced-inc1a.quickbook]
+[heading:inc1_2 Heading 2]
diff --git a/tools/quickbook/test/include/include_id_unbalanced-inc1a.qbk b/tools/quickbook/test/include/include_id_unbalanced-inc1a.quickbook
index 1bc46b4f57..1bc46b4f57 100644
--- a/tools/quickbook/test/include/include_id_unbalanced-inc1a.qbk
+++ b/tools/quickbook/test/include/include_id_unbalanced-inc1a.quickbook
diff --git a/tools/quickbook/test/include/include_id_unbalanced-inc2.qbk b/tools/quickbook/test/include/include_id_unbalanced-inc2.qbk
deleted file mode 100644
index b71932423a..0000000000
--- a/tools/quickbook/test/include/include_id_unbalanced-inc2.qbk
+++ /dev/null
@@ -1,5 +0,0 @@
-[heading:inc2_1 Heading 1]
-[endsect]
-[heading:inc2_2 Heading 2]
-[include:include2a include_id_unbalanced-inc2a.qbk]
-[heading:inc2_3 Heading 3]
diff --git a/tools/quickbook/test/include/include_id_unbalanced-inc2.quickbook b/tools/quickbook/test/include/include_id_unbalanced-inc2.quickbook
new file mode 100644
index 0000000000..37c313cae0
--- /dev/null
+++ b/tools/quickbook/test/include/include_id_unbalanced-inc2.quickbook
@@ -0,0 +1,5 @@
+[heading:inc2_1 Heading 1]
+[endsect]
+[heading:inc2_2 Heading 2]
+[include:include2a include_id_unbalanced-inc2a.quickbook]
+[heading:inc2_3 Heading 3]
diff --git a/tools/quickbook/test/include/include_id_unbalanced-inc2a.qbk b/tools/quickbook/test/include/include_id_unbalanced-inc2a.quickbook
index 780fb40572..780fb40572 100644
--- a/tools/quickbook/test/include/include_id_unbalanced-inc2a.qbk
+++ b/tools/quickbook/test/include/include_id_unbalanced-inc2a.quickbook
diff --git a/tools/quickbook/test/include/nested_compatibility-1_5.quickbook b/tools/quickbook/test/include/nested_compatibility-1_5.quickbook
index 8276715faf..f9d78b1e46 100644
--- a/tools/quickbook/test/include/nested_compatibility-1_5.quickbook
+++ b/tools/quickbook/test/include/nested_compatibility-1_5.quickbook
@@ -2,11 +2,11 @@
[article Nested Compatibility Test]
[section Duplicate Name]
-[include nested_compatibility_inc-1_5.qbk]
-[include nested_compatibility_inc-1_6.qbk]
+[include nested_compatibility-inc-1_5.quickbook]
+[include nested_compatibility-inc-1_6.quickbook]
[endsect]
[section Duplicate Name]
-[include nested_compatibility_inc-1_5.qbk]
-[include nested_compatibility_inc-1_6.qbk]
+[include nested_compatibility-inc-1_5.quickbook]
+[include nested_compatibility-inc-1_6.quickbook]
[endsect]
diff --git a/tools/quickbook/test/include/nested_compatibility-1_6.quickbook b/tools/quickbook/test/include/nested_compatibility-1_6.quickbook
index 72c4b65512..8beeb8a064 100644
--- a/tools/quickbook/test/include/nested_compatibility-1_6.quickbook
+++ b/tools/quickbook/test/include/nested_compatibility-1_6.quickbook
@@ -2,11 +2,11 @@
[article Nested Compatibility Test]
[section Duplicate Name]
-[include nested_compatibility_inc-1_5.qbk]
-[include nested_compatibility_inc-1_6.qbk]
+[include nested_compatibility-inc-1_5.quickbook]
+[include nested_compatibility-inc-1_6.quickbook]
[endsect]
[section Duplicate Name]
-[include nested_compatibility_inc-1_5.qbk]
-[include nested_compatibility_inc-1_6.qbk]
+[include nested_compatibility-inc-1_5.quickbook]
+[include nested_compatibility-inc-1_6.quickbook]
[endsect]
diff --git a/tools/quickbook/test/include/nested_compatibility_inc-1_5.qbk b/tools/quickbook/test/include/nested_compatibility-inc-1_5.quickbook
index 3c79855a49..3c79855a49 100644
--- a/tools/quickbook/test/include/nested_compatibility_inc-1_5.qbk
+++ b/tools/quickbook/test/include/nested_compatibility-inc-1_5.quickbook
diff --git a/tools/quickbook/test/include/nested_compatibility_inc-1_6.qbk b/tools/quickbook/test/include/nested_compatibility-inc-1_6.quickbook
index d0790840cd..d0790840cd 100644
--- a/tools/quickbook/test/include/nested_compatibility_inc-1_6.qbk
+++ b/tools/quickbook/test/include/nested_compatibility-inc-1_6.quickbook
diff --git a/tools/quickbook/test/include/source_mode-1_5.gold b/tools/quickbook/test/include/source_mode-1_5.gold
index 4fbee6f63a..2ed6b7fee3 100644
--- a/tools/quickbook/test/include/source_mode-1_5.gold
+++ b/tools/quickbook/test/include/source_mode-1_5.gold
@@ -4,4 +4,6 @@
xmlns:xi="http://www.w3.org/2001/XInclude">
<title>Souce Mode Include</title>
<programlisting>void main() {}</programlisting>
+<programlisting>void main() {}
+</programlisting>
</article>
diff --git a/tools/quickbook/test/include/source_mode-1_5.quickbook b/tools/quickbook/test/include/source_mode-1_5.quickbook
index 24e52a01b9..d5df05bd4b 100644
--- a/tools/quickbook/test/include/source_mode-1_5.quickbook
+++ b/tools/quickbook/test/include/source_mode-1_5.quickbook
@@ -1,3 +1,4 @@
[article Souce Mode Include [source-mode teletype][quickbook 1.5]]
-[include source_mode-inc1.qbk] \ No newline at end of file
+[include source_mode-inc1.quickbook]
+[include source_mode-inc2.quickbook]
diff --git a/tools/quickbook/test/include/source_mode-1_6.gold b/tools/quickbook/test/include/source_mode-1_6.gold
index 6d20203e66..aa01d20575 100644
--- a/tools/quickbook/test/include/source_mode-1_6.gold
+++ b/tools/quickbook/test/include/source_mode-1_6.gold
@@ -8,4 +8,6 @@
<title>Source include with no source-mode</title>
<programlisting><phrase role="keyword">void</phrase> <phrase role="identifier">main</phrase><phrase role="special">()</phrase> <phrase role="special">{}</phrase></programlisting>
</article>
+<programlisting>void main() {}
+</programlisting>
</article>
diff --git a/tools/quickbook/test/include/source_mode-1_6.quickbook b/tools/quickbook/test/include/source_mode-1_6.quickbook
index 9eb81d9401..fc4243f3b1 100644
--- a/tools/quickbook/test/include/source_mode-1_6.quickbook
+++ b/tools/quickbook/test/include/source_mode-1_6.quickbook
@@ -1,4 +1,5 @@
[source-mode teletype][quickbook 1.6]
[article Souce Mode Include]
-[include source_mode-inc1.qbk] \ No newline at end of file
+[include source_mode-inc1.quickbook]
+[include source_mode-inc2.quickbook]
diff --git a/tools/quickbook/test/include/source_mode-inc1.qbk b/tools/quickbook/test/include/source_mode-inc1.quickbook
index 3cefbe6bc7..3cefbe6bc7 100644
--- a/tools/quickbook/test/include/source_mode-inc1.qbk
+++ b/tools/quickbook/test/include/source_mode-inc1.quickbook
diff --git a/tools/quickbook/test/include/source_mode-inc2.quickbook b/tools/quickbook/test/include/source_mode-inc2.quickbook
new file mode 100644
index 0000000000..81dba8a17d
--- /dev/null
+++ b/tools/quickbook/test/include/source_mode-inc2.quickbook
@@ -0,0 +1,3 @@
+[/ Source include with no source-mode, and no docinfo.]
+
+ void main() {}
diff --git a/tools/quickbook/test/include/template_include-1_7.gold b/tools/quickbook/test/include/template_include-1_7.gold
new file mode 100644
index 0000000000..163ff30eea
--- /dev/null
+++ b/tools/quickbook/test/include/template_include-1_7.gold
@@ -0,0 +1,12 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE article PUBLIC "-//Boost//DTD BoostBook XML V1.0//EN" "http://www.boost.org/tools/boostbook/dtd/boostbook.dtd">
+<article id="template_include_test" last-revision="DEBUG MODE Date: 2000/12/20 12:00:00 $"
+ xmlns:xi="http://www.w3.org/2001/XInclude">
+ <title>Template include test</title>
+ <bridgehead renderas="sect2" id="template_include_test.h0">
+ <phrase id="template_include_test.simple_include"/><link linkend="template_include_test.simple_include">Simple
+ include</link>
+ </bridgehead>
+<programlisting><phrase role="keyword">void</phrase> <phrase role="identifier">main</phrase><phrase role="special">()</phrase> <phrase role="special">{}</phrase>
+</programlisting>
+</article>
diff --git a/tools/quickbook/test/include/template_include-1_7.quickbook b/tools/quickbook/test/include/template_include-1_7.quickbook
new file mode 100644
index 0000000000..19f58fab04
--- /dev/null
+++ b/tools/quickbook/test/include/template_include-1_7.quickbook
@@ -0,0 +1,8 @@
+[article Template include test
+[quickbook 1.7]
+]
+
+[template include_foo[name] [include [name].quickbook]]
+
+[include_foo include-id-inc1]
+[include_foo source_mode-inc2]
diff --git a/tools/quickbook/test/include2-1_6.quickbook b/tools/quickbook/test/include2-1_6.quickbook
index be78fcc7fd..66b4897ef5 100644
--- a/tools/quickbook/test/include2-1_6.quickbook
+++ b/tools/quickbook/test/include2-1_6.quickbook
@@ -3,6 +3,6 @@
[id include-test]
]
-[include include_sub-1_5.qbk]
-[include include_sub.qbk]
-[include:foo include_sub.qbk]
+[include include-inc-1_5.quickbook]
+[include include-inc.quickbook]
+[include:foo include-inc.quickbook]
diff --git a/tools/quickbook/test/include_invalid_path1-1_7-fail.quickbook b/tools/quickbook/test/include_invalid_path1-1_7-fail.quickbook
new file mode 100644
index 0000000000..fa7e8e3379
--- /dev/null
+++ b/tools/quickbook/test/include_invalid_path1-1_7-fail.quickbook
@@ -0,0 +1,5 @@
+[article Include invalid path fail
+[quickbook 1.7]
+]
+
+[include .\\\/empty-inc.quickbook]
diff --git a/tools/quickbook/test/include_invalid_path2-1_7-fail.quickbook b/tools/quickbook/test/include_invalid_path2-1_7-fail.quickbook
new file mode 100644
index 0000000000..4f26728403
--- /dev/null
+++ b/tools/quickbook/test/include_invalid_path2-1_7-fail.quickbook
@@ -0,0 +1,5 @@
+[article Include invalid path fail
+[quickbook 1.7]
+]
+
+[include .\\\\empty-inc.quickbook]
diff --git a/tools/quickbook/test/include_invalid_path3-1_7-fail.quickbook b/tools/quickbook/test/include_invalid_path3-1_7-fail.quickbook
new file mode 100644
index 0000000000..5c99c1143b
--- /dev/null
+++ b/tools/quickbook/test/include_invalid_path3-1_7-fail.quickbook
@@ -0,0 +1,5 @@
+[article Include invalid path fail
+[quickbook 1.7]
+]
+
+[include empty\[-/\]inc.quickbook]
diff --git a/tools/quickbook/test/include_unicode_glob-1_7-fail.quickbook b/tools/quickbook/test/include_unicode_glob-1_7-fail.quickbook
new file mode 100644
index 0000000000..79a9df667a
--- /dev/null
+++ b/tools/quickbook/test/include_unicode_glob-1_7-fail.quickbook
@@ -0,0 +1,5 @@
+[article Unicode glob
+[quickbook 1.7]
+]
+
+[include £*.quickbook]
diff --git a/tools/quickbook/test/include_win_path-1_6-fail.quickbook b/tools/quickbook/test/include_win_path-1_6-fail.quickbook
index 2e25423fd1..aff5fd2c30 100644
--- a/tools/quickbook/test/include_win_path-1_6-fail.quickbook
+++ b/tools/quickbook/test/include_win_path-1_6-fail.quickbook
@@ -2,4 +2,4 @@
[quickbook 1.6]
]
-[include .\empty.qbk]
+[include .\empty-inc.quickbook]
diff --git a/tools/quickbook/test/link-1_1.gold b/tools/quickbook/test/link-1_1.gold
index 70fd440f1e..955fdec767 100644
--- a/tools/quickbook/test/link-1_1.gold
+++ b/tools/quickbook/test/link-1_1.gold
@@ -31,4 +31,15 @@
as well.
</para>
</section>
+ <section id="link_tests.escaping_links">
+ <title>Escaping links</title>
+ <itemizedlist>
+ <listitem>
+ <simpara>
+ <ulink url="http://svn.boost.org/trac/boost/query?status=closed&amp;milestone=Boost+1.34.1">Boost
+ Trac</ulink>
+ </simpara>
+ </listitem>
+ </itemizedlist>
+ </section>
</article>
diff --git a/tools/quickbook/test/link-1_1.quickbook b/tools/quickbook/test/link-1_1.quickbook
index 2ddb0bf218..a22ce70f08 100644
--- a/tools/quickbook/test/link-1_1.quickbook
+++ b/tools/quickbook/test/link-1_1.quickbook
@@ -39,3 +39,10 @@ whitespace when they appear together as in [link x] [link y]. Also in [link x]
as well.
[endsect]
+
+[section Escaping links]
+
+* [@http://svn.boost.org/trac/boost/query?status=closed&milestone=Boost+1.34.1
+ Boost Trac]
+
+[endsect]
diff --git a/tools/quickbook/test/link-1_6.gold b/tools/quickbook/test/link-1_6.gold
index 05af56162a..587b67be37 100644
--- a/tools/quickbook/test/link-1_6.gold
+++ b/tools/quickbook/test/link-1_6.gold
@@ -35,4 +35,15 @@
as well.
</para>
</section>
+ <section id="link_tests.escaping_links">
+ <title><link linkend="link_tests.escaping_links">Escaping links</link></title>
+ <itemizedlist>
+ <listitem>
+ <simpara>
+ <ulink url="http://svn.boost.org/trac/boost/query?status=closed&amp;milestone=Boost+1.34.1">Boost
+ Trac</ulink>
+ </simpara>
+ </listitem>
+ </itemizedlist>
+ </section>
</article>
diff --git a/tools/quickbook/test/link-1_6.quickbook b/tools/quickbook/test/link-1_6.quickbook
index 28e99dbfd0..c874a79c65 100644
--- a/tools/quickbook/test/link-1_6.quickbook
+++ b/tools/quickbook/test/link-1_6.quickbook
@@ -43,3 +43,10 @@ whitespace when they appear together as in [link x] [link y]. Also in [link x]
as well.
[endsect]
+
+[section Escaping links]
+
+* [@http://svn.boost.org/trac/boost/query?status=closed&milestone=Boost+1.34.1
+ Boost Trac]
+
+[endsect]
diff --git a/tools/quickbook/test/link-1_7-fail.quickbook b/tools/quickbook/test/link-1_7-fail.quickbook
new file mode 100644
index 0000000000..9f2db1fb1f
--- /dev/null
+++ b/tools/quickbook/test/link-1_7-fail.quickbook
@@ -0,0 +1,5 @@
+[article Link fail test
+[quickbook 1.7]
+]
+
+[link something [table]]
diff --git a/tools/quickbook/test/link-1_7-fail2.quickbook b/tools/quickbook/test/link-1_7-fail2.quickbook
new file mode 100644
index 0000000000..30ac49570b
--- /dev/null
+++ b/tools/quickbook/test/link-1_7-fail2.quickbook
@@ -0,0 +1,6 @@
+[article Link fail test
+[quickbook 1.7]
+]
+
+[/ Escapes aren't allowed in links.]
+[link '''escaped stuff''']
diff --git a/tools/quickbook/test/link-1_7.gold b/tools/quickbook/test/link-1_7.gold
index f3eec12e74..c2e5f0398d 100644
--- a/tools/quickbook/test/link-1_7.gold
+++ b/tools/quickbook/test/link-1_7.gold
@@ -21,9 +21,6 @@
<para>
<link linkend="link">description</link>
</para>
- <para>
- <link linkend="link[Hello]">description</link>
- </para>
</section>
<section id="link_tests.side_by_side_links">
<title><link linkend="link_tests.side_by_side_links">Side-by-side links</link></title>
@@ -40,5 +37,20 @@
<para>
<link linkend="blah.x2">Templated link?</link>
</para>
+ <para>
+ <link linkend="something-one-two">something-one-two</link> <link linkend="something-one-two">something-one-two</link>
+ <link linkend="something-one-two">something-one-two</link>
+ </para>
+ </section>
+ <section id="link_tests.escaping_links">
+ <title><link linkend="link_tests.escaping_links">Escaping links</link></title>
+ <itemizedlist>
+ <listitem>
+ <simpara>
+ <ulink url="http://svn.boost.org/trac/boost/query?status=closed&amp;milestone=Boost+1.34.1">Boost
+ Trac</ulink>
+ </simpara>
+ </listitem>
+ </itemizedlist>
</section>
</article>
diff --git a/tools/quickbook/test/link-1_7.quickbook b/tools/quickbook/test/link-1_7.quickbook
index 8e146b9706..acfba31889 100644
--- a/tools/quickbook/test/link-1_7.quickbook
+++ b/tools/quickbook/test/link-1_7.quickbook
@@ -28,9 +28,6 @@
[link link[/ comment]description]
-[link link\[Hello\] description]
-
-
[endsect]
[section Side-by-side links]
@@ -49,4 +46,16 @@ as well.
[template thing[]x]
[link blah.[thing]2 Templated link?]
+[template link_in_template[x y] [link something-[x]-[y]]]
+[link_in_template one two]
+[link_in_template one..two]
+[link_in_template one .. two ]
+
+[endsect]
+
+[section Escaping links]
+
+* [@http://svn.boost.org/trac/boost/query?status=closed&milestone=Boost+1.34.1
+ Boost Trac]
+
[endsect]
diff --git a/tools/quickbook/test/list_test-1_6-fail.quickbook b/tools/quickbook/test/list_test-1_6-fail.quickbook
new file mode 100644
index 0000000000..7ca8a93993
--- /dev/null
+++ b/tools/quickbook/test/list_test-1_6-fail.quickbook
@@ -0,0 +1,46 @@
+[article List Test
+[quickbook 1.6]
+]
+
+Markup in list:
+
+* [table [[Heading]][[Cell]]]
+* [heading The heading for a list item]
+
+ The content of the list item.
+
+[section Paragraphs in list items]
+
+* A1
+
+ A2
+
+* B1
+ * C1
+
+ C2
+
+ B2
+
+* D1
+ * E1
+
+ E2
+
+ E3
+
+ D2
+
+[endsect]
+
+[section Indented code blocks in lists]
+
+* A
+
+ B
+ C
+* D
+
+ E
+* F
+[endsect]
diff --git a/tools/quickbook/test/list_test-1_6.gold b/tools/quickbook/test/list_test-1_6.gold
index f0055257ab..b6d3775a68 100644
--- a/tools/quickbook/test/list_test-1_6.gold
+++ b/tools/quickbook/test/list_test-1_6.gold
@@ -310,40 +310,51 @@
</simpara>
</listitem>
<listitem>
+ <informaltable frame="all">
+ <tgroup cols="1">
+ <thead>
+ <row>
+ <entry>
+ <para>
+ Heading
+ </para>
+ </entry>
+ </row>
+ </thead>
+ <tbody>
+ <row>
+ <entry>
+ <para>
+ Cell
+ </para>
+ </entry>
+ </row>
+ </tbody>
+ </tgroup>
+ </informaltable>
<simpara>
- <informaltable frame="all">
- <tgroup cols="1">
- <thead>
- <row>
- <entry>
- <para>
- Heading
- </para>
- </entry>
- </row>
- </thead>
- <tbody>
- <row>
- <entry>
- <para>
- Cell
- </para>
- </entry>
- </row>
- </tbody>
- </tgroup>
- </informaltable>
+ Some text.
</simpara>
</listitem>
<listitem>
<simpara>
- <bridgehead renderas="sect2" id="list_test.h0">
- <phrase id="list_test.the_heading_for_a_list_item"/><link linkend="list_test.the_heading_for_a_list_item">The
- heading for a list item</link>
- </bridgehead>
- <para>
- The content of the list item.
- </para>
+ [section Doesn't expand] Blah, blah. [endsect]
+ </simpara>
+ </listitem>
+ <listitem>
+ <simpara>
+ Paragraph cheat 1.
+ </simpara>
+ <simpara>
+ Paragraph cheat 2.
+ </simpara>
+ </listitem>
+ <listitem>
+ <simpara>
+ Paragraph cheat 3.
+ </simpara>
+ <simpara>
+ Paragraph cheat 4.
</simpara>
</listitem>
</itemizedlist>
@@ -426,99 +437,12 @@
<itemizedlist>
<listitem>
<simpara>
- One
- </simpara>
- </listitem>
- </itemizedlist>
- <section id="list_test.list_immediately_following_mark0.nested_section">
- <title><link linkend="list_test.list_immediately_following_mark0.nested_section">Nested
- section</link></title>
- <itemizedlist>
- <listitem>
- <simpara>
- Two
- </simpara>
- </listitem>
- </itemizedlist>
- </section>
- </section>
- <section id="list_test.paragraphs_in_list_items">
- <title><link linkend="list_test.paragraphs_in_list_items">Paragraphs in list
- items</link></title>
- <itemizedlist>
- <listitem>
- <simpara>
- A1
- <para>
- A2
- </para>
- </simpara>
- </listitem>
- <listitem>
- <simpara>
- B1
- <itemizedlist>
- <listitem>
- <simpara>
- C1
- <para>
- C2
- </para>
- </simpara>
- </listitem>
- </itemizedlist>
- <para>
- B2
- </para>
- </simpara>
- </listitem>
- <listitem>
- <simpara>
- D1
- <itemizedlist>
- <listitem>
- <simpara>
- E1
- <para>
- E2
- </para>
- <para>
- E3
- </para>
- </simpara>
- </listitem>
- </itemizedlist>
- <para>
- D2
- </para>
- </simpara>
- </listitem>
- </itemizedlist>
- </section>
- <section id="list_test.indented_code_blocks_in_lists">
- <title><link linkend="list_test.indented_code_blocks_in_lists">Indented code
- blocks in lists</link></title>
- <itemizedlist>
- <listitem>
- <simpara>
- A
-<programlisting><phrase role="identifier">B</phrase>
-</programlisting>
- <para>
- C
- </para>
- </simpara>
- </listitem>
- <listitem>
- <simpara>
- D
-<programlisting><phrase role="identifier">E</phrase>
-</programlisting>
+ One [section Nested section]
</simpara>
</listitem>
<listitem>
<simpara>
- F
+ Two [endsect]
</simpara>
</listitem>
</itemizedlist>
diff --git a/tools/quickbook/test/list_test-1_6.quickbook b/tools/quickbook/test/list_test-1_6.quickbook
index dbba74258e..fe1e187568 100644
--- a/tools/quickbook/test/list_test-1_6.quickbook
+++ b/tools/quickbook/test/list_test-1_6.quickbook
@@ -74,9 +74,17 @@ Markup in list:
* ["Quoted]
* [footnote Footnote]
* [table [[Heading]][[Cell]]]
-* [heading The heading for a list item]
-
- The content of the list item.
+ Some text.
+* [section Doesn't expand]
+ Blah, blah.
+ [endsect]
+
+* Paragraph cheat 1.
+ [block]
+ Paragraph cheat 2.
+* Paragraph cheat 3.
+ [block]
+ Paragraph cheat 4.
Don't end list with comment 1:
@@ -108,40 +116,5 @@ Don't end list with comment 2:
[section Nested section]
* Two
[endsect]
-[endsect]
-
-[section Paragraphs in list items]
-
-* A1
-
- A2
-
-* B1
- * C1
-
- C2
-
- B2
-
-* D1
- * E1
-
- E2
-
- E3
-
- D2
-
-[endsect]
-
-[section Indented code blocks in lists]
-
-* A
-
- B
- C
-* D
- E
-* F
[endsect]
diff --git a/tools/quickbook/test/list_test-1_7-fail1.quickbook b/tools/quickbook/test/list_test-1_7-fail1.quickbook
new file mode 100644
index 0000000000..c8beed1c5c
--- /dev/null
+++ b/tools/quickbook/test/list_test-1_7-fail1.quickbook
@@ -0,0 +1,9 @@
+[article List Fail Test 1
+[quickbook 1.7]
+]
+
+[section List immediately following markup]
+* One
+* Two
+* Three
+[endsect]
diff --git a/tools/quickbook/test/list_test-1_7.gold b/tools/quickbook/test/list_test-1_7.gold
new file mode 100644
index 0000000000..dec074c74b
--- /dev/null
+++ b/tools/quickbook/test/list_test-1_7.gold
@@ -0,0 +1,479 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE article PUBLIC "-//Boost//DTD BoostBook XML V1.0//EN" "http://www.boost.org/tools/boostbook/dtd/boostbook.dtd">
+<article id="list_test" last-revision="DEBUG MODE Date: 2000/12/20 12:00:00 $" xmlns:xi="http://www.w3.org/2001/XInclude">
+ <title>List Test</title>
+ <para>
+ Simple list:
+ </para>
+ <itemizedlist>
+ <listitem>
+ <simpara>
+ A
+ </simpara>
+ </listitem>
+ <listitem>
+ <simpara>
+ B
+ </simpara>
+ </listitem>
+ </itemizedlist>
+ <para>
+ Simple list:
+ </para>
+ <orderedlist>
+ <listitem>
+ <simpara>
+ A
+ </simpara>
+ </listitem>
+ <listitem>
+ <simpara>
+ B
+ </simpara>
+ </listitem>
+ </orderedlist>
+ <para>
+ Two level list:
+ </para>
+ <itemizedlist>
+ <listitem>
+ <simpara>
+ A
+ <itemizedlist>
+ <listitem>
+ <simpara>
+ A
+ </simpara>
+ </listitem>
+ <listitem>
+ <simpara>
+ B
+ </simpara>
+ </listitem>
+ </itemizedlist>
+ </simpara>
+ </listitem>
+ <listitem>
+ <simpara>
+ B
+ <itemizedlist>
+ <listitem>
+ <simpara>
+ A
+ </simpara>
+ </listitem>
+ <listitem>
+ <simpara>
+ B
+ </simpara>
+ </listitem>
+ </itemizedlist>
+ </simpara>
+ </listitem>
+ </itemizedlist>
+ <para>
+ Two level list:
+ </para>
+ <orderedlist>
+ <listitem>
+ <simpara>
+ A
+ <itemizedlist>
+ <listitem>
+ <simpara>
+ A
+ </simpara>
+ </listitem>
+ <listitem>
+ <simpara>
+ B
+ </simpara>
+ </listitem>
+ </itemizedlist>
+ </simpara>
+ </listitem>
+ <listitem>
+ <simpara>
+ B
+ <itemizedlist>
+ <listitem>
+ <simpara>
+ A
+ </simpara>
+ </listitem>
+ <listitem>
+ <simpara>
+ B
+ </simpara>
+ </listitem>
+ </itemizedlist>
+ </simpara>
+ </listitem>
+ </orderedlist>
+ <para>
+ Three level list:
+ </para>
+ <orderedlist>
+ <listitem>
+ <simpara>
+ A
+ <itemizedlist>
+ <listitem>
+ <simpara>
+ A
+ </simpara>
+ </listitem>
+ <listitem>
+ <simpara>
+ B
+ <itemizedlist>
+ <listitem>
+ <simpara>
+ C
+ </simpara>
+ </listitem>
+ <listitem>
+ <simpara>
+ D
+ </simpara>
+ </listitem>
+ </itemizedlist>
+ </simpara>
+ </listitem>
+ <listitem>
+ <simpara>
+ E
+ </simpara>
+ </listitem>
+ <listitem>
+ <simpara>
+ F
+ <itemizedlist>
+ <listitem>
+ <simpara>
+ G
+ </simpara>
+ </listitem>
+ <listitem>
+ <simpara>
+ H
+ </simpara>
+ </listitem>
+ </itemizedlist>
+ </simpara>
+ </listitem>
+ </itemizedlist>
+ </simpara>
+ </listitem>
+ </orderedlist>
+ <para>
+ Three level list:
+ </para>
+ <orderedlist>
+ <listitem>
+ <simpara>
+ A
+ <itemizedlist>
+ <listitem>
+ <simpara>
+ A
+ </simpara>
+ </listitem>
+ <listitem>
+ <simpara>
+ B
+ <orderedlist>
+ <listitem>
+ <simpara>
+ C
+ </simpara>
+ </listitem>
+ <listitem>
+ <simpara>
+ D
+ </simpara>
+ </listitem>
+ </orderedlist>
+ </simpara>
+ </listitem>
+ </itemizedlist>
+ </simpara>
+ </listitem>
+ <listitem>
+ <simpara>
+ G
+ </simpara>
+ </listitem>
+ <listitem>
+ <simpara>
+ H
+ </simpara>
+ </listitem>
+ </orderedlist>
+ <para>
+ Inconsistent Indentation:
+ </para>
+ <itemizedlist>
+ <listitem>
+ <simpara>
+ A1
+ <itemizedlist>
+ <listitem>
+ <simpara>
+ B1
+ </simpara>
+ </listitem>
+ <listitem>
+ <simpara>
+ B2
+ <itemizedlist>
+ <listitem>
+ <simpara>
+ C1
+ </simpara>
+ </listitem>
+ <listitem>
+ <simpara>
+ C2
+ </simpara>
+ </listitem>
+ </itemizedlist>
+ </simpara>
+ </listitem>
+ <listitem>
+ <simpara>
+ B3
+ </simpara>
+ </listitem>
+ <listitem>
+ <simpara>
+ B4
+ </simpara>
+ </listitem>
+ <listitem>
+ <simpara>
+ B5
+ <itemizedlist>
+ <listitem>
+ <simpara>
+ C3
+ </simpara>
+ </listitem>
+ </itemizedlist>
+ </simpara>
+ </listitem>
+ <listitem>
+ <simpara>
+ B6
+ </simpara>
+ </listitem>
+ </itemizedlist>
+ </simpara>
+ </listitem>
+ <listitem>
+ <simpara>
+ A2
+ </simpara>
+ </listitem>
+ <listitem>
+ <simpara>
+ A3
+ </simpara>
+ </listitem>
+ </itemizedlist>
+ <para>
+ Markup in list:
+ </para>
+ <itemizedlist>
+ <listitem>
+ <simpara>
+ <emphasis role="bold">Bold</emphasis>
+ </simpara>
+ </listitem>
+ <listitem>
+ <simpara>
+ <emphasis role="bold">Bold</emphasis>
+ </simpara>
+ </listitem>
+ <listitem>
+ <simpara>
+ <quote>Quoted</quote>
+ </simpara>
+ </listitem>
+ <listitem>
+ <simpara>
+ <footnote id="list_test.f0">
+ <para>
+ Footnote
+ </para>
+ </footnote>
+ </simpara>
+ </listitem>
+ <listitem>
+ <informaltable frame="all">
+ <tgroup cols="1">
+ <thead>
+ <row>
+ <entry>
+ <para>
+ Heading
+ </para>
+ </entry>
+ </row>
+ </thead>
+ <tbody>
+ <row>
+ <entry>
+ <para>
+ Cell
+ </para>
+ </entry>
+ </row>
+ </tbody>
+ </tgroup>
+ </informaltable>
+ </listitem>
+ <listitem>
+ <bridgehead renderas="sect2" id="list_test.h0">
+ <phrase id="list_test.the_heading_for_a_list_item"/><link linkend="list_test.the_heading_for_a_list_item">The
+ heading for a list item</link>
+ </bridgehead>
+ <simpara>
+ The content of the list item.
+ </simpara>
+ </listitem>
+ </itemizedlist>
+ <para>
+ Don't end list with comment 1:
+ </para>
+ <itemizedlist>
+ <listitem>
+ <simpara>
+ A1
+ </simpara>
+ </listitem>
+ <listitem>
+ <simpara>
+ A2
+ </simpara>
+ </listitem>
+ <listitem>
+ <simpara>
+ A3
+ </simpara>
+ </listitem>
+ <listitem>
+ <simpara>
+ A4
+ </simpara>
+ </listitem>
+ </itemizedlist>
+ <para>
+ Don't end list with comment 2:
+ </para>
+ <itemizedlist>
+ <listitem>
+ <simpara>
+ A1
+ <itemizedlist>
+ <listitem>
+ <simpara>
+ B1
+ </simpara>
+ </listitem>
+ <listitem>
+ <simpara>
+ B2
+ </simpara>
+ </listitem>
+ <listitem>
+ <simpara>
+ B3
+ </simpara>
+ </listitem>
+ </itemizedlist>
+ </simpara>
+ </listitem>
+ </itemizedlist>
+ <section id="list_test.paragraphs_in_list_items">
+ <title><link linkend="list_test.paragraphs_in_list_items">Paragraphs in list
+ items</link></title>
+ <itemizedlist>
+ <listitem>
+ <simpara>
+ A1
+ </simpara>
+ <simpara>
+ A2
+ </simpara>
+ </listitem>
+ <listitem>
+ <simpara>
+ B1
+ <itemizedlist>
+ <listitem>
+ <simpara>
+ C1
+ </simpara>
+ <simpara>
+ C2
+ </simpara>
+ </listitem>
+ </itemizedlist>
+ </simpara>
+ <simpara>
+ B2
+ </simpara>
+ </listitem>
+ <listitem>
+ <simpara>
+ D1
+ <itemizedlist>
+ <listitem>
+ <simpara>
+ E1
+ </simpara>
+ <simpara>
+ E2
+ </simpara>
+ <simpara>
+ E3
+ </simpara>
+ </listitem>
+ </itemizedlist>
+ </simpara>
+ <simpara>
+ D2
+ </simpara>
+ </listitem>
+ </itemizedlist>
+ </section>
+ <section id="list_test.indented_code_blocks_in_lists">
+ <title><link linkend="list_test.indented_code_blocks_in_lists">Indented code
+ blocks in lists</link></title>
+ <itemizedlist>
+ <listitem>
+ <simpara>
+ A
+ </simpara>
+<programlisting><phrase role="identifier">B</phrase>
+</programlisting>
+ <simpara>
+ C
+ </simpara>
+ </listitem>
+ <listitem>
+ <simpara>
+ D
+ </simpara>
+<programlisting><phrase role="identifier">E</phrase>
+</programlisting>
+ </listitem>
+ <listitem>
+ <simpara>
+ F
+ </simpara>
+ </listitem>
+ </itemizedlist>
+ </section>
+</article>
diff --git a/tools/quickbook/test/list_test-1_7.quickbook b/tools/quickbook/test/list_test-1_7.quickbook
new file mode 100644
index 0000000000..8eac6a7182
--- /dev/null
+++ b/tools/quickbook/test/list_test-1_7.quickbook
@@ -0,0 +1,134 @@
+[article List Test
+[quickbook 1.7]
+]
+
+Simple list:
+
+* A
+* B
+
+Simple list:
+
+#A
+#B
+
+Two level list:
+
+* A
+ * A
+ * B
+* B
+ * A
+ * B
+
+Two level list:
+
+# A
+ * A
+ * B
+# B
+ * A
+ * B
+
+Three level list:
+
+# A
+ * A
+ * B
+ * C
+ * D
+ * E
+ * F
+ * G
+ * H
+
+Three level list:
+
+# A
+ * A
+ * B
+ # C
+ # D
+# G
+# H
+
+Inconsistent Indentation:
+
+* A1
+ * B1
+ * B2
+ * C1
+ * C2
+ * B3
+ * B4
+ * B5
+ * C3
+ * B6
+ * A2
+* A3
+
+Markup in list:
+
+* *Bold*
+* [*Bold]
+* ["Quoted]
+* [footnote Footnote]
+* [table [[Heading]][[Cell]]]
+* [heading The heading for a list item]
+
+ The content of the list item.
+
+Don't end list with comment 1:
+
+* A1
+* A2
+
+[/ End list?]
+* A3
+* A4
+
+Don't end list with comment 2:
+
+* A1
+ * B1
+
+[/ End list?]
+ * B2
+ * B3
+
+[section Paragraphs in list items]
+
+* A1
+
+ A2
+
+* B1
+ * C1
+
+ C2
+
+ B2
+
+* D1
+ * E1
+
+ E2
+
+ E3
+
+ D2
+
+[endsect]
+
+[section Indented code blocks in lists]
+
+* A
+
+ B
+ C
+* D
+
+ E
+* F
+
+[endsect]
diff --git a/tools/quickbook/test/mismatched_brackets3-1_1.gold b/tools/quickbook/test/mismatched_brackets3-1_1.gold
new file mode 100644
index 0000000000..a7ab11e833
--- /dev/null
+++ b/tools/quickbook/test/mismatched_brackets3-1_1.gold
@@ -0,0 +1,12 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE article PUBLIC "-//Boost//DTD BoostBook XML V1.0//EN" "http://www.boost.org/tools/boostbook/dtd/boostbook.dtd">
+<article id="mismatched_brackets" last-revision="DEBUG MODE Date: 2000/12/20 12:00:00 $"
+ xmlns:xi="http://www.w3.org/2001/XInclude">
+ <title>Mismatched brackets</title>
+ <section id="mismatched_brackets.something">
+ <title>Something</title>
+ </section>
+ <para>
+ ]
+ </para>
+</article>
diff --git a/tools/quickbook/test/mismatched_brackets3-1_1.quickbook b/tools/quickbook/test/mismatched_brackets3-1_1.quickbook
new file mode 100644
index 0000000000..dadd42692a
--- /dev/null
+++ b/tools/quickbook/test/mismatched_brackets3-1_1.quickbook
@@ -0,0 +1,6 @@
+[article Mismatched brackets]
+
+[section Something]
+
+[endsect]
+]
diff --git a/tools/quickbook/test/python/include_glob.qbk b/tools/quickbook/test/python/include_glob.qbk
new file mode 100644
index 0000000000..6e1f5248a4
--- /dev/null
+++ b/tools/quickbook/test/python/include_glob.qbk
@@ -0,0 +1,11 @@
+[/
+ Copyright 2012-2013 Daniel James
+
+ Distributed under the Boost Software License, Version 1.0.
+ (See accompanying file LICENSE_1_0.txt or copy at
+ http://www.boost.org/LICENSE_1_0.txt)
+]
+
+[quickbook 1.7]
+[article Include Path]
+[include ?.qbk]
diff --git a/tools/quickbook/test/python/include_glob_deps.txt b/tools/quickbook/test/python/include_glob_deps.txt
new file mode 100644
index 0000000000..82fb66e68e
--- /dev/null
+++ b/tools/quickbook/test/python/include_glob_deps.txt
@@ -0,0 +1,6 @@
+# Copyright 2012-2013 Daniel James
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+include_glob.qbk
+sub1/a.qbk
+sub2/b.qbk
diff --git a/tools/quickbook/test/python/include_glob_locs.txt b/tools/quickbook/test/python/include_glob_locs.txt
new file mode 100644
index 0000000000..ccab607e02
--- /dev/null
+++ b/tools/quickbook/test/python/include_glob_locs.txt
@@ -0,0 +1,9 @@
+# Copyright 2012-2013 Daniel James
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
++ include_glob.qbk
+g ?.qbk
+g sub1/?.qbk
++ sub1/a.qbk
+g sub2/?.qbk
++ sub2/b.qbk
diff --git a/tools/quickbook/test/python/include_path.qbk b/tools/quickbook/test/python/include_path.qbk
index c9a4dcd981..3af3b37043 100644
--- a/tools/quickbook/test/python/include_path.qbk
+++ b/tools/quickbook/test/python/include_path.qbk
@@ -1,3 +1,11 @@
+[/
+ Copyright 2012-2013 Daniel James
+
+ Distributed under the Boost Software License, Version 1.0.
+ (See accompanying file LICENSE_1_0.txt or copy at
+ http://www.boost.org/LICENSE_1_0.txt)
+]
+
[quickbook 1.5]
[article Include Path]
[include a.qbk]
diff --git a/tools/quickbook/test/python/include_path_deps.txt b/tools/quickbook/test/python/include_path_deps.txt
index 994d776ee6..86875d79be 100644
--- a/tools/quickbook/test/python/include_path_deps.txt
+++ b/tools/quickbook/test/python/include_path_deps.txt
@@ -1,3 +1,6 @@
+# Copyright 2012-2013 Daniel James
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
include_path.qbk
sub1/a.qbk
sub2/b.qbk
diff --git a/tools/quickbook/test/python/include_path_locs.txt b/tools/quickbook/test/python/include_path_locs.txt
index aaffb8d13f..7926db6156 100644
--- a/tools/quickbook/test/python/include_path_locs.txt
+++ b/tools/quickbook/test/python/include_path_locs.txt
@@ -1,3 +1,6 @@
+# Copyright 2012-2013 Daniel James
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ include_path.qbk
- a.qbk
+ sub1/a.qbk
diff --git a/tools/quickbook/test/python/missing_relative.qbk b/tools/quickbook/test/python/missing_relative.qbk
index 8fdc0ee049..ff96b10881 100644
--- a/tools/quickbook/test/python/missing_relative.qbk
+++ b/tools/quickbook/test/python/missing_relative.qbk
@@ -1,3 +1,11 @@
+[/
+ Copyright 2012-2013 Daniel James
+
+ Distributed under the Boost Software License, Version 1.0.
+ (See accompanying file LICENSE_1_0.txt or copy at
+ http://www.boost.org/LICENSE_1_0.txt)
+]
+
[quickbook 1.5]
[article Missing Relative]
diff --git a/tools/quickbook/test/python/missing_relative_deps.txt b/tools/quickbook/test/python/missing_relative_deps.txt
index a9de670365..fb8f27fee3 100644
--- a/tools/quickbook/test/python/missing_relative_deps.txt
+++ b/tools/quickbook/test/python/missing_relative_deps.txt
@@ -1 +1,4 @@
+# Copyright 2012-2013 Daniel James
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
missing_relative.qbk
diff --git a/tools/quickbook/test/python/missing_relative_locs.txt b/tools/quickbook/test/python/missing_relative_locs.txt
index 69b51f28f4..ac85cec10f 100644
--- a/tools/quickbook/test/python/missing_relative_locs.txt
+++ b/tools/quickbook/test/python/missing_relative_locs.txt
@@ -1,3 +1,6 @@
+# Copyright 2012-2013 Daniel James
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+ missing_relative.qbk
- ../missing.qbk
- missing-dir/x.qbk
diff --git a/tools/quickbook/test/python/output-deps.py b/tools/quickbook/test/python/output-deps.py
index 7b77c27d84..875a808db9 100644
--- a/tools/quickbook/test/python/output-deps.py
+++ b/tools/quickbook/test/python/output-deps.py
@@ -1,5 +1,9 @@
#!/usr/bin/env python
+# Copyright 2012-2013 Daniel James
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+
import sys, os, subprocess, tempfile, re
def main(args, directory):
@@ -20,6 +24,10 @@ def main(args, directory):
deps_gold = 'include_path_deps.txt',
locations_gold = 'include_path_locs.txt',
input_path = ['sub1', 'sub2'])
+ failures += run_quickbook(quickbook_command, 'include_glob.qbk',
+ deps_gold = 'include_glob_deps.txt',
+ locations_gold = 'include_glob_locs.txt',
+ input_path = ['sub1', 'sub2'])
if failures == 0:
print "Success"
@@ -110,6 +118,7 @@ def load_dependencies(filename, adjust_paths = False):
dependencies = set()
f = open(filename, 'r')
for path in f:
+ if path[0] == '#': continue
if adjust_paths:
path = os.path.realpath(path)
if path in dependencies:
@@ -118,21 +127,34 @@ def load_dependencies(filename, adjust_paths = False):
return dependencies
def load_locations(filename, adjust_paths = False):
- line_matcher = re.compile("^([+-]) (.*)$")
+ line_matcher = re.compile("^([+-g]) (.*)$")
dependencies = {}
f = open(filename, 'r')
+ glob = None
+ globs = {}
for line in f:
+ if line[0] == '#': continue
m = line_matcher.match(line)
- if not m:
- raise Exception("Invalid dependency file: %1s" % filename)
- found = m.group(1) == '+'
+
path = m.group(2)
if adjust_paths:
path = os.path.realpath(path)
- if path in dependencies:
- raise Exception("Duplicate path (%1s) in %2s" % (path, filename))
- dependencies[path] = found
- return dependencies
+
+ if not m:
+ raise Exception("Invalid dependency file: %1s" % filename)
+ if m.group(1) == 'g':
+ globs[path] = []
+ glob = path
+ elif glob:
+ if m.group(1) != '+':
+ raise Exception("Negative match in glob.")
+ globs[glob].append(path)
+ else:
+ found = m.group(1) == '+'
+ if path in dependencies:
+ raise Exception("Duplicate path (%1s) in %2s" % (path, filename))
+ dependencies[path] = found
+ return { 'dependencies': dependencies, 'globs': globs }
def temp_filename(extension):
file = tempfile.mkstemp(suffix = extension)
diff --git a/tools/quickbook/test/python/sub1/a.qbk b/tools/quickbook/test/python/sub1/a.qbk
index 7898192261..f3abe6ebf0 100644
--- a/tools/quickbook/test/python/sub1/a.qbk
+++ b/tools/quickbook/test/python/sub1/a.qbk
@@ -1 +1,9 @@
+[/
+ Copyright 2012-2013 Daniel James
+
+ Distributed under the Boost Software License, Version 1.0.
+ (See accompanying file LICENSE_1_0.txt or copy at
+ http://www.boost.org/LICENSE_1_0.txt)
+]
+
a
diff --git a/tools/quickbook/test/python/sub2/b.qbk b/tools/quickbook/test/python/sub2/b.qbk
index 6178079822..9b4336bdc3 100644
--- a/tools/quickbook/test/python/sub2/b.qbk
+++ b/tools/quickbook/test/python/sub2/b.qbk
@@ -1 +1,9 @@
+[/
+ Copyright 2012-2013 Daniel James
+
+ Distributed under the Boost Software License, Version 1.0.
+ (See accompanying file LICENSE_1_0.txt or copy at
+ http://www.boost.org/LICENSE_1_0.txt)
+]
+
b
diff --git a/tools/quickbook/test/python/svg_missing.qbk b/tools/quickbook/test/python/svg_missing.qbk
index 2b25c2f3b6..1b54ece45e 100644
--- a/tools/quickbook/test/python/svg_missing.qbk
+++ b/tools/quickbook/test/python/svg_missing.qbk
@@ -1,3 +1,11 @@
+[/
+ Copyright 2012-2013 Daniel James
+
+ Distributed under the Boost Software License, Version 1.0.
+ (See accompanying file LICENSE_1_0.txt or copy at
+ http://www.boost.org/LICENSE_1_0.txt)
+]
+
[article Dependencies for missing svg]
[$missing.svg]
diff --git a/tools/quickbook/test/python/svg_missing_deps.txt b/tools/quickbook/test/python/svg_missing_deps.txt
index 25d1c0e502..91487e85e9 100644
--- a/tools/quickbook/test/python/svg_missing_deps.txt
+++ b/tools/quickbook/test/python/svg_missing_deps.txt
@@ -1 +1,4 @@
+# Copyright 2012-2013 Daniel James
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
svg_missing.qbk
diff --git a/tools/quickbook/test/python/svg_missing_locs.txt b/tools/quickbook/test/python/svg_missing_locs.txt
index 379d4142f0..0f4c9b8061 100644
--- a/tools/quickbook/test/python/svg_missing_locs.txt
+++ b/tools/quickbook/test/python/svg_missing_locs.txt
@@ -1,2 +1,5 @@
+# Copyright 2012-2013 Daniel James
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
- html/missing.svg
+ svg_missing.qbk
diff --git a/tools/quickbook/test/role-1_7-fail.quickbook b/tools/quickbook/test/role-1_7-fail.quickbook
new file mode 100644
index 0000000000..b01d16e23c
--- /dev/null
+++ b/tools/quickbook/test/role-1_7-fail.quickbook
@@ -0,0 +1,5 @@
+[article Quickbook Role Fail Test
+[quickbook 1.7]
+]
+
+[role]
diff --git a/tools/quickbook/test/role-1_7.gold b/tools/quickbook/test/role-1_7.gold
new file mode 100644
index 0000000000..4cd7d5bb79
--- /dev/null
+++ b/tools/quickbook/test/role-1_7.gold
@@ -0,0 +1,13 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE article PUBLIC "-//Boost//DTD BoostBook XML V1.0//EN" "http://www.boost.org/tools/boostbook/dtd/boostbook.dtd">
+<article id="quickbook_role_test" last-revision="DEBUG MODE Date: 2000/12/20 12:00:00 $"
+ xmlns:xi="http://www.w3.org/2001/XInclude">
+ <title>Quickbook Role Test</title>
+ <para>
+ <phrase role="keyword">Keyword</phrase> <phrase role="keyword"></phrase>
+ </para>
+ <para>
+ road <phrase role="red">Red</phrase> <phrase role="red"></phrase> <phrase role="three-colours-red">Three
+ Colours Red</phrase> <phrase role="red-road">Red Road</phrase>
+ </para>
+</article>
diff --git a/tools/quickbook/test/role-1_7.quickbook b/tools/quickbook/test/role-1_7.quickbook
new file mode 100644
index 0000000000..17bf449522
--- /dev/null
+++ b/tools/quickbook/test/role-1_7.quickbook
@@ -0,0 +1,13 @@
+[article Quickbook Role Test
+[quickbook 1.7]
+]
+
+[role keyword Keyword] [role keyword]
+
+[template r red]
+[template r2 road]
+[r2]
+[role [r] Red] [role [r]]
+[role three-colours-[r] Three Colours Red]
+[role [r]-[r2] Red Road]
+
diff --git a/tools/quickbook/test/section-1_7.gold b/tools/quickbook/test/section-1_7.gold
new file mode 100644
index 0000000000..33c67675aa
--- /dev/null
+++ b/tools/quickbook/test/section-1_7.gold
@@ -0,0 +1,26 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE article PUBLIC "-//Boost//DTD BoostBook XML V1.0//EN" "http://www.boost.org/tools/boostbook/dtd/boostbook.dtd">
+<article id="section_id_1_7" last-revision="DEBUG MODE Date: 2000/12/20 12:00:00 $"
+ xmlns:xi="http://www.w3.org/2001/XInclude">
+ <title>Section Id 1.7</title>
+ <section id="section_id_1_7.quickbook_section_no_id_test">
+ <title><link linkend="section_id_1_7.quickbook_section_no_id_test">Quickbook
+ section no id test</link></title>
+ </section>
+ <section id="section_id_1_7.id_test1">
+ <title><link linkend="section_id_1_7.id_test1">Quickbook section id test</link></title>
+ </section>
+ <section id="section_id_1_7.id_test2">
+ <title><link linkend="section_id_1_7.id_test2">Quickbook section id test</link></title>
+ </section>
+ <section id="section_id_1_7.id_test3_quickbook_section_id_te">
+ <title><link linkend="section_id_1_7.id_test3_quickbook_section_id_te">id_test3
+ Quickbook section id test</link></title>
+ </section>
+ <section id="section_id_1_7.--">
+ <title><link linkend="section_id_1_7.--">Quickbook section odd id test</link></title>
+ </section>
+ <section id="section_id_1_7.sect-abc">
+ <title><link linkend="section_id_1_7.sect-abc">Section with template in id</link></title>
+ </section>
+</article>
diff --git a/tools/quickbook/test/section-1_7.quickbook b/tools/quickbook/test/section-1_7.quickbook
new file mode 100644
index 0000000000..925e34b2ce
--- /dev/null
+++ b/tools/quickbook/test/section-1_7.quickbook
@@ -0,0 +1,18 @@
+[article Section Id 1.7
+ [quickbook 1.7]
+]
+
+[section Quickbook section no id test]
+[endsect]
+[section:id_test1 Quickbook section id test]
+[endsect]
+[section :id_test2 Quickbook section id test]
+[endsect]
+[section: id_test3 Quickbook section id test]
+[endsect]
+[section:-- Quickbook section odd id test]
+[endsect]
+
+[template thing[] abc]
+[section:sect-[thing] Section with template in id]
+[endsect]
diff --git a/tools/quickbook/test/snippets/Jamfile.v2 b/tools/quickbook/test/snippets/Jamfile.v2
index 4dfd0d0e8f..6765dff9e4 100644
--- a/tools/quickbook/test/snippets/Jamfile.v2
+++ b/tools/quickbook/test/snippets/Jamfile.v2
@@ -6,7 +6,7 @@
# http://www.boost.org/LICENSE_1_0.txt)
#
-project quickook/tests/snippets ;
+project quickbook/test/snippets ;
import quickbook-testing : quickbook-test quickbook-error-test ;
diff --git a/tools/quickbook/test/snippets/unbalanced_snippet1.cpp b/tools/quickbook/test/snippets/unbalanced_snippet1.cpp
index ec49a78878..ee5983912a 100644
--- a/tools/quickbook/test/snippets/unbalanced_snippet1.cpp
+++ b/tools/quickbook/test/snippets/unbalanced_snippet1.cpp
@@ -1,3 +1,11 @@
+/*=============================================================================
+ Copyright (c) 2011 Daniel James
+
+ Use, modification and distribution is subject to the Boost Software
+ License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+ http://www.boost.org/LICENSE_1_0.txt)
+=============================================================================*/
+
//[unclosed
-int main() {} \ No newline at end of file
+int main() {}
diff --git a/tools/quickbook/test/snippets/unbalanced_snippet2.cpp b/tools/quickbook/test/snippets/unbalanced_snippet2.cpp
index b0575171ea..238b976357 100644
--- a/tools/quickbook/test/snippets/unbalanced_snippet2.cpp
+++ b/tools/quickbook/test/snippets/unbalanced_snippet2.cpp
@@ -1 +1,10 @@
-//] \ No newline at end of file
+//]
+
+/*=============================================================================
+ Copyright (c) 2011 Daniel James
+
+ Use, modification and distribution is subject to the Boost Software
+ License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+ http://www.boost.org/LICENSE_1_0.txt)
+=============================================================================*/
+
diff --git a/tools/quickbook/test/source_mode-1_7.gold b/tools/quickbook/test/source_mode-1_7.gold
index e40bae8608..2cc454a6dd 100644
--- a/tools/quickbook/test/source_mode-1_7.gold
+++ b/tools/quickbook/test/source_mode-1_7.gold
@@ -26,7 +26,14 @@
<itemizedlist>
<listitem>
<simpara>
- Sadly this doesn't work.
+ <code><phrase role="keyword">int</phrase> <phrase role="identifier">main</phrase><phrase
+ role="special">()</phrase> <phrase role="special">{}</phrase></code>
+ </simpara>
+ </listitem>
+ <listitem>
+ <simpara>
+ <code><phrase role="keyword">int</phrase> <phrase role="identifier">main</phrase><phrase
+ role="special">()</phrase> <phrase role="special">{}</phrase></code>
</simpara>
</listitem>
<listitem>
@@ -35,4 +42,22 @@
</simpara>
</listitem>
</itemizedlist>
+ <para>
+ <code><phrase role="keyword">int</phrase> <phrase role="identifier">main</phrase><phrase
+ role="special">()</phrase> <phrase role="special">{}</phrase></code> and <code><phrase
+ role="keyword">int</phrase> <phrase role="identifier">main</phrase><phrase role="special">()</phrase>
+ <phrase role="special">{}</phrase></code> should both be C++ highlighted. <code>int
+ main() {}</code> shouldn't be, but <code><phrase role="keyword">int</phrase>
+ <phrase role="identifier">main</phrase><phrase role="special">()</phrase> <phrase
+ role="special">{}</phrase></code> should.
+ </para>
+ <para>
+ <code><phrase role="keyword">int</phrase> <phrase role="identifier">main</phrase><phrase
+ role="special">()</phrase> <phrase role="special">{}</phrase></code>. Not highlighted:
+ <code>int main() {}</code>.
+ </para>
+ <section id="source_mode_test.cpp">
+ <title><link linkend="source_mode_test.cpp">C++ section</link></title>
+<programlisting><phrase role="keyword">int</phrase> <phrase role="identifier">main</phrase><phrase role="special">()</phrase> <phrase role="special">{}</phrase></programlisting>
+ </section>
</article>
diff --git a/tools/quickbook/test/source_mode-1_7.quickbook b/tools/quickbook/test/source_mode-1_7.quickbook
index f5b9ee37fe..9388867c80 100644
--- a/tools/quickbook/test/source_mode-1_7.quickbook
+++ b/tools/quickbook/test/source_mode-1_7.quickbook
@@ -8,5 +8,17 @@
[!c++]`int main() {}` but `plain text`.
[!c++]
-* Sadly this doesn't work.
* `int main() {}`
+* `int main() {}`
+* [teletype] `int main() {}` [/ shouldn't be highlighted]
+
+[!c++]
+`int main() {}` and `int main() {}` should both be C++ highlighted.
+[!teletype]`int main() {}` shouldn't be, but `int main() {}` should.
+
+[!c++] `int main() {}`. Not highlighted: `int main() {}`.
+
+[!c++]
+[section:cpp C++ section]
+``int main() {}``
+[endsect]
diff --git a/tools/quickbook/test/table-1_7.gold b/tools/quickbook/test/table-1_7.gold
new file mode 100644
index 0000000000..6b8eafabf7
--- /dev/null
+++ b/tools/quickbook/test/table-1_7.gold
@@ -0,0 +1,520 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE article PUBLIC "-//Boost//DTD BoostBook XML V1.0//EN" "http://www.boost.org/tools/boostbook/dtd/boostbook.dtd">
+<article id="table_tests" last-revision="DEBUG MODE Date: 2000/12/20 12:00:00 $"
+ xmlns:xi="http://www.w3.org/2001/XInclude">
+ <title>Table 1.7</title>
+ <table frame="all" id="table_tests.table1">
+ <title>Table 1</title>
+ <tgroup cols="1">
+ <thead>
+ <row>
+ <entry>
+ <para>
+ Heading
+ </para>
+ </entry>
+ </row>
+ </thead>
+ <tbody>
+ <row>
+ <entry>
+ <para>
+ cell
+ </para>
+ </entry>
+ </row>
+ </tbody>
+ </tgroup>
+ </table>
+ <table frame="all" id="table_tests.table_2">
+ <title>Table 2</title>
+ <tgroup cols="1">
+ <thead>
+ <row>
+ <entry>
+ <para>
+ Heading
+ </para>
+ </entry>
+ </row>
+ </thead>
+ <tbody>
+ <row>
+ <entry>
+ <para>
+ cell
+ </para>
+ </entry>
+ </row>
+ </tbody>
+ </tgroup>
+ </table>
+ <informaltable frame="all">
+ <tgroup cols="1">
+ <thead>
+ <row>
+ <entry>
+ <para>
+ Heading
+ </para>
+ </entry>
+ </row>
+ </thead>
+ <tbody>
+ <row>
+ <entry>
+ <para>
+ cell
+ </para>
+ </entry>
+ </row>
+ </tbody>
+ </tgroup>
+ </informaltable>
+ <informaltable frame="all" id="table_tests.table4">
+ <tgroup cols="1">
+ <thead>
+ <row>
+ <entry>
+ <para>
+ Heading
+ </para>
+ </entry>
+ </row>
+ </thead>
+ <tbody>
+ <row>
+ <entry>
+ <para>
+ cell
+ </para>
+ </entry>
+ </row>
+ </tbody>
+ </tgroup>
+ </informaltable>
+ <informaltable frame="all" id="table_tests.-table5-">
+ <tgroup cols="1">
+ <thead>
+ <row>
+ <entry>
+ <para>
+ Heading
+ </para>
+ </entry>
+ </row>
+ </thead>
+ <tbody>
+ <row>
+ <entry>
+ <para>
+ cell
+ </para>
+ </entry>
+ </row>
+ </tbody>
+ </tgroup>
+ </informaltable>
+ <table frame="all" id="table_tests.title">
+ <title>Title</title>
+ <tgroup cols="1">
+ <thead>
+ <row>
+ <entry>
+ <para>
+ Heading
+ </para>
+ </entry>
+ </row>
+ </thead>
+ <tbody>
+ <row>
+ <entry>
+ <para>
+ cell
+ </para>
+ </entry>
+ </row>
+ </tbody>
+ </tgroup>
+ </table>
+ <table frame="all" id="table_tests.title0">
+ <title>Title</title>
+ <tgroup cols="1">
+ <thead>
+ <row>
+ <entry>
+ <para>
+ Heading
+ </para>
+ </entry>
+ </row>
+ </thead>
+ <tbody>
+ <row>
+ <entry>
+ <para>
+ cell
+ </para>
+ </entry>
+ </row>
+ </tbody>
+ </tgroup>
+ </table>
+ <table frame="all" id="table_tests.title_containing_a_comment">
+ <title>Title containing a comment</title>
+ <tgroup cols="1">
+ <thead>
+ <row>
+ <entry>
+ <para>
+ Heading
+ </para>
+ </entry>
+ </row>
+ </thead>
+ <tbody>
+ <row>
+ <entry>
+ <para>
+ cell
+ </para>
+ </entry>
+ </row>
+ </tbody>
+ </tgroup>
+ </table>
+ <table frame="all" id="table_tests.title1">
+ <title>Title</title>
+ <tgroup cols="1">
+ <thead>
+ <row>
+ <entry>
+ <para>
+ Heading
+ </para>
+ </entry>
+ </row>
+ </thead>
+ <tbody>
+ <row>
+ <entry>
+ <para>
+ cell
+ </para>
+ </entry>
+ </row>
+ </tbody>
+ </tgroup>
+ </table>
+ <informaltable frame="all">
+ <tgroup cols="1">
+ <thead>
+ <row>
+ <entry>
+ <para>
+ Heading
+ </para>
+ </entry>
+ </row>
+ </thead>
+ <tbody>
+ <row>
+ <entry>
+ <para>
+ Cell 1
+ </para>
+ </entry>
+ </row>
+ <row>
+ <entry>
+ <para>
+ Cell 2
+ </para>
+ </entry>
+ </row>
+ </tbody>
+ </tgroup>
+ </informaltable>
+ <table frame="all" id="table_tests.title_on_multiple_lines_with_bol">
+ <title>Title on multiple lines with <emphasis role="bold">bold</emphasis> text?</title>
+ <tgroup cols="1">
+ <thead>
+ <row>
+ <entry>
+ <para>
+ Heading
+ </para>
+ </entry>
+ </row>
+ </thead>
+ <tbody>
+ <row>
+ <entry>
+ <para>
+ Cell 1
+ </para>
+ </entry>
+ </row>
+ <row>
+ <entry>
+ <para>
+ Cell 2
+ </para>
+ </entry>
+ </row>
+ </tbody>
+ </tgroup>
+ </table>
+ <section id="table_tests.section1">
+ <title><link linkend="table_tests.section1">Section 1</link></title>
+ <table frame="all" id="table_tests.section1.table1">
+ <title>Table 1</title>
+ <tgroup cols="1">
+ <thead>
+ <row>
+ <entry>
+ <para>
+ Heading
+ </para>
+ </entry>
+ </row>
+ </thead>
+ <tbody>
+ <row>
+ <entry>
+ <para>
+ cell
+ </para>
+ </entry>
+ </row>
+ </tbody>
+ </tgroup>
+ </table>
+ <table frame="all" id="table_tests.section1.a_b">
+ <title>A &amp; B</title>
+ <tgroup cols="2">
+ <thead>
+ <row>
+ <entry>
+ <para>
+ A
+ </para>
+ </entry>
+ <entry>
+ <para>
+ B
+ </para>
+ </entry>
+ </row>
+ </thead>
+ <tbody>
+ <row>
+ <entry>
+ <para>
+ a
+ </para>
+ </entry>
+ <entry>
+ <para>
+ b
+ </para>
+ </entry>
+ </row>
+ </tbody>
+ </tgroup>
+ </table>
+ <table frame="all" id="table_tests.section1.empty_table">
+ <title>Empty Table</title>
+ <tgroup cols="0">
+ <tbody>
+ </tbody>
+ </tgroup>
+ </table>
+ <table frame="all" id="table_tests.section1.table_with_an_empty_cell">
+ <title>Table with an empty cell</title>
+ <tgroup cols="1">
+ <tbody>
+ <row>
+ <entry>
+ <para>
+ x
+ </para>
+ </entry>
+ </row>
+ </tbody>
+ </tgroup>
+ </table>
+ <table frame="all" id="table_tests.section1.indentation">
+ <title>Indentation</title>
+ <tgroup cols="2">
+ <thead>
+ <row>
+ <entry>
+ <para>
+ Header 1. Paragraph 1
+ </para>
+ <para>
+ Header 1. Paragraph 2
+ </para>
+ </entry>
+ <entry>
+ <para>
+ Header 2
+ </para>
+ </entry>
+ </row>
+ </thead>
+ <tbody>
+ <row>
+ <entry>
+ <para>
+ Row 1. Cell 1.
+ </para>
+ </entry>
+ <entry>
+ <para>
+ Row 1. Cell 2.
+ </para>
+ <para>
+ Row 1. Cell 2. Paragraph 2.
+ </para>
+ </entry>
+ </row>
+ <row>
+ <entry>
+ <para>
+ Row 2. Cell 1.
+ </para>
+ <itemizedlist>
+ <listitem>
+ <simpara>
+ Row 2. Cell 1. List item 1.
+ </simpara>
+ </listitem>
+ <listitem>
+ <simpara>
+ Row 2. Cell 1. List item 2.
+ </simpara>
+ </listitem>
+ </itemizedlist>
+ </entry>
+ <entry>
+ <para>
+ Row 2. Cell 2.
+ </para>
+ <itemizedlist>
+ <listitem>
+ <simpara>
+ Row 2. Cell 2. List item 1.
+ </simpara>
+ </listitem>
+ <listitem>
+ <simpara>
+ Row 2. Cell 2. List item 2.
+ </simpara>
+ </listitem>
+ </itemizedlist>
+ </entry>
+ </row>
+ </tbody>
+ </tgroup>
+ </table>
+ <table frame="all" id="table_tests.section1.nested_tables">
+ <title>Nested Tables</title>
+ <tgroup cols="1">
+ <thead>
+ <row>
+ <entry>
+ <para>
+ Header 1
+ </para>
+ </entry>
+ <entry>
+ <para>
+ Header 2
+ </para>
+ </entry>
+ </row>
+ </thead>
+ <tbody>
+ <row>
+ <entry>
+ <table frame="all" id="table_tests.section1.inner_table">
+ <title>Inner Table</title>
+ <tgroup cols="2">
+ <thead>
+ <row>
+ <entry>
+ <para>
+ 1.1
+ </para>
+ </entry>
+ <entry>
+ <para>
+ 1.2
+ </para>
+ </entry>
+ </row>
+ </thead>
+ <tbody>
+ <row>
+ <entry>
+ <para>
+ 2.1
+ </para>
+ </entry>
+ <entry>
+ <para>
+ 2.2
+ </para>
+ </entry>
+ </row>
+ </tbody>
+ </tgroup>
+ </table>
+ </entry>
+ </row>
+ <row>
+ <entry>
+ <para>
+ Something.
+ </para>
+ </entry>
+ </row>
+ </tbody>
+ </tgroup>
+ </table>
+ <anchor id="id1"/>
+ <table frame="all" id="table_tests.section1.table_with_anchors">
+ <title>Table with anchors</title>
+ <tgroup cols="1">
+ <thead>
+ <row>
+ <entry>
+ <para>
+ <anchor id="id2"/>a<anchor id="id3"/>
+ </para>
+ </entry>
+ </row>
+ </thead>
+ <tbody>
+ <row>
+ <entry>
+ <para>
+ b
+ </para>
+ </entry>
+ </row>
+ </tbody>
+ </tgroup>
+ </table>
+ <table frame="all" id="table_tests.section1.id123">
+ <title>Table with template id</title>
+ <tgroup cols="0">
+ <tbody>
+ </tbody>
+ </tgroup>
+ </table>
+ </section>
+</article>
diff --git a/tools/quickbook/test/table-1_7.quickbook b/tools/quickbook/test/table-1_7.quickbook
new file mode 100644
index 0000000000..84d68f71d4
--- /dev/null
+++ b/tools/quickbook/test/table-1_7.quickbook
@@ -0,0 +1,151 @@
+[article Table 1.7
+ [quickbook 1.7]
+ [id table_tests]
+]
+
+[table:table1 Table 1 [[Heading]][[cell]]]
+
+[table Table 2
+ [[Heading]]
+ [[cell]]
+]
+
+[table
+ [[Heading]]
+ [[cell]]
+]
+
+[table:table4
+ [[Heading]]
+ [[cell]]
+]
+
+[table:-table5-
+ [[Heading]]
+ [[cell]]
+]
+
+[table [/ Comment?] Title
+ [[Heading]]
+ [[cell]]
+]
+
+[table [/ Multi line
+comment] Title
+ [[Heading]]
+ [[cell]]
+]
+
+[table Title [/ ] containing a comment
+ [[Heading]]
+ [[cell]]
+]
+
+[table [/ Multi line
+comment]
+ Title
+ [[Heading]]
+ [[cell]]
+]
+
+[table [/ Multi line
+comment]
+ [[Heading]]
+ [[Cell 1]]
+ [[Cell 2]]
+]
+
+[table Title on multiple
+ lines with *bold* text?
+ [[Heading]]
+ [[Cell 1]]
+ [[Cell 2]]
+]
+
+[section:section1 Section 1]
+
+[table:table1 Table 1
+ [[Heading]]
+ [[cell]]
+]
+
+[table A & B
+ [[A][B]]
+ [[a][b]]
+]
+
+[table Empty Table]
+
+[table Table with an empty cell
+[[x]]]
+
+[table Indentation
+ [
+ [
+ Header 1. Paragraph 1
+
+ Header 1. Paragraph 2
+ ]
+ [
+ Header 2
+ ]
+ ]
+ [
+ [
+ Row 1. Cell 1.
+ ]
+ [
+ Row 1. Cell 2.
+
+ Row 1. Cell 2. Paragraph 2.
+ ]
+ ]
+ [
+ [
+Row 2. Cell 1.
+
+* Row 2. Cell 1. List item 1.
+* Row 2. Cell 1. List item 2.
+ ]
+ [
+ Row 2. Cell 2.
+
+ * Row 2. Cell 2. List item 1.
+ * Row 2. Cell 2. List item 2.
+ ]
+ ]
+]
+
+[table Nested Tables
+ [
+ [
+ Header 1
+ ]
+ [
+ Header 2
+ ]
+ ]
+ [
+ [
+ [table Inner Table
+ [[1.1][1.2]]
+ [[2.1][2.2]]
+ ]
+ ]
+ ]
+ [
+ [
+ Something.
+ ]
+ ]
+]
+
+[#id1]
+[table Table with anchors
+[[[#id2]a[#id3]]][[b]]
+]
+
+[template id_value[] id123]
+[table:[id_value] Table with template id]
+
+[endsect]
diff --git a/tools/quickbook/test/templates-1_3.gold b/tools/quickbook/test/templates-1_3.gold
index c53fef1e62..ed3668fdaf 100644
--- a/tools/quickbook/test/templates-1_3.gold
+++ b/tools/quickbook/test/templates-1_3.gold
@@ -167,25 +167,26 @@
</simpara>
</listitem>
<listitem>
- <simpara>
- <para/>
- </simpara>
+ <para/>
</listitem>
<listitem>
- <simpara>
<programlisting><phrase role="keyword">int</phrase> <phrase role="identifier">main</phrase><phrase role="special">()</phrase> <phrase role="special">{}</phrase></programlisting>
- </simpara>
</listitem>
<listitem>
<simpara>
- <para>
- Paragraphs 1
- </para>
- <para>
- Paragraphs 2
- </para>
+ Paragraphs 1
+ </simpara>
+ <simpara>
+ Paragraphs 2
</simpara>
</listitem>
</itemizedlist>
</section>
+ <section id="templates.escaped_templates">
+ <title><link linkend="templates.escaped_templates">Escaped templates</link></title>
+ <para>
+ <thingbob>Not real boostbook</thingbob> <thingbob>Also not real boostbook</thingbob>
+ <thingbob>More fake boostbook</thingbob> <thingbob>Final fake boostbook</thingbob>
+ </para>
+ </section>
</article>
diff --git a/tools/quickbook/test/templates-1_3.quickbook b/tools/quickbook/test/templates-1_3.quickbook
index 457cd157d7..a6d4348788 100644
--- a/tools/quickbook/test/templates-1_3.quickbook
+++ b/tools/quickbook/test/templates-1_3.quickbook
@@ -198,3 +198,19 @@ Paragraphs 2
* [paragraphs]
[endsect]
+
+[/----------------------------------- Escaped templates ]
+
+[section Escaped templates]
+
+[template raw_markup <thingbob>Not real boostbook</thingbob>]
+[template | <thingbob>Also not real boostbook</thingbob>]
+[template escaped1 [|] [`|]]
+[template escaped2 [x] [`x]]
+
+[`raw_markup]
+[`|]
+[escaped1 <thingbob>More fake boostbook</thingbob>]
+[escaped2 <thingbob>Final fake boostbook</thingbob>]
+
+[endsect]
diff --git a/tools/quickbook/test/templates-1_4.gold b/tools/quickbook/test/templates-1_4.gold
index 72e33e0bfa..40c2443690 100644
--- a/tools/quickbook/test/templates-1_4.gold
+++ b/tools/quickbook/test/templates-1_4.gold
@@ -18,4 +18,11 @@
<para>
{1-2-3} {1-2-3} {1-2-3 4} {1 2-3-4} {[1-2-3}
</para>
+ <section id="template_1_4.escaped_templates">
+ <title><link linkend="template_1_4.escaped_templates">Escaped templates</link></title>
+ <para>
+ <thingbob>Not real boostbook</thingbob> <thingbob>Also not real boostbook</thingbob>
+ <thingbob>More fake boostbook</thingbob> <thingbob>Final fake boostbook</thingbob>
+ </para>
+ </section>
</article>
diff --git a/tools/quickbook/test/templates-1_4.quickbook b/tools/quickbook/test/templates-1_4.quickbook
index fc808554e8..3fb8937481 100644
--- a/tools/quickbook/test/templates-1_4.quickbook
+++ b/tools/quickbook/test/templates-1_4.quickbook
@@ -34,4 +34,20 @@
[ternary 1 2 3] [/ {1-2-3} ]
[ternary 1..2 3 4] [/ {1-2-3 4} ]
[ternary 1 2..3 4] [/ {1 2-3-4} ]
-[ternary [1..2..3] [/ {(1-2-3} (but with a square bracket) ] \ No newline at end of file
+[ternary [1..2..3] [/ {(1-2-3} (but with a square bracket) ]
+
+[/----------------------------------- Escaped templates ]
+
+[section Escaped templates]
+
+[template raw_markup <thingbob>Not real boostbook</thingbob>]
+[template | <thingbob>Also not real boostbook</thingbob>]
+[template escaped1 [|] [`|]]
+[template escaped2 [x] [`x]]
+
+[`raw_markup]
+[`|]
+[escaped1 <thingbob>More fake boostbook</thingbob>]
+[escaped2 <thingbob>Final fake boostbook</thingbob>]
+
+[endsect]
diff --git a/tools/quickbook/test/templates-1_5.gold b/tools/quickbook/test/templates-1_5.gold
index bb05aaca96..9577f1a915 100644
--- a/tools/quickbook/test/templates-1_5.gold
+++ b/tools/quickbook/test/templates-1_5.gold
@@ -3,40 +3,237 @@
<article id="template_1_5" last-revision="DEBUG MODE Date: 2000/12/20 12:00:00 $"
xmlns:xi="http://www.w3.org/2001/XInclude">
<title>Template 1.5</title>
- <para>
- static scoping
- </para>
- <para>
- [a]
- </para>
- <para>
- new
- </para>
- <para>
- foo foo
- </para>
- <para>
- {1-2} {1-2} {1-2 3 4} {1 2-3 4} {1 2 3-4} {1..2-3} {1..2-3}
- </para>
- <para>
- { {1 2-3}-4} { {1 2-3}-4} { {1-2 3}-4}
- </para>
- <para>
- {[1-2] 3} {[1-2] 3} {[1-2}
- </para>
- <para>
- {1-2-3} {1-2-3}
- </para>
- <para>
- Some <emphasis role="bold">text</emphasis>
- </para>
- <para>
- A &lt;emphasis&gt;paragraph&lt;/emphasis&gt;.
- </para>
- <para>
- Some *text* A <emphasis>paragraph</emphasis>.
- </para>
- <para>
- <index type="things"><title>Things</title></index>
- </para>
+ <section id="template_1_5.templates">
+ <title><link linkend="template_1_5.templates">Templates</link></title>
+ <para>
+ nullary_arg
+ </para>
+ <para>
+ foo baz
+ </para>
+ <para>
+ foo baz
+ </para>
+ <para>
+ This is a complete paragraph. kalamazoo kalamazoo kalamazoo kalamazoo kalamazoo
+ kalamazoo kalamazoo kalamazoo kalamazoo.... blah blah blah......
+ </para>
+ <para>
+ <hey>baz</hey>
+ </para>
+ <para>
+ This is a complete paragraph. madagascar madagascar madagascar madagascar madagascar
+ madagascar madagascar madagascar madagascar.... blah blah blah......
+ </para>
+ <para>
+ zoom peanut zoom
+ </para>
+ <para>
+ exactly xanadu
+ </para>
+ <para>
+ wx
+ </para>
+ <para>
+ wxyz wxyz trail
+ </para>
+<programlisting><phrase role="keyword">int</phrase> <phrase role="identifier">main</phrase><phrase role="special">()</phrase>
+<phrase role="special">{</phrase>
+ <phrase role="identifier">std</phrase><phrase role="special">::</phrase><phrase role="identifier">cout</phrase> <phrase role="special">&lt;&lt;</phrase> &quot;Hello, World&quot; <phrase role="special">&lt;&lt;</phrase> <phrase role="identifier">std</phrase><phrase role="special">::</phrase><phrase role="identifier">endl</phrase><phrase role="special">;</phrase>
+<phrase role="special">}</phrase>
+</programlisting>
+ <para>
+ x<superscript>2</superscript>
+ </para>
+ <para>
+ &alpha;<superscript>2</superscript>
+ </para>
+ <para>
+ x<superscript>2</superscript>
+ </para>
+ <para>
+ got a banana?
+ </para>
+ <para>
+ .0 00
+ </para>
+ <para>
+ [fool]
+ </para>
+ </section>
+ <section id="template_1_5.empty_templates">
+ <title><link linkend="template_1_5.empty_templates">Empty Templates</link></title>
+ </section>
+ <section id="template_1_5.nested_templates">
+ <title><link linkend="template_1_5.nested_templates">Nested Templates</link></title>
+ <para>
+ Pre
+ </para>
+ <para>
+ Start block template.
+ </para>
+ <para>
+ Start block template.
+ </para>
+ <para>
+ Hello!
+ </para>
+ <para>
+ End block template.
+ </para>
+ <para>
+ End block template.
+ </para>
+ <para>
+ Post
+ </para>
+ <para>
+ Pre
+ </para>
+ <para>
+ Start block template.
+ </para>
+ <para>
+ Start phrase template. Hello! End phrase template.
+ </para>
+ <para>
+ End block template.
+ </para>
+ <para>
+ Post
+ </para>
+ <para>
+ Pre
+ </para>
+ <para>
+ Start phrase template.
+ </para>
+ <para>
+ Start block template.
+ </para>
+ <para>
+ Hello!
+ </para>
+ <para>
+ End block template.
+ </para>
+ <para>
+ End phrase template.
+ </para>
+ <para>
+ Post
+ </para>
+ <para>
+ Pre Start phrase template. Start phrase template. Hello! End phrase template.
+ End phrase template. Post
+ </para>
+ </section>
+ <section id="template_1_5.block_markup">
+ <title><link linkend="template_1_5.block_markup">Block Markup</link></title>
+ <itemizedlist>
+ <listitem>
+ <simpara>
+ a
+ </simpara>
+ </listitem>
+ <listitem>
+ <simpara>
+ b
+ </simpara>
+ </listitem>
+ </itemizedlist>
+ <para/>
+<programlisting><phrase role="keyword">int</phrase> <phrase role="identifier">main</phrase><phrase role="special">()</phrase> <phrase role="special">{}</phrase></programlisting>
+ <para>
+ Paragraphs 1
+ </para>
+ <para>
+ Paragraphs 2
+ </para>
+ <itemizedlist>
+ <listitem>
+ <simpara>
+ <itemizedlist>
+ <listitem>
+ <simpara>
+ a
+ </simpara>
+ </listitem>
+ <listitem>
+ <simpara>
+ b
+ </simpara>
+ </listitem>
+ </itemizedlist>
+ </simpara>
+ </listitem>
+ <listitem>
+ <para/>
+ </listitem>
+ <listitem>
+<programlisting><phrase role="keyword">int</phrase> <phrase role="identifier">main</phrase><phrase role="special">()</phrase> <phrase role="special">{}</phrase></programlisting>
+ </listitem>
+ <listitem>
+ <simpara>
+ Paragraphs 1
+ </simpara>
+ <simpara>
+ Paragraphs 2
+ </simpara>
+ </listitem>
+ </itemizedlist>
+ </section>
+ <section id="template_1_5.static_scoping">
+ <title><link linkend="template_1_5.static_scoping">Static Scoping</link></title>
+ <para>
+ static scoping
+ </para>
+ <para>
+ [a]
+ </para>
+ <para>
+ new
+ </para>
+ <para>
+ foo foo
+ </para>
+ </section>
+ <section id="template_1_5.template_arguments">
+ <title><link linkend="template_1_5.template_arguments">Template Arguments</link></title>
+ <para>
+ {1-2} {1-2} {1-2 3 4} {1 2-3 4} {1 2 3-4} {1..2-3} {1..2-3}
+ </para>
+ <para>
+ { {1 2-3}-4} { {1 2-3}-4} { {1-2 3}-4}
+ </para>
+ <para>
+ {[1-2] 3} {[1-2] 3} {[1-2}
+ </para>
+ <para>
+ {1-2-3} {1-2-3}
+ </para>
+ </section>
+ <section id="template_1_5.block_and_phrase_templates">
+ <title><link linkend="template_1_5.block_and_phrase_templates">Block and phrase
+ templates</link></title>
+ <para>
+ Some <emphasis role="bold">text</emphasis>
+ </para>
+ <para>
+ A &lt;emphasis&gt;paragraph&lt;/emphasis&gt;.
+ </para>
+ <para>
+ Some *text* A <emphasis>paragraph</emphasis>.
+ </para>
+ <para>
+ <index type="things"><title>Things</title></index>
+ </para>
+ </section>
+ <section id="template_1_5.escaped_templates">
+ <title><link linkend="template_1_5.escaped_templates">Escaped templates</link></title>
+ <para>
+ <thingbob>Not real boostbook</thingbob> <thingbob>Also not real boostbook</thingbob>
+ <thingbob>More fake boostbook</thingbob> <thingbob>Final fake boostbook</thingbob>
+ </para>
+ </section>
</article>
diff --git a/tools/quickbook/test/templates-1_5.quickbook b/tools/quickbook/test/templates-1_5.quickbook
index 038b8b3f05..55ec031fde 100644
--- a/tools/quickbook/test/templates-1_5.quickbook
+++ b/tools/quickbook/test/templates-1_5.quickbook
@@ -2,12 +2,211 @@
[quickbook 1.5]
]
-[/ 1.5 uses static scoping ]
+[section Templates]
+
+[/-------------------------------- nullary arg ]
+
+[template foo0 nullary_arg]
+
+[foo0]
+
+[/-------------------------------- unary arg, phrase syle ]
+
+[template foo1[bar] foo [bar]]
+
+[foo1 baz]
+
+[/-------------------------------- unary arg, block syle ]
+
+[template foo2[bar]
+foo [bar]
+]
+
+[foo2 baz]
+
+[template foo3[bar]
+This is a complete paragraph. [bar] [bar] [bar] [bar] [bar]
+[bar] [bar] [bar] [bar].... blah blah blah......
+]
+
+[foo3 kalamazoo]
+
+[/-------------------------------- unary arg, using punctuation ]
+
+[template ![bar] '''<hey>'''[bar]'''</hey>''']
+
+[!baz]
+
+[/-------------------------------- recursive templates ]
+
+[template foo4[bar]
+[foo3 [bar]]
+]
+
+[foo3 madagascar]
+
+[template foo5[x] zoom [x]]
+[template foo6[x] [x] zoom]
+
+[foo6[foo5 peanut]]
+
+[template kinda_weird[x y] [x] [y]]
+
+[kinda_weird exactly..xanadu]
+
+
+[/-------------------------------- space delimitted args ]
+
+[template simple1[a b] [a][b]]
+
+[simple1 w x]
+
+[template simple2[a b c d] [a][b][c][d]]
+
+[simple2 w x y z][simple2 w x y z trail]
+
+[/-------------------------------- John's templates ]
+
+[template code[x]
+ int main()
+ {
+ std::cout << ``[x]`` << std::endl;
+ }
+]
+
+[code "Hello\, World"]
+
+[template alpha '''&alpha;''']
+[template pow[a b] [a]'''<superscript>'''[b]'''</superscript>''' ]
+[template super[text]'''<superscript>'''[text]'''</superscript>''' ]
+
+[pow x 2]
+
+[pow [alpha] 2]
+
+x[super 2]
+
+[/-------------------------------- Some more ]
+
+[template banana got a banana?]
+[template plantation[bananarama] [bananarama]]
+
+[plantation [banana]]
+
+[/-------------------------------- Not a bug (there was a syntax error here) ]
+
+[template join1[a b] [b][a]]
+[template join2[a b] [a][b]]
+[template test[x] [join1 [join2 0 [x]]...0]]
+[test 0]
+
+[/-------------------------------- Mismatched template ]
+
+[template foo 1]
+[fool]
+
+[template blah 10]
+
+[endsect]
+
+[section Empty Templates]
+
+[template empty1]
+[template empty2 ]
+[template empty3 [/comment]]
+[template empty4 [/comment]
+
+]
+[template empty5
+]
+[template empty6
+
+]
+[template empty7
+[/comment]
+]
+[template empty8
+
+[/comment]
+]
+[template empty_arg1[x]]
+[template empty_arg2[x y]]
+
+[empty1]
+[empty2]
+[empty3]
+[empty4]
+[empty5]
+[empty6]
+[empty7]
+[empty8]
+[empty_arg1 1]
+[empty_arg2 1 2]
+
+[endsect]
+
+[/----------------------------------- Nested templates ]
+
+[section Nested Templates]
+
+[template block[content]
+
+Start block template.
+
+[content]
+
+End block template.
+]
+
+[template phrase[content] Start phrase template. [content] End phrase template.]
+
+Pre [block [block Hello!]] Post
+
+Pre [block [phrase Hello!]] Post
+
+Pre [phrase [block Hello!]] Post
+
+Pre [phrase [phrase Hello!]] Post
+
+[endsect]
+
+[/----------------------------------- Block Markup ]
+
+[section Block Markup]
+
+[template list
+* a
+* b]
+
+[template horizontal
+----]
+
+[template codeblock
+ int main() {}]
+
+[template paragraphs
+Paragraphs 1
+
+Paragraphs 2
+]
+
+[list][horizontal][codeblock][paragraphs]
+
+* [list]
+* [horizontal]
+* [codeblock]
+* [paragraphs]
+
+[endsect]
+
+[/----------------------------------- 1.5+ use static scoping ]
+
+[section Static Scoping]
[template x static scoping]
-[template foo1[] [x]]
-[template foo2[x] [foo1]]
-[foo2 dynamic scoping]
+[template static_test1[] [x]]
+[template static_test2[x] [static_test1]]
+[static_test2 dynamic scoping]
[/ This should be '[a]' because [a] isn't matched. ]
[template test1[] [a]]
@@ -17,8 +216,8 @@
[/ In 1.5 template arguments are scoped at the point they are defined]
[template y new]
-[template foo3[a y] [a]]
-[foo3 [y] old]
+[template static_test3[a y] [a]]
+[static_test3 [y] old]
[/ From https://svn.boost.org/trac/boost/ticket/2034 ]
@@ -27,7 +226,11 @@
[template echo_twice[x] [echo [same [x]]..[same [x]]]]
[echo_twice foo]
-[/ 1.5 template arguments]
+[endsect]
+
+[/----------------------------------- 1.5+ template arguments ]
+
+[section Template Arguments]
[template binary[x y] {[x]-[y]}]
[binary 1..2] [/ {1-2} ]
@@ -50,18 +253,22 @@
[ternary 1..2..3] [/ {1-2-3} ]
[ternary 1 2 3] [/ {1-2-3} ]
-[/ Block vs. phrase templates ]
+[endsect]
-[template phrase[] Some *text*]
-[template block[]
+[/----------------------------------- Block and phrases ]
+
+[section Block and phrase templates]
+
+[template phrase_template[] Some *text*]
+[template block_template[]
A <emphasis>paragraph</emphasis>.
]
-[phrase]
-[block]
-[`phrase]
-[`block]
+[phrase_template]
+[block_template]
+[`phrase_template]
+[`block_template]
[/ Trailing newline shouldn't be included]
@@ -69,4 +276,22 @@ A <emphasis>paragraph</emphasis>.
'''<index type="'''[type]'''"><title>'''[title]'''</title></index>'''
]
-[named_index things Things] \ No newline at end of file
+[named_index things Things]
+
+[endsect]
+
+[/----------------------------------- Escaped templates ]
+
+[section Escaped templates]
+
+[template raw_markup <thingbob>Not real boostbook</thingbob>]
+[template | <thingbob>Also not real boostbook</thingbob>]
+[template escaped1 [|] [`|]]
+[template escaped2 [x] [`x]]
+
+[`raw_markup]
+[`|]
+[escaped1 <thingbob>More fake boostbook</thingbob>]
+[escaped2 <thingbob>Final fake boostbook</thingbob>]
+
+[endsect]
diff --git a/tools/quickbook/test/templates-1_6-fail1.quickbook b/tools/quickbook/test/templates-1_6-fail1.quickbook
new file mode 100644
index 0000000000..0fd039b1e7
--- /dev/null
+++ b/tools/quickbook/test/templates-1_6-fail1.quickbook
@@ -0,0 +1,8 @@
+[article Template 1.6 fail 1
+ [quickbook 1.6]
+]
+
+[/ I've removed support for escaping punctuation templates in 1.6. ]
+
+[template ~ body]
+[`~]
diff --git a/tools/quickbook/test/templates-1_6.gold b/tools/quickbook/test/templates-1_6.gold
new file mode 100644
index 0000000000..b37604e75f
--- /dev/null
+++ b/tools/quickbook/test/templates-1_6.gold
@@ -0,0 +1,257 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE article PUBLIC "-//Boost//DTD BoostBook XML V1.0//EN" "http://www.boost.org/tools/boostbook/dtd/boostbook.dtd">
+<article id="template_1_6" last-revision="DEBUG MODE Date: 2000/12/20 12:00:00 $"
+ xmlns:xi="http://www.w3.org/2001/XInclude">
+ <title>Template 1.6</title>
+ <section id="template_1_6.templates">
+ <title><link linkend="template_1_6.templates">Templates</link></title>
+ <para>
+ nullary_arg
+ </para>
+ <para>
+ foo baz
+ </para>
+ <para>
+ foo baz
+ </para>
+ <para>
+ This is a complete paragraph. kalamazoo kalamazoo kalamazoo kalamazoo kalamazoo
+ kalamazoo kalamazoo kalamazoo kalamazoo.... blah blah blah......
+ </para>
+ <para>
+ <hey>baz</hey>
+ </para>
+ <para>
+ This is a complete paragraph. madagascar madagascar madagascar madagascar madagascar
+ madagascar madagascar madagascar madagascar.... blah blah blah......
+ </para>
+ <para>
+ zoom peanut zoom
+ </para>
+ <para>
+ exactly xanadu
+ </para>
+ <para>
+ wx
+ </para>
+ <para>
+ wxyz wxyz trail
+ </para>
+<programlisting><phrase role="keyword">int</phrase> <phrase role="identifier">main</phrase><phrase role="special">()</phrase>
+<phrase role="special">{</phrase>
+ <phrase role="identifier">std</phrase><phrase role="special">::</phrase><phrase role="identifier">cout</phrase> <phrase role="special">&lt;&lt;</phrase> &quot;Hello, World&quot; <phrase role="special">&lt;&lt;</phrase> <phrase role="identifier">std</phrase><phrase role="special">::</phrase><phrase role="identifier">endl</phrase><phrase role="special">;</phrase>
+<phrase role="special">}</phrase>
+</programlisting>
+ <para>
+ x<superscript>2</superscript>
+ </para>
+ <para>
+ &alpha;<superscript>2</superscript>
+ </para>
+ <para>
+ x<superscript>2</superscript>
+ </para>
+ <para>
+ got a banana?
+ </para>
+ <para>
+ .0 00
+ </para>
+ <para>
+ [fool]
+ </para>
+ </section>
+ <section id="template_1_6.empty_templates">
+ <title><link linkend="template_1_6.empty_templates">Empty Templates</link></title>
+ </section>
+ <section id="template_1_6.nested_templates">
+ <title><link linkend="template_1_6.nested_templates">Nested Templates</link></title>
+ <para>
+ Pre
+ </para>
+ <para>
+ Start block template.
+ </para>
+ <para>
+ Start block template.
+ </para>
+ <para>
+ Hello!
+ </para>
+ <para>
+ End block template.
+ </para>
+ <para>
+ End block template.
+ </para>
+ <para>
+ Post
+ </para>
+ <para>
+ Pre
+ </para>
+ <para>
+ Start block template.
+ </para>
+ <para>
+ Start phrase template. Hello! End phrase template.
+ </para>
+ <para>
+ End block template.
+ </para>
+ <para>
+ Post
+ </para>
+ <para>
+ Pre
+ </para>
+ <para>
+ Start phrase template.
+ </para>
+ <para>
+ Start block template.
+ </para>
+ <para>
+ Hello!
+ </para>
+ <para>
+ End block template.
+ </para>
+ <para>
+ End phrase template.
+ </para>
+ <para>
+ Post
+ </para>
+ <para>
+ Pre Start phrase template. Start phrase template. Hello! End phrase template.
+ End phrase template. Post
+ </para>
+ </section>
+ <section id="template_1_6.block_markup">
+ <title><link linkend="template_1_6.block_markup">Block Markup</link></title>
+ <itemizedlist>
+ <listitem>
+ <simpara>
+ a
+ </simpara>
+ </listitem>
+ <listitem>
+ <simpara>
+ b
+ </simpara>
+ </listitem>
+ </itemizedlist>
+ <para/>
+<programlisting><phrase role="keyword">int</phrase> <phrase role="identifier">main</phrase><phrase role="special">()</phrase> <phrase role="special">{}</phrase></programlisting>
+ <para>
+ Paragraphs 1
+ </para>
+ <para>
+ Paragraphs 2
+ </para>
+ <itemizedlist>
+ <listitem>
+ <simpara>
+ <itemizedlist>
+ <listitem>
+ <simpara>
+ a
+ </simpara>
+ </listitem>
+ <listitem>
+ <simpara>
+ b
+ </simpara>
+ </listitem>
+ </itemizedlist>
+ </simpara>
+ </listitem>
+ <listitem>
+ <para/>
+ </listitem>
+ <listitem>
+<programlisting><phrase role="keyword">int</phrase> <phrase role="identifier">main</phrase><phrase role="special">()</phrase> <phrase role="special">{}</phrase></programlisting>
+ </listitem>
+ <listitem>
+ <simpara>
+ Paragraphs 1
+ </simpara>
+ <simpara>
+ Paragraphs 2
+ </simpara>
+ </listitem>
+ </itemizedlist>
+ </section>
+ <section id="template_1_6.static_scoping">
+ <title><link linkend="template_1_6.static_scoping">Static Scoping</link></title>
+ <para>
+ static scoping
+ </para>
+ <para>
+ [a]
+ </para>
+ <para>
+ new
+ </para>
+ <para>
+ foo foo
+ </para>
+ </section>
+ <section id="template_1_6.template_arguments">
+ <title><link linkend="template_1_6.template_arguments">Template Arguments</link></title>
+ <para>
+ {1-2} {1-2} {1-2 3 4} {1 2-3 4} {1 2 3-4} {1..2-3} {1..2-3}
+ </para>
+ <para>
+ { {1 2-3}-4} { {1 2-3}-4} { {1-2 3}-4}
+ </para>
+ <para>
+ {[1-2] 3} {[1-2] 3} {[1-2}
+ </para>
+ <para>
+ {1-2-3} {1-2-3}
+ </para>
+ </section>
+ <section id="template_1_6.block_and_phrase_templates">
+ <title><link linkend="template_1_6.block_and_phrase_templates">Block and phrase
+ templates</link></title>
+ <para>
+ Some <emphasis role="bold">text</emphasis>
+ </para>
+ <para>
+ A &lt;emphasis&gt;paragraph&lt;/emphasis&gt;.
+ </para>
+ <para>
+ Some *text* A <emphasis>paragraph</emphasis>.
+ </para>
+ <para>
+ <index type="things"><title>Things</title></index>
+ </para>
+ </section>
+ <section id="template_1_6.template_body">
+ <title><link linkend="template_1_6.template_body">Skipping the template body
+ correctly</link></title>
+ <para>
+ <emphasis>Argument</emphasis>
+ </para>
+ <orderedlist>
+ <listitem>
+<programlisting><phrase role="identifier">code</phrase></programlisting>
+ </listitem>
+ </orderedlist>
+ <orderedlist>
+ <listitem>
+ <simpara>
+ <code><phrase role="identifier">code</phrase></code>
+ </simpara>
+ </listitem>
+ </orderedlist>
+ </section>
+ <section id="template_1_6.escaped_templates">
+ <title><link linkend="template_1_6.escaped_templates">Escaped templates</link></title>
+ <para>
+ <thingbob>Not real boostbook</thingbob> <thingbob>More fake boostbook</thingbob>
+ </para>
+ </section>
+</article>
diff --git a/tools/quickbook/test/templates-1_6.quickbook b/tools/quickbook/test/templates-1_6.quickbook
new file mode 100644
index 0000000000..627bd7fd87
--- /dev/null
+++ b/tools/quickbook/test/templates-1_6.quickbook
@@ -0,0 +1,313 @@
+[article Template 1.6
+ [quickbook 1.6]
+]
+
+[section Templates]
+
+[/-------------------------------- nullary arg ]
+
+[template foo0 nullary_arg]
+
+[foo0]
+
+[/-------------------------------- unary arg, phrase syle ]
+
+[template foo1[bar] foo [bar]]
+
+[foo1 baz]
+
+[/-------------------------------- unary arg, block syle ]
+
+[template foo2[bar]
+foo [bar]
+]
+
+[foo2 baz]
+
+[template foo3[bar]
+This is a complete paragraph. [bar] [bar] [bar] [bar] [bar]
+[bar] [bar] [bar] [bar].... blah blah blah......
+]
+
+[foo3 kalamazoo]
+
+[/-------------------------------- unary arg, using punctuation ]
+
+[template ![bar] '''<hey>'''[bar]'''</hey>''']
+
+[!baz]
+
+[/-------------------------------- recursive templates ]
+
+[template foo4[bar]
+[foo3 [bar]]
+]
+
+[foo3 madagascar]
+
+[template foo5[x] zoom [x]]
+[template foo6[x] [x] zoom]
+
+[foo6[foo5 peanut]]
+
+[template kinda_weird[x y] [x] [y]]
+
+[kinda_weird exactly..xanadu]
+
+
+[/-------------------------------- space delimitted args ]
+
+[template simple1[a b] [a][b]]
+
+[simple1 w x]
+
+[template simple2[a b c d] [a][b][c][d]]
+
+[simple2 w x y z][simple2 w x y z trail]
+
+[/-------------------------------- John's templates ]
+
+[template code[x]
+ int main()
+ {
+ std::cout << ``[x]`` << std::endl;
+ }
+]
+
+[code "Hello\, World"]
+
+[template alpha '''&alpha;''']
+[template pow[a b] [a]'''<superscript>'''[b]'''</superscript>''' ]
+[template super[text]'''<superscript>'''[text]'''</superscript>''' ]
+
+[pow x 2]
+
+[pow [alpha] 2]
+
+x[super 2]
+
+[/-------------------------------- Some more ]
+
+[template banana got a banana?]
+[template plantation[bananarama] [bananarama]]
+
+[plantation [banana]]
+
+[/-------------------------------- Not a bug (there was a syntax error here) ]
+
+[template join1[a b] [b][a]]
+[template join2[a b] [a][b]]
+[template test[x] [join1 [join2 0 [x]]...0]]
+[test 0]
+
+[/-------------------------------- Mismatched template ]
+
+[template foo 1]
+[fool]
+
+[template blah 10]
+
+[endsect]
+
+[section Empty Templates]
+
+[template empty1]
+[template empty2 ]
+[template empty3 [/comment]]
+[template empty4 [/comment]
+
+]
+[template empty5
+]
+[template empty6
+
+]
+[template empty7
+[/comment]
+]
+[template empty8
+
+[/comment]
+]
+[template empty_arg1[x]]
+[template empty_arg2[x y]]
+
+[empty1]
+[empty2]
+[empty3]
+[empty4]
+[empty5]
+[empty6]
+[empty7]
+[empty8]
+[empty_arg1 1]
+[empty_arg2 1 2]
+
+[endsect]
+
+[/----------------------------------- Nested templates ]
+
+[section Nested Templates]
+
+[template block_foo[content]
+
+Start block template.
+
+[content]
+
+End block template.
+]
+
+[template phrase_foo[content] Start phrase template. [content] End phrase template.]
+
+Pre [block_foo [block_foo Hello!]] Post
+
+Pre [block_foo [phrase_foo Hello!]] Post
+
+Pre [phrase_foo [block_foo Hello!]] Post
+
+Pre [phrase_foo [phrase_foo Hello!]] Post
+
+[endsect]
+
+[/----------------------------------- Block Markup ]
+
+[section Block Markup]
+
+[template list
+* a
+* b]
+
+[template horizontal
+----]
+
+[template codeblock
+ int main() {}]
+
+[template paragraphs
+Paragraphs 1
+
+Paragraphs 2
+]
+
+[list][horizontal][codeblock][paragraphs]
+
+* [list]
+* [horizontal]
+* [codeblock]
+* [paragraphs]
+
+[endsect]
+
+[/----------------------------------- 1.5+ use static scoping ]
+
+[section Static Scoping]
+
+[template x static scoping]
+[template static_test1[] [x]]
+[template static_test2[x] [static_test1]]
+[static_test2 dynamic scoping]
+
+[/ This should be '[a]' because [a] isn't matched. ]
+[template test1[] [a]]
+[template test2[a] [test1]]
+[test2 1]
+
+[/ In 1.5 template arguments are scoped at the point they are defined]
+
+[template y new]
+[template static_test3[a y] [a]]
+[static_test3 [y] old]
+
+[/ From https://svn.boost.org/trac/boost/ticket/2034 ]
+
+[template same[x] [x]]
+[template echo[a b] [a] [b]]
+[template echo_twice[x] [echo [same [x]]..[same [x]]]]
+[echo_twice foo]
+
+[endsect]
+
+[/----------------------------------- 1.5+ template arguments ]
+
+[section Template Arguments]
+
+[template binary[x y] {[x]-[y]}]
+[binary 1..2] [/ {1-2} ]
+[binary 1 2] [/ {1-2} ]
+[binary 1..2 3 4] [/ {1-2 3 4} ]
+[binary 1 2..3 4] [/ {1 2-3 4} ]
+[binary 1 2 3..4] [/ {1 2 3-4} ]
+[binary 1.\.2..3] [/ {1..2-3} ]
+[binary 1.\.2 3] [/ {1..2-3} ]
+
+[binary [binary 1 2..3]..4] [/ { {1 2-3}-4} ]
+[binary [binary 1 2..3] 4] [/ { {1 2-3}-4} ]
+[binary [binary 1 2 3]..4] [/ { {1-2 3}-4} ]
+
+[binary \[1 2\] 3] [/ {[1-2] 3} ]
+[binary \[1..2\] 3] [/ {[1-2] 3} ]
+[binary \[1 2] [/ {(1-2} ]
+
+[template ternary[x y z] {[x]-[y]-[z]}]
+[ternary 1..2..3] [/ {1-2-3} ]
+[ternary 1 2 3] [/ {1-2-3} ]
+
+[endsect]
+
+[/----------------------------------- Block and phrases ]
+
+[section Block and phrase templates]
+
+[template phrase_template[] Some *text*]
+[template block_template[]
+
+A <emphasis>paragraph</emphasis>.
+]
+
+[phrase_template]
+[block_template]
+[`phrase_template]
+[`block_template]
+
+[/ Trailing newline shouldn't be included]
+
+[template named_index[type title]
+'''<index type="'''[type]'''"><title>'''[title]'''</title></index>'''
+]
+
+[named_index things Things]
+
+[endsect]
+
+[/-------------------------------- Skipping template body ]
+
+[section:template_body Skipping the template body correctly]
+
+[template args1[x] [`x]]
+[template args2[]
+[ordered_list [``code``]]
+]
+[/ Due to a bug in the template parser need to stop the parser
+ thinking that the code is an escaped template. ]
+[template args3[]
+[ordered_list [\ `code`]]
+]
+
+[args1 <emphasis>Argument</emphasis>]
+[args2]
+[args3]
+
+[endsect]
+
+[/----------------------------------- Escaped templates ]
+
+[section Escaped templates]
+
+[template raw_markup <thingbob>Not real boostbook</thingbob>]
+[template escaped1 [x] [`x]]
+
+[`raw_markup]
+[escaped1 <thingbob>More fake boostbook</thingbob>]
+
+[endsect]
diff --git a/tools/quickbook/test/templates-1_7-fail1.quickbook b/tools/quickbook/test/templates-1_7-fail1.quickbook
new file mode 100644
index 0000000000..0618bebb95
--- /dev/null
+++ b/tools/quickbook/test/templates-1_7-fail1.quickbook
@@ -0,0 +1,12 @@
+[article Template 1.7 fail
+[quickbook 1.7]
+]
+
+[/ This should fail because it's a phrase template containing a paragraph
+ separator. ]
+
+[template fail[] Phrase content.
+
+Paragraph content.]
+
+[fail]
diff --git a/tools/quickbook/test/templates-1_7-fail2.quickbook b/tools/quickbook/test/templates-1_7-fail2.quickbook
new file mode 100644
index 0000000000..548884b16a
--- /dev/null
+++ b/tools/quickbook/test/templates-1_7-fail2.quickbook
@@ -0,0 +1,11 @@
+[article Template 1.7 fail
+[quickbook 1.7]
+]
+
+[/ This should fail because it's a block template called from an attribute ]
+
+[template fail[]
+
+Block template.]
+
+[link this-should-[fail]]
diff --git a/tools/quickbook/test/templates-1_7.gold b/tools/quickbook/test/templates-1_7.gold
new file mode 100644
index 0000000000..c7d4c64e09
--- /dev/null
+++ b/tools/quickbook/test/templates-1_7.gold
@@ -0,0 +1,302 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE article PUBLIC "-//Boost//DTD BoostBook XML V1.0//EN" "http://www.boost.org/tools/boostbook/dtd/boostbook.dtd">
+<article id="template_1_7" last-revision="DEBUG MODE Date: 2000/12/20 12:00:00 $"
+ xmlns:xi="http://www.w3.org/2001/XInclude">
+ <title>Template 1.7</title>
+ <section id="template_1_7.templates">
+ <title><link linkend="template_1_7.templates">Templates</link></title>
+ <para>
+ nullary_arg
+ </para>
+ <para>
+ foo baz
+ </para>
+ <para>
+ foo baz
+ </para>
+ <para>
+ This is a complete paragraph. kalamazoo kalamazoo kalamazoo kalamazoo kalamazoo
+ kalamazoo kalamazoo kalamazoo kalamazoo.... blah blah blah......
+ </para>
+ <para>
+ <hey>baz</hey>
+ </para>
+ <para>
+ This is a complete paragraph. madagascar madagascar madagascar madagascar madagascar
+ madagascar madagascar madagascar madagascar.... blah blah blah......
+ </para>
+ <para>
+ zoom peanut zoom
+ </para>
+ <para>
+ exactly xanadu
+ </para>
+ <para>
+ wx
+ </para>
+ <para>
+ wxyz wxyz trail
+ </para>
+<programlisting><phrase role="keyword">int</phrase> <phrase role="identifier">main</phrase><phrase role="special">()</phrase>
+<phrase role="special">{</phrase>
+ <phrase role="identifier">std</phrase><phrase role="special">::</phrase><phrase role="identifier">cout</phrase> <phrase role="special">&lt;&lt;</phrase> &quot;Hello, World&quot; <phrase role="special">&lt;&lt;</phrase> <phrase role="identifier">std</phrase><phrase role="special">::</phrase><phrase role="identifier">endl</phrase><phrase role="special">;</phrase>
+<phrase role="special">}</phrase>
+</programlisting>
+ <para>
+ x<superscript>2</superscript>
+ </para>
+ <para>
+ &alpha;<superscript>2</superscript>
+ </para>
+ <para>
+ x<superscript>2</superscript>
+ </para>
+ <para>
+ got a banana?
+ </para>
+ <para>
+ .0 00
+ </para>
+ <para>
+ [fool]
+ </para>
+ </section>
+ <section id="template_1_7.empty_templates">
+ <title><link linkend="template_1_7.empty_templates">Empty Templates</link></title>
+ </section>
+ <section id="template_1_7.nested_templates">
+ <title><link linkend="template_1_7.nested_templates">Nested Templates</link></title>
+ <para>
+ Pre
+ </para>
+ <para>
+ Start block template.
+ </para>
+ <para>
+ Start block template.
+ </para>
+ <para>
+ Hello!
+ </para>
+ <para>
+ End block template.
+ </para>
+ <para>
+ End block template.
+ </para>
+ <para>
+ Post
+ </para>
+ <para>
+ Pre
+ </para>
+ <para>
+ Start block template.
+ </para>
+ <para>
+ Start phrase template. Hello! End phrase template.
+ </para>
+ <para>
+ End block template.
+ </para>
+ <para>
+ Post
+ </para>
+ <para>
+ Pre Start phrase template.
+ </para>
+ <para>
+ Start block template.
+ </para>
+ <para>
+ Hello!
+ </para>
+ <para>
+ End block template.
+ </para>
+ <para>
+ End phrase template. Post
+ </para>
+ <para>
+ Pre Start phrase template. Start phrase template. Hello! End phrase template.
+ End phrase template. Post
+ </para>
+ </section>
+ <section id="template_1_7.block_markup">
+ <title><link linkend="template_1_7.block_markup">Block Markup</link></title>
+ <itemizedlist>
+ <listitem>
+ <simpara>
+ a
+ </simpara>
+ </listitem>
+ <listitem>
+ <simpara>
+ b
+ </simpara>
+ </listitem>
+ </itemizedlist>
+ <para/>
+<programlisting><phrase role="keyword">int</phrase> <phrase role="identifier">main</phrase><phrase role="special">()</phrase> <phrase role="special">{}</phrase></programlisting>
+ <para>
+ Paragraphs 1
+ </para>
+ <para>
+ Paragraphs 2
+ </para>
+ <itemizedlist>
+ <listitem>
+ <simpara>
+ <itemizedlist>
+ <listitem>
+ <simpara>
+ a
+ </simpara>
+ </listitem>
+ <listitem>
+ <simpara>
+ b
+ </simpara>
+ </listitem>
+ </itemizedlist>
+ </simpara>
+ </listitem>
+ <listitem>
+ <para/>
+ </listitem>
+ <listitem>
+<programlisting><phrase role="keyword">int</phrase> <phrase role="identifier">main</phrase><phrase role="special">()</phrase> <phrase role="special">{}</phrase></programlisting>
+ </listitem>
+ <listitem>
+ <simpara>
+ Paragraphs 1
+ </simpara>
+ <simpara>
+ Paragraphs 2
+ </simpara>
+ </listitem>
+ </itemizedlist>
+ </section>
+ <section id="template_1_7.static_scoping">
+ <title><link linkend="template_1_7.static_scoping">Static Scoping</link></title>
+ <para>
+ static scoping
+ </para>
+ <para>
+ [a]
+ </para>
+ <para>
+ new
+ </para>
+ <para>
+ foo foo
+ </para>
+ </section>
+ <section id="template_1_7.template_arguments">
+ <title><link linkend="template_1_7.template_arguments">Template Arguments</link></title>
+ <para>
+ {1-2} {1-2} {1-2 3 4} {1 2-3 4} {1 2 3-4} {1..2-3} {1..2-3}
+ </para>
+ <para>
+ { {1 2-3}-4} { {1 2-3}-4} { {1-2 3}-4}
+ </para>
+ <para>
+ {[1-2] 3} {[1-2] 3} {[1-2}
+ </para>
+ <para>
+ {1-2-3} {1-2-3}
+ </para>
+ </section>
+ <section id="template_1_7.block_and_phrase_templates">
+ <title><link linkend="template_1_7.block_and_phrase_templates">Block and phrase
+ templates</link></title>
+ <para>
+ Some <emphasis role="bold">text</emphasis>
+ </para>
+ <para>
+ A &lt;emphasis&gt;paragraph&lt;/emphasis&gt;.
+ </para>
+ <para>
+ Some *text* A <emphasis>paragraph</emphasis>.
+ </para>
+ <para>
+ <index type="things"><title>Things</title></index>
+ </para>
+ <informaltable frame="all">
+ <tgroup cols="0">
+ <tbody>
+ </tbody>
+ </tgroup>
+ </informaltable>
+ <informaltable frame="all">
+ <tgroup cols="0">
+ <tbody>
+ </tbody>
+ </tgroup>
+ </informaltable>
+ <para>
+ Text2 afterwards. Text3 before.
+ </para>
+ <informaltable frame="all">
+ <tgroup cols="0">
+ <tbody>
+ </tbody>
+ </tgroup>
+ </informaltable>
+ <para>
+ Text4 before.
+ </para>
+ <informaltable frame="all">
+ <tgroup cols="0">
+ <tbody>
+ </tbody>
+ </tgroup>
+ </informaltable>
+ <para>
+ Text4 afterwards.
+ </para>
+ <informaltable frame="all">
+ <tgroup cols="0">
+ <tbody>
+ </tbody>
+ </tgroup>
+ </informaltable>
+ <para>
+ * Not a list.
+ </para>
+ <informaltable frame="all">
+ <tgroup cols="0">
+ <tbody>
+ </tbody>
+ </tgroup>
+ </informaltable>
+ <para>
+ * Not a list.
+ </para>
+ </section>
+ <section id="template_1_7.template_body">
+ <title><link linkend="template_1_7.template_body">Skipping the template body
+ correctly</link></title>
+ <para>
+ <emphasis>Argument</emphasis>
+ </para>
+ <orderedlist>
+ <listitem>
+<programlisting><phrase role="identifier">code</phrase></programlisting>
+ </listitem>
+ </orderedlist>
+ <orderedlist>
+ <listitem>
+ <simpara>
+ <code><phrase role="identifier">code</phrase></code>
+ </simpara>
+ </listitem>
+ </orderedlist>
+ </section>
+ <section id="template_1_7.escaped_templates">
+ <title><link linkend="template_1_7.escaped_templates">Escaped templates</link></title>
+ <para>
+ <thingbob>Not real boostbook</thingbob> <thingbob>More fake boostbook</thingbob>
+ </para>
+ </section>
+</article>
diff --git a/tools/quickbook/test/templates-1_7.quickbook b/tools/quickbook/test/templates-1_7.quickbook
new file mode 100644
index 0000000000..bf1feaca2c
--- /dev/null
+++ b/tools/quickbook/test/templates-1_7.quickbook
@@ -0,0 +1,322 @@
+[article Template 1.7
+ [quickbook 1.7]
+]
+
+[section Templates]
+
+[/-------------------------------- nullary arg ]
+
+[template foo0 nullary_arg]
+
+[foo0]
+
+[/-------------------------------- unary arg, phrase syle ]
+
+[template foo1[bar] foo [bar]]
+
+[foo1 baz]
+
+[/-------------------------------- unary arg, block syle ]
+
+[template foo2[bar]
+foo [bar]
+]
+
+[foo2 baz]
+
+[template foo3[bar]
+This is a complete paragraph. [bar] [bar] [bar] [bar] [bar]
+[bar] [bar] [bar] [bar].... blah blah blah......
+]
+
+[foo3 kalamazoo]
+
+[/-------------------------------- unary arg, using punctuation ]
+
+[template |[bar] '''<hey>'''[bar]'''</hey>''']
+
+[|baz]
+
+[/-------------------------------- recursive templates ]
+
+[template foo4[bar]
+[foo3 [bar]]
+]
+
+[foo3 madagascar]
+
+[template foo5[x] zoom [x]]
+[template foo6[x] [x] zoom]
+
+[foo6[foo5 peanut]]
+
+[template kinda_weird[x y] [x] [y]]
+
+[kinda_weird exactly..xanadu]
+
+
+[/-------------------------------- space delimitted args ]
+
+[template simple1[a b] [a][b]]
+
+[simple1 w x]
+
+[template simple2[a b c d] [a][b][c][d]]
+
+[simple2 w x y z][simple2 w x y z trail]
+
+[/-------------------------------- John's templates ]
+
+[template code[x]
+ int main()
+ {
+ std::cout << ``[x]`` << std::endl;
+ }
+]
+
+[code "Hello\, World"]
+
+[template alpha '''&alpha;''']
+[template pow[a b] [a]'''<superscript>'''[b]'''</superscript>''' ]
+[template super[text]'''<superscript>'''[text]'''</superscript>''' ]
+
+[pow x 2]
+
+[pow [alpha] 2]
+
+x[super 2]
+
+[/-------------------------------- Some more ]
+
+[template banana got a banana?]
+[template plantation[bananarama] [bananarama]]
+
+[plantation [banana]]
+
+[/-------------------------------- Not a bug (there was a syntax error here) ]
+
+[template join1[a b] [b][a]]
+[template join2[a b] [a][b]]
+[template test[x] [join1 [join2 0 [x]]...0]]
+[test 0]
+
+[/-------------------------------- Mismatched template ]
+
+[template foo 1]
+[fool]
+
+[template blah 10]
+
+[endsect]
+
+[section Empty Templates]
+
+[template empty1]
+[template empty2 ]
+[template empty3 [/comment]]
+[template empty4 [/comment]
+
+]
+[template empty5
+]
+[template empty6
+
+]
+[template empty7
+[/comment]
+]
+[template empty8
+
+[/comment]
+]
+[template empty_arg1[x]]
+[template empty_arg2[x y]]
+
+[empty1]
+[empty2]
+[empty3]
+[empty4]
+[empty5]
+[empty6]
+[empty7]
+[empty8]
+[empty_arg1 1]
+[empty_arg2 1 2]
+
+[endsect]
+
+[/----------------------------------- Nested templates ]
+
+[section Nested Templates]
+
+[template block_foo[content]
+
+Start block template.
+
+[content]
+
+End block template.
+]
+
+[template phrase_foo[content] Start phrase template. [content] End phrase template.]
+
+Pre [block_foo [block_foo Hello!]] Post
+
+Pre [block_foo [phrase_foo Hello!]] Post
+
+Pre [phrase_foo [block_foo Hello!]] Post
+
+Pre [phrase_foo [phrase_foo Hello!]] Post
+
+[endsect]
+
+[/----------------------------------- Block Markup ]
+
+[section Block Markup]
+
+[template list
+* a
+* b]
+
+[template horizontal
+----]
+
+[template codeblock
+ int main() {}]
+
+[template paragraphs
+Paragraphs 1
+
+Paragraphs 2
+]
+
+[list][horizontal][codeblock][paragraphs]
+
+* [list]
+* [horizontal]
+* [codeblock]
+* [paragraphs]
+
+[endsect]
+
+[/----------------------------------- 1.5+ use static scoping ]
+
+[section Static Scoping]
+
+[template x static scoping]
+[template static_test1[] [x]]
+[template static_test2[x] [static_test1]]
+[static_test2 dynamic scoping]
+
+[/ This should be '[a]' because [a] isn't matched. ]
+[template test1[] [a]]
+[template test2[a] [test1]]
+[test2 1]
+
+[/ In 1.5 template arguments are scoped at the point they are defined]
+
+[template y new]
+[template static_test3[a y] [a]]
+[static_test3 [y] old]
+
+[/ From https://svn.boost.org/trac/boost/ticket/2034 ]
+
+[template same[x] [x]]
+[template echo[a b] [a] [b]]
+[template echo_twice[x] [echo [same [x]]..[same [x]]]]
+[echo_twice foo]
+
+[endsect]
+
+[/----------------------------------- 1.5+ template arguments ]
+
+[section Template Arguments]
+
+[template binary[x y] {[x]-[y]}]
+[binary 1..2] [/ {1-2} ]
+[binary 1 2] [/ {1-2} ]
+[binary 1..2 3 4] [/ {1-2 3 4} ]
+[binary 1 2..3 4] [/ {1 2-3 4} ]
+[binary 1 2 3..4] [/ {1 2 3-4} ]
+[binary 1.\.2..3] [/ {1..2-3} ]
+[binary 1.\.2 3] [/ {1..2-3} ]
+
+[binary [binary 1 2..3]..4] [/ { {1 2-3}-4} ]
+[binary [binary 1 2..3] 4] [/ { {1 2-3}-4} ]
+[binary [binary 1 2 3]..4] [/ { {1-2 3}-4} ]
+
+[binary \[1 2\] 3] [/ {[1-2] 3} ]
+[binary \[1..2\] 3] [/ {[1-2] 3} ]
+[binary \[1 2] [/ {(1-2} ]
+
+[template ternary[x y z] {[x]-[y]-[z]}]
+[ternary 1..2..3] [/ {1-2-3} ]
+[ternary 1 2 3] [/ {1-2-3} ]
+
+[endsect]
+
+[/----------------------------------- Block and phrases ]
+
+[section Block and phrase templates]
+
+[template phrase_template[] Some *text*]
+[template block_template[]
+
+A <emphasis>paragraph</emphasis>.
+]
+
+[phrase_template]
+[block_template]
+[`phrase_template]
+[`block_template]
+
+[/ Trailing newline shouldn't be included]
+
+[template named_index[type title]
+'''<index type="'''[type]'''"><title>'''[title]'''</title></index>'''
+]
+
+[named_index things Things]
+
+[/ Blocks in phrase templates ]
+
+[template phrase_block1[] [table]]
+[template phrase_block2[] [table] Text2 afterwards.]
+[template phrase_block3[] Text3 before. [table]]
+[template phrase_block4[] Text4 before. [table] Text4 afterwards.]
+[template phrase_block5[] [table] * Not a list.]
+[template phrase_block6[] [table]
+* Not a list.]
+
+[phrase_block1][phrase_block2][phrase_block3][phrase_block4]
+[phrase_block5][phrase_block6]
+
+[endsect]
+
+[/-------------------------------- Skipping template body ]
+
+[section:template_body Skipping the template body correctly]
+
+[template args1[x] [`x]]
+[template args2[] [ordered_list [``code``]]]
+[/ Due to a bug in the template parser need to stop the parser
+ thinking that the code is an escaped template. ]
+[template args3[] [ordered_list [\ `code`]]]
+
+[args1 <emphasis>Argument</emphasis>]
+[args2]
+[args3]
+
+[endsect]
+
+[/----------------------------------- Escaped templates ]
+
+[section Escaped templates]
+
+[template raw_markup <thingbob>Not real boostbook</thingbob>]
+[template escaped1 [x] [`x]]
+
+[`raw_markup]
+[escaped1 <thingbob>More fake boostbook</thingbob>]
+
+[endsect]
diff --git a/tools/quickbook/test/unit/Jamfile.v2 b/tools/quickbook/test/unit/Jamfile.v2
index 0b583b796b..68262d65c0 100644
--- a/tools/quickbook/test/unit/Jamfile.v2
+++ b/tools/quickbook/test/unit/Jamfile.v2
@@ -8,7 +8,7 @@
import testing ;
-project quickbook-unit-tests
+project quickbook/test/unit-tests
: requirements
<include>../../src
<warnings>all
@@ -20,8 +20,10 @@ project quickbook-unit-tests
<toolset>darwin:<define>BOOST_DETAIL_CONTAINER_FWD
;
-run values_test.cpp ../../src/values.cpp ../../src/files.cpp ../../src/string_ref.cpp ;
+run values_test.cpp ../../src/values.cpp ../../src/files.cpp ;
run post_process_test.cpp ../../src/post_process.cpp ;
+run source_map_test.cpp ../../src/files.cpp ;
+run glob_test.cpp ../../src/glob.cpp ;
# Copied from spirit
run symbols_tests.cpp ;
diff --git a/tools/quickbook/test/unit/glob_test.cpp b/tools/quickbook/test/unit/glob_test.cpp
new file mode 100644
index 0000000000..fa2578116c
--- /dev/null
+++ b/tools/quickbook/test/unit/glob_test.cpp
@@ -0,0 +1,122 @@
+/*=============================================================================
+ Copyright (c) 2013 Daniel James
+
+ Use, modification and distribution is subject to the Boost Software
+ License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+ http://www.boost.org/LICENSE_1_0.txt)
+=============================================================================*/
+
+#include "glob.hpp"
+#include <boost/detail/lightweight_test.hpp>
+
+void glob_tests() {
+ BOOST_TEST(quickbook::glob("", ""));
+
+ BOOST_TEST(!quickbook::glob("*", ""));
+ BOOST_TEST(quickbook::glob("*", "a"));
+ BOOST_TEST(quickbook::glob("*b", "b"));
+ BOOST_TEST(quickbook::glob("*b", "ab"));
+ BOOST_TEST(quickbook::glob("*b", "bab"));
+ BOOST_TEST(quickbook::glob("*b*", "b"));
+ BOOST_TEST(quickbook::glob("*b*", "ab"));
+ BOOST_TEST(quickbook::glob("*b*", "bc"));
+ BOOST_TEST(quickbook::glob("*b*", "abc"));
+ BOOST_TEST(!quickbook::glob("*b*", ""));
+ BOOST_TEST(!quickbook::glob("*b*", "a"));
+ BOOST_TEST(!quickbook::glob("*b*", "ac"));
+
+ BOOST_TEST(quickbook::glob("hello.txt", "hello.txt"));
+ BOOST_TEST(!quickbook::glob("world.txt", "helloworld.txt"));
+ BOOST_TEST(quickbook::glob("*world.txt", "helloworld.txt"));
+ BOOST_TEST(!quickbook::glob("world.txt*", "helloworld.txt"));
+ BOOST_TEST(!quickbook::glob("hello", "helloworld.txt"));
+ BOOST_TEST(!quickbook::glob("*hello", "helloworld.txt"));
+ BOOST_TEST(quickbook::glob("hello*", "helloworld.txt"));
+ BOOST_TEST(quickbook::glob("*world*", "helloworld.txt"));
+
+ BOOST_TEST(quickbook::glob("?", "a"));
+ BOOST_TEST(!quickbook::glob("?", ""));
+ BOOST_TEST(!quickbook::glob("?", "ab"));
+ BOOST_TEST(quickbook::glob("a?", "ab"));
+ BOOST_TEST(quickbook::glob("?b", "ab"));
+ BOOST_TEST(quickbook::glob("?bc", "abc"));
+ BOOST_TEST(quickbook::glob("a?c", "abc"));
+ BOOST_TEST(quickbook::glob("ab?", "abc"));
+ BOOST_TEST(!quickbook::glob("?bc", "aac"));
+ BOOST_TEST(!quickbook::glob("a?c", "bbc"));
+ BOOST_TEST(!quickbook::glob("ab?", "abcd"));
+
+ BOOST_TEST(quickbook::glob("[a]", "a"));
+ BOOST_TEST(!quickbook::glob("[^a]", "a"));
+ BOOST_TEST(!quickbook::glob("[b]", "a"));
+ BOOST_TEST(quickbook::glob("[^b]", "a"));
+ BOOST_TEST(quickbook::glob("[a-z]", "a"));
+ BOOST_TEST(!quickbook::glob("[^a-z]", "a"));
+ BOOST_TEST(!quickbook::glob("[b-z]", "a"));
+ BOOST_TEST(quickbook::glob("[^b-z]", "a"));
+ BOOST_TEST(quickbook::glob("[-a]", "a"));
+ BOOST_TEST(quickbook::glob("[-a]", "-"));
+ BOOST_TEST(!quickbook::glob("[-a]", "b"));
+ BOOST_TEST(!quickbook::glob("[^-a]", "a"));
+ BOOST_TEST(!quickbook::glob("[^-a]", "-"));
+ BOOST_TEST(quickbook::glob("[^-a]", "b"));
+ BOOST_TEST(quickbook::glob("[a-]", "a"));
+ BOOST_TEST(quickbook::glob("[a-]", "-"));
+ BOOST_TEST(!quickbook::glob("[a-]", "b"));
+ BOOST_TEST(!quickbook::glob("[^a-]", "a"));
+ BOOST_TEST(!quickbook::glob("[^a-]", "-"));
+ BOOST_TEST(quickbook::glob("[^a-]", "b"));
+ BOOST_TEST(quickbook::glob("[a-ce-f]", "a"));
+ BOOST_TEST(!quickbook::glob("[a-ce-f]", "d"));
+ BOOST_TEST(quickbook::glob("[a-ce-f]", "f"));
+ BOOST_TEST(!quickbook::glob("[a-ce-f]", "g"));
+ BOOST_TEST(!quickbook::glob("[^a-ce-f]", "a"));
+ BOOST_TEST(quickbook::glob("[^a-ce-f]", "d"));
+ BOOST_TEST(!quickbook::glob("[^a-ce-f]", "f"));
+ BOOST_TEST(quickbook::glob("[^a-ce-f]", "g"));
+ BOOST_TEST(!quickbook::glob("[b]", "a"));
+ BOOST_TEST(quickbook::glob("[a]bc", "abc"));
+ BOOST_TEST(quickbook::glob("a[b]c", "abc"));
+ BOOST_TEST(quickbook::glob("ab[c]", "abc"));
+ BOOST_TEST(quickbook::glob("a[a-c]c", "abc"));
+ BOOST_TEST(quickbook::glob("*[b]*", "abc"));
+ BOOST_TEST(quickbook::glob("[\\]]", "]"));
+ BOOST_TEST(!quickbook::glob("[^\\]]", "]"));
+
+ BOOST_TEST(quickbook::glob("b*ana", "banana"));
+ BOOST_TEST(quickbook::glob("1234*1234*1234", "123412341234"));
+ BOOST_TEST(!quickbook::glob("1234*1234*1234", "1234123341234"));
+ BOOST_TEST(quickbook::glob("1234*1234*1234", "123412312312341231231234"));
+ BOOST_TEST(!quickbook::glob("1234*1234*1234", "12341231231234123123123"));
+}
+
+void check_glob_tests()
+{
+ BOOST_TEST(!quickbook::check_glob(""));
+ BOOST_TEST(!quickbook::check_glob("file"));
+ BOOST_TEST(!quickbook::check_glob("file\\[\\]"));
+ BOOST_TEST(quickbook::check_glob("[x]"));
+ BOOST_TEST(quickbook::check_glob("abc[x]"));
+ BOOST_TEST(quickbook::check_glob("[x]abd"));
+ BOOST_TEST_THROWS(quickbook::check_glob("["), quickbook::glob_error);
+ BOOST_TEST_THROWS(quickbook::check_glob("[xyz"), quickbook::glob_error);
+ BOOST_TEST_THROWS(quickbook::check_glob("xyx["), quickbook::glob_error);
+ BOOST_TEST_THROWS(quickbook::check_glob("]"), quickbook::glob_error);
+ BOOST_TEST_THROWS(quickbook::check_glob("abc]"), quickbook::glob_error);
+ BOOST_TEST_THROWS(quickbook::check_glob("]def"), quickbook::glob_error);
+ BOOST_TEST_THROWS(quickbook::check_glob("[]"), quickbook::glob_error);
+ BOOST_TEST_THROWS(quickbook::check_glob("[[]"), quickbook::glob_error);
+ BOOST_TEST_THROWS(quickbook::check_glob("[]]"), quickbook::glob_error);
+ BOOST_TEST_THROWS(quickbook::check_glob("**"), quickbook::glob_error);
+ BOOST_TEST_THROWS(quickbook::check_glob("[/]"), quickbook::glob_error);
+ BOOST_TEST_THROWS(quickbook::check_glob("\\"), quickbook::glob_error);
+ BOOST_TEST_THROWS(quickbook::check_glob("\\\\"), quickbook::glob_error);
+}
+
+int main()
+{
+ glob_tests();
+ check_glob_tests();
+
+ return boost::report_errors();
+}
diff --git a/tools/quickbook/test/unit/source_map_test.cpp b/tools/quickbook/test/unit/source_map_test.cpp
new file mode 100644
index 0000000000..3eb305741c
--- /dev/null
+++ b/tools/quickbook/test/unit/source_map_test.cpp
@@ -0,0 +1,395 @@
+/*=============================================================================
+ Copyright (c) 2012 Daniel James
+
+ Use, modification and distribution is subject to the Boost Software
+ License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
+ http://www.boost.org/LICENSE_1_0.txt)
+=============================================================================*/
+
+#include "fwd.hpp"
+#include "files.hpp"
+#include <boost/utility/string_ref.hpp>
+#include <boost/detail/lightweight_test.hpp>
+#include <boost/range/algorithm/find.hpp>
+
+void simple_map_tests()
+{
+ boost::string_ref source("First Line\nSecond Line");
+ quickbook::file_ptr fake_file = new quickbook::file(
+ "(fake file)", source, 105u);
+
+ quickbook::string_iterator line1 = fake_file->source().begin();
+ quickbook::string_iterator line1_end = boost::find(fake_file->source(), '\n');
+ quickbook::string_iterator line2 = line1_end + 1;
+ quickbook::string_iterator line2_end = fake_file->source().end();
+
+ quickbook::mapped_file_builder builder;
+
+ { // Empty test
+ builder.start(fake_file);
+ BOOST_TEST(builder.empty());
+ quickbook::file_ptr f1 = builder.release();
+ BOOST_TEST(f1->source().empty());
+ }
+
+ { // Add full text
+ builder.start(fake_file);
+ builder.add(boost::string_ref(line1, line2_end - line1));
+ quickbook::file_ptr f1 = builder.release();
+ BOOST_TEST_EQ(f1->source(), source);
+ BOOST_TEST_EQ(f1->position_of(f1->source().begin()),
+ quickbook::file_position(1,1));
+ BOOST_TEST_EQ(f1->position_of(f1->source().begin() + 2),
+ quickbook::file_position(1,3));
+ BOOST_TEST_EQ(f1->position_of(f1->source().begin() + (line1_end - line1)),
+ quickbook::file_position(1,line1_end - line1 + 1));
+ BOOST_TEST_EQ(f1->position_of(f1->source().begin() + (line2 - line1)),
+ quickbook::file_position(2,1));
+ BOOST_TEST_EQ(f1->position_of(f1->source().end()),
+ fake_file->position_of(fake_file->source().end()));
+ }
+
+ { // Add first line
+ builder.start(fake_file);
+ builder.add(boost::string_ref(line1, line1_end - line1));
+ quickbook::file_ptr f1 = builder.release();
+ BOOST_TEST_EQ(f1->source(),
+ boost::string_ref(source.begin(), line1_end - line1));
+ BOOST_TEST_EQ(f1->position_of(f1->source().begin()),
+ quickbook::file_position(1,1));
+ BOOST_TEST_EQ(f1->position_of(f1->source().begin() + 2),
+ quickbook::file_position(1,3));
+ BOOST_TEST_EQ(f1->position_of(f1->source().end()),
+ quickbook::file_position(1,line1_end - line1 + 1));
+ }
+
+ { // Add second line
+ builder.start(fake_file);
+ builder.add(boost::string_ref(line2, line2_end - line2));
+ quickbook::file_ptr f1 = builder.release();
+ BOOST_TEST_EQ(f1->source(), boost::string_ref("Second Line"));
+ BOOST_TEST_EQ(f1->position_of(f1->source().begin()),
+ quickbook::file_position(2,1));
+ BOOST_TEST_EQ(f1->position_of(f1->source().begin() + 2),
+ quickbook::file_position(2,3));
+ BOOST_TEST_EQ(f1->position_of(f1->source().end()),
+ quickbook::file_position(2,line2_end - line2 + 1));
+ }
+
+ { // Out of order
+ builder.start(fake_file);
+ builder.add(boost::string_ref(line2, line2_end - line2));
+ builder.add(boost::string_ref(line1_end, 1));
+ builder.add(boost::string_ref(line1, line1_end - line1));
+ quickbook::file_ptr f1 = builder.release();
+ BOOST_TEST_EQ(f1->source(),
+ boost::string_ref("Second Line\nFirst Line"));
+
+ BOOST_TEST_EQ(f1->position_of(f1->source().begin()),
+ quickbook::file_position(2,1));
+ BOOST_TEST_EQ(f1->position_of(f1->source().begin() + 2),
+ quickbook::file_position(2,3));
+ BOOST_TEST_EQ(f1->position_of(f1->source().begin() + (line2_end - line2 - 1)),
+ quickbook::file_position(2,line2_end - line2));
+ BOOST_TEST_EQ(f1->position_of(f1->source().begin() + (line2_end - line2)),
+ quickbook::file_position(1,(line1_end - line1 + 1)));
+ BOOST_TEST_EQ(f1->position_of(f1->source().begin() + (line2_end - line2 + 1)),
+ quickbook::file_position(1,1));
+ BOOST_TEST_EQ(f1->position_of(f1->source().end()),
+ quickbook::file_position(1,line1_end - line1 + 1));
+ }
+
+ { // Repeated text
+ builder.start(fake_file);
+ builder.add(boost::string_ref(line2, line2_end - line2));
+ builder.add(boost::string_ref(line1_end, 1));
+ builder.add(boost::string_ref(line2, line2_end - line2));
+ quickbook::file_ptr f1 = builder.release();
+ BOOST_TEST_EQ(f1->source(),
+ boost::string_ref("Second Line\nSecond Line"));
+
+ BOOST_TEST_EQ(f1->position_of(f1->source().begin()),
+ quickbook::file_position(2,1));
+ BOOST_TEST_EQ(f1->position_of(f1->source().begin() + 2),
+ quickbook::file_position(2,3));
+ BOOST_TEST_EQ(f1->position_of(f1->source().begin() + (line2_end - line2 - 1)),
+ quickbook::file_position(2,line2_end - line2));
+ BOOST_TEST_EQ(f1->position_of(f1->source().begin() + (line2_end - line2)),
+ quickbook::file_position(1,(line1_end - line1 + 1)));
+ BOOST_TEST_EQ(f1->position_of(f1->source().begin() + (line2_end - line2 + 1)),
+ quickbook::file_position(2,1));
+ BOOST_TEST_EQ(f1->position_of(f1->source().end()),
+ quickbook::file_position(2,line2_end - line2 + 1));
+ }
+
+
+ { // Generated text
+ builder.start(fake_file);
+ builder.add_at_pos("------\n", line1);
+ builder.add(boost::string_ref(line1, line1_end - line1));
+ builder.add_at_pos("\n------\n", line1_end);
+ quickbook::file_ptr f1 = builder.release();
+ BOOST_TEST_EQ(f1->source(),
+ boost::string_ref("------\nFirst Line\n------\n"));
+
+ quickbook::string_iterator newline = boost::find(f1->source(), '\n');
+
+ BOOST_TEST_EQ(f1->position_of(f1->source().begin()),
+ quickbook::file_position(1,1));
+ BOOST_TEST_EQ(f1->position_of(f1->source().begin() + 2),
+ quickbook::file_position(1,1));
+ BOOST_TEST_EQ(f1->position_of(newline),
+ quickbook::file_position(1,1));
+ BOOST_TEST_EQ(f1->position_of(newline + 1),
+ quickbook::file_position(1,1));
+ BOOST_TEST_EQ(f1->position_of(newline + 2),
+ quickbook::file_position(1,2));
+ BOOST_TEST_EQ(f1->position_of(newline + (line1_end - line1)),
+ quickbook::file_position(1,line1_end - line1));
+ BOOST_TEST_EQ(f1->position_of(newline + (line1_end - line1 + 1)),
+ quickbook::file_position(1,line1_end - line1 + 1));
+ BOOST_TEST_EQ(f1->position_of(newline + (line1_end - line1 + 2)),
+ quickbook::file_position(1,line1_end - line1 + 1));
+ BOOST_TEST_EQ(f1->position_of(f1->source().end()),
+ quickbook::file_position(1,line1_end - line1 + 1));
+ }
+}
+
+void indented_map_tests()
+{
+ boost::string_ref source(
+ " Code line1\n"
+ " Code line2\n");
+ quickbook::file_ptr fake_file = new quickbook::file(
+ "(fake file)", source, 105u);
+
+ quickbook::mapped_file_builder builder;
+
+ {
+ builder.start(fake_file);
+ builder.unindent_and_add(fake_file->source());
+ quickbook::file_ptr f1 = builder.release();
+ BOOST_TEST_EQ(f1->source(),
+ boost::string_ref("Code line1\nCode line2\n"));
+ BOOST_TEST_EQ(f1->position_of(f1->source().begin()),
+ quickbook::file_position(1,4));
+ BOOST_TEST_EQ(f1->position_of(f1->source().begin() + 1),
+ quickbook::file_position(1,5));
+ BOOST_TEST_EQ(f1->position_of(f1->source().begin() + 5),
+ quickbook::file_position(1,9));
+ BOOST_TEST_EQ(f1->position_of(f1->source().begin() + 10),
+ quickbook::file_position(1,14));
+ BOOST_TEST_EQ(f1->position_of(f1->source().begin() + 11),
+ quickbook::file_position(2,4));
+ // TODO: Shouldn't this be (3,1)? Does it matter?
+ BOOST_TEST_EQ(f1->position_of(f1->source().end()),
+ quickbook::file_position(3,1));
+ }
+
+ {
+ builder.start(fake_file);
+ {
+ quickbook::mapped_file_builder builder2;
+ builder2.start(fake_file);
+ builder2.unindent_and_add(fake_file->source());
+ builder.add(builder2);
+ }
+ quickbook::file_ptr f1 = builder.release();
+
+ BOOST_TEST_EQ(f1->source(),
+ boost::string_ref("Code line1\nCode line2\n"));
+ BOOST_TEST_EQ(f1->position_of(f1->source().begin()),
+ quickbook::file_position(1,4));
+ BOOST_TEST_EQ(f1->position_of(f1->source().begin() + 1),
+ quickbook::file_position(1,5));
+ BOOST_TEST_EQ(f1->position_of(f1->source().begin() + 5),
+ quickbook::file_position(1,9));
+ BOOST_TEST_EQ(f1->position_of(f1->source().begin() + 10),
+ quickbook::file_position(1,14));
+ BOOST_TEST_EQ(f1->position_of(f1->source().begin() + 11),
+ quickbook::file_position(2,4));
+ BOOST_TEST_EQ(f1->position_of(f1->source().end()),
+ quickbook::file_position(3,1));
+ }
+
+ {
+ builder.start(fake_file);
+ builder.unindent_and_add(boost::string_ref(
+ fake_file->source().begin() + 3,
+ fake_file->source().end() - (fake_file->source().begin() + 3)));
+ quickbook::file_ptr f1 = builder.release();
+ BOOST_TEST_EQ(f1->source(),
+ boost::string_ref("Code line1\n Code line2\n"));
+ BOOST_TEST_EQ(f1->position_of(f1->source().begin()),
+ quickbook::file_position(1,4));
+ BOOST_TEST_EQ(f1->position_of(f1->source().begin() + 1),
+ quickbook::file_position(1,5));
+ BOOST_TEST_EQ(f1->position_of(f1->source().begin() + 5),
+ quickbook::file_position(1,9));
+ BOOST_TEST_EQ(f1->position_of(f1->source().begin() + 10),
+ quickbook::file_position(1,14));
+ BOOST_TEST_EQ(f1->position_of(f1->source().begin() + 11),
+ quickbook::file_position(2,1));
+ BOOST_TEST_EQ(f1->position_of(f1->source().end()),
+ quickbook::file_position(3,1));
+ }
+}
+
+void indented_map_tests2()
+{
+ boost::string_ref source(
+ " Code line1\n"
+ "\n"
+ " Code line2\n");
+ quickbook::file_ptr fake_file = new quickbook::file(
+ "(fake file)", source, 105u);
+
+ quickbook::mapped_file_builder builder;
+
+ {
+ builder.start(fake_file);
+ builder.unindent_and_add(fake_file->source());
+ quickbook::file_ptr f1 = builder.release();
+ BOOST_TEST_EQ(f1->source(),
+ boost::string_ref("Code line1\n\nCode line2\n"));
+ BOOST_TEST_EQ(f1->position_of(f1->source().begin()),
+ quickbook::file_position(1,4));
+ BOOST_TEST_EQ(f1->position_of(f1->source().begin() + 1),
+ quickbook::file_position(1,5));
+ BOOST_TEST_EQ(f1->position_of(f1->source().begin() + 5),
+ quickbook::file_position(1,9));
+ BOOST_TEST_EQ(f1->position_of(f1->source().begin() + 10),
+ quickbook::file_position(1,14));
+ BOOST_TEST_EQ(f1->position_of(f1->source().begin() + 11),
+ quickbook::file_position(2,1));
+ BOOST_TEST_EQ(f1->position_of(f1->source().begin() + 12),
+ quickbook::file_position(3,4));
+ }
+}
+
+void indented_map_leading_blanks_test()
+{
+ quickbook::mapped_file_builder builder;
+
+ {
+ boost::string_ref source("\n\n Code line1\n");
+ quickbook::file_ptr fake_file = new quickbook::file(
+ "(fake file)", source, 105u);
+ builder.start(fake_file);
+ builder.unindent_and_add(fake_file->source());
+ quickbook::file_ptr f1 = builder.release();
+ BOOST_TEST_EQ(f1->source(),
+ boost::string_ref("Code line1\n"));
+ }
+
+ {
+ boost::string_ref source(" \n \n Code line1\n");
+ quickbook::file_ptr fake_file = new quickbook::file(
+ "(fake file)", source, 105u);
+ builder.start(fake_file);
+ builder.unindent_and_add(fake_file->source());
+ quickbook::file_ptr f1 = builder.release();
+ BOOST_TEST_EQ(f1->source(),
+ boost::string_ref("Code line1\n"));
+ }
+
+ {
+ boost::string_ref source(" Code line1\n \n Code line2");
+ quickbook::file_ptr fake_file = new quickbook::file(
+ "(fake file)", source, 105u);
+ builder.start(fake_file);
+ builder.unindent_and_add(fake_file->source());
+ quickbook::file_ptr f1 = builder.release();
+ BOOST_TEST_EQ(f1->source(),
+ boost::string_ref("Code line1\n\nCode line2"));
+ }
+}
+
+void indented_map_trailing_blanks_test()
+{
+ quickbook::mapped_file_builder builder;
+
+ {
+ boost::string_ref source("\n\n Code line1\n ");
+ quickbook::file_ptr fake_file = new quickbook::file(
+ "(fake file)", source, 105u);
+ builder.start(fake_file);
+ builder.unindent_and_add(fake_file->source());
+ quickbook::file_ptr f1 = builder.release();
+ BOOST_TEST_EQ(f1->source(),
+ boost::string_ref("Code line1\n"));
+ }
+
+ {
+ boost::string_ref source(" \n \n Code line1\n ");
+ quickbook::file_ptr fake_file = new quickbook::file(
+ "(fake file)", source, 105u);
+ builder.start(fake_file);
+ builder.unindent_and_add(fake_file->source());
+ quickbook::file_ptr f1 = builder.release();
+ BOOST_TEST_EQ(f1->source(),
+ boost::string_ref("Code line1\n "));
+ }
+
+ {
+ boost::string_ref source(" Code line1\n \n Code line2\n ");
+ quickbook::file_ptr fake_file = new quickbook::file(
+ "(fake file)", source, 105u);
+ builder.start(fake_file);
+ builder.unindent_and_add(fake_file->source());
+ quickbook::file_ptr f1 = builder.release();
+ BOOST_TEST_EQ(f1->source(),
+ boost::string_ref("Code line1\n\nCode line2\n"));
+ }
+
+}
+
+void indented_map_mixed_test()
+{
+ quickbook::mapped_file_builder builder;
+
+ {
+ boost::string_ref source("\tCode line 1\n Code line 2\n\t Code line 3\n \tCode line 4");
+ quickbook::file_ptr fake_file = new quickbook::file(
+ "(fake file)", source, 105u);
+ builder.start(fake_file);
+ builder.unindent_and_add(fake_file->source());
+ quickbook::file_ptr f1 = builder.release();
+ BOOST_TEST_EQ(f1->source(),
+ boost::string_ref("Code line 1\nCode line 2\n Code line 3\n Code line 4"));
+ }
+
+ {
+ boost::string_ref source(" Code line 1\n\tCode line 2");
+ quickbook::file_ptr fake_file = new quickbook::file(
+ "(fake file)", source, 105u);
+ builder.start(fake_file);
+ builder.unindent_and_add(fake_file->source());
+ quickbook::file_ptr f1 = builder.release();
+ BOOST_TEST_EQ(f1->source(),
+ boost::string_ref("Code line 1\n Code line 2"));
+ }
+
+ {
+ boost::string_ref source(" Code line 1\n \tCode line 2");
+ quickbook::file_ptr fake_file = new quickbook::file(
+ "(fake file)", source, 105u);
+ builder.start(fake_file);
+ builder.unindent_and_add(fake_file->source());
+ quickbook::file_ptr f1 = builder.release();
+ BOOST_TEST_EQ(f1->source(),
+ boost::string_ref("Code line 1\n\tCode line 2"));
+ }
+}
+
+
+int main()
+{
+ simple_map_tests();
+ indented_map_tests();
+ indented_map_tests2();
+ indented_map_leading_blanks_test();
+ indented_map_trailing_blanks_test();
+ indented_map_mixed_test();
+ return boost::report_errors();
+}
diff --git a/tools/quickbook/test/unit/values_test.cpp b/tools/quickbook/test/unit/values_test.cpp
index b6d186c555..d3da4a857e 100644
--- a/tools/quickbook/test/unit/values_test.cpp
+++ b/tools/quickbook/test/unit/values_test.cpp
@@ -32,10 +32,10 @@ void qbk_tests()
"(fake file)", source, 105u);
q = quickbook::qbk_value(
fake_file,
- fake_file->source.begin(),
- fake_file->source.end());
+ fake_file->source().begin(),
+ fake_file->source().end());
}
- BOOST_TEST_EQ(q.get_quickbook(), source);
+ BOOST_TEST_EQ(q.get_quickbook(), boost::string_ref(source));
}
void sort_test()
diff --git a/tools/quickbook/test/versions/Jamfile.v2 b/tools/quickbook/test/versions/Jamfile.v2
index ccb46dc7bf..dd185d5879 100644
--- a/tools/quickbook/test/versions/Jamfile.v2
+++ b/tools/quickbook/test/versions/Jamfile.v2
@@ -6,7 +6,7 @@
# http://www.boost.org/LICENSE_1_0.txt)
#
-project test/versions
+project quickbook/test/versions
: requirements
<toolset>msvc:<debug-symbols>off
;
diff --git a/tools/quickbook/test/versions/invalid_macro-1_6.quickbook b/tools/quickbook/test/versions/invalid_macro-1_6.quickbook
index f579f7252e..860d58b718 100644
--- a/tools/quickbook/test/versions/invalid_macro-1_6.quickbook
+++ b/tools/quickbook/test/versions/invalid_macro-1_6.quickbook
@@ -4,11 +4,11 @@
Import:
-[import invalid_macro-inc-1_1.qbk]
+[import invalid_macro-inc-1_1.quickbook]
* __valid__
* __invalid\macro__
Include:
-[include invalid_macro-inc-1_1.qbk]
+[include invalid_macro-inc-1_1.quickbook]
diff --git a/tools/quickbook/test/versions/invalid_macro-inc-1_1.qbk b/tools/quickbook/test/versions/invalid_macro-inc-1_1.quickbook
index c44bd67417..c44bd67417 100644
--- a/tools/quickbook/test/versions/invalid_macro-inc-1_1.qbk
+++ b/tools/quickbook/test/versions/invalid_macro-inc-1_1.quickbook
diff --git a/tools/quickbook/test/versions/templates-1_1.qbk b/tools/quickbook/test/versions/templates-inc-1_1.quickbook
index be5365055d..be5365055d 100644
--- a/tools/quickbook/test/versions/templates-1_1.qbk
+++ b/tools/quickbook/test/versions/templates-inc-1_1.quickbook
diff --git a/tools/quickbook/test/versions/templates-1_4.qbk b/tools/quickbook/test/versions/templates-inc-1_4.quickbook
index 8b565512d4..8b565512d4 100644
--- a/tools/quickbook/test/versions/templates-1_4.qbk
+++ b/tools/quickbook/test/versions/templates-inc-1_4.quickbook
diff --git a/tools/quickbook/test/versions/templates-1_5.qbk b/tools/quickbook/test/versions/templates-inc-1_5.quickbook
index 9dd07f6939..9dd07f6939 100644
--- a/tools/quickbook/test/versions/templates-1_5.qbk
+++ b/tools/quickbook/test/versions/templates-inc-1_5.quickbook
diff --git a/tools/quickbook/test/versions/versions-1_6.quickbook b/tools/quickbook/test/versions/versions-1_6.quickbook
index baac8e08d6..fed5a93ea9 100644
--- a/tools/quickbook/test/versions/versions-1_6.quickbook
+++ b/tools/quickbook/test/versions/versions-1_6.quickbook
@@ -1,9 +1,9 @@
[quickbook 1.6]
[article Mixed version tests]
-[import templates-1_1.qbk]
-[import templates-1_4.qbk]
-[import templates-1_5.qbk]
+[import templates-inc-1_1.quickbook]
+[import templates-inc-1_4.quickbook]
+[import templates-inc-1_5.quickbook]
[/ This test calls a 1.4 template -> 1.5 template -> 1.4 template.
The name lookup in the 1.4 template should skip the 1.5 template
@@ -21,6 +21,6 @@
[/ For comparison]
-[include templates-1_1.qbk]
-[include templates-1_4.qbk]
-[include templates-1_5.qbk]
+[include templates-inc-1_1.quickbook]
+[include templates-inc-1_4.quickbook]
+[include templates-inc-1_5.quickbook]
diff --git a/tools/quickbook/test/xinclude/Jamfile.v2 b/tools/quickbook/test/xinclude/Jamfile.v2
index 2974e86089..fbc8981c4e 100644
--- a/tools/quickbook/test/xinclude/Jamfile.v2
+++ b/tools/quickbook/test/xinclude/Jamfile.v2
@@ -6,7 +6,7 @@
# http://www.boost.org/LICENSE_1_0.txt)
#
-project test/xinclude
+project quickbook/test/xinclude
: requirements
<toolset>msvc:<debug-symbols>off
;
diff --git a/tools/quickbook/test/xinclude/sub/boost-no-inspect b/tools/quickbook/test/xinclude/sub/boost-no-inspect
new file mode 100644
index 0000000000..099a233ec9
--- /dev/null
+++ b/tools/quickbook/test/xinclude/sub/boost-no-inspect
@@ -0,0 +1,2 @@
+This directory just contains simple test files, which might be disrupted by
+adding copyright etc.
diff --git a/tools/regression/build/Jamroot.jam b/tools/regression/build/Jamroot.jam
index e1abe768cd..61eae71210 100644
--- a/tools/regression/build/Jamroot.jam
+++ b/tools/regression/build/Jamroot.jam
@@ -37,7 +37,7 @@ obj tiny_xml
:
<define>BOOST_ALL_NO_LIB=1
<define>_CRT_SECURE_NO_WARNINGS
- <use>/boost//headers
+ <implicit-dependency>/boost//headers
:
release
;
@@ -51,7 +51,7 @@ exe process_jam_log
:
<define>BOOST_ALL_NO_LIB=1
<define>_CRT_SECURE_NO_WARNINGS
- <use>/boost//headers
+ <implicit-dependency>/boost//headers
:
release
;
@@ -64,7 +64,7 @@ exe compiler_status
/boost/filesystem//boost_filesystem/<link>static
:
<define>BOOST_ALL_NO_LIB=1
- <use>/boost//headers
+ <implicit-dependency>/boost//headers
:
release
;
@@ -77,8 +77,25 @@ exe library_status
/boost/filesystem//boost_filesystem/<link>static
:
<define>BOOST_ALL_NO_LIB=1
- <use>/boost//headers
+ <implicit-dependency>/boost//headers
:
release
;
explicit library_status ;
+
+exe boost_report
+ :
+ [ glob report/*.cpp ]
+ /boost/filesystem//boost_filesystem/<link>static
+ /boost//filesystem/<link>static
+ /boost//date_time/<link>static
+ /boost//regex/<link>static
+ /boost//program_options/<link>static
+ /boost//iostreams/<link>static
+ :
+ <define>BOOST_ALL_NO_LIB=1
+ <implicit-dependency>/boost//headers
+ :
+ release
+ ;
+explicit boost_report ;
diff --git a/tools/regression/doc/index.html b/tools/regression/doc/index.html
index 799ba5df41..14775eb533 100644
--- a/tools/regression/doc/index.html
+++ b/tools/regression/doc/index.html
@@ -43,7 +43,7 @@
</ul>
<hr />
- <p>Revised $Date: 2010-06-26 05:30:09 -0700 (Sat, 26 Jun 2010) $</p>
+ <p>Revised $Date$</p>
<p>Copyright Beman Dawes 2003.</p>
diff --git a/tools/regression/doc/library_status.html b/tools/regression/doc/library_status.html
index 0b83ca44e7..a8eb4827fc 100644
--- a/tools/regression/doc/library_status.html
+++ b/tools/regression/doc/library_status.html
@@ -459,6 +459,6 @@
License, Version 1.0. (See accompanying file LICENSE_1_0.txt or
http://www.boost.org/LICENSE_1_0.txt)</p>
- <p>Revised $Date: 2011-10-06 08:41:40 -0700 (Thu, 06 Oct 2011) $</p>
+ <p>Revised $Date$</p>
</body>
</html>
diff --git a/tools/regression/src/compiler_status.cpp b/tools/regression/src/compiler_status.cpp
index c4444bbcc6..63f813445e 100644
--- a/tools/regression/src/compiler_status.cpp
+++ b/tools/regression/src/compiler_status.cpp
@@ -34,7 +34,7 @@ namespace fs = boost::filesystem;
namespace xml = boost::tiny_xml;
#include <cstdlib> // for abort, exit
-#include <cctype> // for toupper
+#include <cctype> // for toupper, isdigit
#include <string>
#include <vector>
#include <set>
@@ -46,6 +46,15 @@ namespace xml = boost::tiny_xml;
#include <stdexcept>
#include <cassert>
+#include <stdio.h> // for popen, pclose
+#if defined(_MSC_VER)
+# define POPEN _popen
+# define PCLOSE _pclose
+#else
+# define POPEN popen
+# define PCLOSE pclose
+#endif
+
using std::string;
const string pass_msg( "Pass" );
@@ -107,15 +116,24 @@ namespace
string revision( const fs::path & boost_root )
{
string rev;
- fs::path entries( boost_root / ".svn" / "entries" );
- fs::ifstream entries_file( entries );
- if ( entries_file )
+ string command("cd ");
+ command += boost_root.string() + " & svn info";
+ FILE* fp = POPEN(command.c_str(), "r");
+ if (fp)
{
- std::getline( entries_file, rev );
- std::getline( entries_file, rev );
- std::getline( entries_file, rev );
- std::getline( entries_file, rev ); // revision number as a string
+ static const int line_max = 128;
+ char line[line_max];
+ while (fgets(line, line_max, fp) != NULL)
+ {
+ string ln(line);
+ if (ln.find("Revision: ") != string::npos)
+ {
+ for(auto itr = ln.begin()+10; itr != ln.end() && isdigit(*itr); ++itr)
+ rev += *itr;
+ }
+ }
}
+ std::cout << "Revision: " << rev << std::endl;
return rev;
}
diff --git a/tools/regression/src/library_status.cpp b/tools/regression/src/library_status.cpp
index d3ad418048..3faa0f63c9 100644
--- a/tools/regression/src/library_status.cpp
+++ b/tools/regression/src/library_status.cpp
@@ -24,15 +24,16 @@ program maintenance.
*******************************************************************************/
-#include "boost/filesystem/operations.hpp"
-#include "boost/filesystem/fstream.hpp"
+#include <boost/filesystem/operations.hpp>
+#include <boost/filesystem/fstream.hpp>
+#include <boost/foreach.hpp>
namespace fs = boost::filesystem;
#include "detail/tiny_xml.hpp"
namespace xml = boost::tiny_xml;
-#include "boost/iterator/transform_iterator.hpp"
+#include <boost/iterator/transform_iterator.hpp>
#include <cstdlib> // for abort, exit
#include <string>
@@ -40,39 +41,35 @@ namespace xml = boost::tiny_xml;
#include <set>
#include <utility> // for make_pair on STLPort
#include <map>
-#include <algorithm>
+#include <algorithm> // max_element, find_if
#include <iostream>
#include <fstream>
#include <ctime>
#include <stdexcept>
#include <cassert>
-#include <utility>
+#include <utility> // for pair
using std::string;
const string pass_msg( "Pass" );
const string warn_msg( "<i>Warn</i>" );
const string fail_msg( "<font color=\"#FF0000\"><i>Fail</i></font>" );
-const string note_msg( "<sup>*</sup>" );
const string missing_residue_msg( "<i>Missing</i>" );
const std::size_t max_compile_msg_size = 10000;
namespace
{
- fs::path boost_root; // boost-root complete path
fs::path locate_root; // locate-root (AKA ALL_LOCATE_TARGET) complete path
bool ignore_pass = false;
bool no_warn = false;
bool no_links = false;
- fs::directory_iterator end_itr;
-
// transform pathname to something html can accept
struct char_xlate {
typedef char result_type;
result_type operator()(char c) const{
- if(c == '/')
+ if(c == '/' || c == '\\')
return '-';
return c;
}
@@ -91,12 +88,13 @@ namespace
struct col_node {
int rows, cols;
- bool has_leaf;
+ bool is_leaf;
+ typedef std::pair<const std::string, col_node> subcolumn;
typedef std::map<std::string, col_node> subcolumns_t;
subcolumns_t m_subcolumns;
bool operator<(const col_node &cn) const;
col_node() :
- has_leaf(false)
+ is_leaf(false)
{}
std::pair<int, int> get_spans();
};
@@ -104,14 +102,16 @@ namespace
std::pair<int, int> col_node::get_spans(){
rows = 1;
cols = 0;
- if(has_leaf){
+ if(is_leaf){
cols = 1;
}
if(! m_subcolumns.empty()){
- subcolumns_t::iterator itr;
- for(itr = m_subcolumns.begin(); itr != m_subcolumns.end(); ++itr){
+ BOOST_FOREACH(
+ subcolumn & s,
+ m_subcolumns
+ ){
std::pair<int, int> spans;
- spans = itr->second.get_spans();
+ spans = s.second.get_spans();
rows = (std::max)(rows, spans.first);
cols += spans.second;
}
@@ -121,86 +121,40 @@ namespace
}
void build_node_tree(const fs::path & dir_root, col_node & node){
- fs::path xml_file_path( dir_root / "test_log.xml" );
- if (fs::exists( xml_file_path ) )
- {
- node.has_leaf = true;
- }
- fs::directory_iterator itr(dir_root);
- while(itr != end_itr){
- if(fs::is_directory(*itr)){
+ bool has_directories = false;
+ bool has_files = false;
+ BOOST_FOREACH(
+ fs::directory_entry & d,
+ std::make_pair(
+ fs::directory_iterator(dir_root),
+ fs::directory_iterator()
+ )
+ ){
+ if(fs::is_directory(d)){
+ has_directories = true;
std::pair<col_node::subcolumns_t::iterator, bool> result
= node.m_subcolumns.insert(
- std::make_pair(itr->path().native(), col_node())
+ std::make_pair(d.path().filename().string(), col_node())
);
- build_node_tree(*itr, result.first->second);
- }
- ++itr;
- }
+ build_node_tree(d, result.first->second);
+ }
+ else{
+ has_files = true;
+ }
+ }
+ if(has_directories && has_files)
+ throw std::string("invalid bin directory structure");
+ node.is_leaf = has_files;
}
fs::ofstream report;
fs::ofstream links_file;
string links_name;
- fs::path notes_path;
- string notes_html;
-
- fs::path notes_map_path;
- typedef std::multimap< string, string > notes_map; // key is test_name-toolset,
- // value is note bookmark
- notes_map notes;
-
string specific_compiler; // if running on one toolset only
const string empty_string;
- // build notes_bookmarks from notes HTML -----------------------------------//
-
- void build_notes_bookmarks()
- {
- if ( notes_map_path.empty() ) return;
- fs::ifstream notes_map_file( notes_map_path );
- if ( !notes_map_file )
- {
- std::cerr << "Could not open --notes-map input file: " << notes_map_path.string() << std::endl;
- std::exit( 1 );
- }
- string line;
- while( std::getline( notes_map_file, line ) )
- {
- string::size_type pos = 0;
- if ( (pos = line.find( ',', pos )) == string::npos ) continue;
- string key(line.substr( 0, pos ) );
- string bookmark( line.substr( pos+1 ) );
-
- // std::cout << "inserting \"" << key << "\",\"" << bookmark << "\"\n";
- notes.insert( notes_map::value_type( key, bookmark ) );
- }
- }
-
- // load_notes_html ---------------------------------------------------------//
-
- bool load_notes_html()
- {
- if ( notes_path.empty() ) return false;
- fs::ifstream notes_file( notes_path );
- if ( !notes_file )
- {
- std::cerr << "Could not open --notes input file: " << notes_path.string() << std::endl;
- std::exit( 1 );
- }
- string line;
- bool in_body( false );
- while( std::getline( notes_file, line ) )
- {
- if ( in_body && line.find( "</body>" ) != string::npos ) in_body = false;
- if ( in_body ) notes_html += line;
- else if ( line.find( "<body>" ) ) in_body = true;
- }
- return true;
- }
-
// extract object library name from target directory string ----------------//
string extract_object_library_name( const string & s )
@@ -213,49 +167,66 @@ namespace
return t.substr( pos, t.find( "/", pos ) - pos );
}
- // element_content ---------------------------------------------------------//
+ // find_element ------------------------------------------------------------//
- const string & element_content(
- const xml::element & root, const string & name )
- {
- const static string empty_string;
- xml::element_list::const_iterator itr;
- for ( itr = root.elements.begin();
- itr != root.elements.end() && (*itr)->name != name;
- ++itr ) {}
- return itr != root.elements.end() ? (*itr)->content : empty_string;
- }
+ struct element_equal {
+ const string & m_name;
+ element_equal(const string & name) :
+ m_name(name)
+ {}
+ bool operator()(const xml::element_ptr & xep) const {
+ return xep.get()->name == m_name;
+ }
+ };
- // find_element ------------------------------------------------------------//
+ xml::element_list::const_iterator find_element(
+ const xml::element & root, const string & name
+ ){
+ return std::find_if(
+ root.elements.begin(),
+ root.elements.end(),
+ element_equal(name)
+ );
+ }
- const xml::element & find_element(
- const xml::element & root, const string & name )
- {
- static const xml::element empty_element;
+ // element_content ---------------------------------------------------------//
+ const string & element_content(
+ const xml::element & root, const string & name
+ ){
xml::element_list::const_iterator itr;
- for ( itr = root.elements.begin();
- itr != root.elements.end() && (*itr)->name != name;
- ++itr ) {}
- return itr != root.elements.end() ? *((*itr).get()) : empty_element;
+ itr = find_element(root, name);
+ if(root.elements.end() == itr)
+ return empty_string;
+ return (*itr)->content;
}
// attribute_value ----------------------------------------------------------//
+ struct attribute_equal {
+ const string & m_name;
+ attribute_equal(const string & name) :
+ m_name(name)
+ {}
+ bool operator()(const xml::attribute & a) const {
+ return a.name == m_name;
+ }
+ };
+
const string & attribute_value(
const xml::element & element,
const string & attribute_name
){
- xml::attribute_list::const_iterator atr;
- for(
- atr = element.attributes.begin();
- atr != element.attributes.end();
- ++atr
- ){
- if(atr->name == attribute_name)
- return atr->value;
+ xml::attribute_list::const_iterator itr;
+ itr = std::find_if(
+ element.attributes.begin(),
+ element.attributes.end(),
+ attribute_equal(attribute_name)
+ );
+ if(element.attributes.end() == itr){
+ static const string empty_string;
+ return empty_string;
}
- static const string empty_string;
- return empty_string;
+ return itr->value;
}
// generate_report ---------------------------------------------------------//
@@ -303,15 +274,17 @@ namespace
compile += "...\n (remainder deleted because of excessive size)\n";
}
+ const string target_dir_string = target_dir.string();
+
links_file << "<h2><a name=\"";
links_file << std::make_pair(
- html_from_path(target_dir.string().begin()),
- html_from_path(target_dir.string().end())
+ html_from_path(target_dir_string.begin()),
+ html_from_path(target_dir_string.end())
)
<< "\">"
<< std::make_pair(
- html_from_path(target_dir.string().begin()),
- html_from_path(target_dir.string().end())
+ html_from_path(target_dir_string.begin()),
+ html_from_path(target_dir_string.end())
)
;
links_file << "</a></h2>\n";;
@@ -347,14 +320,14 @@ namespace
<< source_library_name << "-"
<< object_library_name << "-"
<< std::make_pair(
- html_from_path(target_dir.string().begin()),
- html_from_path(target_dir.string().end())
+ html_from_path(target_dir_string.begin()),
+ html_from_path(target_dir_string.end())
)
<< source_library_name << " - "
<< object_library_name << " - "
<< std::make_pair(
- html_from_path(target_dir.string().begin()),
- html_from_path(target_dir.string().end())
+ html_from_path(target_dir_string.begin()),
+ html_from_path(target_dir_string.end())
)
<< "</a>";
if ( failed_lib_target_dirs.find( lib ) == failed_lib_target_dirs.end() )
@@ -379,14 +352,14 @@ namespace
links_file << "<h2><a name=\""
<< object_library_name << "-"
<< std::make_pair(
- html_from_path(target_dir.string().begin()),
- html_from_path(target_dir.string().end())
+ html_from_path(target_dir_string.begin()),
+ html_from_path(target_dir_string.end())
)
<< "\">"
<< object_library_name << " - "
<< std::make_pair(
- html_from_path(target_dir.string().begin()),
- html_from_path(target_dir.string().end())
+ html_from_path(target_dir_string.begin()),
+ html_from_path(target_dir_string.end())
)
<< "</a></h2>\n"
<< "test_log.xml not found\n";
@@ -396,36 +369,18 @@ namespace
return result;
}
- // add_notes --------------------------------------------------------------//
-
- void add_notes( const string & key, bool fail, string & sep, string & target )
- {
- notes_map::const_iterator itr = notes.lower_bound( key );
- if ( itr != notes.end() && itr->first == key )
- {
- for ( ; itr != notes.end() && itr->first == key; ++itr )
- {
- string note_desc( itr->second[0] == '-'
- ? itr->second.substr( 1 ) : itr->second );
- if ( fail || itr->second[0] == '-' )
- {
- target += sep;
- sep = ",";
- target += "<a href=\"";
- target += "#";
- target += note_desc;
- target += "\">";
- target += note_desc;
- target += "</a>";
- }
- }
+ struct has_fail_result {
+ //bool operator()(const boost::shared_ptr<const xml::element> e) const {
+ bool operator()(const xml::element_ptr & e) const {
+ return attribute_value(*e, "result") == "fail";
}
- }
+ };
// do_cell ---------------------------------------------------------------//
bool do_cell(
const fs::path & target_dir,
const string & lib_name,
+ const string & test_name,
string & target,
bool profile
){
@@ -435,61 +390,34 @@ namespace
fs::path xml_file_path( target_dir / "test_log.xml" );
if ( !fs::exists( xml_file_path ) )
{
- // suppress message because there are too many of them.
- // "missing" is a legitmate result as its not a requirement
- // that every test be run in every figuration.
- //std::cerr << "Missing jam_log.xml in target:\n "
- // << target_dir.string() << "\n";
- target += "<td align=\"right\">" + missing_residue_msg + "</td>";
+ fs::path test_path = target_dir / (test_name + ".test");
+ target += "<td align=\"right\">";
+ target += fs::exists( test_path) ? pass_msg : fail_msg;
+ target += "</td>";
return true;
}
- int anything_generated = 0;
- bool note = false;
-
- fs::ifstream file( xml_file_path );
- if ( !file ) // could not open jam_log.xml
- {
- std::cerr << "Can't open jam_log.xml in target:\n "
- << target_dir.string() << "\n";
- target += "<td>" + missing_residue_msg + "</td>";
- return false;
- }
string test_type( "unknown" );
bool always_show_run_output( false );
+ fs::ifstream file( xml_file_path );
xml::element_ptr dbp = xml::parse( file, xml_file_path.string() );
const xml::element & db( *dbp );
- test_type = attribute_value( db, "test-type" );
+
always_show_run_output
= attribute_value( db, "show-run-output" ) == "true";
- std::string test_type_base( test_type );
- if ( test_type_base.size() > 5 )
- {
- const string::size_type trailer = test_type_base.size() - 5;
- if ( test_type_base.substr( trailer ) == "_fail" )
- {
- test_type_base.erase( trailer );
- }
- }
- if ( test_type_base.size() > 4 )
- {
- const string::size_type trailer = test_type_base.size() - 4;
- if ( test_type_base.substr( trailer ) == "_pyd" )
- {
- test_type_base.erase( trailer );
- }
- }
- const xml::element & test_type_element( find_element( db, test_type_base ) );
-
- pass = !test_type_element.name.empty()
- && attribute_value( test_type_element, "result" ) != "fail";
+ // if we don't find any failures
+ // mark it as a pass
+ pass = (db.elements.end() == std::find_if(
+ db.elements.begin(),
+ db.elements.end(),
+ has_fail_result()
+ ));
+ int anything_generated = 0;
if (!no_links){
- if(!test_type_element.name.empty())
- note = attribute_value( test_type_element, "result" ) == "note";
anything_generated =
generate_report(
db,
@@ -497,7 +425,7 @@ namespace
test_type,
target_dir,
pass,
- always_show_run_output || note
+ always_show_run_output
);
}
@@ -508,9 +436,10 @@ namespace
target += "<a href=\"";
target += links_name;
target += "#";
+ const string target_dir_string = target_dir.string();
std::copy(
- html_from_path(target_dir.string().begin()),
- html_from_path(target_dir.string().end()),
+ html_from_path(target_dir_string.begin()),
+ html_from_path(target_dir_string.end()),
std::back_inserter(target)
);
target += "\">";
@@ -518,7 +447,6 @@ namespace
? (anything_generated < 2 ? pass_msg : warn_msg)
: fail_msg;
target += "</a>";
- if ( pass && note ) target += note_msg;
}
else target += pass ? pass_msg : fail_msg;
@@ -529,11 +457,6 @@ namespace
target += (target_dir / "profile.txt").string();
target += "\"><i>Profile</i></a>";
}
-
- // if notes, generate the superscript HTML
-// if ( !notes.empty() )
-// target += get_notes( toolset, lib_name, test_name, !pass );
-
target += "</td>";
return (anything_generated != 0) || !pass;
}
@@ -542,32 +465,32 @@ namespace
const col_node & node,
fs::path dir_root,
const string & lib_name,
+ const string & test_name,
string & target,
bool profile
){
bool retval = false;
- if(node.has_leaf){
- retval = do_cell(
+ if(node.is_leaf){
+ return do_cell(
dir_root,
lib_name,
+ test_name,
target,
profile
);
}
-
- col_node::subcolumns_t::const_iterator col_itr;
- for(
- col_itr = node.m_subcolumns.begin();
- col_itr != node.m_subcolumns.end();
- ++col_itr
+ BOOST_FOREACH(
+ const col_node::subcolumn & s,
+ node.m_subcolumns
){
- fs::path subdir = dir_root / col_itr->first;
+ fs::path subdir = dir_root / s.first;
retval |= visit_node_tree(
- col_itr->second,
+ s.second,
subdir,
lib_name,
+ test_name,
target,
- col_itr->first == "profile"
+ s.first == "profile"
);
}
return retval;
@@ -588,20 +511,17 @@ namespace
target += "<td>";
//target += "<a href=\"" + url_prefix_dir_view + "/libs/" + lib_name + "\">";
target += test_name;
- target += "</a>";
+ //target += "</a>";
target += "</td>";
-// target += "<td>" + test_type + "</td>";
-
bool no_warn_save = no_warn;
-// if ( test_type.find( "fail" ) != string::npos ) no_warn = true;
-
// emit cells on this row
bool anything_to_report = visit_node_tree(
test_node,
test_dir,
lib_name,
+ test_name,
target,
false
);
@@ -625,25 +545,26 @@ namespace
// rows are held in a vector so they can be sorted, if desired.
std::vector<string> results;
- for ( fs::directory_iterator itr( test_lib_dir ); itr != end_itr; ++itr )
- {
- if(! fs::is_directory(*itr))
+ BOOST_FOREACH(
+ fs::directory_entry & d,
+ std::make_pair(
+ fs::directory_iterator(test_lib_dir),
+ fs::directory_iterator()
+ )
+ ){
+ if(! fs::is_directory(d))
continue;
-
- string test_name = itr->path().native();
+
// if the file name contains ".test"
- string::size_type s = test_name.find( ".test" );
- if(string::npos != s)
- // strip it off
- test_name.resize(s);
- else
- // if it doesn't - skip this directory
+ if(d.path().extension() != ".test")
continue;
+ string test_name = d.path().stem().string();
+
results.push_back( std::string() );
do_row(
root_node, //*test_node_itr++,
- *itr, // test dir
+ d, // test dir
lib_name,
test_name,
results[results.size()-1]
@@ -652,21 +573,18 @@ namespace
std::sort( results.begin(), results.end() );
- for (
- std::vector<string>::iterator v(results.begin());
- v != results.end();
- ++v
- ){
- report << *v << "\n";
- }
+ BOOST_FOREACH(string &s, results)
+ report << s << "\n";
}
// column header-----------------------------------------------------------//
int header_depth(const col_node & root){
- col_node::subcolumns_t::const_iterator itr;
int max_depth = 1;
- for(itr = root.m_subcolumns.begin(); itr != root.m_subcolumns.end(); ++itr){
- max_depth = (std::max)(max_depth, itr->second.rows);
+ BOOST_FOREACH(
+ const col_node::subcolumn &s,
+ root.m_subcolumns
+ ){
+ max_depth = (std::max)(max_depth, s.second.rows);
}
return max_depth;
}
@@ -692,23 +610,30 @@ namespace
){
if(current_row < display_row){
if(! node.m_subcolumns.empty()){
- col_node::subcolumns_t::const_iterator itr;
- for(itr = node.m_subcolumns.begin(); itr != node.m_subcolumns.end(); ++itr){
- emit_column_headers(itr->second, display_row, current_row + 1, row_count);
+ BOOST_FOREACH(
+ const col_node::subcolumn &s,
+ node.m_subcolumns
+ ){
+ emit_column_headers(
+ s.second,
+ display_row,
+ current_row + 1,
+ row_count
+ );
}
}
return;
}
- if(node.has_leaf && ! node.m_subcolumns.empty()){
+ /*
+ if(node.is_leaf && ! node.m_subcolumns.empty()){
header_cell(row_count - current_row, 1, std::string(""));
}
-
- col_node::subcolumns_t::const_iterator itr;
- for(itr = node.m_subcolumns.begin(); itr != node.m_subcolumns.end(); ++itr){
- if(1 == itr->second.rows)
- header_cell(row_count - current_row, itr->second.cols, itr->first);
+ */
+ BOOST_FOREACH(col_node::subcolumn s, node.m_subcolumns){
+ if(1 == s.second.rows)
+ header_cell(row_count - current_row, s.second.cols, s.first);
else
- header_cell(1, itr->second.cols, itr->first);
+ header_cell(1, s.second.cols, s.first);
}
}
@@ -716,9 +641,8 @@ namespace
// walk up from the path were we started until we find
// bin or bin.v2
- fs::path::const_iterator it = initial_path.end(), end = initial_path.end();
fs::path test_lib_dir = initial_path;
- for(;;){
+ do{
if(fs::is_directory( test_lib_dir / "bin.v2")){
test_lib_dir /= "bin.v2";
break;
@@ -726,44 +650,47 @@ namespace
if(fs::is_directory( test_lib_dir / "bin")){
// v1 includes the word boost
test_lib_dir /= "bin";
- test_lib_dir /= "boost";
+ if(fs::is_directory( test_lib_dir / "boost")){
+ test_lib_dir /= "boost";
+ }
break;
}
- if(test_lib_dir.empty())
- throw std::string("binary path not found");
- if(*it != "libs")
- --it;
- test_lib_dir.remove_filename();
- }
-
- if(it == end)
- throw std::string("must be run from within a library directory");
+ }while(! test_lib_dir.empty());
+ if(test_lib_dir.empty())
+ throw std::string("binary path not found");
- for(;it != end; ++it){
- test_lib_dir /= *it; // append "libs"
- }
return test_lib_dir;
}
- // note : uncomment the #if/#endif and what this compile !!!
string find_lib_name(fs::path lib_test_dir){
- unsigned int count;
+ // search the path backwards for the magic name "libs"
fs::path::iterator e_itr = lib_test_dir.end();
- for(count = 0;; ++count){
+ while(lib_test_dir.begin() != e_itr){
if(*--e_itr == "libs")
break;
- if(lib_test_dir.empty())
- throw std::string("must be run from within a library directory");
}
- string library_name;
- for(;;){
- library_name.append((*++e_itr).native());
- if(1 == --count)
- break;
- library_name.append("/");
+
+ // if its found
+ if(lib_test_dir.begin() != e_itr){
+ // use the whole path since the "libs"
+ ++e_itr;
+ }
+ // otherwise, just use the last two components
+ else{
+ e_itr = lib_test_dir.end();
+ if(e_itr != lib_test_dir.begin()){
+ if(--e_itr != lib_test_dir.begin()){
+ --e_itr;
+ }
+ }
+ }
+
+ fs::path library_name;
+ while(lib_test_dir.end() != e_itr){
+ library_name /= *e_itr++;
}
- return library_name;
+ return library_name.string();
}
fs::path find_boost_root(fs::path initial_path){
@@ -781,19 +708,25 @@ namespace
}
// do_table ----------------------------------------------------------------//
- void do_table(fs::path const& initial_path, const string & lib_name)
+ void do_table(const fs::path & lib_test_dir, const string & lib_name)
{
col_node root_node;
- fs::path lib_test_dir = find_lib_test_dir(initial_path);
-
- for ( fs::directory_iterator itr(lib_test_dir); itr != end_itr; ++itr )
- {
- if(! fs::is_directory(*itr))
+ BOOST_FOREACH(
+ fs::directory_entry & d,
+ std::make_pair(
+ fs::directory_iterator(lib_test_dir),
+ fs::directory_iterator()
+ )
+ ){
+ if(! fs::is_directory(d))
+ continue;
+ fs::path p = d.path();
+ if(p.extension() != ".test")
continue;
- build_node_tree(*itr, root_node);
+ build_node_tree(d, root_node);
}
-
+
// visit directory nodes and record nodetree
report << "<table border=\"1\" cellspacing=\"0\" cellpadding=\"5\">\n";
@@ -829,21 +762,12 @@ int cpp_main( int argc, char * argv[] ) // note name!
{
fs::path initial_path = fs::initial_path();
- fs::path comment_path;
while ( argc > 1 && *argv[1] == '-' )
{
if ( argc > 2 && std::strcmp( argv[1], "--compiler" ) == 0 )
{ specific_compiler = argv[2]; --argc; ++argv; }
else if ( argc > 2 && std::strcmp( argv[1], "--locate-root" ) == 0 )
{ locate_root = fs::path( argv[2] ); --argc; ++argv; }
- else if ( argc > 2 && std::strcmp( argv[1], "--boost-root" ) == 0 )
- { boost_root = fs::path( argv[2] ); --argc; ++argv; }
- else if ( argc > 2 && std::strcmp( argv[1], "--comment" ) == 0 )
- { comment_path = fs::path( argv[2] ); --argc; ++argv; }
- else if ( argc > 2 && std::strcmp( argv[1], "--notes" ) == 0 )
- { notes_path = fs::path( argv[2] ); --argc; ++argv; }
- else if ( argc > 2 && std::strcmp( argv[1], "--notes-map" ) == 0 )
- { notes_map_path = fs::path( argv[2] ); --argc; ++argv; }
else if ( std::strcmp( argv[1], "--ignore-pass" ) == 0 ) ignore_pass = true;
else if ( std::strcmp( argv[1], "--no-warn" ) == 0 ) no_warn = true;
else if ( std::strcmp( argv[1], "--v2" ) == 0 )
@@ -864,27 +788,19 @@ int cpp_main( int argc, char * argv[] ) // note name!
" options: --compiler name Run for named compiler only\n"
" --ignore-pass Do not report tests which pass all compilers\n"
" --no-warn Warnings not reported if test passes\n"
- " --boost-root path default derived from current path.\n"
" --locate-root path Path to ALL_LOCATE_TARGET for bjam;\n"
" default boost-root.\n"
- " --comment path Path to file containing HTML\n"
- " to be copied into status-file.\n"
- " --notes path Path to file containing HTML\n"
- " to be copied into status-file.\n"
- " --notes-map path Path to file of toolset/test,n lines, where\n"
- " n is number of note bookmark in --notes file.\n"
- "Example: compiler_status --compiler gcc /boost-root cs.html cs-links.html\n"
- "Note: Only the leaf of the links-file path and --notes file string are\n"
+ "Example: library_status --compiler gcc /boost-root cs.html cs-links.html\n"
+ "Note: Only the leaf of the links-file path is\n"
"used in status-file HTML links. Thus for browsing, status-file,\n"
- "links-file, and --notes file must all be in the same directory.\n"
+ "links-file must be in the same directory.\n"
;
return 1;
}
- if(boost_root.empty())
- boost_root = find_boost_root(initial_path);
- if ( locate_root.empty() )
- locate_root = boost_root;
+ if(locate_root.empty())
+ if(! fs::exists("bin") && ! fs::exists("bin.v2"))
+ locate_root = find_boost_root(initial_path);
report.open( fs::path( argv[1] ) );
if ( !report )
@@ -896,7 +812,7 @@ int cpp_main( int argc, char * argv[] ) // note name!
if ( argc == 3 )
{
fs::path links_path( argv[2] );
- links_name = links_path.native();
+ links_name = links_path.filename().string();
links_file.open( links_path );
if ( !links_file )
{
@@ -906,8 +822,6 @@ int cpp_main( int argc, char * argv[] ) // note name!
}
else no_links = true;
- build_notes_bookmarks();
-
const string library_name = find_lib_name(initial_path);
char run_date[128];
@@ -923,31 +837,12 @@ int cpp_main( int argc, char * argv[] ) // note name!
<< "</head>\n"
<< "<body bgcolor=\"#ffffff\" text=\"#000000\">\n"
<< "<table border=\"0\">\n"
- << "<tr>\n"
- << "<td><img border=\"0\" "
- << "src=\""
- << boost_root / "boost.png"
- << "\" width=\"277\" "
- << "height=\"86\"></td>\n"
- << "<td>\n"
<< "<h1>Library Status: " + library_name + "</h1>\n"
<< "<b>Run Date:</b> "
<< run_date
- << "\n"
+ << "\n<br>"
;
- if ( !comment_path.empty() )
- {
- fs::ifstream comment_file( comment_path );
- if ( !comment_file )
- {
- std::cerr << "Could not open \"--comment\" input file: " << comment_path.string() << std::endl;
- return 1;
- }
- char c;
- while ( comment_file.get( c ) ) { report.put( c ); }
- }
-
report << "</td>\n</table>\n<br>\n";
if ( !no_links )
@@ -959,22 +854,25 @@ int cpp_main( int argc, char * argv[] ) // note name!
<< "</head>\n"
<< "<body bgcolor=\"#ffffff\" text=\"#000000\">\n"
<< "<table border=\"0\">\n"
- << "<tr>\n"
- << "<td><img border=\"0\" src=\""
- << boost_root / "boost.png"
- << "\" width=\"277\" "
- << "height=\"86\"></td>\n"
- << "<td>\n"
<< "<h1>Library Status: " + library_name + "</h1>\n"
<< "<b>Run Date:</b> "
<< run_date
- << "\n</td>\n</table>\n<br>\n"
+ << "\n<br></table>\n<br>\n"
;
}
- do_table(initial_path, library_name);
+ // detect whether in a a directory which looks like
+ // bin/<library name>/test
+ // or just
+ // bin
+ fs::path library_test_directory = find_lib_test_dir(locate_root);
+ // if libs exists, drop down a couple of levels
+ if(fs::is_directory( library_test_directory / "libs")){
+ library_test_directory /= "libs";
+ library_test_directory /= library_name;
+ }
- if ( load_notes_html() ) report << notes_html << "\n";
+ do_table(library_test_directory, library_name);
report << "</body>\n"
"</html>\n"
diff --git a/tools/regression/src/regression.py b/tools/regression/src/regression.py
index 5bda7c4f7a..22bee2f9e3 100644
--- a/tools/regression/src/regression.py
+++ b/tools/regression/src/regression.py
@@ -26,7 +26,7 @@ repo_path = {
'trunk' : 'trunk',
'release' : 'branches/release',
'build' : 'trunk/tools/build/v2',
- 'jam' : 'trunk/tools/build/v2/engine',
+ 'jam' : 'trunk/tools/build/engine',
'regression' : 'trunk/tools/regression',
'boost-build.jam'
: 'trunk/boost-build.jam'
@@ -373,7 +373,7 @@ class runner:
fn = os.path.join(d, "test_log.xml")
cd = os.getcwd()
try:
- os.chdir (os.path.join (self.boost_root, 'tools/build/v2/test'));
+ os.chdir (os.path.join (self.boost_root, 'tools/build/test'));
bjam_path = os.path.dirname (self.tool_path( self.bjam ))
self.log( "Using bjam binary in '%s'" % (bjam_path))
os.putenv('PATH', bjam_path + os.pathsep + os.environ['PATH'])
@@ -549,8 +549,8 @@ class runner:
raise
def command_show_revision(self):
- modified = '$Date: 2011-10-06 08:41:40 -0700 (Thu, 06 Oct 2011) $'
- revision = '$Revision: 74759 $'
+ modified = '$Date$'
+ revision = '$Revision$'
import re
re_keyword_value = re.compile( r'^\$\w+:\s+(.*)\s+\$$' )
diff --git a/tools/regression/src/run_tests.sh b/tools/regression/src/run_tests.sh
index 483cecfeae..d4c82f7d61 100644
--- a/tools/regression/src/run_tests.sh
+++ b/tools/regression/src/run_tests.sh
@@ -75,7 +75,7 @@ exe_suffix=
# The location of the binary for running bjam. The default should work
# under most circumstances.
#
-bjam="$boost_root/tools/build/v2/engine/bin/bjam$exe_suffix"
+bjam="$boost_root/tools/build/engine/bin/bjam$exe_suffix"
#
# "process_jam_log", and "compiler_status" paths to built helper programs:
@@ -133,7 +133,7 @@ fi
# rebuild bjam if required:
#
echo building bjam:
-cd "$boost_root/tools/build/v2/engine" && \
+cd "$boost_root/tools/build/engine" && \
LOCATE_TARGET=bin sh ./build.sh
if test $? != 0 ; then
echo "bjam build failed."
diff --git a/tools/regression/test/test-cases/general/bjam.log b/tools/regression/test/test-cases/general/bjam.log
index 71013f7458..a896470f2e 100644
--- a/tools/regression/test/test-cases/general/bjam.log
+++ b/tools/regression/test/test-cases/general/bjam.log
@@ -46,7 +46,7 @@ MkDir1 ..\..\..\bin.v2\tools\regression\test\compile-fail~pass.test\msvc-7.1\deb
MkDir1 ..\..\..\bin.v2\tools\regression\test\compile-fail~pass.test\msvc-7.1\debug\link-static\threading-multi
compile-c-c++ ..\..\..\bin.v2\tools\regression\test\compile-fail~pass.test\msvc-7.1\debug\link-static\threading-multi\compile-fail~pass.obj
compile-fail~pass.cpp
-compile-fail~pass.cpp(9) : fatal error C1189: #error : example of a compile failure
+compile-fail~pass.cpp(9) : fatal error C1189: #error : example of a compile failure
(failed-as-expected) ..\..\..\bin.v2\tools\regression\test\compile-fail~pass.test\msvc-7.1\debug\link-static\threading-multi\compile-fail~pass.obj
**passed** ..\..\..\bin.v2\tools\regression\test\compile-fail~pass.test\msvc-7.1\debug\link-static\threading-multi\compile-fail~pass.test
MkDir1 ..\..\..\bin.v2\tools\regression\test\compile~fail.test
@@ -56,7 +56,7 @@ MkDir1 ..\..\..\bin.v2\tools\regression\test\compile~fail.test\msvc-7.1\debug\li
MkDir1 ..\..\..\bin.v2\tools\regression\test\compile~fail.test\msvc-7.1\debug\link-static\threading-multi
compile-c-c++ ..\..\..\bin.v2\tools\regression\test\compile~fail.test\msvc-7.1\debug\link-static\threading-multi\compile~fail.obj
compile~fail.cpp
-compile~fail.cpp(9) : fatal error C1189: #error : example of a compile failure
+compile~fail.cpp(9) : fatal error C1189: #error : example of a compile failure
call "C:\Program Files\Microsoft Visual Studio .NET 2003\Vc7\bin\vcvars32.bat" >nul
cl /Zm800 -nologo @"..\..\..\bin.v2\tools\regression\test\compile~fail.test\msvc-7.1\debug\link-static\threading-multi\compile~fail.obj.rsp"
@@ -145,7 +145,7 @@ MkDir1 ..\..\..\bin.v2\tools\regression\test\run-fail~compile-fail.test\msvc-7.1
MkDir1 ..\..\..\bin.v2\tools\regression\test\run-fail~compile-fail.test\msvc-7.1\debug\link-static\threading-multi
compile-c-c++ ..\..\..\bin.v2\tools\regression\test\run-fail~compile-fail.test\msvc-7.1\debug\link-static\threading-multi\run-fail~compile-fail.obj
run-fail~compile-fail.cpp
-run-fail~compile-fail.cpp(9) : fatal error C1189: #error : example of a compile failure
+run-fail~compile-fail.cpp(9) : fatal error C1189: #error : example of a compile failure
call "C:\Program Files\Microsoft Visual Studio .NET 2003\Vc7\bin\vcvars32.bat" >nul
cl /Zm800 -nologo @"..\..\..\bin.v2\tools\regression\test\run-fail~compile-fail.test\msvc-7.1\debug\link-static\threading-multi\run-fail~compile-fail.obj.rsp"
diff --git a/tools/release/2release.bat b/tools/release/2release.bat
deleted file mode 100644
index 391ac2ea11..0000000000
--- a/tools/release/2release.bat
+++ /dev/null
@@ -1,16 +0,0 @@
-@echo off
-rem Copyright Beman Dawes 2011
-rem Distributed under the Boost Software License, Version 1.0. See http://www.boost.org/LICENSE_1_0.txt
-if not %1$==$ goto usage_ok
-echo Usage: 2release path-relative-to-boost-root [svn-options]
-echo Path may be to file or directory
-echo Options include --dry-run
-echo WARNING: The current directory must be the directory in %BOOST_RELEASE%
-echo specified by the path-relative argument
-goto done
-
-:usage_ok
-svn merge %2 %3 %4 %5 %6 https://svn.boost.org/svn/boost/branches/release/%1 ^
- https://svn.boost.org/svn/boost/trunk/%1
-
-:done
diff --git a/tools/release/README b/tools/release/README
deleted file mode 100644
index bb8eaeca59..0000000000
--- a/tools/release/README
+++ /dev/null
@@ -1,11 +0,0 @@
-Boost SourceForge release folder README
----------------------------------------
-
-The only differences between the distribution files are the line endings
-and archive format. The .bz2 and .7z formats are preferred as more compact.
-
-Distribution files with extensions .gz and .bz2 use Unix line endings.
-
-Distribution files with extensions .zip and .7z use Windows line endings.
-
-See http://www.boost.org/users/history for information about each Boost release.
diff --git a/tools/release/bjam_warnings.bat b/tools/release/bjam_warnings.bat
deleted file mode 100644
index b45f4b785b..0000000000
--- a/tools/release/bjam_warnings.bat
+++ /dev/null
@@ -1,17 +0,0 @@
-@echo off
-rem Scan bjam/b2 log file for compile warnings
-
-rem Copyright 2011 Beman Dawes
-
-rem Distributed under the Boost Software License, Version 1.0.
-rem See http://www.boost.org/LICENSE_1_0.txt
-
-if not %1$==$ goto usage_ok
-echo Usage: bjam_warnings log-path
-goto done
-
-:usage_ok
-
-grep -i "warning" %1 | grep -E "boost|libs" | sort | uniq
-
-:done
diff --git a/tools/release/build_docs.sh b/tools/release/build_docs.sh
deleted file mode 100755
index 6e00fb31a0..0000000000
--- a/tools/release/build_docs.sh
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/usr/bin/env bash
-
-# Build docs
-
-# Copyright 2008 Beman Dawes
-# Distributed under the Boost Software License, Version 1.0. See http://www.boost.org/LICENSE_1_0.txt
-
-if [ $# -lt 1 ]
-then
- echo "invoke:" $0 "directory-name"
- echo "example:" $0 "posix"
- exit 1
-fi
-
-echo building $1 docs...
-pushd $1/doc
-bjam --v2 >../../$1-bjam.log
-ls html
-popd
-
diff --git a/tools/release/build_release.sh b/tools/release/build_release.sh
deleted file mode 100755
index 39a10df4db..0000000000
--- a/tools/release/build_release.sh
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/usr/bin/env bash
-
-# Build release packages
-
-# Copyright 2008 Beman Dawes
-# Distributed under the Boost Software License, Version 1.0. See http://www.boost.org/LICENSE_1_0.txt
-
-if [ $# -lt 1 ]
-then
- echo "invoke:" $0 "release-name"
- echo "example:" $0 "boost_1_35_0_RC3"
- exit 1
-fi
-
-./load_posix.sh
-./load_windows.sh
-./build_docs.sh posix
-./build_docs.sh windows
-./build_release_packages.sh $1
-
diff --git a/tools/release/build_release_packages.bat b/tools/release/build_release_packages.bat
deleted file mode 100644
index 5f72e5157c..0000000000
--- a/tools/release/build_release_packages.bat
+++ /dev/null
@@ -1,47 +0,0 @@
-@echo off
-rem Build release packages
-
-rem Copyright Beman Dawes 2009
-
-rem Distributed under the Boost Software License, Version 1.0.
-rem See http://www.boost.org/LICENSE_1_0.txt
-
-echo Build release packages...
-
-if not %1$==$ goto ok
-echo Usage: build_release_packages release-name
-echo Example: build_release_packages boost_1_38_0_Beta1
-goto done
-
-:ok
-
-echo Preping posix...
-rmdir /s /q posix\bin.v2 2>nul
-rmdir /s /q posix\dist 2>nul
-ren posix %1
-del %1.tar.gz 2>nul
-del %1.tar.bz2 2>nul
-echo Creating gz...
-tar cfz %1.tar.gz %1
-echo Creating bz2...
-gzip -d -c %1.tar.gz | bzip2 >%1.tar.bz2
-echo Cleaning up posix...
-ren %1 posix
-
-echo Preping windows...
-rmdir /s /q windows\bin.v2 2>nul
-rmdir /s /q windows\dist 2>nul
-ren windows %1
-del %1.zip 2>nul
-del %1.7z 2>nul
-echo Creating zip...
-zip -r -q %1.zip %1
-echo Creating 7z...
-7z a -r -bd %1.7z %1
-echo Cleaning up windows...
-ren %1 windows
-
-grep "Revision:" snapshot.log
-echo Build release packages complete
-
-:done
diff --git a/tools/release/build_release_packages.sh b/tools/release/build_release_packages.sh
deleted file mode 100755
index 2a126c01c0..0000000000
--- a/tools/release/build_release_packages.sh
+++ /dev/null
@@ -1,43 +0,0 @@
-#!/usr/bin/env bash
-
-# Build release packages
-
-# Copyright 2008 Beman Dawes
-# Distributed under the Boost Software License, Version 1.0.
-# See http://www.boost.org/LICENSE_1_0.txt
-
-if [ $# -lt 1 ]
-then
- echo "invoke:" $0 "release-name"
- echo "example:" $0 "boost_1_35_0_RC3"
- exit 1
-fi
-
-echo "preping posix..."
-rm -r posix/bin.v2 2>/dev/null
-rm -r posix/dist 2>/dev/null
-mv posix $1
-rm -f $1.tar.gz 2>/dev/null
-rm -f $1.tar.bz2 2>/dev/null
-echo "creating gz..."
-tar cfz $1.tar.gz $1
-echo "creating bz2..."
-gzip -c $1.tar.gz | bzip2 >$1.tar.bz2
-echo "cleaning up..."
-mv $1 posix
-
-echo "preping windows..."
-rm -r windows/bin.v2 2>/dev/null
-rm -r windows/dist 2>/dev/null
-mv windows $1
-rm -f $1.zip 2>/dev/null
-rm -f $1.7z 2>/dev/null
-echo "creating zip..."
-zip -r $1.zip $1
-echo "creating 7z..."
-7z a -r $1.7z $1
-echo "cleaning up..."
-mv $1 windows
-
-exit 0
-
diff --git a/tools/release/index.html b/tools/release/index.html
deleted file mode 100644
index 8e850eab4f..0000000000
--- a/tools/release/index.html
+++ /dev/null
@@ -1,64 +0,0 @@
-<html>
-
-<head>
-<meta http-equiv="Content-Language" content="en-us">
-<meta name="GENERATOR" content="Microsoft FrontPage 5.0">
-<meta name="ProgId" content="FrontPage.Editor.Document">
-<meta http-equiv="Content-Type" content="text/html; charset=windows-1252">
-<title>Release Scripts</title>
-<link rel="stylesheet" type="text/css" href="../../doc/src/minimal.css">
-</head>
-
-<body>
-<table border="0" cellpadding="5" cellspacing="0" style="border-collapse: collapse" bordercolor="#111111" width="637">
- <tr>
- <td width="277">
-<a href="../../index.htm">
-<img src="../../boost.png" alt="boost.png (6897 bytes)" align="middle" width="277" height="86" border="0"></a></td>
- <td width="337" align="middle">
- <font size="7">Release Scripts</font>
- </td>
- </tr>
-</table>
-
-<h2>Introduction</h2>
-<p>The release scripts are used by the release management team to build the
-release distribution files, and perform related release management functions.</p>
-<p>The files being built include:</p>
-<table border="1" cellpadding="5" cellspacing="0" style="border-collapse: collapse" bordercolor="#111111">
- <tr>
- <td width="100%" colspan="2">
- <p align="center"><b><i>Files</i></b></td>
- </tr>
- <tr>
- <td width="50%">boost_x_xx_x.7z</td>
- <td width="50%">&nbsp;</td>
- </tr>
- <tr>
- <td width="50%">boost_x_xx_x.tar.bz2</td>
- <td width="50%">Unix-style line endings</td>
- </tr>
- <tr>
- <td width="50%">boost_x_xx_x.tar.gz</td>
- <td width="50%">Unix-style line endings</td>
- </tr>
- <tr>
- <td width="50%">boost_x_xx_x.zip</td>
- <td width="50%">Windows-style line endings</td>
- </tr>
-</table>
-<p>The content of all files is identical except for the line endings.</p>
-
-<hr>
-
-<p>© Copyright Beman Dawes, 2008<br>
-Distributed under the Boost Software License, Version 1.0. See
-<a href="http://www.boost.org/LICENSE_1_0.txt">www.boost.org/LICENSE_1_0.txt</a></p>
-
-<p>Revised
-<!--webbot bot="Timestamp" S-Type="EDITED" S-Format="%B %d, %Y" startspan -->January 18, 2008<!--webbot bot="Timestamp" endspan i-checksum="31853" --> </font>
-</p>
-
-</body>
-
-</html> \ No newline at end of file
diff --git a/tools/release/inspect.sh b/tools/release/inspect.sh
deleted file mode 100755
index 7c04fb58c2..0000000000
--- a/tools/release/inspect.sh
+++ /dev/null
@@ -1,31 +0,0 @@
-#!/usr/bin/env bash
-
-# Inspect snapshot
-
-# © Copyright 2008 Beman Dawes
-# Distributed under the Boost Software License, Version 1.0.
-# See http://www.boost.org/LICENSE_1_0.txt
-
-# This script uses ftp, and thus assumes ~/.netrc contains a machine ... entry
-
-pushd posix/tools/inspect/build
-bjam
-popd
-echo inspect...
-pushd posix
-dist/bin/inspect >../inspect.html
-popd
-
-# create the ftp script
-echo create ftp script...
-echo "dir" >inspect.ftp
-echo "binary" >>inspect.ftp
-echo "put inspect.html" >>inspect.ftp
-echo "delete inspect-snapshot.html" >>inspect.ftp
-echo "rename inspect.html inspect-snapshot.html" >>inspect.ftp
-echo "dir" >>inspect.ftp
-echo "bye" >>inspect.ftp
-# use cygwin ftp rather than Windows ftp
-echo ftp...
-/usr/bin/ftp -v -i boost.cowic.de <inspect.ftp
-echo inspect.sh complete
diff --git a/tools/release/inspect_trunk.bat b/tools/release/inspect_trunk.bat
deleted file mode 100644
index 4a306a9302..0000000000
--- a/tools/release/inspect_trunk.bat
+++ /dev/null
@@ -1,42 +0,0 @@
-rem Inspect Trunk
-rem Copyright Beman Dawes 2008, 2009
-
-rem Distributed under the Boost Software License, Version 1.0.
-rem See http://www.boost.org/LICENSE_1_0.txt
-
-echo Must be run in directory containing svn checkout of trunk
-
-echo Clean trunk working copy ...
-rem cleanup clears locks or other residual problems (we learned this the hard way!)
-svn cleanup
-echo Update trunk working copy...
-svn up --non-interactive --trust-server-cert
-pushd tools\inspect\build
-echo Build inspect program...
-bjam
-popd
-echo Copy inspect.exe to %UTIL% directory...
-copy /y dist\bin\inspect.exe %UTIL%
-echo Inspect...
-inspect >%TEMP%\trunk_inspect.html
-
-echo Create ftp script...
-pushd %TEMP%
-copy %BOOST_TRUNK%\..\user.txt inspect.ftp
-echo dir >>inspect.ftp
-echo binary >>inspect.ftp
-echo put trunk_inspect.html >>inspect.ftp
-echo dir >>inspect.ftp
-echo mdelete inspect-trunk.html >>inspect.ftp
-echo rename trunk_inspect.html inspect-trunk.html >>inspect.ftp
-echo dir >>inspect.ftp
-echo bye >>inspect.ftp
-
-echo Run ftp script...
-ftp -n -i -s:inspect.ftp boost.cowic.de
-popd
-
-echo Update script for next run
-copy /y tools\release\inspect_trunk.bat
-
-echo Inspect script complete
diff --git a/tools/release/linux_user-config.jam b/tools/release/linux_user-config.jam
deleted file mode 100644
index 6b62071b02..0000000000
--- a/tools/release/linux_user-config.jam
+++ /dev/null
@@ -1,21 +0,0 @@
-# Linux user-config.jam
-
-import toolset : using ;
-
-using gcc ;
-
-using python ; # requires pythonN.NN-dev be installed
-
-# Boost iostreams requires no user-config.jam entries,
-# but does require zliblg-dev, libbz2-dev, be installed
-
-using xsltproc ;
-
-using boostbook
- : /usr/share/xml/docbook/stylesheet/nwalsh
- : /usr/share/xml/docbook/schema/dtd/4.2
- ;
-
-# Remove this line if you're not using doxygen
-using doxygen ;
-using quickbook ;
diff --git a/tools/release/load_posix.sh b/tools/release/load_posix.sh
deleted file mode 100755
index eecdbed240..0000000000
--- a/tools/release/load_posix.sh
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/usr/bin/env bash
-
-# Load posix directory from branches/release snapshot, using LF line termination
-
-# Copyright 2008 Beman Dawes
-# Distributed under the Boost Software License, Version 1.0. See http://www.boost.org/LICENSE_1_0.txt
-
-rm -r -f posix 2>/dev/null
-svn export --non-interactive --native-eol LF http://svn.boost.org/svn/boost/branches/release posix
-
diff --git a/tools/release/load_windows.sh b/tools/release/load_windows.sh
deleted file mode 100755
index 08967ddf9e..0000000000
--- a/tools/release/load_windows.sh
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/usr/bin/env bash
-
-# Load windows directory from branches/release snapshot, using CR/LF line termination
-
-# Copyright 2008 Beman Dawes
-# Distributed under the Boost Software License, Version 1.0. See http://www.boost.org/LICENSE_1_0.txt
-
-rm -r -f windows 2>/dev/null
-svn export --non-interactive --native-eol CRLF http://svn.boost.org/svn/boost/branches/release windows
-
diff --git a/tools/release/make_packages.sh b/tools/release/make_packages.sh
deleted file mode 100755
index 3aeb17f8f5..0000000000
--- a/tools/release/make_packages.sh
+++ /dev/null
@@ -1,45 +0,0 @@
-#!/bin/sh
-
-# Build branches/release packages
-
-# © Copyright 2008 Beman Dawes
-# Distributed under the Boost Software License, Version 1.0. See http://www.boost.org/LICENSE_1_0.txt
-
-if [ $# -lt 1 ]
-then
- echo "invoke:" $0 "package-name"
- echo "example:" $0 "boost_1_35_0_RC3"
- exit 1
-fi
-
-echo "preping posix..."
-rm -r posix/bin.v2 2>/dev/null
-rm -r posix/dist 2>/dev/null
-mv posix $1
-rm -f $1.tar.gz 2>/dev/null
-rm -f $1.tar.bz2 2>/dev/null
-echo "creating gz..."
-tar cfz $1.tar.gz $1
-echo "creating bz2..."
-gunzip -c $1.tar.gz | bzip2 >$1.tar.bz2
-echo "cleaning up..."
-mv $1 posix
-
-echo "preping windows..."
-rm -r windows/bin.v2 2>/dev/null
-rm -r windows/dist 2>/dev/null
-mv windows $1
-rm -f $1.zip 2>/dev/null
-rm -f $1.7z 2>/dev/null
-echo "creating zip..."
-zip -r $1.zip $1
-echo "creating 7z..."
-7z a -r $1.7z $1
-echo "cleaning up..."
-mv $1 windows
-
-echo "done automatic processing; you must now upload packages manually"
-exit 0
-
-
-
diff --git a/tools/release/merge2release.bat b/tools/release/merge2release.bat
deleted file mode 100644
index 39bb01f216..0000000000
--- a/tools/release/merge2release.bat
+++ /dev/null
@@ -1,22 +0,0 @@
-rem @echo off
-rem Copyright Beman Dawes 2010
-rem Distributed under the Boost Software License, Version 1.0. See http://www.boost.org/LICENSE_1_0.txt
-if not %1$==$ goto usage_ok
-echo Usage: merge2release library-name [svn-options]
-echo Options include --dry-run
-goto done
-
-:usage_ok
-pushd %BOOST_RELEASE%
-pushd boost
-call 2release boost/%1.hpp %2 %3 %4 %5 %6
-pushd %1
-call 2release boost/%1 %2 %3 %4 %5 %6
-popd
-popd
-pushd libs\%1
-call 2release libs/%1 %2 %3 %4 %5 %6
-popd
-popd
-
-:done
diff --git a/tools/release/merge_release_cycle_init.bat b/tools/release/merge_release_cycle_init.bat
deleted file mode 100644
index 3eb3bbe707..0000000000
--- a/tools/release/merge_release_cycle_init.bat
+++ /dev/null
@@ -1,16 +0,0 @@
-rem @echo off
-rem Copyright Beman Dawes 2011
-rem Distributed under the Boost Software License, Version 1.0.
-rem See http://www.boost.org/LICENSE_1_0.txt
-
-pushd %BOOST_RELEASE%
-svn up
-call 2release Jamroot
-call 2release index.html
-pushd boost
-call 2release boost/version.hpp
-popd
-pushd more
-call 2release more
-popd
-popd
diff --git a/tools/release/release-mgt-msvc/compare_trees/compare_trees.vcproj b/tools/release/release-mgt-msvc/compare_trees/compare_trees.vcproj
deleted file mode 100644
index f52093006f..0000000000
--- a/tools/release/release-mgt-msvc/compare_trees/compare_trees.vcproj
+++ /dev/null
@@ -1,197 +0,0 @@
-<?xml version="1.0" encoding="Windows-1252"?>
-<VisualStudioProject
- ProjectType="Visual C++"
- Version="9.00"
- Name="compare_trees"
- ProjectGUID="{7E6AD5ED-4168-4613-A342-0217AA82DEC1}"
- RootNamespace="compare_trees"
- Keyword="Win32Proj"
- TargetFrameworkVersion="196613"
- >
- <Platforms>
- <Platform
- Name="Win32"
- />
- </Platforms>
- <ToolFiles>
- </ToolFiles>
- <Configurations>
- <Configuration
- Name="Debug|Win32"
- OutputDirectory="$(SolutionDir)$(ConfigurationName)"
- IntermediateDirectory="$(ConfigurationName)"
- ConfigurationType="1"
- CharacterSet="1"
- >
- <Tool
- Name="VCPreBuildEventTool"
- />
- <Tool
- Name="VCCustomBuildTool"
- />
- <Tool
- Name="VCXMLDataGeneratorTool"
- />
- <Tool
- Name="VCWebServiceProxyGeneratorTool"
- />
- <Tool
- Name="VCMIDLTool"
- />
- <Tool
- Name="VCCLCompilerTool"
- Optimization="0"
- AdditionalIncludeDirectories="..\..\..\.."
- PreprocessorDefinitions="WIN32;_DEBUG;_CONSOLE"
- MinimalRebuild="true"
- BasicRuntimeChecks="3"
- RuntimeLibrary="3"
- UsePrecompiledHeader="0"
- WarningLevel="3"
- DebugInformationFormat="4"
- />
- <Tool
- Name="VCManagedResourceCompilerTool"
- />
- <Tool
- Name="VCResourceCompilerTool"
- />
- <Tool
- Name="VCPreLinkEventTool"
- />
- <Tool
- Name="VCLinkerTool"
- LinkIncremental="2"
- AdditionalLibraryDirectories="..\..\..\..\stage\lib"
- GenerateDebugInformation="true"
- SubSystem="1"
- TargetMachine="1"
- />
- <Tool
- Name="VCALinkTool"
- />
- <Tool
- Name="VCManifestTool"
- />
- <Tool
- Name="VCXDCMakeTool"
- />
- <Tool
- Name="VCBscMakeTool"
- />
- <Tool
- Name="VCFxCopTool"
- />
- <Tool
- Name="VCAppVerifierTool"
- />
- <Tool
- Name="VCPostBuildEventTool"
- />
- </Configuration>
- <Configuration
- Name="Release|Win32"
- OutputDirectory="$(SolutionDir)$(ConfigurationName)"
- IntermediateDirectory="$(ConfigurationName)"
- ConfigurationType="1"
- CharacterSet="1"
- WholeProgramOptimization="1"
- >
- <Tool
- Name="VCPreBuildEventTool"
- />
- <Tool
- Name="VCCustomBuildTool"
- />
- <Tool
- Name="VCXMLDataGeneratorTool"
- />
- <Tool
- Name="VCWebServiceProxyGeneratorTool"
- />
- <Tool
- Name="VCMIDLTool"
- />
- <Tool
- Name="VCCLCompilerTool"
- Optimization="2"
- EnableIntrinsicFunctions="true"
- AdditionalIncludeDirectories="..\..\..\.."
- PreprocessorDefinitions="WIN32;NDEBUG;_CONSOLE"
- RuntimeLibrary="2"
- EnableFunctionLevelLinking="true"
- UsePrecompiledHeader="0"
- WarningLevel="3"
- DebugInformationFormat="3"
- />
- <Tool
- Name="VCManagedResourceCompilerTool"
- />
- <Tool
- Name="VCResourceCompilerTool"
- />
- <Tool
- Name="VCPreLinkEventTool"
- />
- <Tool
- Name="VCLinkerTool"
- LinkIncremental="1"
- AdditionalLibraryDirectories="..\..\..\..\stage\lib"
- GenerateDebugInformation="true"
- SubSystem="1"
- OptimizeReferences="2"
- EnableCOMDATFolding="2"
- TargetMachine="1"
- />
- <Tool
- Name="VCALinkTool"
- />
- <Tool
- Name="VCManifestTool"
- />
- <Tool
- Name="VCXDCMakeTool"
- />
- <Tool
- Name="VCBscMakeTool"
- />
- <Tool
- Name="VCFxCopTool"
- />
- <Tool
- Name="VCAppVerifierTool"
- />
- <Tool
- Name="VCPostBuildEventTool"
- />
- </Configuration>
- </Configurations>
- <References>
- </References>
- <Files>
- <Filter
- Name="Source Files"
- Filter="cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx"
- UniqueIdentifier="{4FC737F1-C7A5-4376-A066-2A32D752A2FF}"
- >
- <File
- RelativePath="..\..\compare_trees.cpp"
- >
- </File>
- </Filter>
- <Filter
- Name="Header Files"
- Filter="h;hpp;hxx;hm;inl;inc;xsd"
- UniqueIdentifier="{93995380-89BD-4b04-88EB-625FBE52EBFB}"
- >
- </Filter>
- <Filter
- Name="Resource Files"
- Filter="rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav"
- UniqueIdentifier="{67DA6AB6-F800-4c08-8B7A-83BB121AAD01}"
- >
- </Filter>
- </Files>
- <Globals>
- </Globals>
-</VisualStudioProject>
diff --git a/tools/release/release-mgt-msvc/msvc.sln b/tools/release/release-mgt-msvc/msvc.sln
deleted file mode 100644
index ca70e06f63..0000000000
--- a/tools/release/release-mgt-msvc/msvc.sln
+++ /dev/null
@@ -1,26 +0,0 @@
-
-Microsoft Visual Studio Solution File, Format Version 10.00
-# Visual C++ Express 2008
-Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "compare_trees", "compare_trees\compare_trees.vcproj", "{7E6AD5ED-4168-4613-A342-0217AA82DEC1}"
-EndProject
-Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "strftime", "strftime\strftime.vcproj", "{4A82F955-7630-4B79-9C50-E45F27E28BA8}"
-EndProject
-Global
- GlobalSection(SolutionConfigurationPlatforms) = preSolution
- Debug|Win32 = Debug|Win32
- Release|Win32 = Release|Win32
- EndGlobalSection
- GlobalSection(ProjectConfigurationPlatforms) = postSolution
- {7E6AD5ED-4168-4613-A342-0217AA82DEC1}.Debug|Win32.ActiveCfg = Debug|Win32
- {7E6AD5ED-4168-4613-A342-0217AA82DEC1}.Debug|Win32.Build.0 = Debug|Win32
- {7E6AD5ED-4168-4613-A342-0217AA82DEC1}.Release|Win32.ActiveCfg = Release|Win32
- {7E6AD5ED-4168-4613-A342-0217AA82DEC1}.Release|Win32.Build.0 = Release|Win32
- {4A82F955-7630-4B79-9C50-E45F27E28BA8}.Debug|Win32.ActiveCfg = Debug|Win32
- {4A82F955-7630-4B79-9C50-E45F27E28BA8}.Debug|Win32.Build.0 = Debug|Win32
- {4A82F955-7630-4B79-9C50-E45F27E28BA8}.Release|Win32.ActiveCfg = Release|Win32
- {4A82F955-7630-4B79-9C50-E45F27E28BA8}.Release|Win32.Build.0 = Release|Win32
- EndGlobalSection
- GlobalSection(SolutionProperties) = preSolution
- HideSolutionNode = FALSE
- EndGlobalSection
-EndGlobal
diff --git a/tools/release/release-mgt-msvc/strftime/strftime.vcproj b/tools/release/release-mgt-msvc/strftime/strftime.vcproj
deleted file mode 100644
index 4b8e9ab866..0000000000
--- a/tools/release/release-mgt-msvc/strftime/strftime.vcproj
+++ /dev/null
@@ -1,193 +0,0 @@
-<?xml version="1.0" encoding="Windows-1252"?>
-<VisualStudioProject
- ProjectType="Visual C++"
- Version="9.00"
- Name="strftime"
- ProjectGUID="{4A82F955-7630-4B79-9C50-E45F27E28BA8}"
- RootNamespace="strftime"
- Keyword="Win32Proj"
- TargetFrameworkVersion="196613"
- >
- <Platforms>
- <Platform
- Name="Win32"
- />
- </Platforms>
- <ToolFiles>
- </ToolFiles>
- <Configurations>
- <Configuration
- Name="Debug|Win32"
- OutputDirectory="$(SolutionDir)$(ConfigurationName)"
- IntermediateDirectory="$(ConfigurationName)"
- ConfigurationType="1"
- CharacterSet="1"
- >
- <Tool
- Name="VCPreBuildEventTool"
- />
- <Tool
- Name="VCCustomBuildTool"
- />
- <Tool
- Name="VCXMLDataGeneratorTool"
- />
- <Tool
- Name="VCWebServiceProxyGeneratorTool"
- />
- <Tool
- Name="VCMIDLTool"
- />
- <Tool
- Name="VCCLCompilerTool"
- Optimization="0"
- PreprocessorDefinitions="WIN32;_DEBUG;_CONSOLE"
- MinimalRebuild="true"
- BasicRuntimeChecks="3"
- RuntimeLibrary="3"
- UsePrecompiledHeader="0"
- WarningLevel="3"
- DebugInformationFormat="4"
- />
- <Tool
- Name="VCManagedResourceCompilerTool"
- />
- <Tool
- Name="VCResourceCompilerTool"
- />
- <Tool
- Name="VCPreLinkEventTool"
- />
- <Tool
- Name="VCLinkerTool"
- LinkIncremental="2"
- GenerateDebugInformation="true"
- SubSystem="1"
- TargetMachine="1"
- />
- <Tool
- Name="VCALinkTool"
- />
- <Tool
- Name="VCManifestTool"
- />
- <Tool
- Name="VCXDCMakeTool"
- />
- <Tool
- Name="VCBscMakeTool"
- />
- <Tool
- Name="VCFxCopTool"
- />
- <Tool
- Name="VCAppVerifierTool"
- />
- <Tool
- Name="VCPostBuildEventTool"
- />
- </Configuration>
- <Configuration
- Name="Release|Win32"
- OutputDirectory="$(SolutionDir)$(ConfigurationName)"
- IntermediateDirectory="$(ConfigurationName)"
- ConfigurationType="1"
- CharacterSet="1"
- WholeProgramOptimization="1"
- >
- <Tool
- Name="VCPreBuildEventTool"
- />
- <Tool
- Name="VCCustomBuildTool"
- />
- <Tool
- Name="VCXMLDataGeneratorTool"
- />
- <Tool
- Name="VCWebServiceProxyGeneratorTool"
- />
- <Tool
- Name="VCMIDLTool"
- />
- <Tool
- Name="VCCLCompilerTool"
- Optimization="2"
- EnableIntrinsicFunctions="true"
- PreprocessorDefinitions="WIN32;NDEBUG;_CONSOLE"
- RuntimeLibrary="0"
- EnableFunctionLevelLinking="true"
- UsePrecompiledHeader="0"
- WarningLevel="3"
- DebugInformationFormat="3"
- />
- <Tool
- Name="VCManagedResourceCompilerTool"
- />
- <Tool
- Name="VCResourceCompilerTool"
- />
- <Tool
- Name="VCPreLinkEventTool"
- />
- <Tool
- Name="VCLinkerTool"
- LinkIncremental="1"
- GenerateDebugInformation="true"
- SubSystem="1"
- OptimizeReferences="2"
- EnableCOMDATFolding="2"
- TargetMachine="1"
- />
- <Tool
- Name="VCALinkTool"
- />
- <Tool
- Name="VCManifestTool"
- />
- <Tool
- Name="VCXDCMakeTool"
- />
- <Tool
- Name="VCBscMakeTool"
- />
- <Tool
- Name="VCFxCopTool"
- />
- <Tool
- Name="VCAppVerifierTool"
- />
- <Tool
- Name="VCPostBuildEventTool"
- />
- </Configuration>
- </Configurations>
- <References>
- </References>
- <Files>
- <Filter
- Name="Source Files"
- Filter="cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx"
- UniqueIdentifier="{4FC737F1-C7A5-4376-A066-2A32D752A2FF}"
- >
- <File
- RelativePath="..\..\strftime.cpp"
- >
- </File>
- </Filter>
- <Filter
- Name="Header Files"
- Filter="h;hpp;hxx;hm;inl;inc;xsd"
- UniqueIdentifier="{93995380-89BD-4b04-88EB-625FBE52EBFB}"
- >
- </Filter>
- <Filter
- Name="Resource Files"
- Filter="rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav"
- UniqueIdentifier="{67DA6AB6-F800-4c08-8B7A-83BB121AAD01}"
- >
- </Filter>
- </Files>
- <Globals>
- </Globals>
-</VisualStudioProject>
diff --git a/tools/release/release_reports.sh b/tools/release/release_reports.sh
deleted file mode 100755
index 73d8ead91d..0000000000
--- a/tools/release/release_reports.sh
+++ /dev/null
@@ -1,31 +0,0 @@
-#!/usr/bin/env bash
-
-# Run the release branch regression reporting system
-
-# Copyright 2008 Beman Dawes
-
-# Distributed under the Boost Software License, Version 1.0.
-# See http://www.boost.org/LICENSE_1_0.txt
-
-# Requirements:
-#
-# * ~/boost/trunk is an svn working copy of the trunk
-# * ~/boost/release-reports is a directory devoted to release regression reporting
-# * ~/boost/release-reports/boost is an svn working of branches/release
-# * ~/boost/release-reports/release/ exists
-
-echo Updating ~/boost/trunk/tools/regression ...
-svn up ~/boost/trunk/tools/regression
-
-echo Updating ~/boost/trunk/tools/release ...
-svn up ~/boost/trunk/tools/release
-
-pushd ~/boost/release-reports
-
-echo Running build_results.sh ...
-date >report.log
-~/boost/trunk/tools/regression/xsl_reports/build_results.sh release 1>>report.log 2>>report.log
-date >>report.log
-
-popd
-echo Release regression reporting complete - see ~/boost/release-reports/report.log
diff --git a/tools/release/revision_number.bat b/tools/release/revision_number.bat
deleted file mode 100644
index 8f03bd0f16..0000000000
--- a/tools/release/revision_number.bat
+++ /dev/null
@@ -1,13 +0,0 @@
-rem Create revision information, to be used by other script
-
-rem Copyright 2011 Beman Dawes
-
-rem Distributed under the Boost Software License, Version 1.0.
-rem See http://www.boost.org/LICENSE_1_0.txt
-
-echo Getting current subversion revision number...
-svn co --non-interactive --depth=files http://svn.boost.org/svn/boost/branches/release svn_info
-svn info svn_info
-
-svn info svn_info | grep Revision | sed "s/Revision: /set BOOST_REVISION_NUMBER=/" >generated_set_release.bat
-call generated_set_release.bat
diff --git a/tools/release/snapshot.bat b/tools/release/snapshot.bat
deleted file mode 100644
index 5029e3d73b..0000000000
--- a/tools/release/snapshot.bat
+++ /dev/null
@@ -1,39 +0,0 @@
-@echo off
-rem Run POSIX and Windows snapshots and inspection
-
-rem Copyright 2008 Beman Dawes
-
-rem Distributed under the Boost Software License, Version 1.0.
-rem See http://www.boost.org/LICENSE_1_0.txt
-
-rem Must be run in a directory devoted to boost release snapshots
-
-echo Remove residue from prior runs...
-rem rmdir commands seem to finish before the deletes are necessarily complete.
-rem This can occasionally cause subsequent commands to fail because they expect
-rem the directory to be gone or empty. snapshot_posix and snapshot_windows
-rem are affected. Fix is to run rmdir here so that deletes are complete
-rem by the time snapshots are run.
-rmdir /s /q posix >nul
-rmdir /s /q windows >nul
-time /t
-
-echo Using %BOOST_TRUNK% as boost trunk
-time /t
-pushd %BOOST_TRUNK%
-echo Running svn cleanup on %BOOST_TRUNK%
-svn --non-interactive --trust-server-cert cleanup
-echo Running svn update on %BOOST_TRUNK%
-svn --non-interactive --trust-server-cert up
-popd
-call %BOOST_TRUNK%\tools\release\revision_number.bat
-time /t
-call %BOOST_TRUNK%\tools\release\snapshot_download_docs.bat
-time /t
-call %BOOST_TRUNK%\tools\release\snapshot_posix.bat
-time /t
-call %BOOST_TRUNK%\tools\release\snapshot_windows.bat
-time /t
-call %BOOST_TRUNK%\tools\release\snapshot_inspection.bat
-time /t
-echo Revision %BOOST_REVISION_NUMBER% snapshot complete
diff --git a/tools/release/snapshot.sh b/tools/release/snapshot.sh
deleted file mode 100755
index 8e45abbccc..0000000000
--- a/tools/release/snapshot.sh
+++ /dev/null
@@ -1,16 +0,0 @@
-#!/usr/bin/env bash
-
-# © Copyright 2008 Beman Dawes
-# Distributed under the Boost Software License, Version 1.0.
-# See http://www.boost.org/LICENSE_1_0.txt
-
-sleep 5s
-echo Using $BOOST_TRUNK as boost trunk
-date
-$BOOST_TRUNK/tools/release/snapshot_posix.sh
-date
-$BOOST_TRUNK/tools/release/snapshot_windows.sh
-date
-$BOOST_TRUNK/tools/release/snapshot_inspect.sh
-date
-sleep 5s
diff --git a/tools/release/snapshot_download_docs.bat b/tools/release/snapshot_download_docs.bat
deleted file mode 100644
index 5bdae2b5a8..0000000000
--- a/tools/release/snapshot_download_docs.bat
+++ /dev/null
@@ -1,31 +0,0 @@
-@echo off
-rem Download and unpack boost-release-docs.7z
-
-rem Copyright 2008 Beman Dawes
-
-rem Distributed under the Boost Software License, Version 1.0.
-rem See http://www.boost.org/LICENSE_1_0.txt
-
-echo Downloading docs subdirectory...
-
-echo Deleting old files and directories ...
-del boost-docs.7z 2>nul
-del boost-release-docs.7z 2>nul
-rmdir /s /q docs_temp 2>nul
-mkdir docs_temp
-
-echo Creating ftp script ...
-rem user.txt must be a single line: user userid password
-rem where "userid" and "password" are replace with the appropriate values
-copy user.txt download_docs.ftp
-echo binary >>download_docs.ftp
-echo get boost-release-docs.7z >>download_docs.ftp
-echo bye >>download_docs.ftp
-
-echo Running ftp script ...
-ftp -d -n -i -s:download_docs.ftp boost.cowic.de
-
-echo Unpacking 7z file ...
-7z x -y -odocs_temp boost-release-docs.7z
-
-echo Download and unpack boost-release-docs.7z complete! \ No newline at end of file
diff --git a/tools/release/snapshot_inspect.sh b/tools/release/snapshot_inspect.sh
deleted file mode 100755
index 7c04fb58c2..0000000000
--- a/tools/release/snapshot_inspect.sh
+++ /dev/null
@@ -1,31 +0,0 @@
-#!/usr/bin/env bash
-
-# Inspect snapshot
-
-# © Copyright 2008 Beman Dawes
-# Distributed under the Boost Software License, Version 1.0.
-# See http://www.boost.org/LICENSE_1_0.txt
-
-# This script uses ftp, and thus assumes ~/.netrc contains a machine ... entry
-
-pushd posix/tools/inspect/build
-bjam
-popd
-echo inspect...
-pushd posix
-dist/bin/inspect >../inspect.html
-popd
-
-# create the ftp script
-echo create ftp script...
-echo "dir" >inspect.ftp
-echo "binary" >>inspect.ftp
-echo "put inspect.html" >>inspect.ftp
-echo "delete inspect-snapshot.html" >>inspect.ftp
-echo "rename inspect.html inspect-snapshot.html" >>inspect.ftp
-echo "dir" >>inspect.ftp
-echo "bye" >>inspect.ftp
-# use cygwin ftp rather than Windows ftp
-echo ftp...
-/usr/bin/ftp -v -i boost.cowic.de <inspect.ftp
-echo inspect.sh complete
diff --git a/tools/release/snapshot_inspection.bat b/tools/release/snapshot_inspection.bat
deleted file mode 100644
index 1a86aa4b8e..0000000000
--- a/tools/release/snapshot_inspection.bat
+++ /dev/null
@@ -1,28 +0,0 @@
-rem Inspect snapshot
-
-rem Copyright Beman Dawes 2008, 2011
-
-rem Distributed under the Boost Software License, Version 1.0.
-rem See http://www.boost.org/LICENSE_1_0.txt
-
-echo inspect...
-pushd windows
-rem inspect_trunk.bat builds inspect program every day and copies it to %UTIL%
-%UTIL%%\inspect >..\inspect.html
-popd
-
-echo Create ftp script...
-copy user.txt inspect.ftp
-echo dir >>inspect.ftp
-echo binary >>inspect.ftp
-echo put inspect.html >>inspect.ftp
-echo dir >>inspect.ftp
-echo mdelete inspect-snapshot.html >>inspect.ftp
-echo rename inspect.html inspect-snapshot.html >>inspect.ftp
-echo dir >>inspect.ftp
-echo bye >>inspect.ftp
-
-echo Run ftp script...
-ftp -n -i -s:inspect.ftp boost.cowic.de
-
-echo Inspect script complete
diff --git a/tools/release/snapshot_posix.bat b/tools/release/snapshot_posix.bat
deleted file mode 100644
index 1a061dccba..0000000000
--- a/tools/release/snapshot_posix.bat
+++ /dev/null
@@ -1,69 +0,0 @@
-rem Build a branches/release snapshot for POSIX, using LF line termination
-
-rem Copyright 2008 Beman Dawes
-
-rem Distributed under the Boost Software License, Version 1.0.
-rem See http://www.boost.org/LICENSE_1_0.txt
-
-echo Build a branches/release snapshot for POSIX, using LF line termination...
-echo Revision %BOOST_REVISION_NUMBER%
-echo Removing old files...
-rmdir /s /q posix >nul
-rmdir /s /q svn_info >nul
-del posix.tar.gz >nul
-del posix.tar.bz2 >nul
-
-echo Exporting files from subversion...
-svn export --non-interactive --native-eol LF -r %BOOST_REVISION_NUMBER% http://svn.boost.org/svn/boost/branches/release posix
-
-echo Copying docs into posix...
-pushd posix
-xcopy /s /y ..\docs_temp
-popd
-
-echo Setting SNAPSHOT_DATE
-strftime "%%Y-%%m-%%d" >date.txt
-set /p SNAPSHOT_DATE= <date.txt
-echo SNAPSHOT_DATE is %SNAPSHOT_DATE%
-
-echo Renaming root directory...
-ren posix boost-posix-%SNAPSHOT_DATE%
-
-echo Building .gz file...
-tar cfz posix.tar.gz --numeric-owner --group=0 --owner=0 boost-posix-%SNAPSHOT_DATE%
-echo Building .bz2 file...
-gzip -d -c posix.tar.gz | bzip2 >posix.tar.bz2
-
-ren boost-posix-%SNAPSHOT_DATE% posix
-
-echo The ftp transfer will be done in two steps because that has proved more
-echo reliable on Beman's Windows XP 64-bit system.
-
-echo Creating ftp script 1 ...
-copy user.txt posix.ftp
-echo dir >>posix.ftp
-echo binary >>posix.ftp
-
-rem echo put posix.tar.gz >>posix.ftp
-rem echo mdelete boost-posix*.gz >>posix.ftp
-rem echo rename posix.tar.gz boost-posix-%SNAPSHOT_DATE%.tar.gz >>posix.ftp
-
-echo put posix.tar.bz2 >>posix.ftp
-echo bye >>posix.ftp
-
-echo Running ftp script 1 ...
-ftp -n -i -s:posix.ftp boost.cowic.de
-
-echo Creating ftp script 2 ...
-copy user.txt posix.ftp
-echo dir >>posix.ftp
-echo mdelete boost-posix*.bz2 >>posix.ftp
-echo rename posix.tar.bz2 boost-posix-%SNAPSHOT_DATE%.tar.bz2 >>posix.ftp
-
-echo dir >>posix.ftp
-echo bye >>posix.ftp
-
-echo Running ftp script 2 ...
-ftp -n -i -s:posix.ftp boost.cowic.de
-
-echo POSIX snapshot complete!
diff --git a/tools/release/snapshot_posix.sh b/tools/release/snapshot_posix.sh
deleted file mode 100755
index ccc625a999..0000000000
--- a/tools/release/snapshot_posix.sh
+++ /dev/null
@@ -1,65 +0,0 @@
-#!/usr/bin/env bash
-
-# Build a branches/release snapshot for Posix, using LF line termination
-
-# © Copyright 2008 Beman Dawes
-# Distributed under the Boost Software License, Version 1.0.
-# See http://www.boost.org/LICENSE_1_0.txt
-
-echo "Build a branches/release snapshot for POSIX, using LF line termination..."
-
-echo "Removing old files..."
-rm -r -f posix
-rm -r -f svn_info
-
-echo "Exporting files from subversion..."
-# leave an audit trail, which is used by inspect to determine revision number
-# use --non-recursive rather than --depth=files until the linux distros catch up
-svn co --non-recursive http://svn.boost.org/svn/boost/branches/release svn_info
-svn export --non-interactive --native-eol LF http://svn.boost.org/svn/boost/branches/release posix
-
-#echo "Building bjam..."
-# failure to use an up-to-date copy of bjam has caused much wasted effort.
-#pushd posix/tools/build/v2/engine
-#./build.sh gcc
-#popd
-#
-#echo "Building docs..."
-#pushd posix/doc
-#../tools/build/v2/engine/bin.cygwinx86/bjam --toolset=gcc &>../../posix-bjam.log
-#popd
-
-echo "Cleaning up and renaming..."
-#rm -r posix/bin.v2
-SNAPSHOT_DATE=`eval date +%Y-%m-%d`
-echo SNAPSHOT_DATE is $SNAPSHOT_DATE
-mv posix boost-posix-$SNAPSHOT_DATE
-rm -f posix.tar.gz
-rm -f posix.tar.bz2
-
-echo "Building .gz file..."
-tar cfz posix.tar.gz boost-posix-$SNAPSHOT_DATE
-echo "Building .bz2 file..."
-gunzip -c posix.tar.gz | bzip2 >posix.tar.bz2
-mv boost-posix-$SNAPSHOT_DATE posix
-
-echo "Creating ftp script..."
-echo "dir" >posix.ftp
-echo "binary" >>posix.ftp
-
-#echo "put posix.tar.gz" >>posix.ftp
-#echo "mdelete boost-posix*.gz" >>posix.ftp
-#echo "rename posix.tar.gz boost-posix-$SNAPSHOT_DATE.tar.gz" >>posix.ftp
-
-echo "put posix.tar.bz2" >>posix.ftp
-echo "mdelete boost-posix*.bz2" >>posix.ftp
-echo "rename posix.tar.bz2 boost-posix-$SNAPSHOT_DATE.tar.bz2" >>posix.ftp
-
-echo "dir" >>posix.ftp
-echo "bye" >>posix.ftp
-
-echo "Running ftp script..."
-# use cygwin ftp rather than Windows ftp
-/usr/bin/ftp -v -i boost.cowic.de <posix.ftp
-
-echo POSIX snapshot complete!
diff --git a/tools/release/snapshot_windows.bat b/tools/release/snapshot_windows.bat
deleted file mode 100644
index af31d62610..0000000000
--- a/tools/release/snapshot_windows.bat
+++ /dev/null
@@ -1,75 +0,0 @@
-rem Build a branches/release snapshot for Windows, using CRLF line termination
-
-rem Copyright 2008 Beman Dawes
-
-rem Distributed under the Boost Software License, Version 1.0.
-rem See http://www.boost.org/LICENSE_1_0.txt
-
-echo Build a branches/release snapshot for Windows, using CRLF line termination...
-echo Revision %BOOST_REVISION_NUMBER%
-
-echo Removing old files...
-rmdir /s /q windows >nul
-rmdir /s /q svn_info >nul
-del windows.7z >nul
-del windows.zip >nul
-
-echo Exporting files from subversion...
-svn export --non-interactive --native-eol CRLF -r %BOOST_REVISION_NUMBER% http://svn.boost.org/svn/boost/branches/release windows
-
-echo Copying docs into windows...
-pushd windows
-xcopy /s /y ..\docs_temp
-popd
-
-echo Setting SNAPSHOT_DATE
-strftime "%%Y-%%m-%%d" >date.txt
-set /p SNAPSHOT_DATE= <date.txt
-echo SNAPSHOT_DATE is %SNAPSHOT_DATE%
-
-echo Renaming root directory...
-ren windows boost-windows-%SNAPSHOT_DATE%
-
-echo Building .7z file...
-rem On Windows, 7z comes from the 7-Zip package, not Cygwin,
-rem so path must include C:\Program Files\7-Zip
-7z a -r windows.7z boost-windows-%SNAPSHOT_DATE%
-
-rem Building .zip file...
-rem zip -r windows.zip boost-windows-%SNAPSHOT_DATE%
-
-ren boost-windows-%SNAPSHOT_DATE% windows
-
-echo The ftp transfer will be done in two steps because that has proved more
-echo reliable on Beman's Windows XP 64-bit system.
-
-echo Creating ftp script 1 ...
-rem user.txt must be a single line: user userid password
-rem where "userid" and "password" are replace with the appropriate values
-copy user.txt windows.ftp
-echo dir >>windows.ftp
-echo binary >>windows.ftp
-
-rem echo put windows.zip >>windows.ftp
-rem echo mdelete boost-windows*.zip >>windows.ftp
-rem echo rename windows.zip boost-windows-%SNAPSHOT_DATE%.zip >>windows.ftp
-
-echo put windows.7z >>windows.ftp
-echo bye >>windows.ftp
-
-echo Running ftp script 1 ...
-ftp -n -i -s:windows.ftp boost.cowic.de
-
-echo Creating ftp script 2 ...
-copy user.txt windows.ftp
-echo dir >>windows.ftp
-echo mdelete boost-windows*.7z >>windows.ftp
-echo rename windows.7z boost-windows-%SNAPSHOT_DATE%.7z >>windows.ftp
-
-echo dir >>windows.ftp
-echo bye >>windows.ftp
-
-echo Running ftp script 2 ...
-ftp -n -i -s:windows.ftp boost.cowic.de
-
-echo Windows snapshot complete!
diff --git a/tools/release/snapshot_windows.sh b/tools/release/snapshot_windows.sh
deleted file mode 100755
index 14de67f147..0000000000
--- a/tools/release/snapshot_windows.sh
+++ /dev/null
@@ -1,58 +0,0 @@
-#!/usr/bin/env bash
-
-# Build a branches/release snapshot for Windows, using CRLF line termination
-
-# © Copyright 2008 Beman Dawes
-# Distributed under the Boost Software License, Version 1.0.
-# See http://www.boost.org/LICENSE_1_0.txt
-
-# This script uses ftp, and thus assumes ~/.netrc contains a machine ... entry
-
-echo "Build a branches/release snapshot for Windows, using CRLF line termination..."
-
-echo "Removing old files..."
-rm -r -f windows
-
-echo "Exporting files from subversion..."
-svn export --non-interactive --native-eol CRLF http://svn.boost.org/svn/boost/branches/release windows
-
-#echo "Copying docs from posix tree..."
-#cp --recursive posix/doc/html windows/doc
-
-echo "Renaming..."
-SNAPSHOT_DATE=`eval date +%Y-%m-%d`
-echo SNAPSHOT_DATE is $SNAPSHOT_DATE
-mv windows boost-windows-$SNAPSHOT_DATE
-
-#rm -f windows.zip
-#zip -r windows.zip boost-windows-$SNAPSHOT_DATE
-
-echo "Building .7z..."
-rm -f windows.7z
-# On Windows, 7z comes from the 7-Zip package, not Cygwin,
-# so path must include C:\Program Files\7-Zip.
-7z a -r windows.7z boost-windows-$SNAPSHOT_DATE
-
-echo "Reverting name..."
-mv boost-windows-$SNAPSHOT_DATE windows
-
-echo "Creating ftp script..."
-cat <user.txt >>windows.ftp
-echo "dir" >>windows.ftp
-echo "binary" >>windows.ftp
-
-#echo "put windows.zip" >>windows.ftp
-#echo "mdelete boost-windows*.zip" >>windows.ftp
-#echo "rename windows.zip boost-windows-$SNAPSHOT_DATE.zip" >>windows.ftp
-
-echo "put windows.7z" >>windows.ftp
-echo "mdelete boost-windows*.7z" >>windows.ftp
-echo "rename windows.7z boost-windows-$SNAPSHOT_DATE.7z" >>windows.ftp
-echo "dir" >>windows.ftp
-echo "bye" >>windows.ftp
-
-echo "Running ftp script..."
-# This is the Windows ftp client
-ftp -n -i -d -s:windows.ftp boost.cowic.de
-
-echo "Windows snapshot complete!"
diff --git a/tools/release/strftime.cpp b/tools/release/strftime.cpp
deleted file mode 100644
index 57d66099ae..0000000000
--- a/tools/release/strftime.cpp
+++ /dev/null
@@ -1,68 +0,0 @@
-// Command line utility to output the date under control of a format string
-
-// Copyright 2008 Beman Dawes
-
-// Distributed under the Boost Software License, Version 1.0.
-// See http://www.boost.org/LICENSE_1_0.txt
-
-#define _CRT_SECURE_NO_WARNINGS
-
-#include <ctime>
-#include <string>
-#include <iostream>
-#include <cstdlib>
-
-using namespace std;
-
-int main(int argc, char * argv[])
-{
- if (argc != 2 )
- {
- cerr <<
- "Invoke: strftime format\n"
- "Example: strftime \"The date is %Y-%m-%d in ISO format\""
- "The format codes are:\n"
- " %a Abbreviated weekday name\n"
- " %A Full weekday name\n"
- " %b Abbreviated month name\n"
- " %B Full month name\n"
- " %c Date and time representation appropriate for locale\n"
- " %d Day of month as decimal number (01 - 31)\n"
- " %H Hour in 24-hour format (00 - 23)\n"
- " %I Hour in 12-hour format (01 - 12)\n"
- " %j Day of year as decimal number (001 - 366)\n"
- " %m Month as decimal number (01 - 12)\n"
- " %M Minute as decimal number (00 - 59)\n"
- " %p Current locale's A.M./P.M. indicator for 12-hour clock\n"
- " %S Second as decimal number (00 - 59)\n"
- " %U Week of year as decimal number, with Sunday as first day of week (00 - 53)\n"
- " %w Weekday as decimal number (0 - 6; Sunday is 0)\n"
- " %W Week of year as decimal number, with Monday as first day of week (00 - 53)\n"
- " %x Date representation for current locale\n"
- " %X Time representation for current locale\n"
- " %y Year without century, as decimal number (00 - 99)\n"
- " %Y Year with century, as decimal number\n"
- " %z, %Z Either the time-zone name or time zone abbreviation, depending on registry settings; no characters if time zone is unknown\n"
- " %% Percent sign\n"
- ;
- return 1;
- }
-
- string format = argv[1];
- time_t t = time(0);
- tm * tod = localtime(&t);
- if (!tod)
- {
- cerr << "error: localtime function failed\n";
- return 1;
- }
- char* s = new char [format.size() + 256];
- if (strftime( s, format.size() + 256, format.c_str(), tod ) == 0 )
- {
- cerr << "error: buffer overflow\n";
- return 1;
- }
-
- cout << s;
- return 0;
-}
diff --git a/tools/release/unmerged.bat b/tools/release/unmerged.bat
deleted file mode 100644
index 9a3581b8ab..0000000000
--- a/tools/release/unmerged.bat
+++ /dev/null
@@ -1,17 +0,0 @@
-@echo off
-rem Copyright Beman Dawes 2009
-rem Distributed under the Boost Software License, Version 1.0. See http://www.boost.org/LICENSE_1_0.txt
-if not %1$==$ goto usage_ok
-echo Usage: unmerged library-name [svn-options]
-echo Options include --summarize to show paths only. i.e. suppresses line-by-line diffs
-goto done
-
-:usage_ok
-svn diff %2 %3 %4 %5 %6 http://svn.boost.org/svn/boost/branches/release/boost/%1.hpp ^
- http://svn.boost.org/svn/boost/trunk/boost/%1.hpp
-svn diff %2 %3 %4 %5 %6 http://svn.boost.org/svn/boost/branches/release/boost/%1 ^
- http://svn.boost.org/svn/boost/trunk/boost/%1
-svn diff %2 %3 %4 %5 %6 http://svn.boost.org/svn/boost/branches/release/libs/%1 ^
- http://svn.boost.org/svn/boost/trunk/libs/%1
-
-:done
diff --git a/tools/release/unmerged_all.bat b/tools/release/unmerged_all.bat
deleted file mode 100644
index 6bb19dd9f1..0000000000
--- a/tools/release/unmerged_all.bat
+++ /dev/null
@@ -1,86 +0,0 @@
-rem Copyright Beman Dawes 2009
-rem Distributed under the Boost Software License, Version 1.0. See http://www.boost.org/LICENSE_1_0.txt
-call unmerged accumulators --summarize
-call unmerged algorithm --summarize
-call unmerged any --summarize
-call unmerged array --summarize
-call unmerged asio --summarize
-call unmerged assign --summarize
-call unmerged bimap --summarize
-call unmerged bind --summarize
-call unmerged circular_buffer --summarize
-call unmerged compatibility --summarize
-call unmerged compose --summarize
-call unmerged concept --summarize
-call unmerged concept_check --summarize
-call unmerged config --summarize
-call unmerged conversion --summarize
-call unmerged crc --summarize
-call unmerged date_time --summarize
-call unmerged detail --summarize
-call unmerged disjoint_sets --summarize
-call unmerged dynamic_bitset --summarize
-call unmerged exception --summarize
-call unmerged filesystem --summarize
-call unmerged flyweight --summarize
-call unmerged foreach --summarize
-call unmerged format --summarize
-call unmerged function --summarize
-call unmerged functional --summarize
-call unmerged function_types --summarize
-call unmerged fusion --summarize
-call unmerged gil --summarize
-call unmerged graph --summarize
-call unmerged graph_parallel --summarize
-call unmerged integer --summarize
-call unmerged interprocess --summarize
-call unmerged intrusive --summarize
-call unmerged io --summarize
-call unmerged iostreams --summarize
-call unmerged iterator --summarize
-call unmerged lambda --summarize
-call unmerged logic --summarize
-call unmerged math --summarize
-call unmerged mem_fn --summarize
-call unmerged mpi --summarize
-call unmerged mpl --summarize
-call unmerged multi_array --summarize
-call unmerged multi_index --summarize
-call unmerged numeric --summarize
-call unmerged optional --summarize
-call unmerged parameter --summarize
-call unmerged pool --summarize
-call unmerged preprocessor --summarize
-call unmerged program_options --summarize
-call unmerged property_map --summarize
-call unmerged property_tree --summarize
-call unmerged proto --summarize
-call unmerged ptr_container --summarize
-call unmerged python --summarize
-call unmerged random --summarize
-call unmerged range --summarize
-call unmerged rational --summarize
-call unmerged regex --summarize
-call unmerged scope_exit --summarize
-call unmerged serialization --summarize
-call unmerged signals --summarize
-call unmerged signals2 --summarize
-call unmerged smart_ptr --summarize
-call unmerged spirit --summarize
-call unmerged statechart --summarize
-call unmerged static_assert --summarize
-call unmerged system --summarize
-call unmerged test --summarize
-call unmerged thread --summarize
-call unmerged timer --summarize
-call unmerged tokenizer --summarize
-call unmerged tr1 --summarize
-call unmerged tuple --summarize
-call unmerged typeof --summarize
-call unmerged type_traits --summarize
-call unmerged units --summarize
-call unmerged unordered --summarize
-call unmerged utility --summarize
-call unmerged variant --summarize
-call unmerged wave --summarize
-call unmerged xpressive --summarize
diff --git a/tools/release/unmerged_whatever.bat b/tools/release/unmerged_whatever.bat
deleted file mode 100644
index d49e468b2e..0000000000
--- a/tools/release/unmerged_whatever.bat
+++ /dev/null
@@ -1,13 +0,0 @@
-@echo off
-rem Copyright Beman Dawes 2011
-rem Distributed under the Boost Software License, Version 1.0. See http://www.boost.org/LICENSE_1_0.txt
-if not %1$==$ goto usage_ok
-echo Usage: unmerged_whatever path-from-root [svn-options]
-echo Options include --summarize to show paths only. i.e. suppresses line-by-line diffs
-goto done
-
-:usage_ok
-svn diff %2 %3 %4 %5 %6 http://svn.boost.org/svn/boost/branches/release/%1 ^
- http://svn.boost.org/svn/boost/trunk/%1
-
-:done
diff --git a/tools/release/upload2sourceforge.bat b/tools/release/upload2sourceforge.bat
deleted file mode 100644
index f16a2e809b..0000000000
--- a/tools/release/upload2sourceforge.bat
+++ /dev/null
@@ -1,13 +0,0 @@
-rem Copyright Beman Dawes 2009
-rem Distributed under the Boost Software License, Version 1.0. See http://www.boost.org/LICENSE_1_0.txt
-if not %1$==$ goto usage_ok
-echo Usage: upload2sourceforge release-folder
-echo Example: upload2sourceforge 1.40.0
-goto done
-
-:usage_ok
-dir boost_*
-pause Are these the correct files to upload? [Ctrl-C to interrupt]
-rsync -avP -e ssh boost_* beman_dawes,boost@frs.sourceforge.net:/home/frs/project/b/bo/boost/boost/%1/
-
-:done
diff --git a/tools/wave/build/Jamfile.v2 b/tools/wave/build/Jamfile.v2
deleted file mode 100644
index 3e839eeaed..0000000000
--- a/tools/wave/build/Jamfile.v2
+++ /dev/null
@@ -1,69 +0,0 @@
-# Wave: A Standard compliant C++ preprocessor
-#
-# Boost Wave Library Build Jamfile
-#
-# http://www.boost.org/
-#
-# Copyright (c) 2001-2010 Hartmut Kaiser. Distributed under the Boost
-# Software License, Version 1.0. (See accompanying file
-# LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
-
-# Make sure all of Wave is compiled with threading disabled. We still need
-# to link with Boost.Thread, but no multi-threaded features are being used
-# in the Wave tool anyway.
-
-import feature ;
-
-feature.feature wavetool
- : on
- : optional composite propagated
- ;
-
-feature.compose <wavetool>on
- : <define>BOOST_WAVE_SUPPORT_THREADING=0
- ;
-
-###############################################################################
-project
- : requirements
- <toolset>msvc:<define>_SCL_SECURE_NO_DEPRECATE
- <toolset>msvc:<define>_CRT_SECURE_NO_DEPRECATE
- ;
-
-exe wave
- :
- ../cpp.cpp
- /boost//wave
- /boost//program_options
- /boost//filesystem
- /boost//serialization
- /boost//system
- /boost//thread
- /boost//date_time
- :
- <threading>multi
-# <debug-symbols>on
- <wavetool>on
- :
- release
- ;
-
-install dist-bin
- :
- wave
- :
- <install-type>EXE
- <location>../../../dist/bin
- :
- release
- ;
-
-install dist-lib
- :
- wave
- :
- <install-type>LIB
- <location>../../../dist/lib
- :
- release
- ;
diff --git a/tools/wave/cpp.cpp b/tools/wave/cpp.cpp
deleted file mode 100644
index 32bdd62ba0..0000000000
--- a/tools/wave/cpp.cpp
+++ /dev/null
@@ -1,1473 +0,0 @@
-/*=============================================================================
- Boost.Wave: A Standard compliant C++ preprocessor library
-
- http://www.boost.org/
-
- Copyright (c) 2001-2012 Hartmut Kaiser. Distributed under the Boost
- Software License, Version 1.0. (See accompanying file
- LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
-=============================================================================*/
-
-#define BOOST_WAVE_SERIALIZATION 0 // enable serialization
-#define BOOST_WAVE_BINARY_SERIALIZATION 0 // use binary archives
-#define BOOST_WAVE_XML_SERIALIZATION 1 // use XML archives
-
-#include "cpp.hpp" // global configuration
-
-///////////////////////////////////////////////////////////////////////////////
-// Include additional Boost libraries
-#include <boost/filesystem/path.hpp>
-#include <boost/filesystem/convenience.hpp>
-#include <boost/timer.hpp>
-#include <boost/any.hpp>
-
-///////////////////////////////////////////////////////////////////////////////
-// Include Wave itself
-#include <boost/wave.hpp>
-
-///////////////////////////////////////////////////////////////////////////////
-// Include the lexer related stuff
-#include <boost/wave/cpplexer/cpp_lex_token.hpp> // token type
-#include <boost/wave/cpplexer/cpp_lex_iterator.hpp> // lexer type
-
-///////////////////////////////////////////////////////////////////////////////
-// Include serialization support, if requested
-#if BOOST_WAVE_SERIALIZATION != 0
-#include <boost/serialization/serialization.hpp>
-#if BOOST_WAVE_BINARY_SERIALIZATION != 0
-#include <boost/archive/binary_iarchive.hpp>
-#include <boost/archive/binary_oarchive.hpp>
-typedef boost::archive::binary_iarchive iarchive;
-typedef boost::archive::binary_oarchive oarchive;
-#elif BOOST_WAVE_XML_SERIALIZATION != 0
-#include <boost/archive/xml_iarchive.hpp>
-#include <boost/archive/xml_oarchive.hpp>
-typedef boost::archive::xml_iarchive iarchive;
-typedef boost::archive::xml_oarchive oarchive;
-#else
-#include <boost/archive/text_iarchive.hpp>
-#include <boost/archive/text_oarchive.hpp>
-typedef boost::archive::text_iarchive iarchive;
-typedef boost::archive::text_oarchive oarchive;
-#endif
-#endif
-
-///////////////////////////////////////////////////////////////////////////////
-// Include the context policies to use
-#include "trace_macro_expansion.hpp"
-
-///////////////////////////////////////////////////////////////////////////////
-// Include lexer specifics, import lexer names
-#if BOOST_WAVE_SEPARATE_LEXER_INSTANTIATION == 0
-#include <boost/wave/cpplexer/re2clex/cpp_re2c_lexer.hpp>
-#endif
-
-///////////////////////////////////////////////////////////////////////////////
-// Include the grammar definitions, if these shouldn't be compiled separately
-// (ATTENTION: _very_ large compilation times!)
-#if BOOST_WAVE_SEPARATE_GRAMMAR_INSTANTIATION == 0
-#include <boost/wave/grammars/cpp_intlit_grammar.hpp>
-#include <boost/wave/grammars/cpp_chlit_grammar.hpp>
-#include <boost/wave/grammars/cpp_grammar.hpp>
-#include <boost/wave/grammars/cpp_expression_grammar.hpp>
-#include <boost/wave/grammars/cpp_predef_macros_grammar.hpp>
-#include <boost/wave/grammars/cpp_defined_grammar.hpp>
-#endif
-
-///////////////////////////////////////////////////////////////////////////////
-// Import required names
-using namespace boost::spirit::classic;
-
-using std::pair;
-using std::vector;
-using std::getline;
-using std::ofstream;
-using std::cout;
-using std::cerr;
-using std::endl;
-using std::ostream;
-using std::istreambuf_iterator;
-
-///////////////////////////////////////////////////////////////////////////////
-//
-// This application uses the lex_iterator and lex_token types predefined
-// with the Wave library, but it is possible to use your own types.
-//
-// You may want to have a look at the other samples to see how this is
-// possible to achieve.
- typedef boost::wave::cpplexer::lex_token<> token_type;
- typedef boost::wave::cpplexer::lex_iterator<token_type>
- lex_iterator_type;
-
-// The C++ preprocessor iterators shouldn't be constructed directly. They
-// are to be generated through a boost::wave::context<> object. This
-// boost::wave::context object is additionally to be used to initialize and
-// define different parameters of the actual preprocessing.
- typedef boost::wave::context<
- std::string::iterator, lex_iterator_type,
- boost::wave::iteration_context_policies::load_file_to_string,
- trace_macro_expansion<token_type> >
- context_type;
-
-///////////////////////////////////////////////////////////////////////////////
-// print the current version
-std::string get_version()
-{
- std::string version (context_type::get_version_string());
- version = version.substr(1, version.size()-2); // strip quotes
- version += std::string(" (" CPP_VERSION_DATE_STR ")"); // add date
- return version;
-}
-
-///////////////////////////////////////////////////////////////////////////////
-// print the current version for interactive sessions
-int print_interactive_version()
-{
- cout << "Wave: A Standard conformant C++ preprocessor based on the Boost.Wave library" << endl;
- cout << "Version: " << get_version() << endl;
- return 0;
-}
-
-///////////////////////////////////////////////////////////////////////////////
-// print the copyright statement
-int print_copyright()
-{
- char const *copyright[] = {
- "",
- "Wave: A Standard conformant C++ preprocessor based on the Boost.Wave library",
- "http://www.boost.org/",
- "",
- "Copyright (c) 2001-2012 Hartmut Kaiser, Distributed under the Boost",
- "Software License, Version 1.0. (See accompanying file",
- "LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)",
- 0
- };
-
- for (int i = 0; 0 != copyright[i]; ++i)
- cout << copyright[i] << endl;
-
- return 0; // exit app
-}
-
-///////////////////////////////////////////////////////////////////////////////
-// forward declarations only
-namespace cmd_line_utils
-{
- class include_paths;
-}
-
-namespace boost { namespace program_options {
-
- void validate(boost::any &v, std::vector<std::string> const &s,
- cmd_line_utils::include_paths *, long);
-
-}} // boost::program_options
-
-///////////////////////////////////////////////////////////////////////////////
-#include <boost/program_options.hpp>
-
-namespace po = boost::program_options;
-namespace fs = boost::filesystem;
-
-///////////////////////////////////////////////////////////////////////////////
-namespace cmd_line_utils {
-
- // Additional command line parser which interprets '@something' as an
- // option "config-file" with the value "something".
- inline pair<std::string, std::string>
- at_option_parser(std::string const&s)
- {
- if ('@' == s[0])
- return std::make_pair(std::string("config-file"), s.substr(1));
- else
- return pair<std::string, std::string>();
- }
-
- // class, which keeps include file information read from the command line
- class include_paths {
- public:
- include_paths() : seen_separator(false) {}
-
- vector<std::string> paths; // stores user paths
- vector<std::string> syspaths; // stores system paths
- bool seen_separator; // command line contains a '-I-' option
-
- // Function which validates additional tokens from command line.
- static void
- validate(boost::any &v, vector<std::string> const &tokens)
- {
- if (v.empty())
- v = boost::any(include_paths());
-
- include_paths *p = boost::any_cast<include_paths>(&v);
-
- BOOST_ASSERT(p);
- // Assume only one path per '-I' occurrence.
- std::string const& t = po::validators::get_single_string(tokens);
- if (t == "-") {
- // found -I- option, so switch behaviour
- p->seen_separator = true;
- }
- else if (p->seen_separator) {
- // store this path as a system path
- p->syspaths.push_back(t);
- }
- else {
- // store this path as an user path
- p->paths.push_back(t);
- }
- }
- };
-
- // Read all options from a given config file, parse and add them to the
- // given variables_map
- bool read_config_file_options(std::string const &filename,
- po::options_description const &desc, po::variables_map &vm,
- bool may_fail = false)
- {
- std::ifstream ifs(filename.c_str());
-
- if (!ifs.is_open()) {
- if (!may_fail) {
- cerr << filename
- << ": command line warning: config file not found"
- << endl;
- }
- return false;
- }
-
- vector<std::string> options;
- std::string line;
-
- while (std::getline(ifs, line)) {
- // skip empty lines
- std::string::size_type pos = line.find_first_not_of(" \t");
- if (pos == std::string::npos)
- continue;
-
- // skip comment lines
- if ('#' != line[pos]) {
- // strip leading and trailing whitespace
- std::string::size_type endpos = line.find_last_not_of(" \t");
- BOOST_ASSERT(endpos != std::string::npos);
- options.push_back(line.substr(pos, endpos-pos+1));
- }
- }
-
- if (options.size() > 0) {
- using namespace boost::program_options::command_line_style;
- po::store(po::command_line_parser(options)
- .options(desc).style(unix_style).run(), vm);
- po::notify(vm);
- }
- return true;
- }
-
- // predicate to extract all positional arguments from the command line
- struct is_argument {
- bool operator()(po::option const &opt)
- {
- return (opt.position_key == -1) ? true : false;
- }
- };
-
- // trim quotes from path names, if any
- std::string trim_quotes(std::string const& file)
- {
- if (('"' == file[0] || '\'' == file[0]) && file[0] == file[file.size()-1])
- {
- return file.substr(1, file.size()-2);
- }
- return file;
- }
-
-///////////////////////////////////////////////////////////////////////////////
-}
-
-///////////////////////////////////////////////////////////////////////////////
-//
-// Special validator overload, which allows to handle the -I- syntax for
-// switching the semantics of an -I option.
-//
-///////////////////////////////////////////////////////////////////////////////
-namespace boost { namespace program_options {
-
- void validate(boost::any &v, std::vector<std::string> const &s,
- cmd_line_utils::include_paths *, long)
- {
- cmd_line_utils::include_paths::validate(v, s);
- }
-
-}} // namespace boost::program_options
-
-///////////////////////////////////////////////////////////////////////////////
-namespace {
-
- class auto_stop_watch : public stop_watch
- {
- public:
- auto_stop_watch(std::ostream &outstrm_)
- : print_time(false), outstrm(outstrm_)
- {
- }
-
- ~auto_stop_watch()
- {
- if (print_time) {
- outstrm << "Elapsed time: "
- << this->format_elapsed_time()
- << std::endl;
- }
- }
-
- void set_print_time(bool print_time_)
- {
- print_time = print_time_;
- }
-
- private:
- bool print_time;
- std::ostream &outstrm;
- };
-
- ///////////////////////////////////////////////////////////////////////////
- inline std::string
- report_iostate_error(std::ios::iostate state)
- {
- BOOST_ASSERT(state & (std::ios::badbit | std::ios::failbit | std::ios::eofbit));
- std::string result;
- if (state & std::ios::badbit) {
- result += " the reported problem was: "
- "loss of integrity of the stream buffer\n";
- }
- if (state & std::ios::failbit) {
- result += " the reported problem was: "
- "an operation was not processed correctly\n";
- }
- if (state & std::ios::eofbit) {
- result += " the reported problem was: "
- "end-of-file while writing to the stream\n";
- }
- return result;
- }
-
- ///////////////////////////////////////////////////////////////////////////
- // Retrieve the position of a macro definition
- template <typename Context>
- inline bool
- get_macro_position(Context &ctx,
- typename Context::token_type::string_type const& name,
- typename Context::position_type &pos)
- {
- bool has_parameters = false;
- bool is_predefined = false;
- std::vector<typename Context::token_type> parameters;
- typename Context::token_sequence_type definition;
-
- return ctx.get_macro_definition(name, has_parameters, is_predefined,
- pos, parameters, definition);
- }
-
- ///////////////////////////////////////////////////////////////////////////
- // Generate some meaningful error messages
- template <typename Exception>
- inline int
- report_error_message(Exception const &e)
- {
- // default error reporting
- cerr
- << e.file_name() << ":" << e.line_no() << ":" << e.column_no()
- << ": " << e.description() << endl;
-
- // errors count as one
- return (e.get_severity() == boost::wave::util::severity_error ||
- e.get_severity() == boost::wave::util::severity_fatal) ? 1 : 0;
- }
-
- template <typename Context>
- inline int
- report_error_message(Context &ctx, boost::wave::cpp_exception const &e)
- {
- // default error reporting
- int result = report_error_message(e);
-
- using boost::wave::preprocess_exception;
- switch(e.get_errorcode()) {
- case preprocess_exception::macro_redefinition:
- {
- // report the point of the initial macro definition
- typename Context::position_type pos;
- if (get_macro_position(ctx, e.get_related_name(), pos)) {
- cerr
- << pos << ": "
- << preprocess_exception::severity_text(e.get_severity())
- << ": this is the location of the previous definition."
- << endl;
- }
- else {
- cerr
- << e.file_name() << ":" << e.line_no() << ":"
- << e.column_no() << ": "
- << preprocess_exception::severity_text(e.get_severity())
- << ": not able to retrieve the location of the previous "
- << "definition." << endl;
- }
- }
- break;
-
- default:
- break;
- }
-
- return result;
- }
-
- ///////////////////////////////////////////////////////////////////////////
- // Read one logical line of text
- inline bool
- read_a_line (std::istream &instream, std::string &instring)
- {
- bool eol = true;
- do {
- std::string line;
- std::getline(instream, line);
- if (instream.rdstate() & std::ios::failbit)
- return false; // nothing to do
-
- eol = true;
- if (line.find_last_of('\\') == line.size()-1)
- eol = false;
-
- instring += line + '\n';
- } while (!eol);
- return true;
- }
-
- ///////////////////////////////////////////////////////////////////////////
- // Load and save the internal tables of the wave::context object
- template <typename Context>
- inline void
- load_state(po::variables_map const &vm, Context &ctx)
- {
-#if BOOST_WAVE_SERIALIZATION != 0
- try {
- if (vm.count("state") > 0) {
- fs::path state_file (
- boost::wave::util::create_path(vm["state"].as<std::string>()));
- if (state_file == "-")
- state_file = boost::wave::util::create_path("wave.state");
-
- std::ios::openmode mode = std::ios::in;
-
-#if BOOST_WAVE_BINARY_SERIALIZATION != 0
- mode = (std::ios::openmode)(mode | std::ios::binary);
-#endif
- std::ifstream ifs (state_file.string().c_str(), mode);
- if (ifs.is_open()) {
- using namespace boost::serialization;
- iarchive ia(ifs);
- std::string version;
-
- ia >> make_nvp("version", version); // load version
- if (version == CPP_VERSION_FULL_STR)
- ia >> make_nvp("state", ctx); // load the internal tables from disc
- else {
- cerr << "wave: detected version mismatch while loading state, state was not loaded." << endl;
- cerr << " loaded version: " << version << endl;
- cerr << " expected version: " << CPP_VERSION_FULL_STR << endl;
- }
- }
- }
- }
- catch (boost::archive::archive_exception const& e) {
- cerr << "wave: error while loading state: "
- << e.what() << endl;
- }
- catch (boost::wave::preprocess_exception const& e) {
- cerr << "wave: error while loading state: "
- << e.description() << endl;
- }
-#endif
- }
-
- template <typename Context>
- inline void
- save_state(po::variables_map const &vm, Context const &ctx)
- {
-#if BOOST_WAVE_SERIALIZATION != 0
- try {
- if (vm.count("state") > 0) {
- fs::path state_file (boost::wave::util::create_path(
- vm["state"].as<std::string>()));
- if (state_file == "-")
- state_file = boost::wave::util::create_path("wave.state");
-
- std::ios::openmode mode = std::ios::out;
-
-#if BOOST_WAVE_BINARY_SERIALIZATION != 0
- mode = (std::ios::openmode)(mode | std::ios::binary);
-#endif
- ofstream ofs(state_file.string().c_str(), mode);
- if (!ofs.is_open()) {
- cerr << "wave: could not open state file for writing: "
- << state_file.string() << endl;
- // this is non-fatal
- }
- else {
- using namespace boost::serialization;
- oarchive oa(ofs);
- std::string version(CPP_VERSION_FULL_STR);
- oa << make_nvp("version", version); // write version
- oa << make_nvp("state", ctx); // write the internal tables to disc
- }
- }
- }
- catch (boost::archive::archive_exception const& e) {
- cerr << "wave: error while writing state: "
- << e.what() << endl;
- }
-#endif
- }
-
- ///////////////////////////////////////////////////////////////////////////
- // list all defined macros
- bool list_macro_names(context_type const& ctx, std::string filename)
- {
- // open file for macro names listing
- std::ofstream macronames_out;
- fs::path macronames_file (boost::wave::util::create_path(filename));
-
- if (macronames_file != "-") {
- macronames_file = boost::wave::util::complete_path(macronames_file);
- boost::wave::util::create_directories(
- boost::wave::util::branch_path(macronames_file));
- macronames_out.open(macronames_file.string().c_str());
- if (!macronames_out.is_open()) {
- cerr << "wave: could not open file for macro name listing: "
- << macronames_file.string() << endl;
- return false;
- }
- }
- else {
- macronames_out.copyfmt(cout);
- macronames_out.clear(cout.rdstate());
- static_cast<std::basic_ios<char> &>(macronames_out).rdbuf(cout.rdbuf());
- }
-
- // simply list all defined macros and its definitions
- typedef context_type::const_name_iterator name_iterator;
- name_iterator end = ctx.macro_names_end();
- for (name_iterator it = ctx.macro_names_begin(); it != end; ++it)
- {
- typedef std::vector<context_type::token_type> parameters_type;
-
- bool has_pars = false;
- bool predef = false;
- context_type::position_type pos;
- parameters_type pars;
- context_type::token_sequence_type def;
-
- if (ctx.get_macro_definition(*it, has_pars, predef, pos, pars, def))
- {
- macronames_out << (predef ? "-P" : "-D") << *it;
- if (has_pars) {
- // list the parameter names for function style macros
- macronames_out << "(";
- parameters_type::const_iterator pend = pars.end();
- for (parameters_type::const_iterator pit = pars.begin();
- pit != pend; /**/)
- {
- macronames_out << (*pit).get_value();
- if (++pit != pend)
- macronames_out << ", ";
- }
- macronames_out << ")";
- }
- macronames_out << "=";
-
- // print the macro definition
- context_type::token_sequence_type::const_iterator dend = def.end();
- for (context_type::token_sequence_type::const_iterator dit = def.begin();
- dit != dend; ++dit)
- {
- macronames_out << (*dit).get_value();
- }
-
- macronames_out << std::endl;
- }
- }
- return true;
- }
-
- ///////////////////////////////////////////////////////////////////////////
- // list macro invocation counts
- bool list_macro_counts(context_type const& ctx, std::string filename)
- {
- // open file for macro invocation count listing
- std::ofstream macrocounts_out;
- fs::path macrocounts_file (boost::wave::util::create_path(filename));
-
- if (macrocounts_file != "-") {
- macrocounts_file = boost::wave::util::complete_path(macrocounts_file);
- boost::wave::util::create_directories(
- boost::wave::util::branch_path(macrocounts_file));
- macrocounts_out.open(macrocounts_file.string().c_str());
- if (!macrocounts_out.is_open()) {
- cerr << "wave: could not open file for macro invocation count listing: "
- << macrocounts_file.string() << endl;
- return false;
- }
- }
- else {
- macrocounts_out.copyfmt(cout);
- macrocounts_out.clear(cout.rdstate());
- static_cast<std::basic_ios<char> &>(macrocounts_out).rdbuf(cout.rdbuf());
- }
-
- // list all expanded macro names and their counts in alphabetical order
- std::map<std::string, std::size_t> const& counts =
- ctx.get_hooks().get_macro_counts();
-
- typedef std::map<std::string, std::size_t>::const_iterator iterator;
- iterator end = counts.end();
- for (iterator it = counts.begin(); it != end; ++it)
- macrocounts_out << (*it).first << "," << (*it).second << std::endl;
-
- return true;
- }
-
- ///////////////////////////////////////////////////////////////////////////
- // read all of a file into a string
- std::string read_entire_file(std::istream& instream)
- {
- std::string content;
-
- instream.unsetf(std::ios::skipws);
-
-#if defined(BOOST_NO_TEMPLATED_ITERATOR_CONSTRUCTORS)
- // this is known to be very slow for large files on some systems
- copy (std::istream_iterator<char>(instream),
- std::istream_iterator<char>(),
- std::inserter(content, content.end()));
-#else
- content = std::string(std::istreambuf_iterator<char>(instream.rdbuf()),
- std::istreambuf_iterator<char>());
-#endif
- return content;
- }
-} // anonymous namespace
-
-///////////////////////////////////////////////////////////////////////////////
-// do the actual preprocessing
-int
-do_actual_work (std::string file_name, std::istream &instream,
- po::variables_map const &vm, bool input_is_stdin)
-{
-// current file position is saved for exception handling
-boost::wave::util::file_position_type current_position;
-auto_stop_watch elapsed_time(cerr);
-int error_count = 0;
-
- try {
- // process the given file
- std::string instring;
-
- instream.unsetf(std::ios::skipws);
- if (!input_is_stdin)
- instring = read_entire_file(instream);
-
- // The preprocessing of the input stream is done on the fly behind the
- // scenes during iteration over the context_type::iterator_type stream.
- std::ofstream output;
- std::ofstream traceout;
- std::ofstream includelistout;
- std::ofstream listguardsout;
-
- trace_flags enable_trace = trace_nothing;
-
- if (vm.count("traceto")) {
- // try to open the file, where to put the trace output
- fs::path trace_file (boost::wave::util::create_path(
- vm["traceto"].as<std::string>()));
-
- if (trace_file != "-") {
- boost::wave::util::create_directories(
- boost::wave::util::branch_path(trace_file));
- traceout.open(trace_file.string().c_str());
- if (!traceout.is_open()) {
- cerr << "wave: could not open trace file: " << trace_file
- << endl;
- return -1;
- }
- }
- enable_trace = trace_macros;
- }
- if ((enable_trace & trace_macros) && !traceout.is_open()) {
- // by default trace to std::cerr
- traceout.copyfmt(cerr);
- traceout.clear(cerr.rdstate());
- static_cast<std::basic_ios<char> &>(traceout).rdbuf(cerr.rdbuf());
- }
-
- // Open the stream where to output the list of included file names
- if (vm.count("listincludes")) {
- // try to open the file, where to put the include list
- fs::path includes_file(boost::wave::util::create_path(
- vm["listincludes"].as<std::string>()));
-
- if (includes_file != "-") {
- boost::wave::util::create_directories(
- boost::wave::util::branch_path(includes_file));
- includelistout.open(includes_file.string().c_str());
- if (!includelistout.is_open()) {
- cerr << "wave: could not open include list file: "
- << includes_file.string() << endl;
- return -1;
- }
- }
- enable_trace = trace_flags(enable_trace | trace_includes);
- }
- if ((enable_trace & trace_includes) && !includelistout.is_open()) {
- // by default list included names to std::cout
- includelistout.copyfmt(cout);
- includelistout.clear(cout.rdstate());
- static_cast<std::basic_ios<char> &>(includelistout).
- rdbuf(cout.rdbuf());
- }
-
- // Open the stream where to output the list of included file names
- if (vm.count("listguards")) {
- // try to open the file, where to put the include list
- fs::path listguards_file(boost::wave::util::create_path(
- vm["listguards"].as<std::string>()));
-
- if (listguards_file != "-") {
- boost::wave::util::create_directories(
- boost::wave::util::branch_path(listguards_file));
- listguardsout.open(listguards_file.string().c_str());
- if (!listguardsout.is_open()) {
- cerr << "wave: could not open include guard list file: "
- << listguards_file.string() << endl;
- return -1;
- }
- }
- enable_trace = trace_flags(enable_trace | trace_guards);
- }
- if ((enable_trace & trace_guards) && !listguardsout.is_open()) {
- // by default list included names to std::cout
- listguardsout.copyfmt(cout);
- listguardsout.clear(cout.rdstate());
- static_cast<std::basic_ios<char> &>(listguardsout).
- rdbuf(cout.rdbuf());
- }
-
- // enable preserving comments mode
- bool preserve_comments = false;
- bool preserve_whitespace = false;
- bool preserve_bol_whitespace = false;
-
- if (vm.count("preserve")) {
- int preserve = vm["preserve"].as<int>();
-
- switch(preserve) {
- case 0: break; // preserve no whitespace
- case 3: // preserve all whitespace
- preserve_whitespace = true;
- preserve_comments = true;
- preserve_bol_whitespace = true;
- break;
-
- case 2: // preserve comments and BOL whitespace only
- preserve_comments = true;
- preserve_bol_whitespace = true;
- break;
-
- case 1: // preserve BOL whitespace only
- preserve_bol_whitespace = true;
- break;
-
- default:
- cerr << "wave: bogus preserve whitespace option value: "
- << preserve << ", should be 0, 1, 2, or 3" << endl;
- return -1;
- }
- }
-
- // Since the #pragma wave system() directive may cause a potential security
- // threat, it has to be enabled explicitly by --extended or -x
- bool enable_system_command = false;
-
- if (vm.count("extended"))
- enable_system_command = true;
-
- // This this the central piece of the Wave library, it provides you with
- // the iterators to get the preprocessed tokens and allows to configure
- // the preprocessing stage in advance.
- bool allow_output = true; // will be manipulated from inside the hooks object
- std::string default_outfile; // will be used from inside the hooks object
- trace_macro_expansion<token_type> hooks(preserve_whitespace,
- preserve_bol_whitespace, output, traceout, includelistout,
- listguardsout, enable_trace, enable_system_command, allow_output,
- default_outfile);
-
- // enable macro invocation count, if appropriate
- if (vm.count("macrocounts"))
- hooks.enable_macro_counting();
-
- // check, if we have a license file to prepend
- std::string license;
-
- if (vm.count ("license")) {
- // try to open the file, where to put the preprocessed output
- std::string license_file(vm["license"].as<std::string>());
- std::ifstream license_stream(license_file.c_str());
-
- if (!license_stream.is_open()) {
- cerr << "wave: could not open specified license file: "
- << license_file << endl;
- return -1;
- }
- license = read_entire_file(license_stream);
- hooks.set_license_info(license);
- }
-
- context_type ctx (instring.begin(), instring.end(), file_name.c_str(), hooks);
-
-#if BOOST_WAVE_SUPPORT_VARIADICS_PLACEMARKERS != 0
- // enable C99 mode, if appropriate (implies variadics)
- if (vm.count("c99")) {
-#if BOOST_WAVE_SUPPORT_CPP0X != 0
- if (vm.count("c++11")) {
- cerr << "wave: multiple language options specified: --c99 "
- "and --c++11" << endl;
- return -1;
- }
-#endif
- ctx.set_language(
- boost::wave::language_support(
- boost::wave::support_c99
- | boost::wave::support_option_convert_trigraphs
- | boost::wave::support_option_emit_line_directives
-#if BOOST_WAVE_SUPPORT_PRAGMA_ONCE != 0
- | boost::wave::support_option_include_guard_detection
-#endif
-#if BOOST_WAVE_EMIT_PRAGMA_DIRECTIVES != 0
- | boost::wave::support_option_emit_pragma_directives
-#endif
- | boost::wave::support_option_insert_whitespace
- ));
- }
- else if (vm.count("variadics")) {
- // enable variadics and placemarkers, if appropriate
- ctx.set_language(boost::wave::enable_variadics(ctx.get_language()));
- }
-#endif // BOOST_WAVE_SUPPORT_VARIADICS_PLACEMARKERS != 0
-#if BOOST_WAVE_SUPPORT_CPP0X != 0
- if (vm.count("c++11")) {
- if (vm.count("c99")) {
- cerr << "wave: multiple language options specified: --c99 "
- "and --c++11" << endl;
- return -1;
- }
- ctx.set_language(
- boost::wave::language_support(
- boost::wave::support_cpp0x
- | boost::wave::support_option_convert_trigraphs
- | boost::wave::support_option_long_long
- | boost::wave::support_option_emit_line_directives
-#if BOOST_WAVE_SUPPORT_PRAGMA_ONCE != 0
- | boost::wave::support_option_include_guard_detection
-#endif
-#if BOOST_WAVE_EMIT_PRAGMA_DIRECTIVES != 0
- | boost::wave::support_option_emit_pragma_directives
-#endif
- | boost::wave::support_option_insert_whitespace
- ));
- }
-#endif // BOOST_WAVE_SUPPORT_CPP0X != 0
-
- // enable long long support, if appropriate
- if (vm.count("long_long")) {
- ctx.set_language(
- boost::wave::enable_long_long(ctx.get_language()));
- }
-
-#if BOOST_WAVE_SUPPORT_PRAGMA_ONCE != 0
-// disable include guard detection
- if (vm.count("noguard")) {
- ctx.set_language(
- boost::wave::enable_include_guard_detection(
- ctx.get_language(), false));
- }
-#endif
-
- // enable preserving comments mode
- if (preserve_comments) {
- ctx.set_language(
- boost::wave::enable_preserve_comments(ctx.get_language()));
- }
-
- // control the generation of #line directives
- if (vm.count("line")) {
- int lineopt = vm["line"].as<int>();
- if (0 != lineopt && 1 != lineopt && 2 != lineopt) {
- cerr << "wave: bogus value for --line command line option: "
- << lineopt << endl;
- return -1;
- }
- ctx.set_language(
- boost::wave::enable_emit_line_directives(ctx.get_language(),
- lineopt != 0));
-
- if (2 == lineopt)
- ctx.get_hooks().enable_relative_names_in_line_directives(true);
- }
-
- // control whether whitespace should be inserted to disambiguate output
- if (vm.count("disambiguate")) {
- int disambiguateopt = vm["disambiguate"].as<int>();
- if (0 != disambiguateopt && 1 != disambiguateopt) {
- cerr << "wave: bogus value for --disambiguate command line option: "
- << disambiguateopt << endl;
- return -1;
- }
- ctx.set_language(
- boost::wave::enable_insert_whitespace(ctx.get_language(),
- disambiguateopt != 0));
- }
-
- // add include directories to the system include search paths
- if (vm.count("sysinclude")) {
- vector<std::string> syspaths = vm["sysinclude"].as<vector<std::string> >();
-
- vector<std::string>::const_iterator end = syspaths.end();
- for (vector<std::string>::const_iterator cit = syspaths.begin();
- cit != end; ++cit)
- {
- ctx.add_sysinclude_path(cmd_line_utils::trim_quotes(*cit).c_str());
- }
- }
-
- // add include directories to the include search paths
- if (vm.count("include")) {
- cmd_line_utils::include_paths const &ip =
- vm["include"].as<cmd_line_utils::include_paths>();
- vector<std::string>::const_iterator end = ip.paths.end();
-
- for (vector<std::string>::const_iterator cit = ip.paths.begin();
- cit != end; ++cit)
- {
- ctx.add_include_path(cmd_line_utils::trim_quotes(*cit).c_str());
- }
-
- // if -I- was given on the command line, this has to be propagated
- if (ip.seen_separator)
- ctx.set_sysinclude_delimiter();
-
- // add system include directories to the include path
- vector<std::string>::const_iterator sysend = ip.syspaths.end();
- for (vector<std::string>::const_iterator syscit = ip.syspaths.begin();
- syscit != sysend; ++syscit)
- {
- ctx.add_sysinclude_path(cmd_line_utils::trim_quotes(*syscit).c_str());
- }
- }
-
- // add additional defined macros
- if (vm.count("define")) {
- vector<std::string> const &macros = vm["define"].as<vector<std::string> >();
- vector<std::string>::const_iterator end = macros.end();
- for (vector<std::string>::const_iterator cit = macros.begin();
- cit != end; ++cit)
- {
- ctx.add_macro_definition(*cit);
- }
- }
-
- // add additional predefined macros
- if (vm.count("predefine")) {
- vector<std::string> const &predefmacros =
- vm["predefine"].as<vector<std::string> >();
- vector<std::string>::const_iterator end = predefmacros.end();
- for (vector<std::string>::const_iterator cit = predefmacros.begin();
- cit != end; ++cit)
- {
- ctx.add_macro_definition(*cit, true);
- }
- }
-
- // undefine specified macros
- if (vm.count("undefine")) {
- vector<std::string> const &undefmacros =
- vm["undefine"].as<vector<std::string> >();
- vector<std::string>::const_iterator end = undefmacros.end();
- for (vector<std::string>::const_iterator cit = undefmacros.begin();
- cit != end; ++cit)
- {
- ctx.remove_macro_definition(*cit, true);
- }
- }
-
-#if BOOST_WAVE_USE_DEPRECIATED_PREPROCESSING_HOOKS == 0
- // suppress expansion of specified macros
- if (vm.count("noexpand")) {
- vector<std::string> const &noexpandmacros =
- vm["noexpand"].as<vector<std::string> >();
- vector<std::string>::const_iterator end = noexpandmacros.end();
- for (vector<std::string>::const_iterator cit = noexpandmacros.begin();
- cit != end; ++cit)
- {
- ctx.get_hooks().add_noexpandmacro(*cit);
- }
- }
-#endif
-
- // maximal include nesting depth
- if (vm.count("nesting")) {
- int max_depth = vm["nesting"].as<int>();
- if (max_depth < 1 || max_depth > 100000) {
- cerr << "wave: bogus maximal include nesting depth: "
- << max_depth << endl;
- return -1;
- }
- ctx.set_max_include_nesting_depth(max_depth);
- }
-
- // open the output file
- if (vm.count("output")) {
- // try to open the file, where to put the preprocessed output
- fs::path out_file (boost::wave::util::create_path(
- vm["output"].as<std::string>()));
-
- if (out_file == "-") {
- allow_output = false; // inhibit output initially
- default_outfile = "-";
- }
- else {
- out_file = boost::wave::util::complete_path(out_file);
- boost::wave::util::create_directories(
- boost::wave::util::branch_path(out_file));
- output.open(out_file.string().c_str());
- if (!output.is_open()) {
- cerr << "wave: could not open output file: "
- << out_file.string() << endl;
- return -1;
- }
- if (!license.empty())
- output << license;
- default_outfile = out_file.string();
- }
- }
- else if (!input_is_stdin && vm.count("autooutput")) {
- // generate output in the file <input_base_name>.i
- fs::path out_file (boost::wave::util::create_path(file_name));
- std::string basename (boost::wave::util::leaf(out_file));
- std::string::size_type pos = basename.find_last_of(".");
-
- if (std::string::npos != pos)
- basename = basename.substr(0, pos);
- out_file = boost::wave::util::branch_path(out_file) / (basename + ".i");
-
- boost::wave::util::create_directories(
- boost::wave::util::branch_path(out_file));
- output.open(out_file.string().c_str());
- if (!output.is_open()) {
- cerr << "wave: could not open output file: "
- << out_file.string() << endl;
- return -1;
- }
- if (!license.empty())
- output << license;
- default_outfile = out_file.string();
- }
-
- // we assume the session to be interactive if input is stdin and output is
- // stdout and the output is not inhibited
- bool is_interactive = input_is_stdin && !output.is_open() && allow_output;
-
- if (is_interactive) {
- // if interactive we don't warn for missing endif's etc.
- ctx.set_language(
- boost::wave::enable_single_line(ctx.get_language()), false);
- }
-
- // analyze the input file
- context_type::iterator_type first = ctx.begin();
- context_type::iterator_type last = ctx.end();
-
- // preprocess the required include files
- if (vm.count("forceinclude")) {
- // add the filenames to force as include files in _reverse_ order
- // the second parameter 'is_last' of the force_include function should
- // be set to true for the last (first given) file.
- std::vector<std::string> const &force =
- vm["forceinclude"].as<std::vector<std::string> >();
- std::vector<std::string>::const_reverse_iterator rend = force.rend();
- for (std::vector<std::string>::const_reverse_iterator cit = force.rbegin();
- cit != rend; /**/)
- {
- std::string filename(*cit);
- first.force_include(filename.c_str(), ++cit == rend);
- }
- }
-
- elapsed_time.set_print_time(!input_is_stdin && vm.count("timer") > 0);
- if (is_interactive) {
- print_interactive_version(); // print welcome message
- load_state(vm, ctx); // load the internal tables from disc
- }
- else if (vm.count("state")) {
- // the option "state" is usable in interactive mode only
- cerr << "wave: ignoring the command line option 'state', "
- << "use it in interactive mode only." << endl;
- }
-
- // >>>>>>>>>>>>> The actual preprocessing happens here. <<<<<<<<<<<<<<<<<<<
- // loop over the input lines if reading from stdin, otherwise this loop
- // will be executed once
- do {
- // loop over all generated tokens outputting the generated text
- bool finished = false;
-
- if (input_is_stdin) {
- if (is_interactive)
- cout << ">>> "; // prompt if is interactive
-
- // read next line and continue
- instring.clear();
- if (!read_a_line(instream, instring))
- break; // end of input reached
- first = ctx.begin(instring.begin(), instring.end());
- }
-
- bool need_to_advanve = false;
-
- do {
- try {
- if (need_to_advanve) {
- ++first;
- need_to_advanve = false;
- }
-
- while (first != last) {
- // store the last known good token position
- current_position = (*first).get_position();
-
- // print out the current token value
- if (allow_output) {
- if (!output.good()) {
- cerr << "wave: problem writing to the current "
- << "output file" << endl;
- cerr << report_iostate_error(output.rdstate());
- break;
- }
- if (output.is_open())
- output << (*first).get_value();
- else
- cout << (*first).get_value();
- }
-
- // advance to the next token
- ++first;
- }
- finished = true;
- }
- catch (boost::wave::cpp_exception const &e) {
- // some preprocessing error
- if (is_interactive || boost::wave::is_recoverable(e)) {
- error_count += report_error_message(ctx, e);
- need_to_advanve = true; // advance to the next token
- }
- else {
- throw; // re-throw for non-recoverable errors
- }
- }
- catch (boost::wave::cpplexer::lexing_exception const &e) {
- // some preprocessing error
- if (is_interactive ||
- boost::wave::cpplexer::is_recoverable(e))
- {
- error_count += report_error_message(e);
- need_to_advanve = true; // advance to the next token
- }
- else {
- throw; // re-throw for non-recoverable errors
- }
- }
- } while (!finished);
- } while (input_is_stdin);
-
- if (is_interactive)
- save_state(vm, ctx); // write the internal tables to disc
-
- // list all defined macros at the end of the preprocessing
- if (vm.count("macronames")) {
- if (!list_macro_names(ctx, vm["macronames"].as<std::string>()))
- return -1;
- }
- if (vm.count("macrocounts")) {
- if (!list_macro_counts(ctx, vm["macrocounts"].as<std::string>()))
- return -1;
- }
- }
- catch (boost::wave::cpp_exception const &e) {
- // some preprocessing error
- report_error_message(e);
- return 1;
- }
- catch (boost::wave::cpplexer::lexing_exception const &e) {
- // some lexing error
- report_error_message(e);
- return 2;
- }
- catch (std::exception const &e) {
- // use last recognized token to retrieve the error position
- cerr
- << current_position << ": "
- << "exception caught: " << e.what()
- << endl;
- return 3;
- }
- catch (...) {
- // use last recognized token to retrieve the error position
- cerr
- << current_position << ": "
- << "unexpected exception caught." << endl;
- return 4;
- }
- return -error_count; // returns the number of errors as a negative integer
-}
-
-///////////////////////////////////////////////////////////////////////////////
-// main entry point
-int
-main (int argc, char *argv[])
-{
- // test Wave compilation configuration
- if (!BOOST_WAVE_TEST_CONFIGURATION()) {
- cout << "wave: warning: the library this application was linked against was compiled "
- << endl
- << " using a different configuration (see wave_config.hpp)."
- << endl;
- }
-
- // analyze the command line options and arguments
- try {
- // declare the options allowed on the command line only
- po::options_description desc_cmdline ("Options allowed on the command line only");
-
- desc_cmdline.add_options()
- ("help,h", "print out program usage (this message)")
- ("version,v", "print the version number")
- ("copyright", "print out the copyright statement")
- ("config-file", po::value<vector<std::string> >()->composing(),
- "specify a config file (alternatively: @filepath)")
- ;
-
- // declare the options allowed on command line and in config files
- po::options_description desc_generic ("Options allowed additionally in a config file");
-
- desc_generic.add_options()
- ("output,o", po::value<std::string>(),
- "specify a file [arg] to use for output instead of stdout or "
- "disable output [-]")
- ("autooutput,E",
- "output goes into a file named <input_basename>.i")
- ("license", po::value<std::string>(),
- "prepend the content of the specified file to each created file")
- ("include,I", po::value<cmd_line_utils::include_paths>()->composing(),
- "specify an additional include directory")
- ("sysinclude,S", po::value<vector<std::string> >()->composing(),
- "specify an additional system include directory")
- ("forceinclude,F", po::value<std::vector<std::string> >()->composing(),
- "force inclusion of the given file")
- ("define,D", po::value<std::vector<std::string> >()->composing(),
- "specify a macro to define (as macro[=[value]])")
- ("predefine,P", po::value<std::vector<std::string> >()->composing(),
- "specify a macro to predefine (as macro[=[value]])")
- ("undefine,U", po::value<std::vector<std::string> >()->composing(),
- "specify a macro to undefine")
-#if BOOST_WAVE_USE_DEPRECIATED_PREPROCESSING_HOOKS == 0
- ("noexpand,N", po::value<std::vector<std::string> >()->composing(),
- "specify a macro name, which should not be expanded")
-#endif
- ("nesting,n", po::value<int>(),
- "specify a new maximal include nesting depth")
- ;
-
- po::options_description desc_ext ("Extended options (allowed everywhere)");
-
- desc_ext.add_options()
- ("traceto,t", po::value<std::string>(),
- "output macro expansion tracing information to a file [arg] "
- "or to stderr [-]")
- ("timer", "output overall elapsed computing time to stderr")
- ("long_long", "enable long long support in C++ mode")
-#if BOOST_WAVE_SUPPORT_VARIADICS_PLACEMARKERS != 0
- ("variadics", "enable certain C99 extensions in C++ mode")
- ("c99", "enable C99 mode (implies --variadics)")
-#endif
-#if BOOST_WAVE_SUPPORT_CPP0X != 0
- ("c++11", "enable C++11 mode (implies --variadics and --long_long)")
-#endif
- ("listincludes,l", po::value<std::string>(),
- "list names of included files to a file [arg] or to stdout [-]")
- ("macronames,m", po::value<std::string>(),
- "list all defined macros to a file [arg] or to stdout [-]")
- ("macrocounts,c", po::value<std::string>(),
- "list macro invocation counts to a file [arg] or to stdout [-]")
- ("preserve,p", po::value<int>()->default_value(0),
- "preserve whitespace\n"
- "0: no whitespace is preserved (default),\n"
- "1: begin of line whitespace is preserved,\n"
- "2: comments and begin of line whitespace is preserved,\n"
- "3: all whitespace is preserved")
- ("line,L", po::value<int>()->default_value(1),
- "control the generation of #line directives\n"
- "0: no #line directives are generated,\n"
- "1: #line directives will be emitted (default),\n"
- "2: #line directives will be emitted using relative\n"
- " filenames")
- ("disambiguate", po::value<int>()->default_value(1),
- "control whitespace insertion to disambiguate\n"
- "consecutive tokens\n"
- "0: no additional whitespace is generated,\n"
- "1: whitespace is used to disambiguate output (default)")
- ("extended,x", "enable the #pragma wave system() directive")
-#if BOOST_WAVE_SUPPORT_PRAGMA_ONCE != 0
- ("noguard,G", "disable include guard detection")
- ("listguards,g", po::value<std::string>(),
- "list names of files flagged as 'include once' to a file [arg] "
- "or to stdout [-]")
-#endif
-#if BOOST_WAVE_SERIALIZATION != 0
- ("state,s", po::value<std::string>(),
- "load and save state information from/to the given file [arg] "
- "or 'wave.state' [-] (interactive mode only)")
-#endif
- ;
-
- // combine the options for the different usage schemes
- po::options_description desc_overall_cmdline;
- po::options_description desc_overall_cfgfile;
-
- desc_overall_cmdline.add(desc_cmdline).add(desc_generic).add(desc_ext);
- desc_overall_cfgfile.add(desc_generic).add(desc_ext);
-
- // parse command line and store results
- using namespace boost::program_options::command_line_style;
-
- po::parsed_options opts(po::parse_command_line(argc, argv,
- desc_overall_cmdline, unix_style, cmd_line_utils::at_option_parser));
- po::variables_map vm;
-
- po::store(opts, vm);
- po::notify(vm);
-
-// // Try to find a wave.cfg in the same directory as the executable was
-// // started from. If this exists, treat it as a wave config file
-// fs::path filename(argv[0]);
-//
-// filename = filename.branch_path() / "wave.cfg";
-// cmd_line_utils::read_config_file_options(filename.string(),
-// desc_overall_cfgfile, vm, true);
-
- // extract the arguments from the parsed command line
- vector<po::option> arguments;
-
- std::remove_copy_if(opts.options.begin(), opts.options.end(),
- back_inserter(arguments), cmd_line_utils::is_argument());
-
- // try to find a config file somewhere up the filesystem hierarchy
- // starting with the input file path. This allows to use a general wave.cfg
- // file for all files in a certain project.
- if (arguments.size() > 0 && arguments[0].value[0] != "-") {
- // construct full path of input file
- fs::path input_dir (boost::wave::util::complete_path(
- boost::wave::util::create_path(arguments[0].value[0])));
-
- // chop of file name
- input_dir = boost::wave::util::branch_path(
- boost::wave::util::normalize(input_dir));
-
- // walk up the hierarchy, trying to find a file wave.cfg
- while (!input_dir.empty()) {
- fs::path filename = input_dir / "wave.cfg";
- if (cmd_line_utils::read_config_file_options(filename.string(),
- desc_overall_cfgfile, vm, true))
- {
- break; // break on the first cfg file found
- }
- input_dir = boost::wave::util::branch_path(input_dir);
- }
- }
-
- // if there is specified at least one config file, parse it and add the
- // options to the main variables_map
- if (vm.count("config-file")) {
- vector<std::string> const &cfg_files =
- vm["config-file"].as<vector<std::string> >();
- vector<std::string>::const_iterator end = cfg_files.end();
- for (vector<std::string>::const_iterator cit = cfg_files.begin();
- cit != end; ++cit)
- {
- // parse a single config file and store the results
- cmd_line_utils::read_config_file_options(*cit,
- desc_overall_cfgfile, vm);
- }
- }
-
- // ... act as required
- if (vm.count("help")) {
- po::options_description desc_help (
- "Usage: wave [options] [@config-file(s)] [file]");
-
- desc_help.add(desc_cmdline).add(desc_generic).add(desc_ext);
- cout << desc_help << endl;
- return 1;
- }
-
- if (vm.count("version")) {
- cout << get_version() << endl;
- return 0;
- }
-
- if (vm.count("copyright")) {
- return print_copyright();
- }
-
- // if there is no input file given, then take input from stdin
- if (0 == arguments.size() || 0 == arguments[0].value.size() ||
- arguments[0].value[0] == "-")
- {
- // preprocess the given input from stdin
- return do_actual_work("<stdin>", std::cin, vm, true);
- }
- else {
- if (arguments.size() > 1) {
- // this driver understands to parse one input file only
- cerr << "wave: more than one input file specified, "
- << "ignoring all but the first!" << endl;
- }
-
- std::string file_name(arguments[0].value[0]);
- std::ifstream instream(file_name.c_str());
-
- // preprocess the given input file
- if (!instream.is_open()) {
- cerr << "wave: could not open input file: " << file_name << endl;
- return -1;
- }
- return do_actual_work(file_name, instream, vm, false);
- }
- }
- catch (std::exception const &e) {
- cout << "wave: exception caught: " << e.what() << endl;
- return 6;
- }
- catch (...) {
- cerr << "wave: unexpected exception caught." << endl;
- return 7;
- }
-}
-
diff --git a/tools/wave/cpp.hpp b/tools/wave/cpp.hpp
deleted file mode 100644
index f2253f3c28..0000000000
--- a/tools/wave/cpp.hpp
+++ /dev/null
@@ -1,43 +0,0 @@
-/*=============================================================================
- Boost.Wave: A Standard compliant C++ preprocessor library
-
- http://www.boost.org/
-
- Copyright (c) 2001-2012 Hartmut Kaiser. Distributed under the Boost
- Software License, Version 1.0. (See accompanying file
- LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
-=============================================================================*/
-
-#if !defined(CPP_HPP_920D0370_741F_44AF_BF86_F6104BDACF75_INCLUDED)
-#define CPP_HPP_920D0370_741F_44AF_BF86_F6104BDACF75_INCLUDED
-
-///////////////////////////////////////////////////////////////////////////////
-// This file may be used as a precompiled header (if applicable)
-
-///////////////////////////////////////////////////////////////////////////////
-// include often used files from the stdlib
-#include <iostream>
-#include <fstream>
-#include <string>
-#include <vector>
-#include <algorithm>
-#include <iterator>
-
-///////////////////////////////////////////////////////////////////////////////
-// include boost config
-#include <boost/config.hpp> // global configuration information
-
-///////////////////////////////////////////////////////////////////////////////
-// build version
-#include "cpp_version.hpp"
-
-///////////////////////////////////////////////////////////////////////////////
-// configure this app here (global configuration constants)
-#include "cpp_config.hpp"
-
-///////////////////////////////////////////////////////////////////////////////
-// include required boost libraries
-#include <boost/assert.hpp>
-#include <boost/pool/pool_alloc.hpp>
-
-#endif // !defined(CPP_HPP_920D0370_741F_44AF_BF86_F6104BDACF75_INCLUDED)
diff --git a/tools/wave/cpp_config.hpp b/tools/wave/cpp_config.hpp
deleted file mode 100644
index e6e4f63e2b..0000000000
--- a/tools/wave/cpp_config.hpp
+++ /dev/null
@@ -1,63 +0,0 @@
-/*=============================================================================
- Boost.Wave: A Standard compliant C++ preprocessor library
- Global application configuration of the Wave driver command
-
- http://www.boost.org/
-
- Copyright (c) 2001-2012 Hartmut Kaiser. Distributed under the Boost
- Software License, Version 1.0. (See accompanying file
- LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
-=============================================================================*/
-
-#if !defined(CPP_CONFIG_HPP_F143F90A_A63F_4B27_AC41_9CA4F14F538D_INCLUDED)
-#define CPP_CONFIG_HPP_F143F90A_A63F_4B27_AC41_9CA4F14F538D_INCLUDED
-
-///////////////////////////////////////////////////////////////////////////////
-// Uncomment the following, if you need debug output, the
-// BOOST_SPIRIT_DEBUG_FLAGS constants below helps to fine control the amount of
-// the generated debug output
-//#define BOOST_SPIRIT_DEBUG
-
-///////////////////////////////////////////////////////////////////////////////
-// debug rules, subrules and grammars only, for possible flags see
-// spirit/include/classic_debug.hpp
-#if defined(BOOST_SPIRIT_DEBUG)
-
-#define BOOST_SPIRIT_DEBUG_FLAGS ( \
- BOOST_SPIRIT_DEBUG_FLAGS_NODES | \
- BOOST_SPIRIT_DEBUG_FLAGS_CLOSURES \
- ) \
- /**/
-
-///////////////////////////////////////////////////////////////////////////////
-// Debug flags for the Wave library, possible flags (defined in
-// wave_config.hpp):
-//
-// #define BOOST_SPIRIT_DEBUG_FLAGS_CPP_GRAMMAR 0x0001
-// #define BOOST_SPIRIT_DEBUG_FLAGS_TIME_CONVERSION 0x0002
-// #define BOOST_SPIRIT_DEBUG_FLAGS_CPP_EXPR_GRAMMAR 0x0004
-// #define BOOST_SPIRIT_DEBUG_FLAGS_INTLIT_GRAMMAR 0x0008
-// #define BOOST_SPIRIT_DEBUG_FLAGS_CHLIT_GRAMMAR 0x0010
-// #define BOOST_SPIRIT_DEBUG_FLAGS_DEFINED_GRAMMAR 0x0020
-// #define BOOST_SPIRIT_DEBUG_FLAGS_PREDEF_MACROS_GRAMMAR 0x0040
-
-#define BOOST_SPIRIT_DEBUG_FLAGS_CPP ( 0 \
- /* insert the required flags from above */ \
- ) \
- /**/
-#endif
-
-///////////////////////////////////////////////////////////////////////////////
-// Include the configuration stuff for the Wave library itself
-#include <boost/wave/wave_config.hpp>
-
-///////////////////////////////////////////////////////////////////////////////
-// MSVC specific #pragma's
-#if defined(BOOST_MSVC)
-#pragma warning (disable: 4355) // 'this' used in base member initializer list
-#pragma warning (disable: 4800) // forcing value to bool 'true' or 'false'
-#pragma inline_depth(255)
-#pragma inline_recursion(on)
-#endif // defined(BOOST_MSVC)
-
-#endif // !defined(CPP_CONFIG_HPP_F143F90A_A63F_4B27_AC41_9CA4F14F538D_INCLUDED)
diff --git a/tools/wave/cpp_version.hpp b/tools/wave/cpp_version.hpp
deleted file mode 100644
index 43330c2c9d..0000000000
--- a/tools/wave/cpp_version.hpp
+++ /dev/null
@@ -1,25 +0,0 @@
-/*=============================================================================
- Boost.Wave: A Standard compliant C++ preprocessor library
- http://www.boost.org/
-
- Copyright (c) 2001-2012 Hartmut Kaiser. Distributed under the Boost
- Software License, Version 1.0. (See accompanying file
- LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
-=============================================================================*/
-
-#if !defined(CPP_VERSION_HPP_CE4FE67F_63F9_468D_8364_C855F89D3C5D_INCLUDED)
-#define CPP_VERSION_HPP_CE4FE67F_63F9_468D_8364_C855F89D3C5D_INCLUDED
-
-#include <boost/wave/wave_version.hpp>
-
-#define CPP_VERSION_MAJOR BOOST_WAVE_VERSION_MAJOR
-#define CPP_VERSION_MINOR BOOST_WAVE_VERSION_MINOR
-#define CPP_VERSION_SUBMINOR BOOST_WAVE_VERSION_SUBMINOR
-#define CPP_VERSION_FULL BOOST_WAVE_VERSION
-
-#define CPP_VERSION_FULL_STR BOOST_PP_STRINGIZE(CPP_VERSION_FULL)
-
-#define CPP_VERSION_DATE 20120523L
-#define CPP_VERSION_DATE_STR "20120523"
-
-#endif // !defined(CPP_VERSION_HPP_CE4FE67F_63F9_468D_8364_C855F89D3C5D_INCLUDED)
diff --git a/tools/wave/stop_watch.hpp b/tools/wave/stop_watch.hpp
deleted file mode 100644
index e3285466e3..0000000000
--- a/tools/wave/stop_watch.hpp
+++ /dev/null
@@ -1,84 +0,0 @@
-/*=============================================================================
- Boost.Wave: A Standard compliant C++ preprocessor library
- http://www.boost.org/
-
- Copyright (c) 2001-2012 Hartmut Kaiser. Distributed under the Boost
- Software License, Version 1.0. (See accompanying file
- LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
-=============================================================================*/
-
-#if !defined(STOP_WATCH_HPP_HK040911_INCLUDED)
-#define STOP_WATCH_HPP_HK040911_INCLUDED
-
-#include <boost/config.hpp>
-#include <boost/timer.hpp>
-
-///////////////////////////////////////////////////////////////////////////////
-//
-class stop_watch : public boost::timer {
-
- typedef boost::timer base_t;
-
-public:
- stop_watch() : is_suspended_since(0), suspended_overall(0) {}
-
- void suspend()
- {
- if (0 == is_suspended_since) {
- // if not already suspended
- is_suspended_since = this->base_t::elapsed();
- }
- }
- void resume()
- {
- if (0 != is_suspended_since) {
- // if really suspended
- suspended_overall += this->base_t::elapsed() - is_suspended_since;
- is_suspended_since = 0;
- }
- }
- double elapsed() const
- {
- if (0 == is_suspended_since) {
- // currently running
- return this->base_t::elapsed() - suspended_overall;
- }
-
- // currently suspended
- BOOST_ASSERT(is_suspended_since >= suspended_overall);
- return is_suspended_since - suspended_overall;
- }
-
- std::string format_elapsed_time() const
- {
- double current = elapsed();
- char time_buffer[sizeof("1234:56:78.90 abcd.")+1];
-
- using namespace std;
- if (current >= 3600) {
- // show hours
- sprintf (time_buffer, "%d:%02d:%02d.%03d hrs.",
- (int)(current) / 3600, ((int)(current) % 3600) / 60,
- ((int)(current) % 3600) % 60,
- (int)(current * 1000) % 1000);
- }
- else if (current >= 60) {
- // show minutes
- sprintf (time_buffer, "%d:%02d.%03d min.",
- (int)(current) / 60, (int)(current) % 60,
- (int)(current * 1000) % 1000);
- }
- else {
- // show seconds
- sprintf(time_buffer, "%d.%03d sec.", (int)current,
- (int)(current * 1000) % 1000);
- }
- return time_buffer;
- }
-
-private:
- double is_suspended_since;
- double suspended_overall;
-};
-
-#endif // !defined(STOP_WATCH_HPP_HK040911_INCLUDED)
diff --git a/tools/wave/trace_macro_expansion.hpp b/tools/wave/trace_macro_expansion.hpp
deleted file mode 100644
index a6e7c31526..0000000000
--- a/tools/wave/trace_macro_expansion.hpp
+++ /dev/null
@@ -1,1494 +0,0 @@
-/*=============================================================================
- Boost.Wave: A Standard compliant C++ preprocessor library
- http://www.boost.org/
-
- Copyright (c) 2001-2012 Hartmut Kaiser. Distributed under the Boost
- Software License, Version 1.0. (See accompanying file
- LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
-=============================================================================*/
-
-#if !defined(TRACE_MACRO_EXPANSION_HPP_D8469318_8407_4B9D_A19F_13CA60C1661F_INCLUDED)
-#define TRACE_MACRO_EXPANSION_HPP_D8469318_8407_4B9D_A19F_13CA60C1661F_INCLUDED
-
-#include <cstdio>
-#include <cstdlib>
-#include <ctime>
-
-#include <ostream>
-#include <string>
-#include <stack>
-#include <set>
-
-#include <boost/assert.hpp>
-#include <boost/config.hpp>
-#include <boost/filesystem/path.hpp>
-#include <boost/filesystem/operations.hpp>
-#include <boost/filesystem/convenience.hpp>
-
-#include <boost/wave/token_ids.hpp>
-#include <boost/wave/util/macro_helpers.hpp>
-#include <boost/wave/util/filesystem_compatibility.hpp>
-#include <boost/wave/preprocessing_hooks.hpp>
-#include <boost/wave/whitespace_handling.hpp>
-#include <boost/wave/language_support.hpp>
-#include <boost/wave/cpp_exceptions.hpp>
-
-#include "stop_watch.hpp"
-
-#ifdef BOOST_NO_STRINGSTREAM
-#include <strstream>
-#define BOOST_WAVE_OSSTREAM std::ostrstream
-std::string BOOST_WAVE_GETSTRING(std::ostrstream& ss)
-{
- ss << std::ends;
- std::string rval = ss.str();
- ss.freeze(false);
- return rval;
-}
-#else
-#include <sstream>
-#define BOOST_WAVE_GETSTRING(ss) ss.str()
-#define BOOST_WAVE_OSSTREAM std::ostringstream
-#endif
-
-// trace_flags: enable single tracing functionality
-enum trace_flags {
- trace_nothing = 0, // disable tracing
- trace_macros = 1, // enable macro tracing
- trace_macro_counts = 2, // enable invocation counting
- trace_includes = 4, // enable include file tracing
- trace_guards = 8 // enable include guard tracing
-};
-
-///////////////////////////////////////////////////////////////////////////////
-//
-// Special error thrown whenever the #pragma wave system() directive is
-// disabled
-//
-///////////////////////////////////////////////////////////////////////////////
-class bad_pragma_exception :
- public boost::wave::preprocess_exception
-{
-public:
- enum error_code {
- pragma_system_not_enabled =
- boost::wave::preprocess_exception::last_error_number + 1,
- pragma_mismatched_push_pop,
- };
-
- bad_pragma_exception(char const *what_, error_code code, int line_,
- int column_, char const *filename_) throw()
- : boost::wave::preprocess_exception(what_,
- (boost::wave::preprocess_exception::error_code)code, line_,
- column_, filename_)
- {
- }
- ~bad_pragma_exception() throw() {}
-
- virtual char const *what() const throw()
- {
- return "boost::wave::bad_pragma_exception";
- }
- virtual bool is_recoverable() const throw()
- {
- return true;
- }
- virtual int get_severity() const throw()
- {
- return boost::wave::util::severity_remark;
- }
-
- static char const *error_text(int code)
- {
- switch(code) {
- case pragma_system_not_enabled:
- return "the directive '#pragma wave system()' was not enabled, use the "
- "-x command line argument to enable the execution of";
-
- case pragma_mismatched_push_pop:
- return "unbalanced #pragma push/pop in input file(s) for option";
- }
- return "Unknown exception";
- }
- static boost::wave::util::severity severity_level(int code)
- {
- switch(code) {
- case pragma_system_not_enabled:
- return boost::wave::util::severity_remark;
-
- case pragma_mismatched_push_pop:
- return boost::wave::util::severity_error;
- }
- return boost::wave::util::severity_fatal;
- }
- static char const *severity_text(int code)
- {
- return boost::wave::util::get_severity(boost::wave::util::severity_remark);
- }
-};
-
-///////////////////////////////////////////////////////////////////////////////
-//
-// The trace_macro_expansion policy is used to trace the macro expansion of
-// macros whenever it is requested from inside the input stream to preprocess
-// through the '#pragma wave_option(trace: enable)' directive. The macro
-// tracing is disabled with the help of a '#pragma wave_option(trace: disable)'
-// directive.
-//
-// This policy type is used as a template parameter to the boost::wave::context<>
-// object.
-//
-///////////////////////////////////////////////////////////////////////////////
-template <typename TokenT>
-class trace_macro_expansion
-: public boost::wave::context_policies::eat_whitespace<TokenT>
-{
- typedef boost::wave::context_policies::eat_whitespace<TokenT> base_type;
-
-public:
- trace_macro_expansion(
- bool preserve_whitespace_, bool preserve_bol_whitespace_,
- std::ofstream &output_, std::ostream &tracestrm_,
- std::ostream &includestrm_, std::ostream &guardstrm_,
- trace_flags flags_, bool enable_system_command_,
- bool& generate_output_, std::string const& default_outfile_)
- : outputstrm(output_), tracestrm(tracestrm_),
- includestrm(includestrm_), guardstrm(guardstrm_),
- level(0), flags(flags_), logging_flags(trace_nothing),
- enable_system_command(enable_system_command_),
- preserve_whitespace(preserve_whitespace_),
- preserve_bol_whitespace(preserve_bol_whitespace_),
- generate_output(generate_output_),
- default_outfile(default_outfile_),
- emit_relative_filenames(false)
- {
- }
- ~trace_macro_expansion()
- {
- }
-
- void enable_macro_counting()
- {
- logging_flags = trace_flags(logging_flags | trace_macro_counts);
- }
- std::map<std::string, std::size_t> const& get_macro_counts() const
- {
- return counts;
- }
-
- void enable_relative_names_in_line_directives(bool flag)
- {
- emit_relative_filenames = flag;
- }
- bool enable_relative_names_in_line_directives() const
- {
- return emit_relative_filenames;
- }
-
- // add a macro name, which should not be expanded at all (left untouched)
- void add_noexpandmacro(std::string const& name)
- {
- noexpandmacros.insert(name);
- }
-
- void set_license_info(std::string const& info)
- {
- license_info = info;
- }
-
- ///////////////////////////////////////////////////////////////////////////
- //
- // The function 'expanding_function_like_macro' is called whenever a
- // function-like macro is to be expanded.
- //
- // The parameter 'ctx' is a reference to the context object used for
- // instantiating the preprocessing iterators by the user.
- //
- // The parameter 'macrodef' marks the position, where the macro to expand
- // is defined.
- //
- // The parameter 'formal_args' holds the formal arguments used during the
- // definition of the macro.
- //
- // The parameter 'definition' holds the macro definition for the macro to
- // trace.
- //
- // The parameter 'macro_call' marks the position, where this macro invoked.
- //
- // The parameter 'arguments' holds the macro arguments used during the
- // invocation of the macro
- //
- // The parameters 'seqstart' and 'seqend' point into the input token
- // stream allowing to access the whole token sequence comprising the macro
- // invocation (starting with the opening parenthesis and ending after the
- // closing one).
- //
- // The return value defines whether the corresponding macro will be
- // expanded (return false) or will be copied to the output (return true).
- // Note: the whole argument list is copied unchanged to the output as well
- // without any further processing.
- //
- ///////////////////////////////////////////////////////////////////////////
-#if BOOST_WAVE_USE_DEPRECIATED_PREPROCESSING_HOOKS != 0
- // old signature
- template <typename ContainerT>
- void expanding_function_like_macro(
- TokenT const &macrodef, std::vector<TokenT> const &formal_args,
- ContainerT const &definition,
- TokenT const &macrocall, std::vector<ContainerT> const &arguments)
- {
- if (enabled_macro_counting())
- count_invocation(macrodef.get_value().c_str());
-
- if (!enabled_macro_tracing())
- return;
-#else
- // new signature
- template <typename ContextT, typename ContainerT, typename IteratorT>
- bool
- expanding_function_like_macro(ContextT const& ctx,
- TokenT const &macrodef, std::vector<TokenT> const &formal_args,
- ContainerT const &definition,
- TokenT const &macrocall, std::vector<ContainerT> const &arguments,
- IteratorT const& seqstart, IteratorT const& seqend)
- {
- if (enabled_macro_counting() || !noexpandmacros.empty()) {
- std::string name (macrodef.get_value().c_str());
-
- if (noexpandmacros.find(name.c_str()) != noexpandmacros.end())
- return true; // do not expand this macro
-
- if (enabled_macro_counting())
- count_invocation(name.c_str());
- }
-
- if (!enabled_macro_tracing())
- return false;
-#endif
- if (0 == get_level()) {
- // output header line
- BOOST_WAVE_OSSTREAM stream;
-
- stream
- << macrocall.get_position() << ": "
- << macrocall.get_value() << "(";
-
- // argument list
- for (typename ContainerT::size_type i = 0; i < arguments.size(); ++i) {
- stream << boost::wave::util::impl::as_string(arguments[i]);
- if (i < arguments.size()-1)
- stream << ", ";
- }
- stream << ")" << std::endl;
- output(BOOST_WAVE_GETSTRING(stream));
- increment_level();
- }
-
- // output definition reference
- {
- BOOST_WAVE_OSSTREAM stream;
-
- stream
- << macrodef.get_position() << ": see macro definition: "
- << macrodef.get_value() << "(";
-
- // formal argument list
- for (typename std::vector<TokenT>::size_type i = 0;
- i < formal_args.size(); ++i)
- {
- stream << formal_args[i].get_value();
- if (i < formal_args.size()-1)
- stream << ", ";
- }
- stream << ")" << std::endl;
- output(BOOST_WAVE_GETSTRING(stream));
- }
-
- if (formal_args.size() > 0) {
- // map formal and real arguments
- open_trace_body("invoked with\n");
- for (typename std::vector<TokenT>::size_type j = 0;
- j < formal_args.size(); ++j)
- {
- using namespace boost::wave;
-
- BOOST_WAVE_OSSTREAM stream;
- stream << formal_args[j].get_value() << " = ";
-#if BOOST_WAVE_SUPPORT_VARIADICS_PLACEMARKERS != 0
- if (T_ELLIPSIS == token_id(formal_args[j])) {
- // ellipsis
- for (typename ContainerT::size_type k = j;
- k < arguments.size(); ++k)
- {
- stream << boost::wave::util::impl::as_string(arguments[k]);
- if (k < arguments.size()-1)
- stream << ", ";
- }
- }
- else
-#endif
- {
- stream << boost::wave::util::impl::as_string(arguments[j]);
- }
- stream << std::endl;
- output(BOOST_WAVE_GETSTRING(stream));
- }
- close_trace_body();
- }
- open_trace_body();
-
-#if BOOST_WAVE_USE_DEPRECIATED_PREPROCESSING_HOOKS == 0
- return false;
-#endif
- }
-
- ///////////////////////////////////////////////////////////////////////////
- //
- // The function 'expanding_object_like_macro' is called whenever a
- // object-like macro is to be expanded .
- //
- // The parameter 'ctx' is a reference to the context object used for
- // instantiating the preprocessing iterators by the user.
- //
- // The parameter 'macrodef' marks the position, where the macro to expand
- // is defined.
- //
- // The definition 'definition' holds the macro definition for the macro to
- // trace.
- //
- // The parameter 'macrocall' marks the position, where this macro invoked.
- //
- ///////////////////////////////////////////////////////////////////////////
-#if BOOST_WAVE_USE_DEPRECIATED_PREPROCESSING_HOOKS != 0
- // old signature
- template <typename ContainerT>
- void expanding_object_like_macro(TokenT const &macrodef,
- ContainerT const &definition, TokenT const &macrocall)
- {
- if (enabled_macro_counting())
- count_invocation(macrodef.get_value().c_str());
-
- if (!enabled_macro_tracing())
- return;
-#else
- // new signature
- template <typename ContextT, typename ContainerT>
- bool
- expanding_object_like_macro(ContextT const& ctx,
- TokenT const &macrodef, ContainerT const &definition,
- TokenT const &macrocall)
- {
- if (enabled_macro_counting() || !noexpandmacros.empty()) {
- std::string name (macrodef.get_value().c_str());
-
- if (noexpandmacros.find(name.c_str()) != noexpandmacros.end())
- return true; // do not expand this macro
-
- if (enabled_macro_counting())
- count_invocation(name.c_str());
- }
-
- if (!enabled_macro_tracing())
- return false;
-#endif
- if (0 == get_level()) {
- // output header line
- BOOST_WAVE_OSSTREAM stream;
-
- stream
- << macrocall.get_position() << ": "
- << macrocall.get_value() << std::endl;
- output(BOOST_WAVE_GETSTRING(stream));
- increment_level();
- }
-
- // output definition reference
- {
- BOOST_WAVE_OSSTREAM stream;
-
- stream
- << macrodef.get_position() << ": see macro definition: "
- << macrodef.get_value() << std::endl;
- output(BOOST_WAVE_GETSTRING(stream));
- }
- open_trace_body();
-
-#if BOOST_WAVE_USE_DEPRECIATED_PREPROCESSING_HOOKS == 0
- return false;
-#endif
- }
-
- ///////////////////////////////////////////////////////////////////////////
- //
- // The function 'expanded_macro' is called whenever the expansion of a
- // macro is finished but before the rescanning process starts.
- //
- // The parameter 'ctx' is a reference to the context object used for
- // instantiating the preprocessing iterators by the user.
- //
- // The parameter 'result' contains the token sequence generated as the
- // result of the macro expansion.
- //
- ///////////////////////////////////////////////////////////////////////////
-#if BOOST_WAVE_USE_DEPRECIATED_PREPROCESSING_HOOKS != 0
- // old signature
- template <typename ContainerT>
- void expanded_macro(ContainerT const &result)
-#else
- // new signature
- template <typename ContextT, typename ContainerT>
- void expanded_macro(ContextT const& ctx,ContainerT const &result)
-#endif
- {
- if (!enabled_macro_tracing()) return;
-
- BOOST_WAVE_OSSTREAM stream;
- stream << boost::wave::util::impl::as_string(result) << std::endl;
- output(BOOST_WAVE_GETSTRING(stream));
-
- open_trace_body("rescanning\n");
- }
-
- ///////////////////////////////////////////////////////////////////////////
- //
- // The function 'rescanned_macro' is called whenever the rescanning of a
- // macro is finished.
- //
- // The parameter 'ctx' is a reference to the context object used for
- // instantiating the preprocessing iterators by the user.
- //
- // The parameter 'result' contains the token sequence generated as the
- // result of the rescanning.
- //
- ///////////////////////////////////////////////////////////////////////////
-#if BOOST_WAVE_USE_DEPRECIATED_PREPROCESSING_HOOKS != 0
- // old signature
- template <typename ContainerT>
- void rescanned_macro(ContainerT const &result)
-#else
- // new signature
- template <typename ContextT, typename ContainerT>
- void rescanned_macro(ContextT const& ctx,ContainerT const &result)
-#endif
- {
- if (!enabled_macro_tracing() || get_level() == 0)
- return;
-
- BOOST_WAVE_OSSTREAM stream;
- stream << boost::wave::util::impl::as_string(result) << std::endl;
- output(BOOST_WAVE_GETSTRING(stream));
- close_trace_body();
- close_trace_body();
-
- if (1 == get_level())
- decrement_level();
- }
-
- ///////////////////////////////////////////////////////////////////////////
- //
- // The function 'interpret_pragma' is called whenever a #pragma command
- // directive is found which isn't known to the core Wave library, where
- // command is the value defined as the BOOST_WAVE_PRAGMA_KEYWORD constant
- // which defaults to "wave".
- //
- // The parameter 'ctx' is a reference to the context object used for
- // instantiating the preprocessing iterators by the user.
- //
- // The parameter 'pending' may be used to push tokens back into the input
- // stream, which are to be used as the replacement text for the whole
- // #pragma directive.
- //
- // The parameter 'option' contains the name of the interpreted pragma.
- //
- // The parameter 'values' holds the values of the parameter provided to
- // the pragma operator.
- //
- // The parameter 'act_token' contains the actual #pragma token, which may
- // be used for error output.
- //
- // If the return value is 'false', the whole #pragma directive is
- // interpreted as unknown and a corresponding error message is issued. A
- // return value of 'true' signs a successful interpretation of the given
- // #pragma.
- //
- ///////////////////////////////////////////////////////////////////////////
- template <typename ContextT, typename ContainerT>
- bool
- interpret_pragma(ContextT &ctx, ContainerT &pending,
- typename ContextT::token_type const &option, ContainerT const &valuetokens,
- typename ContextT::token_type const &act_token)
- {
- typedef typename ContextT::token_type token_type;
-
- ContainerT values(valuetokens);
- boost::wave::util::impl::trim_sequence(values); // trim whitespace
-
- if (option.get_value() == "timer") {
- // #pragma wave timer(value)
- if (0 == values.size()) {
- // no value means '1'
- using namespace boost::wave;
- timer(token_type(T_INTLIT, "1", act_token.get_position()));
- }
- else {
- timer(values.front());
- }
- return true;
- }
- if (option.get_value() == "trace") {
- // enable/disable tracing option
- return interpret_pragma_trace(ctx, values, act_token);
- }
- if (option.get_value() == "system") {
- if (!enable_system_command) {
- // if the #pragma wave system() directive is not enabled, throw
- // a corresponding error (actually its a remark),
- typename ContextT::string_type msg(
- boost::wave::util::impl::as_string(values));
- BOOST_WAVE_THROW_CTX(ctx, bad_pragma_exception,
- pragma_system_not_enabled,
- msg.c_str(), act_token.get_position());
- return false;
- }
-
- // try to spawn the given argument as a system command and return the
- // std::cout of this process as the replacement of this _Pragma
- return interpret_pragma_system(ctx, pending, values, act_token);
- }
- if (option.get_value() == "stop") {
- // stop the execution and output the argument
- typename ContextT::string_type msg(
- boost::wave::util::impl::as_string(values));
- BOOST_WAVE_THROW_CTX(ctx, boost::wave::preprocess_exception,
- error_directive, msg.c_str(), act_token.get_position());
- return false;
- }
- if (option.get_value() == "option") {
- // handle different options
- return interpret_pragma_option(ctx, values, act_token);
- }
- return false;
- }
-
- ///////////////////////////////////////////////////////////////////////////
- //
- // The function 'emit_line_directive' is called whenever a #line directive
- // has to be emitted into the generated output.
- //
- // The parameter 'ctx' is a reference to the context object used for
- // instantiating the preprocessing iterators by the user.
- //
- // The parameter 'pending' may be used to push tokens back into the input
- // stream, which are to be used instead of the default output generated
- // for the #line directive.
- //
- // The parameter 'act_token' contains the actual #pragma token, which may
- // be used for error output. The line number stored in this token can be
- // used as the line number emitted as part of the #line directive.
- //
- // If the return value is 'false', a default #line directive is emitted
- // by the library. A return value of 'true' will inhibit any further
- // actions, the tokens contained in 'pending' will be copied verbatim
- // to the output.
- //
- ///////////////////////////////////////////////////////////////////////////
- template <typename ContextT, typename ContainerT>
- bool
- emit_line_directive(ContextT const& ctx, ContainerT &pending,
- typename ContextT::token_type const& act_token)
- {
- if (!need_emit_line_directives(ctx.get_language()) ||
- !enable_relative_names_in_line_directives())
- {
- return false;
- }
-
- // emit a #line directive showing the relative filename instead
- typename ContextT::position_type pos = act_token.get_position();
- unsigned int column = 6;
-
- typedef typename ContextT::token_type result_type;
- using namespace boost::wave;
-
- pos.set_column(1);
- pending.push_back(result_type(T_PP_LINE, "#line", pos));
-
- pos.set_column(column); // account for '#line'
- pending.push_back(result_type(T_SPACE, " ", pos));
-
- // 21 is the max required size for a 64 bit integer represented as a
- // string
- char buffer[22];
-
- using namespace std; // for some systems sprintf is in namespace std
- sprintf (buffer, "%d", pos.get_line());
-
- pos.set_column(++column); // account for ' '
- pending.push_back(result_type(T_INTLIT, buffer, pos));
- pos.set_column(column += (unsigned int)strlen(buffer)); // account for <number>
- pending.push_back(result_type(T_SPACE, " ", pos));
- pos.set_column(++column); // account for ' '
-
- std::string file("\"");
- boost::filesystem::path filename(
- boost::wave::util::create_path(ctx.get_current_relative_filename().c_str()));
-
- using boost::wave::util::impl::escape_lit;
- file += escape_lit(boost::wave::util::native_file_string(filename)) + "\"";
-
- pending.push_back(result_type(T_STRINGLIT, file.c_str(), pos));
- pos.set_column(column += (unsigned int)file.size()); // account for filename
- pending.push_back(result_type(T_GENERATEDNEWLINE, "\n", pos));
-
- return true;
- }
-
- ///////////////////////////////////////////////////////////////////////////
- //
- // The function 'opened_include_file' is called whenever a file referred
- // by an #include directive was successfully located and opened.
- //
- // The parameter 'ctx' is a reference to the context object used for
- // instantiating the preprocessing iterators by the user.
- //
- // The parameter 'filename' contains the file system path of the
- // opened file (this is relative to the directory of the currently
- // processed file or a absolute path depending on the paths given as the
- // include search paths).
- //
- // The include_depth parameter contains the current include file depth.
- //
- // The is_system_include parameter denotes, whether the given file was
- // found as a result of a #include <...> directive.
- //
- ///////////////////////////////////////////////////////////////////////////
-#if BOOST_WAVE_USE_DEPRECIATED_PREPROCESSING_HOOKS != 0
- // old signature
- void
- opened_include_file(std::string const &relname, std::string const &absname,
- std::size_t include_depth, bool is_system_include)
- {
-#else
- // new signature
- template <typename ContextT>
- void
- opened_include_file(ContextT const& ctx, std::string const &relname,
- std::string const &absname, bool is_system_include)
- {
- std::size_t include_depth = ctx.get_iteration_depth();
-#endif
- if (enabled_include_tracing()) {
- // print indented filename
- for (std::size_t i = 0; i < include_depth; ++i)
- includestrm << " ";
-
- if (is_system_include)
- includestrm << "<" << relname << "> (" << absname << ")";
- else
- includestrm << "\"" << relname << "\" (" << absname << ")";
-
- includestrm << std::endl;
- }
- }
-
-#if BOOST_WAVE_SUPPORT_PRAGMA_ONCE != 0
- ///////////////////////////////////////////////////////////////////////////
- //
- // The function 'detected_include_guard' is called whenever either a
- // include file is about to be added to the list of #pragma once headers.
- // That means this header file will not be opened and parsed again even
- // if it is specified in a later #include directive.
- // This function is called as the result of a detected include guard
- // scheme.
- //
- // The implemented heuristics for include guards detects two forms of
- // include guards:
- //
- // #ifndef INCLUDE_GUARD_MACRO
- // #define INCLUDE_GUARD_MACRO
- // ...
- // #endif
- //
- // or
- //
- // if !defined(INCLUDE_GUARD_MACRO)
- // #define INCLUDE_GUARD_MACRO
- // ...
- // #endif
- //
- // note, that the parenthesis are optional (i.e. !defined INCLUDE_GUARD_MACRO
- // will work as well). The code allows for any whitespace, newline and single
- // '#' tokens before the #if/#ifndef and after the final #endif.
- //
- // The parameter 'ctx' is a reference to the context object used for
- // instantiating the preprocessing iterators by the user.
- //
- // The parameter 'filename' contains the file system path of the
- // opened file (this is relative to the directory of the currently
- // processed file or a absolute path depending on the paths given as the
- // include search paths).
- //
- // The parameter contains the name of the detected include guard.
- //
- ///////////////////////////////////////////////////////////////////////////
- template <typename ContextT>
- void
- detected_include_guard(ContextT const& ctx, std::string const& filename,
- std::string const& include_guard)
- {
- if (enabled_guard_tracing()) {
- guardstrm << include_guard << ":" << std::endl
- << " " << filename << std::endl;
- }
- }
-#endif
-
- ///////////////////////////////////////////////////////////////////////////
- //
- // The function 'may_skip_whitespace' will be called by the
- // library whenever a token is about to be returned to the calling
- // application.
- //
- // The parameter 'ctx' is a reference to the context object used for
- // instantiating the preprocessing iterators by the user.
- //
- // The 'token' parameter holds a reference to the current token. The policy
- // is free to change this token if needed.
- //
- // The 'skipped_newline' parameter holds a reference to a boolean value
- // which should be set to true by the policy function whenever a newline
- // is going to be skipped.
- //
- // If the return value is true, the given token is skipped and the
- // preprocessing continues to the next token. If the return value is
- // false, the given token is returned to the calling application.
- //
- // ATTENTION!
- // Caution has to be used, because by returning true the policy function
- // is able to force skipping even significant tokens, not only whitespace.
- //
- ///////////////////////////////////////////////////////////////////////////
- template <typename ContextT>
- bool may_skip_whitespace(ContextT const &ctx, TokenT &token,
- bool &skipped_newline)
- {
- return this->base_type::may_skip_whitespace(
- ctx, token, need_preserve_comments(ctx.get_language()),
- preserve_bol_whitespace, skipped_newline) ?
- !preserve_whitespace : false;
- }
-
- ///////////////////////////////////////////////////////////////////////////
- //
- // The function 'throw_exception' will be called by the library whenever a
- // preprocessing exception occurs.
- //
- // The parameter 'ctx' is a reference to the context object used for
- // instantiating the preprocessing iterators by the user.
- //
- // The parameter 'e' is the exception object containing detailed error
- // information.
- //
- // The default behavior is to call the function boost::throw_exception.
- //
- ///////////////////////////////////////////////////////////////////////////
- template <typename ContextT>
- void
- throw_exception(ContextT const& ctx, boost::wave::preprocess_exception const& e)
- {
-#if BOOST_WAVE_SUPPORT_MS_EXTENSIONS != 0
- if (!is_import_directive_error(e))
- boost::throw_exception(e);
-#else
- boost::throw_exception(e);
-#endif
- }
- using base_type::throw_exception;
-
-protected:
-#if BOOST_WAVE_SUPPORT_MS_EXTENSIONS != 0
- ///////////////////////////////////////////////////////////////////////////
- // Avoid throwing an error from a #import directive
- bool is_import_directive_error(boost::wave::preprocess_exception const& e)
- {
- using namespace boost::wave;
- if (e.get_errorcode() != preprocess_exception::ill_formed_directive)
- return false;
-
- // the error string is formatted as 'severity: error: directive'
- std::string error(e.description());
- std::string::size_type p = error.find_last_of(":");
- return p != std::string::npos && error.substr(p+2) == "import";
- }
-#endif
-
- ///////////////////////////////////////////////////////////////////////////
- // Interpret the different Wave specific pragma directives/operators
- template <typename ContextT, typename ContainerT>
- bool
- interpret_pragma_trace(ContextT& ctx, ContainerT const &values,
- typename ContextT::token_type const &act_token)
- {
- typedef typename ContextT::token_type token_type;
- typedef typename token_type::string_type string_type;
-
- bool valid_option = false;
-
- if (1 == values.size()) {
- token_type const &value = values.front();
-
- if (value.get_value() == "enable" ||
- value.get_value() == "on" ||
- value.get_value() == "1")
- {
- // #pragma wave trace(enable)
- enable_tracing(static_cast<trace_flags>(
- tracing_enabled() | trace_macros));
- valid_option = true;
- }
- else if (value.get_value() == "disable" ||
- value.get_value() == "off" ||
- value.get_value() == "0")
- {
- // #pragma wave trace(disable)
- enable_tracing(static_cast<trace_flags>(
- tracing_enabled() & ~trace_macros));
- valid_option = true;
- }
- }
- if (!valid_option) {
- // unknown option value
- string_type option_str ("trace");
-
- if (values.size() > 0) {
- option_str += "(";
- option_str += boost::wave::util::impl::as_string(values);
- option_str += ")";
- }
- BOOST_WAVE_THROW_CTX(ctx, boost::wave::preprocess_exception,
- ill_formed_pragma_option, option_str.c_str(),
- act_token.get_position());
- return false;
- }
- return true;
- }
-
- ///////////////////////////////////////////////////////////////////////////
- // interpret the pragma wave option(preserve: [0|1|2|3|push|pop]) directive
- template <typename ContextT>
- static bool
- interpret_pragma_option_preserve_set(int mode, bool &preserve_whitespace,
- bool& preserve_bol_whitespace, ContextT &ctx)
- {
- switch(mode) {
- // preserve no whitespace
- case 0:
- preserve_whitespace = false;
- preserve_bol_whitespace = false;
- ctx.set_language(
- enable_preserve_comments(ctx.get_language(), false),
- false);
- break;
-
- // preserve BOL whitespace only
- case 1:
- preserve_whitespace = false;
- preserve_bol_whitespace = true;
- ctx.set_language(
- enable_preserve_comments(ctx.get_language(), false),
- false);
- break;
-
- // preserve comments and BOL whitespace only
- case 2:
- preserve_whitespace = false;
- preserve_bol_whitespace = true;
- ctx.set_language(
- enable_preserve_comments(ctx.get_language()),
- false);
- break;
-
- // preserve all whitespace
- case 3:
- preserve_whitespace = true;
- preserve_bol_whitespace = true;
- ctx.set_language(
- enable_preserve_comments(ctx.get_language()),
- false);
- break;
-
- default:
- return false;
- }
- return true;
- }
-
- template <typename ContextT, typename IteratorT>
- bool
- interpret_pragma_option_preserve(ContextT &ctx, IteratorT &it,
- IteratorT end, typename ContextT::token_type const &act_token)
- {
- using namespace boost::wave;
-
- token_id id = util::impl::skip_whitespace(it, end);
- if (T_COLON == id)
- id = util::impl::skip_whitespace(it, end);
-
- // implement push/pop
- if (T_IDENTIFIER == id) {
- if ((*it).get_value() == "push") {
- // push current preserve option onto the internal option stack
- if (need_preserve_comments(ctx.get_language())) {
- if (preserve_whitespace)
- preserve_options.push(3);
- else
- preserve_options.push(2);
- }
- else if (preserve_bol_whitespace) {
- preserve_options.push(1);
- }
- else {
- preserve_options.push(0);
- }
- return true;
- }
- else if ((*it).get_value() == "pop") {
- // test for mismatched push/pop #pragmas
- if (preserve_options.empty()) {
- BOOST_WAVE_THROW_CTX(ctx, bad_pragma_exception,
- pragma_mismatched_push_pop, "preserve",
- act_token.get_position());
- }
-
- // pop output preserve from the internal option stack
- bool result = interpret_pragma_option_preserve_set(
- preserve_options.top(), preserve_whitespace,
- preserve_bol_whitespace, ctx);
- preserve_options.pop();
- return result;
- }
- return false;
- }
-
- if (T_PP_NUMBER != id)
- return false;
-
- using namespace std; // some platforms have atoi in namespace std
- return interpret_pragma_option_preserve_set(
- atoi((*it).get_value().c_str()), preserve_whitespace,
- preserve_bol_whitespace, ctx);
- }
-
- // interpret the pragma wave option(line: [0|1|2|push|pop]) directive
- template <typename ContextT, typename IteratorT>
- bool
- interpret_pragma_option_line(ContextT &ctx, IteratorT &it,
- IteratorT end, typename ContextT::token_type const &act_token)
- {
- using namespace boost::wave;
-
- token_id id = util::impl::skip_whitespace(it, end);
- if (T_COLON == id)
- id = util::impl::skip_whitespace(it, end);
-
- // implement push/pop
- if (T_IDENTIFIER == id) {
- if ((*it).get_value() == "push") {
- // push current line option onto the internal option stack
- int mode = 0;
- if (need_emit_line_directives(ctx.get_language())) {
- mode = 1;
- if (enable_relative_names_in_line_directives())
- mode = 2;
- }
- line_options.push(mode);
- return true;
- }
- else if ((*it).get_value() == "pop") {
- // test for mismatched push/pop #pragmas
- if (line_options.empty()) {
- BOOST_WAVE_THROW_CTX(ctx, bad_pragma_exception,
- pragma_mismatched_push_pop, "line",
- act_token.get_position());
- }
-
- // pop output line from the internal option stack
- ctx.set_language(
- enable_emit_line_directives(ctx.get_language(), 0 != line_options.top()),
- false);
- enable_relative_names_in_line_directives(2 == line_options.top());
- line_options.pop();
- return true;
- }
- return false;
- }
-
- if (T_PP_NUMBER != id)
- return false;
-
- using namespace std; // some platforms have atoi in namespace std
- int emit_lines = atoi((*it).get_value().c_str());
- if (0 == emit_lines || 1 == emit_lines || 2 == emit_lines) {
- // set the new emit #line directive mode
- ctx.set_language(
- enable_emit_line_directives(ctx.get_language(), emit_lines),
- false);
- return true;
- }
- return false;
- }
-
- // interpret the pragma wave option(output: ["filename"|null|default|push|pop])
- // directive
- template <typename ContextT>
- bool
- interpret_pragma_option_output_open(boost::filesystem::path &fpath,
- ContextT& ctx, typename ContextT::token_type const &act_token)
- {
- namespace fs = boost::filesystem;
-
- // ensure all directories for this file do exist
- boost::wave::util::create_directories(
- boost::wave::util::branch_path(fpath));
-
- // figure out, whether the file has been written to by us, if yes, we
- // append any output to this file, otherwise we overwrite it
- std::ios::openmode mode = std::ios::out;
- if (fs::exists(fpath) && written_by_us.find(fpath) != written_by_us.end())
- mode = (std::ios::openmode)(std::ios::out | std::ios::app);
-
- written_by_us.insert(fpath);
-
- // close the current file
- if (outputstrm.is_open())
- outputstrm.close();
-
- // open the new file
- outputstrm.open(fpath.string().c_str(), mode);
- if (!outputstrm.is_open()) {
- BOOST_WAVE_THROW_CTX(ctx, boost::wave::preprocess_exception,
- could_not_open_output_file,
- fpath.string().c_str(), act_token.get_position());
- return false;
- }
-
- // write license text, if file was created and if requested
- if (mode == std::ios::out && !license_info.empty())
- outputstrm << license_info;
-
- generate_output = true;
- current_outfile = fpath;
- return true;
- }
-
- bool interpret_pragma_option_output_close(bool generate)
- {
- if (outputstrm.is_open())
- outputstrm.close();
- current_outfile = boost::filesystem::path();
- generate_output = generate;
- return true;
- }
-
- template <typename ContextT, typename IteratorT>
- bool
- interpret_pragma_option_output(ContextT &ctx, IteratorT &it,
- IteratorT end, typename ContextT::token_type const &act_token)
- {
- using namespace boost::wave;
- namespace fs = boost::filesystem;
-
- typedef typename ContextT::token_type token_type;
- typedef typename token_type::string_type string_type;
-
- token_id id = util::impl::skip_whitespace(it, end);
- if (T_COLON == id)
- id = util::impl::skip_whitespace(it, end);
-
- bool result = false;
- if (T_STRINGLIT == id) {
- namespace fs = boost::filesystem;
-
- string_type fname ((*it).get_value());
- fs::path fpath (boost::wave::util::create_path(
- util::impl::unescape_lit(fname.substr(1, fname.size()-2)).c_str()));
- fpath = boost::wave::util::complete_path(fpath, ctx.get_current_directory());
- result = interpret_pragma_option_output_open(fpath, ctx, act_token);
- }
- else if (T_IDENTIFIER == id) {
- if ((*it).get_value() == "null") {
- // suppress all output from this point on
- result = interpret_pragma_option_output_close(false);
- }
- else if ((*it).get_value() == "push") {
- // initialize the current_outfile, if appropriate
- if (output_options.empty() && current_outfile.empty() &&
- !default_outfile.empty() && default_outfile != "-")
- {
- current_outfile = boost::wave::util::complete_path(
- default_outfile, ctx.get_current_directory());
- }
-
- // push current output option onto the internal option stack
- output_options.push(
- output_option_type(generate_output, current_outfile));
- result = true;
- }
- else if ((*it).get_value() == "pop") {
- // test for mismatched push/pop #pragmas
- if (output_options.empty()) {
- BOOST_WAVE_THROW_CTX(ctx, bad_pragma_exception,
- pragma_mismatched_push_pop, "output",
- act_token.get_position());
- return false;
- }
-
- // pop output option from the internal option stack
- output_option_type const& opts = output_options.top();
- generate_output = opts.first;
- current_outfile = opts.second;
- if (!current_outfile.empty()) {
- // re-open the last file
- result = interpret_pragma_option_output_open(current_outfile,
- ctx, act_token);
- }
- else {
- // either no output or generate to std::cout
- result = interpret_pragma_option_output_close(generate_output);
- }
- output_options.pop();
- }
- }
- else if (T_DEFAULT == id) {
- // re-open the default output given on command line
- if (!default_outfile.empty()) {
- if (default_outfile == "-") {
- // the output was suppressed on the command line
- result = interpret_pragma_option_output_close(false);
- }
- else {
- // there was a file name on the command line
- fs::path fpath(boost::wave::util::create_path(default_outfile));
- result = interpret_pragma_option_output_open(fpath, ctx,
- act_token);
- }
- }
- else {
- // generate the output to std::cout
- result = interpret_pragma_option_output_close(true);
- }
- }
- return result;
- }
-
- ///////////////////////////////////////////////////////////////////////////
- // join all adjacent string tokens into the first one
- template <typename StringT>
- StringT unlit(StringT const& str)
- {
- return str.substr(1, str.size()-2);
- }
-
- template <typename StringT>
- StringT merge_string_lits(StringT const& lhs, StringT const& rhs)
- {
- StringT result ("\"");
-
- result += unlit(lhs);
- result += unlit(rhs);
- result += "\"";
- return result;
- }
-
- template <typename ContextT, typename ContainerT>
- void join_adjacent_string_tokens(ContextT &ctx, ContainerT const& values,
- ContainerT& joined_values)
- {
- using namespace boost::wave;
-
- typedef typename ContextT::token_type token_type;
- typedef typename token_type::string_type string_type;
- typedef typename ContainerT::const_iterator const_iterator;
- typedef typename ContainerT::iterator iterator;
-
- token_type* current = 0;
-
- const_iterator end = values.end();
- for (const_iterator it = values.begin(); it != end; ++it) {
- token_id id(*it);
-
- if (id == T_STRINGLIT) {
- if (!current) {
- joined_values.push_back(*it);
- current = &joined_values.back();
- }
- else {
- current->set_value(merge_string_lits(
- current->get_value(), (*it).get_value()));
- }
- }
- else if (current) {
- typedef util::impl::next_token<const_iterator> next_token_type;
- token_id next_id (next_token_type::peek(it, end, true));
-
- if (next_id != T_STRINGLIT) {
- current = 0;
- joined_values.push_back(*it);
- }
- }
- else {
- joined_values.push_back(*it);
- }
- }
- }
-
- ///////////////////////////////////////////////////////////////////////////
- // interpret the pragma wave option() directives
- template <typename ContextT, typename ContainerT>
- bool
- interpret_pragma_option(ContextT &ctx, ContainerT const &cvalues,
- typename ContextT::token_type const &act_token)
- {
- using namespace boost::wave;
-
- typedef typename ContextT::token_type token_type;
- typedef typename token_type::string_type string_type;
- typedef typename ContainerT::const_iterator const_iterator;
-
- ContainerT values;
- join_adjacent_string_tokens(ctx, cvalues, values);
-
- const_iterator end = values.end();
- for (const_iterator it = values.begin(); it != end; /**/) {
- bool valid_option = false;
-
- token_type const &value = *it;
- if (value.get_value() == "preserve") {
- // #pragma wave option(preserve: [0|1|2|3|push|pop])
- valid_option = interpret_pragma_option_preserve(ctx, it, end,
- act_token);
- }
- else if (value.get_value() == "line") {
- // #pragma wave option(line: [0|1|2|push|pop])
- valid_option = interpret_pragma_option_line(ctx, it, end,
- act_token);
- }
- else if (value.get_value() == "output") {
- // #pragma wave option(output: ["filename"|null|default|push|pop])
- valid_option = interpret_pragma_option_output(ctx, it, end,
- act_token);
- }
-
- if (!valid_option) {
- // unknown option value
- string_type option_str ("option");
-
- if (values.size() > 0) {
- option_str += "(";
- option_str += util::impl::as_string(values);
- option_str += ")";
- }
- BOOST_WAVE_THROW_CTX(ctx, boost::wave::preprocess_exception,
- ill_formed_pragma_option,
- option_str.c_str(), act_token.get_position());
- return false;
- }
-
- token_id id = util::impl::skip_whitespace(it, end);
- if (id == T_COMMA)
- util::impl::skip_whitespace(it, end);
- }
- return true;
- }
-
- ///////////////////////////////////////////////////////////////////////////
- // interpret the #pragma wave system() directive
- template <typename ContextT, typename ContainerT>
- bool
- interpret_pragma_system(ContextT& ctx, ContainerT &pending,
- ContainerT const &values,
- typename ContextT::token_type const &act_token)
- {
- typedef typename ContextT::token_type token_type;
- typedef typename token_type::string_type string_type;
-
- if (0 == values.size()) return false; // ill_formed_pragma_option
-
- string_type stdout_file(std::tmpnam(0));
- string_type stderr_file(std::tmpnam(0));
- string_type system_str(boost::wave::util::impl::as_string(values));
- string_type native_cmd(system_str);
-
- system_str += " >" + stdout_file + " 2>" + stderr_file;
- if (0 != std::system(system_str.c_str())) {
- // unable to spawn the command
- string_type error_str("unable to spawn command: ");
-
- error_str += native_cmd;
- BOOST_WAVE_THROW_CTX(ctx, boost::wave::preprocess_exception,
- ill_formed_pragma_option,
- error_str.c_str(), act_token.get_position());
- return false;
- }
-
- // rescan the content of the stdout_file and insert it as the
- // _Pragma replacement
- typedef typename ContextT::lexer_type lexer_type;
- typedef typename ContextT::input_policy_type input_policy_type;
- typedef boost::wave::iteration_context<
- ContextT, lexer_type, input_policy_type>
- iteration_context_type;
-
- iteration_context_type iter_ctx(ctx, stdout_file.c_str(),
- act_token.get_position(), ctx.get_language());
- ContainerT pragma;
-
- for (/**/; iter_ctx.first != iter_ctx.last; ++iter_ctx.first)
- pragma.push_back(*iter_ctx.first);
-
- // prepend the newly generated token sequence to the 'pending' container
- pending.splice(pending.begin(), pragma);
-
- // erase the created tempfiles
- std::remove(stdout_file.c_str());
- std::remove(stderr_file.c_str());
- return true;
- }
-
- ///////////////////////////////////////////////////////////////////////////
- // The function enable_tracing is called, whenever the status of the
- // tracing was changed.
- // The parameter 'enable' is to be used as the new tracing status.
- void enable_tracing(trace_flags flags)
- { logging_flags = flags; }
-
- // The function tracing_enabled should return the current tracing status.
- trace_flags tracing_enabled()
- { return logging_flags; }
-
- // Helper functions for generating the trace output
- void open_trace_body(char const *label = 0)
- {
- if (label)
- output(label);
- output("[\n");
- increment_level();
- }
- void close_trace_body()
- {
- if (get_level() > 0) {
- decrement_level();
- output("]\n");
- tracestrm << std::flush; // flush the stream buffer
- }
- }
-
- template <typename StringT>
- void output(StringT const &outstr) const
- {
- indent(get_level());
- tracestrm << outstr; // output the given string
- }
-
- void indent(int level) const
- {
- for (int i = 0; i < level; ++i)
- tracestrm << " "; // indent
- }
-
- int increment_level() { return ++level; }
- int decrement_level() { BOOST_ASSERT(level > 0); return --level; }
- int get_level() const { return level; }
-
- bool enabled_macro_tracing() const
- {
- return (flags & trace_macros) && (logging_flags & trace_macros);
- }
- bool enabled_include_tracing() const
- {
- return (flags & trace_includes);
- }
- bool enabled_guard_tracing() const
- {
- return (flags & trace_guards);
- }
- bool enabled_macro_counting() const
- {
- return logging_flags & trace_macro_counts;
- }
-
- void count_invocation(std::string const& name)
- {
- typedef std::map<std::string, std::size_t>::iterator iterator;
- typedef std::map<std::string, std::size_t>::value_type value_type;
-
- iterator it = counts.find(name);
- if (it == counts.end())
- {
- std::pair<iterator, bool> p = counts.insert(value_type(name, 0));
- if (p.second)
- it = p.first;
- }
-
- if (it != counts.end())
- ++(*it).second;
- }
-
- void timer(TokenT const &value)
- {
- if (value.get_value() == "0" || value.get_value() == "restart") {
- // restart the timer
- elapsed_time.restart();
- }
- else if (value.get_value() == "1") {
- // print out the current elapsed time
- std::cerr
- << value.get_position() << ": "
- << elapsed_time.format_elapsed_time()
- << std::endl;
- }
- else if (value.get_value() == "suspend") {
- // suspend the timer
- elapsed_time.suspend();
- }
- else if (value.get_value() == "resume") {
- // resume the timer
- elapsed_time.resume();
- }
- }
-
-private:
- std::ofstream &outputstrm; // main output stream
- std::ostream &tracestrm; // trace output stream
- std::ostream &includestrm; // included list output stream
- std::ostream &guardstrm; // include guard output stream
- int level; // indentation level
- trace_flags flags; // enabled globally
- trace_flags logging_flags; // enabled by a #pragma
- bool enable_system_command; // enable #pragma wave system() command
- bool preserve_whitespace; // enable whitespace preservation
- bool preserve_bol_whitespace; // enable begin of line whitespace preservation
- bool& generate_output; // allow generated tokens to be streamed to output
- std::string const& default_outfile; // name of the output file given on command line
- boost::filesystem::path current_outfile; // name of the current output file
-
- stop_watch elapsed_time; // trace timings
- std::set<boost::filesystem::path> written_by_us; // all files we have written to
-
- typedef std::pair<bool, boost::filesystem::path> output_option_type;
- std::stack<output_option_type> output_options; // output option stack
- std::stack<int> line_options; // line option stack
- std::stack<int> preserve_options; // preserve option stack
-
- std::map<std::string, std::size_t> counts; // macro invocation counts
- bool emit_relative_filenames; // emit relative names in #line directives
-
- std::set<std::string> noexpandmacros; // list of macros not to expand
-
- std::string license_info; // text to pre-pend to all generated output files
-};
-
-#undef BOOST_WAVE_GETSTRING
-#undef BOOST_WAVE_OSSTREAM
-
-#endif // !defined(TRACE_MACRO_EXPANSION_HPP_D8469318_8407_4B9D_A19F_13CA60C1661F_INCLUDED)